diff --git "a/perf-df-awq-1xT4.csv" "b/perf-df-awq-1xT4.csv" --- "a/perf-df-awq-1xT4.csv" +++ "b/perf-df-awq-1xT4.csv" @@ -4775,7 +4775,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4846,7 +4846,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4996,7 +4996,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5266,7 +5266,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphu5n64in/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5473,7 +5473,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5518,10 +5518,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 110514 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 109305 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5625,7 +5625,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyh_zsdh5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5714,7 +5714,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5837,7 +5837,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5966,7 +5966,7 @@ ChildProcessError: Traceback (most recent call last): AssertionError " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6029,7 +6029,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6092,7 +6092,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6137,10 +6137,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 25195 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 24997 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6273,7 +6273,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpx4ipzc_j/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6363,7 +6363,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6648,7 +6648,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6765,7 +6765,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6981,7 +6981,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7044,7 +7044,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7260,7 +7260,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7332,7 +7332,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7395,7 +7395,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7458,7 +7458,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7875,7 +7875,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8000,7 +8000,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8w2wbor4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9179,7 +9179,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9250,7 +9250,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9400,7 +9400,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9670,7 +9670,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8iya7579/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9877,7 +9877,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9922,10 +9922,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 111013 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 109798 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10029,7 +10029,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpy9gngpwv/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10118,7 +10118,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10241,7 +10241,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10370,7 +10370,7 @@ ChildProcessError: Traceback (most recent call last): AssertionError " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10433,7 +10433,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10496,7 +10496,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10541,10 +10541,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 25685 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 25492 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10677,7 +10677,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxsoh4v0b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10767,7 +10767,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11052,7 +11052,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11169,7 +11169,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11385,7 +11385,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11448,7 +11448,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11664,7 +11664,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11736,7 +11736,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11799,7 +11799,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11862,7 +11862,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12279,7 +12279,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12404,7 +12404,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7m94xslu/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12808,7 +12808,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12905,7 +12905,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13049,7 +13049,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13235,7 +13235,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13352,7 +13352,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.643584,11301.421056,0.0,10898.898944,10500.155392,s,1,15.0760732421875,15.0760732421875,0.0,15.0760732421875,15.0760732421875,15.0760732421875,15.0760732421875,[15.0760732421875],,kWh,0.0002322880344749933,2.5615526863700306e-05,0.00010319674922400213,0.0003611003105626957,,MB,3009.093632,11320.295424,0.0,10903.093248,10048.928256,s,10,58.723462890624994,5.8723462890625004,0.0032497952895616344,5.871911865234376,5.8770778808593755,5.877555786132812,5.877938110351562,"[5.86891748046875, 5.87013818359375, 5.8675283203125, 5.87061083984375, 5.873212890625, 5.87040234375, 5.87406689453125, 5.87358056640625, 5.8769716796875, 5.87803369140625]",tokens/s,43.594159369792465,kWh,0.00017127343585916606,1.889205722104725e-05,0.00011380903549159937,0.0003039745285718127,tokens/kWh,842175.8270430907,MB,3009.093632,11320.295424,0.0,10903.093248,10072.241664,s,10,29.368702880859374,2.9368702880859376,0.0015310096670841527,2.9373739013671876,2.93856103515625,2.938778564453125,2.938952587890625,"[2.93443505859375, 2.93500927734375, 2.934990966796875, 2.936197265625, 2.937611083984375, 2.937291259765625, 2.93745654296875, 2.93899609375, 2.93820263671875, 2.9385126953125]",tokens/s,21.451407049052662,kWh,8.595068042166874e-05,9.481013761268298e-06,5.7408323704401246e-05,0.0001528400178873382,tokens/kWh,412195.7120316402,,s,630,29.365865379333496,0.04661248472910079,0.0003796470001503052,0.04660024070739746,0.04696365661621094,0.047063209915161136,0.04845045204162598,"[0.048330753326416016, 0.046202880859375, 0.04609024047851563, 0.04595302581787109, 0.045829952239990236, 0.046098751068115236, 0.04611468887329102, 0.04604108810424805, 0.04654652786254883, 0.04640553665161133, 0.04644009780883789, 0.046363391876220704, 0.04688409423828125, 0.04650889587402344, 0.04607494354248047, 0.04659296035766602, 0.046491424560546876, 0.04639494323730469, 0.04635100936889648, 0.04652032089233398, 0.04646297454833984, 0.0461578254699707, 0.04638924789428711, 0.04652601623535156, 0.04627654266357422, 0.04626393508911133, 0.046543201446533206, 0.0462993278503418, 0.04633744049072266, 0.046478271484375, 0.046580833435058595, 0.046418846130371096, 0.04675174331665039, 0.046721023559570314, 0.046507999420166014, 0.04687785720825195, 0.046668670654296876, 0.046344192504882815, 0.046868480682373044, 0.046680065155029295, 0.04654694366455078, 0.046663551330566405, 0.046799041748046874, 0.04668204879760742, 0.046937854766845706, 0.04677603149414063, 0.046790592193603514, 0.046400096893310545, 0.04665926361083984, 0.04660870361328125, 0.04660793685913086, 0.04677471923828125, 0.04683366394042969, 0.046761024475097654, 0.047065025329589845, 0.04704473495483399, 0.04675455856323242, 0.04677369689941406, 0.047061695098876956, 0.04674560165405273, 0.046740959167480466, 0.04688332748413086, 0.04687801742553711, 0.048417598724365234, 0.04627568054199219, 0.045967681884765625, 0.046021217346191405, 0.04604313659667969, 0.046308895111083985, 0.04657404708862305, 0.04636876678466797, 0.04618239974975586, 0.04637081527709961, 0.046383102416992186, 0.04596736145019531, 0.04626198577880859, 0.04658396911621094, 0.04628083038330078, 0.04628092956542969, 0.0466255989074707, 0.04639603042602539, 0.04635388946533203, 0.04691750335693359, 0.04674784088134765, 0.046268577575683596, 0.04635919952392578, 0.046465023040771485, 0.0461578254699707, 0.046340255737304686, 0.04629628753662109, 0.046161537170410154, 0.046166561126708985, 0.046720703125, 0.04655984115600586, 0.046368545532226565, 0.0464686393737793, 0.046922622680664064, 0.046604286193847655, 0.04679679870605469, 0.04664115142822266, 0.046467071533203126, 0.04649539184570312, 0.046887264251708985, 0.046936065673828124, 0.04686000061035156, 0.04677568054199219, 0.046750625610351565, 0.04641350555419922, 0.04663532638549805, 0.04674969482421875, 0.046505760192871094, 0.04650723266601563, 0.046779422760009765, 0.04658377456665039, 0.046524417877197265, 0.046732414245605466, 0.046941055297851565, 0.04674339294433594, 0.047024288177490235, 0.04678860855102539, 0.046944255828857424, 0.046956127166748046, 0.04689142227172852, 0.04701385498046875, 0.047081504821777344, 0.047151103973388675, 0.048540897369384765, 0.04625078582763672, 0.04600831985473633, 0.04597555160522461, 0.04641948699951172, 0.045948734283447264, 0.04620969772338867, 0.046378368377685546, 0.04633459091186523, 0.046275775909423826, 0.04641449737548828, 0.04630876922607422, 0.0460211181640625, 0.04629529571533203, 0.04635033416748047, 0.04623155212402344, 0.0462760009765625, 0.04651068878173828, 0.046333953857421874, 0.046489601135253904, 0.04699135971069336, 0.04676992034912109, 0.046334209442138674, 0.04637033462524414, 0.04648783874511719, 0.046362815856933595, 0.04638508987426758, 0.04668803024291992, 0.046530208587646484, 0.046192543029785156, 0.04647804641723633, 0.0466247673034668, 0.0462391357421875, 0.0465250244140625, 0.04691664123535156, 0.04645926284790039, 0.046397342681884765, 0.0465909423828125, 0.04668182373046875, 0.046575454711914065, 0.04690143966674805, 0.046933792114257813, 0.046583999633789064, 0.04737615966796875, 0.04661884689331055, 0.04655023956298828, 0.04674639892578125, 0.046704639434814454, 0.04648672103881836, 0.046635200500488284, 0.04695862579345703, 0.046795360565185545, 0.046860286712646484, 0.046695873260498046, 0.046828033447265625, 0.04650604629516602, 0.04700166320800781, 0.04684588623046875, 0.04672275161743164, 0.0468823356628418, 0.047005760192871095, 0.04684463882446289, 0.04707942581176758, 0.04874892807006836, 0.04662681579589844, 0.045946880340576174, 0.04601446533203125, 0.0464153938293457, 0.04634268951416016, 0.04606560134887695, 0.0462295036315918, 0.046515743255615236, 0.04622288131713867, 0.0460335693359375, 0.04643664169311523, 0.046214176177978517, 0.04599932861328125, 0.04661769485473633, 0.046453536987304686, 0.04629436874389648, 0.04641231918334961, 0.04666896057128906, 0.04636963272094727, 0.046028190612792966, 0.04665200042724609, 0.04653055953979492, 0.04648755264282227, 0.04673126220703125, 0.04654403305053711, 0.04649382400512695, 0.0464284782409668, 0.046733024597167966, 0.04648211288452148, 0.04637651062011719, 0.046561729431152346, 0.046461952209472655, 0.046437374114990236, 0.04680089569091797, 0.04674764633178711, 0.04655104064941406, 0.04652851104736328, 0.046721023559570314, 0.046669822692871094, 0.04630527877807617, 0.046878719329833986, 0.04700774383544922, 0.046593280792236326, 0.04697756958007813, 0.04708784103393555, 0.046599456787109375, 0.04649852752685547, 0.04670054244995117, 0.04669440078735351, 0.046936065673828124, 0.04676095962524414, 0.04665446472167969, 0.04697292709350586, 0.046825313568115236, 0.04675804901123047, 0.046639102935791016, 0.04705279922485352, 0.04693116760253906, 0.04665628814697265, 0.046886913299560545, 0.04698316955566406, 0.04693932723999023, 0.04870899200439453, 0.04654348754882812, 0.045948638916015624, 0.0461091194152832, 0.04653039932250977, 0.04631129455566406, 0.04624732971191406, 0.04656611251831055, 0.04650931167602539, 0.046193214416503904, 0.046892608642578125, 0.04648812866210938, 0.04607385635375977, 0.046241310119628905, 0.046272670745849606, 0.04602304077148438, 0.04600128173828125, 0.04663539123535156, 0.046455295562744144, 0.04604927825927734, 0.04681475067138672, 0.047093856811523435, 0.04653094482421875, 0.04636671829223633, 0.046542720794677736, 0.04644467163085937, 0.04610047912597656, 0.04675369644165039, 0.04654703903198242, 0.046473217010498044, 0.04658790588378906, 0.04684799957275391, 0.04648076629638672, 0.04638579177856445, 0.04660374450683594, 0.04659254455566406, 0.04640134429931641, 0.046593246459960935, 0.04686332702636719, 0.046465023040771485, 0.04688246536254883, 0.04673574447631836, 0.0467547836303711, 0.04674457550048828, 0.046781566619873045, 0.0467198715209961, 0.04693196868896484, 0.04690512084960938, 0.046755615234375, 0.04668870544433594, 0.04691558456420898, 0.04685823822021484, 0.04669766235351563, 0.04699990463256836, 0.04697135925292969, 0.04667801666259765, 0.04669619369506836, 0.04674582290649414, 0.046721023559570314, 0.0469290885925293, 0.04695526504516601, 0.046857791900634764, 0.04710454559326172, 0.048640575408935544, 0.046249568939208986, 0.04572934341430664, 0.0461484489440918, 0.046413982391357425, 0.04643414306640625, 0.04603903961181641, 0.04651212692260742, 0.04634534454345703, 0.04607270431518555, 0.046626785278320315, 0.0464730224609375, 0.04635785675048828, 0.0465863037109375, 0.04686073684692383, 0.046465023040771485, 0.04612483215332031, 0.04624816131591797, 0.046360576629638675, 0.0462479362487793, 0.047064449310302736, 0.046889278411865236, 0.04616633605957031, 0.04635193634033203, 0.04637699127197266, 0.04632972717285156, 0.04617475128173828, 0.04675324630737305, 0.046887359619140624, 0.04647331237792969, 0.04651395034790039, 0.04657958221435547, 0.04654127883911133, 0.0469728012084961, 0.04681913757324219, 0.04662700653076172, 0.046862335205078126, 0.04677017593383789, 0.04647116851806641, 0.046252033233642575, 0.04692377471923828, 0.046695968627929685, 0.04651651382446289, 0.046774463653564455, 0.04676803207397461, 0.04680303955078125, 0.04665887832641601, 0.046844478607177734, 0.04674697494506836, 0.04692380905151367, 0.046949153900146486, 0.04675376129150391, 0.04694160079956055, 0.04696329498291016, 0.046813056945800784, 0.046868606567382814, 0.04671001434326172, 0.046883583068847656, 0.04655104064941406, 0.04685619354248047, 0.04689715194702149, 0.046661182403564455, 0.04673318481445313, 0.048535552978515625, 0.04635647964477539, 0.04594483184814453, 0.0462110710144043, 0.046489601135253904, 0.04625612640380859, 0.04612710571289062, 0.04635033416748047, 0.0461844482421875, 0.04627046585083008, 0.04659404754638672, 0.0462479362487793, 0.04600976181030274, 0.04655718231201172, 0.04628950500488281, 0.04590387344360351, 0.046223201751708985, 0.046683521270751954, 0.04658256149291992, 0.046405441284179685, 0.046962623596191404, 0.04686463928222656, 0.046190399169921875, 0.04637305450439453, 0.0464793586730957, 0.046202880859375, 0.04671078491210937, 0.046566783905029295, 0.04638297653198242, 0.04623846435546875, 0.0466063346862793, 0.04685388946533203, 0.04629939270019531, 0.04698051071166992, 0.04696246337890625, 0.04653039932250977, 0.04645516967773437, 0.046389854431152344, 0.04665494537353516, 0.04696118545532227, 0.04694015884399414, 0.04694015884399414, 0.046903297424316405, 0.04665958404541016, 0.046671871185302735, 0.046895103454589845, 0.04691763305664062, 0.046728416442871096, 0.04682217788696289, 0.0466596794128418, 0.04675369644165039, 0.046516223907470705, 0.04669388961791992, 0.04688886260986328, 0.046778976440429686, 0.04683091354370117, 0.046844608306884764, 0.04664432144165039, 0.04680956649780273, 0.04708806228637695, 0.04687011337280273, 0.04742390441894531, 0.04699545669555664, 0.04846387100219727, 0.04624588775634766, 0.04636262512207031, 0.04614070510864258, 0.04608278274536133, 0.04624588775634766, 0.046312576293945314, 0.046570369720458984, 0.046266368865966793, 0.04631552124023437, 0.04652774429321289, 0.04611475372314453, 0.04660102462768555, 0.04652236938476562, 0.046473217010498044, 0.04635238265991211, 0.04645872116088867, 0.04627872085571289, 0.046063838958740236, 0.04675571060180664, 0.046966911315917965, 0.04649692916870117, 0.046686943054199216, 0.04652588653564453, 0.04630339050292969, 0.04603126525878906, 0.04640467071533203, 0.046506942749023436, 0.04631552124023437, 0.04674886322021484, 0.04674233627319336, 0.04640576171875, 0.04675775909423828, 0.046644798278808595, 0.04664112091064453, 0.04654127883911133, 0.046626049041748045, 0.046752513885498045, 0.04649574279785156, 0.04661376190185547, 0.047180545806884765, 0.04685635375976562, 0.04698700714111328, 0.04676822280883789, 0.046704639434814454, 0.04665865707397461, 0.04687891387939453, 0.04674623870849609, 0.04697507095336914, 0.04688256072998047, 0.046753311157226564, 0.046545631408691404, 0.046986495971679684, 0.046841697692871095, 0.04697958374023437, 0.0470838394165039, 0.04685168075561524, 0.04663929748535156, 0.047039840698242186, 0.04703039932250976, 0.046930721282958984, 0.04707673645019531, 0.04687532806396484, 0.0480785903930664, 0.04637516784667969, 0.0460656623840332, 0.046034942626953124, 0.046288257598876954, 0.046473857879638675, 0.04621209716796875, 0.046031776428222655, 0.046409248352050785, 0.04655571365356445, 0.04630697631835937, 0.046659168243408204, 0.04646080017089844, 0.046313888549804685, 0.04600182342529297, 0.04635087966918945, 0.04666396713256836, 0.04634828948974609, 0.046637054443359374, 0.046772224426269535, 0.04636671829223633, 0.046635009765625, 0.04649760055541992, 0.04627065658569336, 0.04612300872802735, 0.04655104064941406, 0.04632572937011719, 0.046247871398925784, 0.046871841430664064, 0.046633792877197267, 0.046437984466552736, 0.04668662261962891, 0.04660163116455078, 0.046602046966552735, 0.04651830291748047, 0.04685491180419922, 0.046923454284667966, 0.046467391967773435, 0.046583263397216794, 0.047180030822753904, 0.04680323028564453, 0.04683366394042969, 0.04676784133911133, 0.04660444641113281, 0.046721153259277344, 0.04688217544555664, 0.046760574340820316, 0.04690937423706055, 0.046761150360107424, 0.0465766716003418, 0.04692566299438477, 0.046949951171875, 0.0469728012084961, 0.04697462463378906, 0.04693494415283203, 0.04675539016723633, 0.04690300750732422, 0.046879390716552734, 0.04684806442260742, 0.04666572952270508, 0.04706051254272461, 0.047095870971679686, 0.0468298225402832, 0.04857241439819336, 0.04630521774291992, 0.04590188980102539, 0.045755455017089844, 0.046072769165039065, 0.046018558502197264, 0.04617184066772461, 0.04644672012329101, 0.04639353561401367, 0.04639712142944336, 0.046693729400634765, 0.04644144058227539, 0.04622335815429687, 0.0465546875, 0.046612159729003906, 0.04637772750854492, 0.0466060791015625, 0.046926078796386717, 0.046599201202392575, 0.04603593444824219, 0.04688889694213867, 0.04662486267089844, 0.046368736267089844, 0.046241790771484374, 0.0462213134765625, 0.04631961441040039, 0.04629913711547851, 0.046460033416748044, 0.04657651138305664, 0.04638294219970703, 0.04678876876831055, 0.04679679870605469, 0.04640678405761719, 0.046774559020996094, 0.046621280670166014, 0.04647107315063476, 0.046790752410888675, 0.04694220733642578, 0.04666556930541992, 0.0470852165222168, 0.04709222412109375, 0.046668991088867184, 0.04647200012207031, 0.04653036880493164, 0.04669161605834961, 0.046308063507080076, 0.04647676849365234, 0.04655104064941406, 0.046701278686523434, 0.04664934539794922, 0.04694220733642578, 0.04677017593383789, 0.04698316955566406, 0.046992446899414064, 0.0468551025390625, 0.04707894515991211, 0.04703267288208008, 0.04688019180297852, 0.04704735946655274, 0.04729241561889649, 0.047306751251220705, 0.04714486312866211, 0.04693350219726562]",tokens/s,21.453479809362893,,, -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13397,10 +13397,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 109491 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 108320 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13470,7 +13470,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13559,7 +13559,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13648,7 +13648,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13908,7 +13908,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14628,7 +14628,7 @@ ValueError: CodeGenForCausalLM does not support an attention implementation thro " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1028.931584,965.67296,0.0,570.425344,525.840896,s,1,8.756587890625,8.756587890625,0.0,8.756587890625,8.756587890625,8.756587890625,8.756587890625,[8.756587890625],,kWh,3.2744425933303015e-05,3.60483427928465e-06,1.2091120784013532e-05,4.8440380996601195e-05,,MB,1234.219008,1005.518848,0.0,597.68832,584.940544,s,10,1.2835491333007811,0.12835491333007812,0.0008062953252037128,0.1281603240966797,0.12885750885009767,0.1297009635925293,0.13037572738647463,"[0.13054441833496094, 0.12826966857910158, 0.12867007446289064, 0.1277258529663086, 0.12850579833984374, 0.12770556640625, 0.12805097961425782, 0.12850090026855468, 0.12774406433105467, 0.12783180999755858]",tokens/s,1994.4698131007199,kWh,3.962032842454707e-06,4.369392790356769e-07,2.6291199711619972e-06,7.028092092652381e-06,tokens/kWh,36425248.36401032,MB,1245.32736,1020.198912,0.0,612.368384,597.290496,s,10,10.94657263183594,1.0946572631835938,0.017085611733700476,1.0881235961914062,1.1167506591796874,1.118696008300781,1.1202522875976562,"[1.0778565673828124, 1.084336181640625, 1.0810809326171875, 1.069919189453125, 1.0909920654296874, 1.120641357421875, 1.107659912109375, 1.085255126953125, 1.112512939453125, 1.116318359375]",tokens/s,57.55226052835659,kWh,3.2114559489632315e-05,3.541848400897781e-06,1.332450390283957e-05,4.8980911793369663e-05,tokens/kWh,1286215.3376354263,,s,630,10.940668748855604,0.01736614087119935,0.0004322146221876801,0.017292176246643066,0.017899991035461426,0.018038656044006347,0.018869532661437993,"[0.018772064208984376, 0.017807743072509766, 0.017474048614501952, 0.017560800552368163, 0.01753718376159668, 0.01734662437438965, 0.01703318405151367, 0.01691913604736328, 0.016983264923095702, 0.016830432891845704, 0.01706505584716797, 0.01690595245361328, 0.0170732479095459, 0.016969728469848632, 0.01701718330383301, 0.016942720413208007, 0.01697273635864258, 0.017477184295654296, 0.016902591705322264, 0.016916479110717773, 0.016887519836425783, 0.016920671463012696, 0.016910528182983397, 0.016906240463256835, 0.017026399612426756, 0.016926816940307617, 0.017056447982788086, 0.01707379150390625, 0.01711948776245117, 0.017551616668701173, 0.01711692810058594, 0.017020896911621095, 0.01710492706298828, 0.016979455947875977, 0.01698975944519043, 0.017187776565551757, 0.017101055145263673, 0.016858112335205077, 0.016898815155029296, 0.017084384918212892, 0.016897727966308593, 0.017330528259277344, 0.017354751586914064, 0.016965408325195313, 0.016926464080810548, 0.017324512481689452, 0.017031295776367188, 0.01694643211364746, 0.016988800048828124, 0.017163360595703125, 0.01715702438354492, 0.017075872421264647, 0.017508703231811522, 0.017302719116210938, 0.016974655151367188, 0.017153568267822265, 0.016898368835449217, 0.016957504272460937, 0.016775264739990234, 0.016887359619140625, 0.016833984375, 0.016809024810791016, 0.016869312286376954, 0.017879072189331054, 0.01745792007446289, 0.01732211112976074, 0.01715622329711914, 0.017129344940185545, 0.017690624237060547, 0.017164287567138673, 0.01714566421508789, 0.01712761688232422, 0.017008352279663085, 0.017328384399414063, 0.01723776054382324, 0.017232160568237304, 0.017143808364868163, 0.017442592620849608, 0.017550655364990234, 0.01750912094116211, 0.017464736938476562, 0.017349376678466796, 0.01721507263183594, 0.01713203239440918, 0.01712019157409668, 0.017122079849243164, 0.017231136322021483, 0.017343391418457033, 0.017139711380004884, 0.017286527633666993, 0.01752239990234375, 0.017259424209594726, 0.017510400772094727, 0.017201152801513672, 0.0172258243560791, 0.018036895751953125, 0.019379711151123045, 0.017354848861694337, 0.017074335098266603, 0.01702911949157715, 0.016959487915039064, 0.01699430465698242, 0.01702911949157715, 0.017099775314331055, 0.01692527961730957, 0.0169946231842041, 0.016945247650146485, 0.017070207595825195, 0.01708457565307617, 0.017002208709716797, 0.01676288032531738, 0.016788639068603516, 0.016802047729492186, 0.016798240661621094, 0.01682975959777832, 0.01676691246032715, 0.01677395248413086, 0.017487871170043946, 0.017001663208007813, 0.016910688400268555, 0.017002975463867188, 0.016985183715820314, 0.01712748718261719, 0.016986976623535155, 0.01702092742919922, 0.017102848052978514, 0.01782595252990723, 0.017573728561401367, 0.017944576263427735, 0.017744064331054688, 0.017585248947143556, 0.01753766441345215, 0.017342016220092772, 0.017195199966430662, 0.017181024551391602, 0.017506303787231444, 0.017722944259643554, 0.0174268798828125, 0.01730352020263672, 0.017301536560058593, 0.01757209587097168, 0.017348352432250976, 0.017290592193603516, 0.017869024276733397, 0.01711532783508301, 0.017000703811645507, 0.016893951416015626, 0.01678335952758789, 0.0168407039642334, 0.016704864501953125, 0.016765600204467775, 0.01682636833190918, 0.016855039596557618, 0.01680384063720703, 0.016887231826782225, 0.016863807678222657, 0.0168853759765625, 0.01698649597167969, 0.016959455490112303, 0.01697372817993164, 0.017092735290527343, 0.017160192489624023, 0.016990272521972657, 0.016919872283935548, 0.01695414352416992, 0.017079967498779297, 0.016949440002441408, 0.0169881591796875, 0.017074176788330078, 0.017092607498168946, 0.01692982482910156, 0.016859935760498046, 0.01684217643737793, 0.01679020881652832, 0.016826271057128906, 0.016758975982666017, 0.016852960586547852, 0.01723910331726074, 0.016927679061889647, 0.016893951416015626, 0.016863231658935548, 0.016969728469848632, 0.016865280151367186, 0.01696767997741699, 0.01768387222290039, 0.01769059181213379, 0.01784281539916992, 0.017375423431396485, 0.01763270378112793, 0.01795359992980957, 0.01754710388183594, 0.017178815841674806, 0.016924671173095703, 0.01700364875793457, 0.01722867202758789, 0.016946687698364257, 0.01665184020996094, 0.01674950408935547, 0.016928319931030274, 0.0168288631439209, 0.016948448181152344, 0.016691999435424806, 0.016689151763916017, 0.016754016876220704, 0.016611391067504883, 0.016715967178344726, 0.016697759628295897, 0.016760671615600586, 0.01675507164001465, 0.016764320373535157, 0.016648576736450194, 0.016778783798217775, 0.01677359962463379, 0.016740352630615234, 0.016693119049072266, 0.016709760665893556, 0.016742399215698242, 0.016804031372070313, 0.01677622413635254, 0.016820415496826172, 0.01676348876953125, 0.016850719451904295, 0.016762592315673827, 0.017019392013549805, 0.016865440368652344, 0.016957504272460937, 0.0168056640625, 0.01700454330444336, 0.016943103790283204, 0.016990207672119142, 0.01683456039428711, 0.01784832000732422, 0.01676288032531738, 0.016773120880126953, 0.01683430480957031, 0.017082624435424805, 0.0170150089263916, 0.01698588752746582, 0.01712758445739746, 0.01734009552001953, 0.017123487472534178, 0.017184768676757813, 0.017149696350097655, 0.017108415603637694, 0.01700044822692871, 0.017079103469848634, 0.01720854377746582, 0.017179487228393554, 0.017067968368530275, 0.017104896545410156, 0.017121248245239258, 0.018423103332519532, 0.01776710319519043, 0.017552608489990233, 0.01732246398925781, 0.01736355209350586, 0.017381504058837892, 0.01720307159423828, 0.017149791717529297, 0.017040672302246093, 0.017154272079467774, 0.01702895927429199, 0.017271200180053712, 0.017066015243530273, 0.017106847763061525, 0.017041727066040038, 0.016997568130493163, 0.0169418888092041, 0.017010528564453124, 0.017116607666015624, 0.017333120346069337, 0.01710905647277832, 0.01691231918334961, 0.016975616455078123, 0.017249984741210936, 0.01732211112976074, 0.017506399154663087, 0.017563711166381835, 0.01760678482055664, 0.017380767822265625, 0.01730415916442871, 0.017225791931152343, 0.01717219161987305, 0.017070304870605468, 0.016977855682373047, 0.016990272521972657, 0.017041568756103517, 0.016999776840209962, 0.016970239639282226, 0.01699542427062988, 0.017167488098144532, 0.017272607803344726, 0.01744895935058594, 0.01751862335205078, 0.017579999923706055, 0.01752272033691406, 0.01766953659057617, 0.017660480499267578, 0.01768435287475586, 0.017669504165649413, 0.01760233688354492, 0.01765475273132324, 0.01756483268737793, 0.017555456161499023, 0.01745337677001953, 0.017458911895751952, 0.017498943328857423, 0.017551328659057615, 0.017624223709106445, 0.01763603210449219, 0.01730384063720703, 0.017260448455810547, 0.017254400253295898, 0.017268735885620116, 0.017290815353393555, 0.01960140800476074, 0.018509599685668947, 0.01923708724975586, 0.017690624237060547, 0.017824960708618165, 0.017793983459472657, 0.017545087814331055, 0.017520511627197265, 0.017740991592407225, 0.01761401557922363, 0.017678144454956055, 0.017704896926879883, 0.017663999557495116, 0.01890934371948242, 0.01765155220031738, 0.01733568000793457, 0.017320512771606445, 0.0174715518951416, 0.017545087814331055, 0.017518047332763673, 0.01763372802734375, 0.017612127304077147, 0.017748863220214843, 0.017711103439331053, 0.017650976181030273, 0.018195167541503906, 0.019247295379638672, 0.017751615524291994, 0.017660160064697266, 0.01780531120300293, 0.017565248489379882, 0.01741804885864258, 0.01753971290588379, 0.0173253116607666, 0.017305696487426758, 0.017427104949951172, 0.017334175109863282, 0.017255840301513673, 0.01759916877746582, 0.01724415969848633, 0.017137664794921875, 0.018577056884765623, 0.01753327941894531, 0.017342527389526366, 0.017602495193481445, 0.017776416778564452, 0.017725568771362304, 0.017680479049682618, 0.01779088020324707, 0.01783203125, 0.017776607513427734, 0.01793846321105957, 0.01777663993835449, 0.01799577522277832, 0.017954944610595703, 0.017840000152587892, 0.01790755271911621, 0.017875328063964843, 0.017835840225219727, 0.017800447463989257, 0.017814239501953124, 0.01777199935913086, 0.01778332710266113, 0.018114559173583983, 0.018169055938720702, 0.0179965763092041, 0.01805721664428711, 0.017905664443969727, 0.01791958427429199, 0.017937984466552735, 0.01792255973815918, 0.01901398468017578, 0.017779935836791994, 0.017920896530151366, 0.01776639938354492, 0.017704864501953126, 0.0177457275390625, 0.017764575958251955, 0.017880960464477538, 0.01777027130126953, 0.01782329559326172, 0.017690975189208983, 0.017807775497436524, 0.01780531120300293, 0.01794047927856445, 0.017889280319213868, 0.017924095153808595, 0.01807151985168457, 0.01792207908630371, 0.0176312313079834, 0.01764249610900879, 0.017693056106567382, 0.01759846305847168, 0.017477439880371093, 0.01753481674194336, 0.017538015365600585, 0.01751785659790039, 0.017446943283081055, 0.01720185661315918, 0.017182336807250977, 0.017227743148803713, 0.0173121280670166, 0.017338399887084962, 0.01722153663635254, 0.017295455932617186, 0.017275903701782228, 0.01736729621887207, 0.017308416366577147, 0.01722163200378418, 0.01720319938659668, 0.017129791259765624, 0.017179647445678712, 0.017127712249755858, 0.017135040283203125, 0.017099391937255858, 0.0172109432220459, 0.01721014404296875, 0.0171824951171875, 0.017160415649414062, 0.017708032608032227, 0.017711135864257814, 0.017167327880859375, 0.01718396759033203, 0.01714796829223633, 0.017084415435791016, 0.017194911956787108, 0.017934303283691406, 0.017719072341918947, 0.017641056060791017, 0.017436288833618165, 0.01728371238708496, 0.017110687255859375, 0.017174560546875, 0.017179136276245118, 0.017282976150512695, 0.017090272903442384, 0.01749788856506348, 0.017620832443237304, 0.01710995292663574, 0.01706825637817383, 0.01699523162841797, 0.016999263763427735, 0.017039392471313478, 0.01715814399719238, 0.0171909122467041, 0.017029024124145507, 0.017931552886962892, 0.01716307258605957, 0.017102848052978514, 0.016963584899902344, 0.017102848052978514, 0.017104671478271483, 0.016972000122070313, 0.01710201644897461, 0.01694803237915039, 0.016917823791503906, 0.017154943466186522, 0.016873472213745116, 0.017004352569580078, 0.016854976654052733, 0.01700592041015625, 0.01696339225769043, 0.017245088577270508, 0.017124383926391602, 0.017218528747558595, 0.017360895156860352, 0.017328128814697266, 0.01717987251281738, 0.017406911849975587, 0.01714364814758301, 0.017268863677978516, 0.017542015075683592, 0.01713808059692383, 0.01712393569946289, 0.017196319580078126, 0.017203424453735353, 0.017281536102294923, 0.01728102493286133, 0.017294399261474608, 0.017445152282714843, 0.01739228820800781, 0.017294784545898438, 0.017304128646850585, 0.017297088623046877, 0.017326400756835936, 0.017289567947387695, 0.017267744064331056, 0.01708095932006836, 0.016977567672729493, 0.018184192657470705, 0.017681888580322266, 0.01768294334411621, 0.01757801628112793, 0.01729100799560547, 0.017215744018554687, 0.0174583683013916, 0.017213823318481446, 0.017293344497680663, 0.01733468818664551, 0.01723187255859375, 0.01778179168701172, 0.01748908805847168, 0.01763100814819336, 0.01762940788269043, 0.01784566307067871, 0.017927648544311524, 0.017695680618286132, 0.01788105583190918, 0.01780531120300293, 0.017735679626464843, 0.017711103439331053, 0.017809600830078126, 0.017871904373168945, 0.017750944137573242, 0.01789529609680176, 0.018027807235717775, 0.017879776000976563, 0.017961984634399415, 0.017937408447265626, 0.01782579231262207, 0.01784560012817383, 0.01783875274658203, 0.017571584701538086, 0.017464607238769532, 0.01746019172668457, 0.017477632522583008, 0.017707008361816406, 0.017743871688842772, 0.01760256004333496, 0.017739776611328126, 0.017623136520385742, 0.01747052764892578, 0.017537471771240234, 0.017604192733764647, 0.017474176406860352, 0.01811631965637207, 0.017688959121704102, 0.01759651184082031, 0.017383424758911133, 0.017262592315673828, 0.01737673568725586, 0.017537343978881837, 0.017514720916748047, 0.01731724739074707, 0.017656448364257813, 0.01764761543273926, 0.017872831344604493, 0.01801628875732422, 0.017947872161865233, 0.01758195114135742, 0.0175216007232666, 0.01746112060546875, 0.01786591911315918, 0.01752556800842285, 0.017302848815917968, 0.017377983093261717, 0.017204416275024413, 0.017103328704833984, 0.017148256301879883, 0.01730544090270996, 0.01722368049621582, 0.017295520782470705, 0.01747884750366211, 0.017360992431640625, 0.017152063369750975, 0.017457311630249023, 0.01727743911743164, 0.017260032653808592, 0.01744060707092285, 0.01750886344909668, 0.017462656021118163, 0.01746614456176758, 0.01734623908996582, 0.01717030334472656, 0.017383264541625976, 0.017097312927246092, 0.01720921516418457, 0.017190528869628907, 0.017250240325927733, 0.017579872131347655, 0.017611743927001954, 0.017573888778686524, 0.017759775161743162, 0.017616159439086915, 0.017879999160766602, 0.01814463996887207, 0.017924928665161134, 0.01804284858703613, 0.01831065559387207, 0.017912160873413085, 0.017930240631103517, 0.019090431213378906, 0.018377216339111328, 0.018013952255249023, 0.018182912826538087, 0.01789936065673828, 0.01803455924987793, 0.018040096282958985, 0.017884159088134767, 0.017834175109863282, 0.017833791732788085, 0.017831680297851562, 0.018064895629882814, 0.018292543411254882, 0.018266431808471678, 0.018264703750610352, 0.018222240447998046, 0.018083999633789063, 0.01801900863647461, 0.018149375915527344, 0.017991680145263672, 0.018231296539306642, 0.017768447875976562, 0.01770086479187012, 0.017483903884887696]",tokens/s,57.58331729638546,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14725,7 +14725,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14841,7 +14841,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14919,7 +14919,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15105,7 +15105,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15222,7 +15222,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.696832,10248.650752,0.0,9862.905856,9797.835264,s,1,12.6677099609375,12.6677099609375,0.0,12.6677099609375,12.6677099609375,12.6677099609375,12.6677099609375,[12.6677099609375],,kWh,0.0001564087551833192,1.724564190963619e-05,5.322004257599794e-05,0.00022687443966895333,,MB,3037.663232,10626.138112,0.0,10211.033088,10097.347072,s,10,6.949856872558593,0.6949856872558593,0.0016327644485380204,0.6956404418945312,0.6960719543457031,0.6961941497802734,0.6962919061279297,"[0.6908757934570312, 0.6930463256835937, 0.6948036499023438, 0.6956387939453125, 0.6963163452148438, 0.6959058837890625, 0.6955938720703125, 0.6960447998046875, 0.6959893188476562, 0.69564208984375]",tokens/s,368.35290955532076,kWh,2.0330627241944765e-05,2.242116567629049e-06,1.3515288589999637e-05,3.608803239957345e-05,tokens/kWh,7093764.413795689,MB,3039.010816,10628.235264,0.0,10213.13024,10097.349632,s,10,33.050490234375,3.3050490234375003,0.009476094742290402,3.3068792724609377,3.312250415039063,3.314654992675781,3.316578654785156,"[3.2810458984375, 3.29672509765625, 3.303554443359375, 3.306720458984375, 3.3070380859375, 3.311716064453125, 3.3170595703125, 3.31084716796875, 3.30630126953125, 3.309482177734375]",tokens/s,19.06174448646309,kWh,9.664655558055466e-05,1.0660639885171222e-05,6.440607930260033e-05,0.0001717132747683262,tokens/kWh,366890.6791568617,,s,630,33.047305759429925,0.05245604088798402,0.0007997177349419684,0.05240371131896973,0.05318418121337891,0.05353638820648193,0.056124251632690435,"[0.05483919906616211, 0.051114078521728515, 0.05127782440185547, 0.052346336364746095, 0.0521418228149414, 0.051079071044921875, 0.05183071899414062, 0.0521328010559082, 0.051533824920654295, 0.051727935791015624, 0.05122092819213867, 0.051585025787353515, 0.05151334381103516, 0.051490814208984374, 0.05116108703613281, 0.051574337005615235, 0.05171244812011719, 0.05254332733154297, 0.05219449615478516, 0.05194803237915039, 0.05212815856933594, 0.05158256149291992, 0.05215660858154297, 0.05214031982421875, 0.05161574554443359, 0.052192928314208985, 0.05166320037841797, 0.052193279266357424, 0.05102592086791992, 0.05175091171264649, 0.05203542327880859, 0.05190671920776367, 0.05233635330200195, 0.05156687927246094, 0.052170753479003906, 0.05167308807373047, 0.052345951080322264, 0.05262204742431641, 0.0523388786315918, 0.05250371170043945, 0.05233135986328125, 0.052400127410888675, 0.052208641052246096, 0.05247427368164063, 0.05260044860839844, 0.052257759094238285, 0.051955711364746096, 0.05207654571533203, 0.05243699264526367, 0.0521638069152832, 0.05200678253173828, 0.05220240020751953, 0.051748863220214845, 0.052226047515869144, 0.05262745666503906, 0.052373504638671874, 0.05267657470703125, 0.05233164978027344, 0.0525599365234375, 0.05258444976806641, 0.05221196746826172, 0.05290659332275391, 0.05243494415283203, 0.05495798492431641, 0.0518021125793457, 0.051435649871826174, 0.051162593841552736, 0.05178217697143555, 0.05086207962036133, 0.05193091201782227, 0.05171958541870117, 0.052049888610839846, 0.052832382202148434, 0.05216310501098633, 0.0518903694152832, 0.05128806304931641, 0.05196169662475586, 0.05163433456420898, 0.05203558349609375, 0.052170753479003906, 0.053037025451660155, 0.0531190071105957, 0.052080127716064455, 0.05245347213745117, 0.05220547103881836, 0.051512832641601565, 0.05199155044555664, 0.05181548690795899, 0.05253011322021484, 0.052113025665283204, 0.052455806732177736, 0.052590591430664066, 0.051763137817382815, 0.05259215927124023, 0.05147097778320313, 0.052651935577392575, 0.052225566864013674, 0.05189379119873047, 0.05277510452270508, 0.05239270401000977, 0.05280931091308594, 0.05326473617553711, 0.05240838241577148, 0.05245542526245117, 0.05320223999023437, 0.05199737548828125, 0.05285059356689453, 0.05254070281982422, 0.05245779037475586, 0.05178598403930664, 0.05292057418823242, 0.05287046432495117, 0.05197391891479492, 0.05231708908081055, 0.0514785270690918, 0.05267443084716797, 0.0529409294128418, 0.05275596618652344, 0.05304166412353516, 0.05216825485229492, 0.05301011276245117, 0.05301862335205078, 0.05264051055908203, 0.052577342987060544, 0.05185804748535156, 0.053037376403808595, 0.05605055999755859, 0.051873023986816404, 0.050805057525634766, 0.05142777633666992, 0.05194956970214844, 0.05204582214355469, 0.05185494232177734, 0.051909023284912106, 0.05220115280151367, 0.051183902740478515, 0.05187356948852539, 0.051969345092773435, 0.05227411270141601, 0.05214585494995117, 0.05171574401855469, 0.05245609664916992, 0.051638080596923826, 0.053639358520507815, 0.0535464973449707, 0.053172737121582034, 0.05210831832885742, 0.051243423461914066, 0.052021343231201174, 0.05204220962524414, 0.051515071868896485, 0.05261958312988281, 0.05183276748657226, 0.052627521514892577, 0.05213561630249024, 0.05206835174560547, 0.0523015022277832, 0.05224921417236328, 0.05223014450073242, 0.05631734466552735, 0.051303009033203124, 0.05312921524047851, 0.052291584014892575, 0.053172222137451174, 0.05284611129760742, 0.05262326431274414, 0.05267103958129883, 0.05134131240844726, 0.053413375854492184, 0.05239798355102539, 0.052099231719970704, 0.05278108978271484, 0.05174099349975586, 0.05192918395996094, 0.05171331024169922, 0.05269696044921875, 0.053465312957763675, 0.05295372772216797, 0.052512767791748044, 0.05269615936279297, 0.052724510192871096, 0.05299008178710937, 0.05246156692504883, 0.0529705924987793, 0.053504928588867184, 0.05263529586791992, 0.05253276824951172, 0.05209171295166016, 0.05250576019287109, 0.0561803207397461, 0.05215887832641602, 0.05087641525268555, 0.051328510284423826, 0.05116310501098633, 0.051866142272949216, 0.05189363098144531, 0.05189081573486328, 0.051953662872314454, 0.051593215942382815, 0.05235452651977539, 0.052789791107177735, 0.052279232025146484, 0.052800575256347654, 0.05211443328857422, 0.052547584533691405, 0.052553150177001955, 0.05265011215209961, 0.0539284782409668, 0.052776256561279294, 0.05240278244018555, 0.052142078399658204, 0.05200595092773438, 0.05195052719116211, 0.051395744323730466, 0.05210198211669922, 0.05163363265991211, 0.05207043075561523, 0.052076576232910156, 0.05242486572265625, 0.05265235137939453, 0.05216255950927735, 0.05276211166381836, 0.053080577850341794, 0.052541278839111326, 0.05313308715820313, 0.05279980850219727, 0.05318217468261719, 0.05322742462158203, 0.052300159454345706, 0.05277497482299805, 0.05228691101074219, 0.05202569580078125, 0.052866752624511716, 0.05182479858398437, 0.052389312744140625, 0.05195980834960937, 0.052738624572753905, 0.05286336135864258, 0.05201100921630859, 0.05292031860351563, 0.052393985748291017, 0.052951038360595705, 0.052910079956054686, 0.05291212844848633, 0.053286911010742184, 0.05267030334472656, 0.05297782516479492, 0.053433952331542967, 0.052437408447265625, 0.05264156723022461, 0.052969215393066406, 0.05243337631225586, 0.055416736602783206, 0.051969825744628904, 0.05170223999023438, 0.051816192626953125, 0.05147603225708008, 0.05145462417602539, 0.05114038467407227, 0.05075369644165039, 0.052278526306152345, 0.05205635070800781, 0.05228201675415039, 0.05243084716796875, 0.05194956970214844, 0.05233868789672851, 0.051471488952636715, 0.05251772689819336, 0.05268073654174805, 0.05328486251831055, 0.05352403259277344, 0.05226540756225586, 0.05303500747680664, 0.05255987167358398, 0.05203292846679688, 0.05218159866333008, 0.052094974517822266, 0.051681278228759765, 0.051991775512695314, 0.052049983978271486, 0.05212643051147461, 0.05216179275512695, 0.05234355163574219, 0.05286697769165039, 0.052144222259521485, 0.052680225372314454, 0.05264841461181641, 0.05270732879638672, 0.05312716674804688, 0.05278515243530273, 0.05367574310302734, 0.05245980834960937, 0.052641342163085934, 0.05286342239379883, 0.05258028793334961, 0.05225244903564453, 0.05136124801635742, 0.05216134262084961, 0.052483745574951175, 0.052711776733398434, 0.05248409652709961, 0.052391937255859375, 0.0525250244140625, 0.052881439208984374, 0.05364902496337891, 0.05294883346557617, 0.05209552001953125, 0.053419265747070316, 0.05348838424682617, 0.05478780746459961, 0.05250665664672852, 0.052934913635253905, 0.05269913482666016, 0.05288140869140625, 0.05180112075805664, 0.05650457763671875, 0.05192879867553711, 0.05173276901245117, 0.05506047821044922, 0.05095129776000976, 0.05224127960205078, 0.05142272186279297, 0.051476993560791016, 0.05118099212646485, 0.05191737747192383, 0.05146419143676758, 0.05235279846191406, 0.05243721771240235, 0.0522911376953125, 0.05254502487182617, 0.05228009414672852, 0.052537246704101564, 0.05424335861206055, 0.05327872085571289, 0.052954719543457034, 0.05260284805297852, 0.05276467132568359, 0.05254620742797852, 0.05163756942749023, 0.05248684692382812, 0.05142259216308594, 0.051722145080566405, 0.05223292922973633, 0.052089855194091796, 0.05225164794921875, 0.05219942474365234, 0.0522608642578125, 0.05256758499145508, 0.05219990539550781, 0.0530513916015625, 0.052994049072265625, 0.05346255874633789, 0.0536478385925293, 0.052596736907958984, 0.05276224136352539, 0.051904895782470706, 0.052837921142578126, 0.05285715103149414, 0.05234627151489258, 0.052163135528564455, 0.051323070526123046, 0.0522608642578125, 0.052182334899902344, 0.0523570556640625, 0.05287308883666992, 0.05238662338256836, 0.05254560089111328, 0.05257401657104492, 0.05287865447998047, 0.05383257675170899, 0.05269465637207031, 0.05309430313110351, 0.0534031982421875, 0.05268368148803711, 0.053121311187744144, 0.05265926361083984, 0.05306639862060547, 0.05305132675170898, 0.05672505569458008, 0.052404640197753906, 0.05100751876831055, 0.051963905334472656, 0.05120000076293945, 0.051335166931152344, 0.05085551834106445, 0.05198601531982422, 0.051998817443847656, 0.05228003311157227, 0.05240115356445312, 0.05187398529052734, 0.05249478530883789, 0.052017536163330075, 0.05228307342529297, 0.05288991928100586, 0.052553409576416014, 0.053326145172119144, 0.053055072784423826, 0.05312963104248047, 0.052579742431640625, 0.052090526580810544, 0.052656223297119144, 0.05260927963256836, 0.052009567260742184, 0.05193523025512695, 0.051641761779785154, 0.05267923355102539, 0.05212934494018555, 0.05229379272460938, 0.052590911865234374, 0.051568031311035156, 0.05273603057861328, 0.05225529479980469, 0.05304729461669922, 0.0537740478515625, 0.053131553649902345, 0.053217281341552736, 0.05361663818359375, 0.05250646209716797, 0.05276803207397461, 0.052368030548095704, 0.05268521499633789, 0.05605561447143555, 0.05127916717529297, 0.05283910369873047, 0.05283020782470703, 0.05264384078979492, 0.052768768310546874, 0.05191846466064453, 0.052722049713134767, 0.05286092758178711, 0.05296332931518555, 0.05301248168945313, 0.05331353759765625, 0.05330659103393555, 0.05379900741577148, 0.05301443099975586, 0.05338528060913086, 0.05262384033203125, 0.052951297760009765, 0.05365760040283203, 0.05212575912475586, 0.05615228652954102, 0.05149472045898437, 0.051776863098144534, 0.05159203338623047, 0.051310142517089846, 0.051222976684570314, 0.05224649429321289, 0.05203532791137695, 0.05207183837890625, 0.05211584091186523, 0.052066814422607424, 0.05246156692504883, 0.05213916778564453, 0.05195792007446289, 0.052458175659179686, 0.052547584533691405, 0.053008384704589843, 0.05261270523071289, 0.05331539154052734, 0.05550700759887695, 0.05140646362304688, 0.05270415878295898, 0.052598785400390625, 0.0522158088684082, 0.052045440673828124, 0.05196428680419922, 0.05209702301025391, 0.05168332672119141, 0.05246105575561524, 0.05268326568603516, 0.05224857711791992, 0.052346687316894534, 0.05183916854858398, 0.05336883163452148, 0.052749568939208985, 0.05275660705566406, 0.05311862564086914, 0.053217823028564454, 0.05253984069824219, 0.0528317756652832, 0.05214665603637696, 0.052563392639160156, 0.05254345703125, 0.052505184173583984, 0.052653377532958984, 0.05217145538330078, 0.05253529739379883, 0.05217267227172852, 0.05235929489135742, 0.05286707305908203, 0.052291584014892575, 0.05283430480957031, 0.052531200408935545, 0.05265510559082031, 0.05381119918823242, 0.0529496955871582, 0.052642017364501956, 0.05337247848510742, 0.05278752136230469, 0.05301424026489258, 0.052494144439697264, 0.05304348754882812, 0.052630943298339845, 0.0569090576171875, 0.052230911254882814, 0.05127577590942383, 0.05145971298217773, 0.05146809768676758, 0.051378753662109374, 0.05111808013916016, 0.051979679107666016, 0.05242736053466797, 0.05212160110473633, 0.0521146240234375, 0.05180294418334961, 0.052512767791748044, 0.05167887878417969, 0.052604705810546874, 0.05215030288696289, 0.05217948913574219, 0.05351628875732422, 0.053133312225341796, 0.05296083068847656, 0.052740543365478516, 0.05224857711791992, 0.05190860748291016, 0.05163417434692383, 0.052514942169189456, 0.05186751937866211, 0.05204547119140625, 0.05232060623168945, 0.052006622314453126, 0.052305343627929685, 0.051993438720703125, 0.05252505493164063, 0.05280912017822265, 0.05212015914916992, 0.05253228759765625, 0.05277382278442383, 0.05314889526367188, 0.05330793762207031, 0.05293695831298828, 0.05278915023803711, 0.05307308959960937, 0.052294208526611326, 0.05248060989379883, 0.05215411376953125, 0.05231206512451172, 0.0524183349609375, 0.052453601837158206, 0.05286297607421875, 0.05227724838256836, 0.052574207305908206, 0.05222195053100586, 0.05257164764404297, 0.053058048248291016, 0.05262745666503906, 0.053378238677978515, 0.053449024200439454, 0.052711936950683595, 0.052979423522949216, 0.05261116790771484, 0.052628929138183594, 0.052595199584960936, 0.05206991958618164, 0.0526360969543457, 0.05696633529663086, 0.05194937515258789, 0.05119692611694336, 0.0520533447265625, 0.051444320678710936, 0.0510728645324707, 0.052484321594238284, 0.051676990509033204, 0.05169375991821289, 0.052086784362792966, 0.05207440185546875, 0.05158224105834961, 0.05212051010131836, 0.05268572616577148, 0.05257315063476563, 0.05249990463256836, 0.052136512756347654, 0.05360844802856445, 0.05359791946411133, 0.05270489501953125, 0.05222803115844726, 0.0522632942199707, 0.0521506233215332, 0.052200511932373045, 0.052061119079589845, 0.05215014266967773, 0.051636192321777345, 0.05226694488525391, 0.05226835250854492, 0.052128063201904294, 0.05265673446655274, 0.052245887756347656, 0.052421249389648435, 0.05205811309814453, 0.05247734451293945, 0.05356115341186524, 0.05306985473632812, 0.053179134368896486, 0.05347084808349609, 0.05255923080444336, 0.05253811264038086, 0.05234092712402344, 0.05250873565673828, 0.05248988723754883, 0.052085086822509764, 0.0528504638671875, 0.05194364929199219, 0.052641342163085934, 0.05218291091918945, 0.05260960006713867, 0.05290393447875977, 0.052299774169921875, 0.05291417694091797, 0.05339750289916992, 0.05293183898925781, 0.05333465576171875, 0.05302489471435547, 0.05255097579956055, 0.05364806365966797, 0.052414016723632814, 0.05260537719726562, 0.0530247688293457, 0.05262646484375]",tokens/s,19.063581297250888,,, -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15267,10 +15267,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 108973 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 107808 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15340,7 +15340,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15429,7 +15429,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15518,7 +15518,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15783,7 +15783,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16503,7 +16503,7 @@ ValueError: CodeGenForCausalLM does not support an attention implementation thro " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1012.629504,867.106816,0.0,488.636416,482.553856,s,1,8.0609853515625,8.0609853515625,0.0,8.0609853515625,8.0609853515625,8.0609853515625,8.0609853515625,[8.0609853515625],,kWh,2.8416175170870397e-05,3.1269823558455027e-06,8.546673504000735e-06,4.0089831030716635e-05,,MB,1239.527424,1024.393216,0.0,616.562688,582.974464,s,10,0.27054739379882814,0.027054739379882814,0.000286009729749811,0.026956192016601564,0.027177914237976077,0.027537452983856202,0.027825083980560304,"[0.02789699172973633, 0.02697020721435547, 0.026962175369262695, 0.026941280364990234, 0.026929088592529297, 0.02695020866394043, 0.02691299247741699, 0.027000160217285157, 0.027098016738891603, 0.02688627243041992]",tokens/s,9462.29776622261,kWh,8.612702605701969e-07,9.498283774394124e-08,5.70487182053024e-07,1.5267402803671622e-06,tokens/kWh,167677504.3483068,MB,1250.967552,1036.976128,0.0,629.1456,597.192192,s,10,11.528859130859376,1.1528859130859375,0.013783837027398216,1.1535343017578126,1.1710545288085938,1.1736548278808594,1.175735067138672,"[1.1483973388671875, 1.157601318359375, 1.1445784912109376, 1.128986328125, 1.14946728515625, 1.1581435546875, 1.135988525390625, 1.176255126953125, 1.1589644775390624, 1.1704766845703125]",tokens/s,54.645476438659465,kWh,3.303405783776308e-05,3.64319500948955e-06,1.3625607508146755e-05,5.0302860355399364e-05,tokens/kWh,1252413.8698056713,,s,630,11.52320091629028,0.01829079510522267,0.0005419719436823987,0.018167872428894043,0.018848256111145018,0.01896842737197876,0.019749512386322023,"[0.017847360610961913, 0.017965568542480468, 0.018098560333251953, 0.018417472839355468, 0.018214399337768555, 0.017998592376708984, 0.01790777587890625, 0.017827775955200194, 0.0179652156829834, 0.01795055961608887, 0.01780121612548828, 0.017927391052246094, 0.01788934326171875, 0.017883871078491213, 0.0178155517578125, 0.0178985595703125, 0.017884096145629882, 0.01797324752807617, 0.017950016021728514, 0.01790979194641113, 0.01780784034729004, 0.017891519546508788, 0.018081792831420897, 0.017856224060058594, 0.017899808883666993, 0.01845043182373047, 0.01801366424560547, 0.017934879302978515, 0.01805516815185547, 0.01799782371520996, 0.018082847595214845, 0.018039775848388673, 0.01966044807434082, 0.017974815368652343, 0.017895584106445313, 0.017866432189941408, 0.017930944442749022, 0.01852035140991211, 0.01975663948059082, 0.01851798439025879, 0.018310815811157226, 0.01817884826660156, 0.018152671813964842, 0.018063455581665038, 0.01804342460632324, 0.01812224006652832, 0.018064031600952147, 0.018214752197265625, 0.01839302444458008, 0.018623903274536134, 0.01873107147216797, 0.01918617630004883, 0.01857356834411621, 0.019163135528564454, 0.01863270378112793, 0.018753856658935548, 0.018578111648559572, 0.01836953544616699, 0.018874368667602538, 0.018527456283569336, 0.01844918441772461, 0.01835212707519531, 0.01816166305541992, 0.01779408073425293, 0.018195423126220703, 0.018294143676757812, 0.01831590461730957, 0.01845452880859375, 0.018568864822387697, 0.018526559829711915, 0.01859993553161621, 0.019179519653320314, 0.01928544044494629, 0.019657279968261717, 0.027027456283569336, 0.018192384719848635, 0.018225151062011717, 0.01872822380065918, 0.0186265926361084, 0.018621280670166017, 0.01853775978088379, 0.01842848014831543, 0.01823744010925293, 0.018426912307739258, 0.01848828887939453, 0.01853545570373535, 0.01840224075317383, 0.018439359664916992, 0.018068319320678712, 0.017979391098022462, 0.017968544006347655, 0.018022815704345704, 0.017965248107910156, 0.018108415603637695, 0.018155263900756835, 0.018168064117431642, 0.018169183731079102, 0.018137760162353515, 0.018288639068603514, 0.01851372718811035, 0.017970815658569336, 0.01798201560974121, 0.017944063186645508, 0.017856096267700194, 0.017904544830322267, 0.01791328048706055, 0.01800454330444336, 0.017966495513916016, 0.018004575729370118, 0.017900800704956053, 0.01806822395324707, 0.017909311294555665, 0.017951391220092774, 0.017894720077514647, 0.017844703674316405, 0.01778700828552246, 0.018076992034912108, 0.017996288299560546, 0.017791040420532228, 0.017952768325805665, 0.01793132781982422, 0.017882047653198244, 0.01782956886291504, 0.0178035831451416, 0.019271072387695314, 0.018202495574951173, 0.017990976333618163, 0.01796780776977539, 0.01807974433898926, 0.017747488021850586, 0.01781705665588379, 0.018241600036621095, 0.017855424880981446, 0.01778188705444336, 0.017891616821289064, 0.017903936386108397, 0.01791209602355957, 0.01796089553833008, 0.017844287872314454, 0.01784377670288086, 0.01777299118041992, 0.01791391944885254, 0.01780726432800293, 0.017915136337280275, 0.017904415130615234, 0.017864704132080078, 0.01780735969543457, 0.01807360076904297, 0.01973206329345703, 0.01810883140563965, 0.01802444839477539, 0.017922048568725587, 0.018001920700073244, 0.01811849594116211, 0.01831056022644043, 0.017926496505737306, 0.017848735809326173, 0.017967103958129883, 0.0180097599029541, 0.01923072052001953, 0.018368864059448244, 0.018095327377319337, 0.01816655921936035, 0.019165184020996092, 0.018245567321777345, 0.018174016952514648, 0.01821615982055664, 0.018201248168945312, 0.018282623291015626, 0.018214527130126952, 0.01828006362915039, 0.01837727928161621, 0.018311359405517577, 0.018215999603271485, 0.01814358329772949, 0.01814588737487793, 0.018251232147216797, 0.018322175979614257, 0.018707584381103516, 0.018612672805786133, 0.01845065689086914, 0.018374656677246092, 0.018312671661376952, 0.018589792251586915, 0.0183089599609375, 0.018165567398071288, 0.018274879455566405, 0.018030624389648437, 0.017982751846313476, 0.017631263732910157, 0.018114559173583983, 0.017987712860107422, 0.017986719131469726, 0.017875295639038086, 0.017837984085083008, 0.017864511489868163, 0.017746591567993165, 0.017846271514892577, 0.017750015258789064, 0.017909696578979492, 0.017872575759887696, 0.017852800369262695, 0.01794867134094238, 0.018079456329345704, 0.018014047622680666, 0.01780928039550781, 0.017920576095581054, 0.017790048599243165, 0.017822240829467772, 0.017856895446777345, 0.0178155517578125, 0.01784796714782715, 0.017878816604614257, 0.017840703964233397, 0.01779302406311035, 0.017887231826782226, 0.017923807144165037, 0.01791209602355957, 0.01782748794555664, 0.017881439208984374, 0.01780940818786621, 0.01780748748779297, 0.01786662483215332, 0.01779302406311035, 0.01781068801879883, 0.017801984786987305, 0.0178155517578125, 0.01781372833251953, 0.017810783386230468, 0.017942975997924805, 0.017960704803466798, 0.017930496215820314, 0.01803468894958496, 0.01803376007080078, 0.018080671310424803, 0.018059263229370116, 0.018062528610229493, 0.018297664642333983, 0.01805516815185547, 0.017989599227905273, 0.01802025604248047, 0.017859872817993165, 0.01798659133911133, 0.017884992599487306, 0.017903615951538086, 0.017889280319213868, 0.018126623153686523, 0.017938655853271486, 0.017945632934570313, 0.017970144271850588, 0.018077695846557617, 0.017988927841186525, 0.017742656707763673, 0.018051103591918947, 0.018124799728393554, 0.017960960388183594, 0.01790959930419922, 0.017903776168823243, 0.0178767032623291, 0.01802217674255371, 0.017947071075439452, 0.018091903686523438, 0.01807993507385254, 0.01834815979003906, 0.018573183059692383, 0.01877180862426758, 0.018969823837280273, 0.019212928771972657, 0.018620832443237305, 0.01856121635437012, 0.018681568145751955, 0.018653472900390624, 0.01852182388305664, 0.01857535934448242, 0.018933855056762695, 0.018614015579223632, 0.018431264877319335, 0.01828134346008301, 0.01890265655517578, 0.018216703414916994, 0.017981407165527343, 0.017869152069091798, 0.017961280822753906, 0.017862752914428712, 0.018077407836914063, 0.01820857620239258, 0.018133472442626954, 0.01807513618469238, 0.01807606315612793, 0.01815353584289551, 0.018077280044555662, 0.018405567169189452, 0.01795430374145508, 0.018092544555664062, 0.01833590316772461, 0.01806540870666504, 0.01794867134094238, 0.017969152450561524, 0.017860639572143556, 0.017983455657958985, 0.01801817512512207, 0.018135072708129883, 0.01819251251220703, 0.018352256774902344, 0.01823535919189453, 0.018014080047607423, 0.018019519805908202, 0.01818502426147461, 0.018704383850097657, 0.018280031204223633, 0.018313631057739258, 0.018201728820800782, 0.018191232681274414, 0.018130239486694337, 0.018250240325927734, 0.017895040512084962, 0.018139520645141603, 0.018149375915527344, 0.018091264724731444, 0.018579519271850586, 0.01864569664001465, 0.018605215072631836, 0.01853036880493164, 0.01865603256225586, 0.018605728149414063, 0.018774143218994142, 0.01872630310058594, 0.01864089584350586, 0.01865715217590332, 0.018502592086791992, 0.018467967987060546, 0.01826700782775879, 0.018110368728637697, 0.018014303207397463, 0.018208383560180664, 0.018530176162719725, 0.018700639724731447, 0.01865513610839844, 0.01886832046508789, 0.018845855712890627, 0.01875948715209961, 0.01873871994018555, 0.018651359558105467, 0.01852569580078125, 0.018545600891113283, 0.018413568496704103, 0.018370559692382812, 0.018452192306518556, 0.018827552795410155, 0.018204864501953126, 0.018177183151245117, 0.018178720474243164, 0.01827599906921387, 0.018310752868652344, 0.018363136291503906, 0.01823744010925293, 0.018147327423095702, 0.018008064270019532, 0.01806278419494629, 0.018116575241088867, 0.018268768310546874, 0.01821696090698242, 0.018316896438598632, 0.018288543701171875, 0.01829052734375, 0.018329599380493163, 0.018292512893676758, 0.018185184478759765, 0.01821878433227539, 0.018216543197631836, 0.018342079162597655, 0.01813711929321289, 0.018206943511962892, 0.018097248077392578, 0.01813811111450195, 0.018569215774536133, 0.018210912704467775, 0.018083295822143554, 0.017761568069458007, 0.018022239685058592, 0.018148223876953126, 0.018013248443603514, 0.018005056381225584, 0.017940351486206055, 0.01801158332824707, 0.017914527893066405, 0.017978303909301756, 0.017871488571166994, 0.017955167770385742, 0.017845504760742186, 0.01787571144104004, 0.017794912338256835, 0.0178832950592041, 0.017758367538452148, 0.017870687484741212, 0.01778819274902344, 0.017893407821655275, 0.017856800079345703, 0.017801631927490236, 0.017811456680297853, 0.017819648742675782, 0.018138303756713867, 0.0181276798248291, 0.017788703918457032, 0.017926048278808594, 0.017818111419677735, 0.0178175048828125, 0.017885087966918945, 0.017841440200805664, 0.017838016510009765, 0.017836639404296875, 0.017834175109863282, 0.017908927917480468, 0.01787593650817871, 0.017882591247558595, 0.017869440078735352, 0.0180031681060791, 0.01787059211730957, 0.018051872253417967, 0.017983488082885742, 0.01807360076904297, 0.01803468894958496, 0.018167680740356445, 0.018030208587646486, 0.018162176132202147, 0.0182108154296875, 0.018097375869750975, 0.01805392074584961, 0.018116416931152343, 0.018124544143676757, 0.018297279357910156, 0.018188608169555663, 0.018069183349609375, 0.0182476806640625, 0.018274303436279296, 0.01851408004760742, 0.018390815734863283, 0.018491104125976564, 0.018698591232299805, 0.018628543853759765, 0.018549951553344726, 0.01841379165649414, 0.018676959991455078, 0.018732959747314454, 0.01862723159790039, 0.01875289535522461, 0.018737279891967773, 0.01876838493347168, 0.01877363204956055, 0.018750112533569337, 0.018716384887695312, 0.01902387237548828, 0.018593311309814453, 0.018416095733642578, 0.01841391944885254, 0.018680767059326173, 0.01831923294067383, 0.01841375923156738, 0.018487136840820314, 0.018619199752807618, 0.01859993553161621, 0.01855404853820801, 0.018333631515502928, 0.018309696197509766, 0.018161088943481445, 0.018082687377929688, 0.01805267143249512, 0.018177824020385744, 0.018102880477905273, 0.01814352035522461, 0.0182205753326416, 0.018330015182495118, 0.018433055877685546, 0.018299936294555664, 0.01845840072631836, 0.01828976058959961, 0.018436704635620117, 0.01864896011352539, 0.018886560440063475, 0.01880873680114746, 0.01873369598388672, 0.019148704528808593, 0.01936191940307617, 0.018957311630249024, 0.01887945556640625, 0.01884774398803711, 0.018966720581054686, 0.019066688537597656, 0.018999551773071287, 0.01892076873779297, 0.01883795166015625, 0.01885968017578125, 0.01884156799316406, 0.018962175369262695, 0.018983552932739258, 0.01885753631591797, 0.018927743911743164, 0.01887433624267578, 0.018902944564819335, 0.01884819221496582, 0.018890752792358398, 0.01884172821044922, 0.01893987274169922, 0.01887753677368164, 0.018735200881958007, 0.018895296096801757, 0.01890883255004883, 0.019278175354003908, 0.01883135986328125, 0.018630847930908204, 0.018636255264282225, 0.018491519927978515, 0.018380224227905275, 0.018254623413085938, 0.018124799728393554, 0.018321407318115233, 0.018394176483154296, 0.018676671981811523, 0.02060006332397461, 0.018762527465820314, 0.01835935974121094, 0.01820355224609375, 0.01824492835998535, 0.018021055221557617, 0.01797324752807617, 0.01799123191833496, 0.017961408615112303, 0.018250751495361327, 0.01817523193359375, 0.01816694450378418, 0.018086496353149413, 0.018104320526123048, 0.018155519485473632, 0.018067455291748045, 0.01803264045715332, 0.0181014404296875, 0.01826464080810547, 0.018415456771850587, 0.01834239959716797, 0.018528160095214845, 0.019941120147705077, 0.018479360580444335, 0.018435840606689454, 0.018409727096557617, 0.018386943817138672, 0.018308832168579103, 0.01834217643737793, 0.01822719955444336, 0.01819011116027832, 0.018178272247314452, 0.018233407974243165, 0.01822015953063965, 0.018954111099243165, 0.0185599365234375, 0.01820163154602051, 0.01804617691040039, 0.01811164855957031, 0.01818009567260742, 0.018444992065429686, 0.018081792831420897, 0.018343103408813476, 0.018559711456298828, 0.018257919311523436, 0.01816307258605957, 0.018051712036132813, 0.017911808013916015, 0.017897472381591797, 0.01784182357788086, 0.01811008071899414, 0.01802511978149414, 0.017948352813720703, 0.017948991775512697, 0.018045984268188476, 0.019989503860473632, 0.020513856887817383, 0.0179866886138916, 0.01797088050842285, 0.017952863693237304, 0.018054399490356445, 0.017857280731201172, 0.01798748779296875, 0.01801955223083496, 0.01791200065612793, 0.018180192947387694, 0.01808211135864258, 0.018082080841064455, 0.01839308738708496, 0.01838719940185547, 0.01801116752624512, 0.017988319396972655, 0.01820467185974121, 0.01803878402709961, 0.017936384201049805, 0.0180633602142334, 0.018329599380493163, 0.018397184371948243, 0.018448383331298827, 0.018386175155639648, 0.018914047241210936, 0.0184333438873291, 0.01854316711425781, 0.018796672821044923, 0.01878790473937988, 0.018891199111938477, 0.018832895278930666, 0.018921215057373045, 0.019004383087158204, 0.018935840606689455, 0.018824960708618162, 0.01885593605041504, 0.018804767608642577, 0.019825664520263672, 0.018900192260742188, 0.018848831176757813, 0.019108543395996092, 0.018970624923706055, 0.01886591911315918, 0.018827520370483398, 0.018851839065551757, 0.018843839645385742, 0.01869923210144043, 0.019450719833374024, 0.01880473518371582, 0.018903039932250978, 0.018827104568481447, 0.018771711349487304, 0.018823583602905272, 0.01901468849182129, 0.019045343399047853, 0.018797760009765626]",tokens/s,54.67230889894254,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16600,7 +16600,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16744,7 +16744,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16930,7 +16930,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17047,7 +17047,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.369152,10246.5536,0.0,9860.808704,9797.323264,s,1,12.4646513671875,12.4646513671875,0.0,12.4646513671875,12.4646513671875,12.4646513671875,12.4646513671875,[12.4646513671875],,kWh,0.00015677204867916618,1.7285749644863794e-05,5.235254188200311e-05,0.00022641034020603307,,MB,3021.86496,10626.138112,0.0,10211.033088,10096.835072,s,10,7.4292164916992185,0.7429216491699219,0.006665599289488292,0.745841552734375,0.7476935180664063,0.7480543090820313,0.7483429418945312,"[0.724827880859375, 0.7396456298828125, 0.7405772705078125, 0.7466072387695313, 0.742830810546875, 0.746778564453125, 0.7450758666992188, 0.7468447875976563, 0.7484151000976562, 0.7476133422851563]",tokens/s,344.5854623916706,kWh,2.1602723358332317e-05,2.3816632340083317e-06,1.4358027359428244e-05,3.834241395176889e-05,tokens/kWh,6676679.259736323,MB,3032.02304,10628.235264,0.0,10213.13024,10096.837632,s,10,34.91660595703125,3.491660595703125,0.0033925189934705976,3.4927337646484373,3.4947266357421873,3.495775061035156,3.496613801269531,"[3.4855791015625, 3.49337255859375, 3.48746923828125, 3.492094970703125, 3.4898974609375, 3.488794189453125, 3.49424609375, 3.493835205078125, 3.496823486328125, 3.49449365234375]",tokens/s,18.042990798569733,kWh,0.00010236015592375187,1.1291553674374102e-05,6.807223302917203e-05,0.00018172394262729795,tokens/kWh,346679.6894738754,,s,630,34.91343323135378,0.055418147986275805,0.0006959057097443701,0.05533124923706055,0.05586218299865722,0.05606899890899658,0.05975755733489991,"[0.059810592651367185, 0.05535657501220703, 0.0547476806640625, 0.05444992065429687, 0.05470790481567383, 0.05490345764160156, 0.05468796920776367, 0.05472988891601562, 0.05507977676391602, 0.05479401779174805, 0.05489459228515625, 0.055245025634765625, 0.05528985595703125, 0.05506028747558594, 0.05479443359375, 0.05519564819335938, 0.055344993591308594, 0.055715999603271484, 0.05568716812133789, 0.05533235168457031, 0.05511743927001953, 0.054827743530273435, 0.05509545516967773, 0.05528307342529297, 0.054994815826416014, 0.05482358551025391, 0.05517081451416016, 0.05533116912841797, 0.055174720764160155, 0.05498720169067383, 0.055162113189697264, 0.05522713470458984, 0.055041152954101565, 0.05533935928344726, 0.05553129577636719, 0.055470848083496095, 0.05608243179321289, 0.05520147323608399, 0.05537814331054688, 0.055203937530517576, 0.05506662368774414, 0.05509939193725586, 0.055314430236816405, 0.0550645751953125, 0.055076862335205076, 0.055154399871826174, 0.05548812866210937, 0.055177886962890624, 0.05527142333984375, 0.05525299072265625, 0.05571379089355469, 0.055384063720703126, 0.055431167602539064, 0.05564166259765625, 0.0559354248046875, 0.05698355102539063, 0.05543494415283203, 0.055296321868896485, 0.05552556610107422, 0.05544121551513672, 0.055314590454101566, 0.0552973747253418, 0.05561395263671875, 0.06015087890625, 0.05583145523071289, 0.060641281127929686, 0.05434777450561523, 0.05456076812744141, 0.054830142974853516, 0.05461062240600586, 0.05482726287841797, 0.054506942749023436, 0.054423454284667966, 0.05496284866333008, 0.05493145751953125, 0.05515484619140625, 0.05510550308227539, 0.05534502410888672, 0.055166400909423825, 0.055607872009277345, 0.0559529914855957, 0.05621596908569336, 0.05531587219238281, 0.05480303955078125, 0.05487411117553711, 0.055242752075195314, 0.05494784164428711, 0.054831104278564455, 0.05476988983154297, 0.0553592643737793, 0.0550830078125, 0.055107040405273436, 0.05523817443847656, 0.05526630401611328, 0.05529942321777344, 0.05511552047729492, 0.05580278396606445, 0.055976993560791014, 0.0559769287109375, 0.05561328125, 0.05553782272338867, 0.05533695983886719, 0.05522998428344727, 0.05529420852661133, 0.054949310302734374, 0.05505513763427734, 0.05508822250366211, 0.055069503784179685, 0.05525715255737305, 0.05507279968261719, 0.05524684906005859, 0.05543731307983398, 0.05561548614501953, 0.05531033706665039, 0.05558476638793945, 0.056702945709228514, 0.05574844741821289, 0.055836673736572265, 0.05557449722290039, 0.05536979293823242, 0.05537603378295899, 0.05580595016479492, 0.05545574569702148, 0.055244800567626956, 0.05542214584350586, 0.05564067077636719, 0.05984815979003906, 0.055327262878417965, 0.05478364944458008, 0.05463904190063477, 0.054753185272216794, 0.054605056762695316, 0.054919937133789065, 0.05473689651489258, 0.05467136001586914, 0.054658912658691404, 0.054960289001464845, 0.055211742401123046, 0.0551297607421875, 0.05536377716064453, 0.055152992248535156, 0.05522774505615234, 0.055478431701660155, 0.05620336151123047, 0.05551769638061523, 0.05546393585205078, 0.055375873565673826, 0.055070465087890624, 0.05469388961791992, 0.05496403121948242, 0.054822689056396486, 0.05524127960205078, 0.05493353652954101, 0.054821952819824216, 0.05528678512573242, 0.05529395294189453, 0.05508915328979492, 0.05522431945800781, 0.055467742919921875, 0.05540678405761719, 0.05549900817871094, 0.055487712860107424, 0.055968704223632815, 0.055785152435302736, 0.055363582611083983, 0.055112735748291015, 0.05523545455932617, 0.055029537200927736, 0.05522428894042969, 0.05522467041015625, 0.05528371047973633, 0.05503398513793945, 0.0553675537109375, 0.05529587173461914, 0.05572211074829102, 0.05554332733154297, 0.05551929473876953, 0.05546435165405274, 0.05592195129394531, 0.055712001800537106, 0.055587200164794924, 0.05573436737060547, 0.05577072143554687, 0.055640350341796874, 0.05535702514648438, 0.05533135986328125, 0.05527292633056641, 0.05603587341308594, 0.05527923202514649, 0.05988800048828125, 0.05533542251586914, 0.054779903411865234, 0.05456486511230469, 0.05463606262207031, 0.05469404983520508, 0.0548969612121582, 0.054681598663330076, 0.05465087890625, 0.054891777038574216, 0.05491987228393555, 0.05507648086547851, 0.055183616638183594, 0.05540883255004883, 0.05553091049194336, 0.05514057540893555, 0.055603584289550784, 0.056338432312011716, 0.055769088745117185, 0.05530121612548828, 0.05514915084838867, 0.05509059143066406, 0.05475439834594727, 0.05477340698242188, 0.05491513442993164, 0.055296096801757816, 0.05507276916503906, 0.0548568000793457, 0.0553803825378418, 0.055325183868408206, 0.05571123123168945, 0.055304702758789064, 0.055506240844726565, 0.05566550445556641, 0.055871456146240235, 0.05641551971435547, 0.05564422225952149, 0.055618080139160156, 0.05527961730957031, 0.05513391876220703, 0.055040287017822265, 0.055154048919677734, 0.05499763107299805, 0.05513420867919922, 0.055207744598388675, 0.055675071716308595, 0.055246688842773437, 0.05523062515258789, 0.0553779182434082, 0.055812255859375, 0.05579964828491211, 0.05577078247070313, 0.055601310729980466, 0.05614543914794922, 0.05582710266113281, 0.05646649551391601, 0.05563283157348633, 0.055828479766845705, 0.05554975891113281, 0.05529209518432617, 0.05580710220336914, 0.055226814270019534, 0.05589651107788086, 0.05868134307861328, 0.05517478561401367, 0.055093631744384766, 0.054977985382080076, 0.0546965103149414, 0.05466316986083984, 0.054870014190673826, 0.05466067123413086, 0.05475577545166015, 0.05480243301391602, 0.05534678268432617, 0.055248481750488285, 0.05538659286499024, 0.055275871276855466, 0.055431167602539064, 0.05532211303710938, 0.055388671875, 0.055846847534179685, 0.055816001892089843, 0.05523891067504883, 0.05505583953857422, 0.05500096130371094, 0.05541545486450195, 0.05511526489257813, 0.05484511947631836, 0.055020351409912106, 0.05495808029174805, 0.05489625549316406, 0.05531276702880859, 0.05552537536621094, 0.0551649284362793, 0.05524889755249023, 0.055638015747070314, 0.055810047149658204, 0.055506240844726565, 0.05549663925170899, 0.0556387825012207, 0.05575475311279297, 0.05542828750610351, 0.0552374382019043, 0.05540220642089844, 0.055295936584472655, 0.05524924850463867, 0.05497446441650391, 0.05515468978881836, 0.05541888046264649, 0.05560281753540039, 0.05534352111816406, 0.05573769760131836, 0.05566847991943359, 0.05609113693237305, 0.055734657287597654, 0.055773025512695314, 0.05554185485839844, 0.05591427230834961, 0.055543102264404294, 0.05546912002563477, 0.05566454315185547, 0.05576287841796875, 0.05541638565063477, 0.055193248748779296, 0.055226848602294924, 0.05562406539916992, 0.05886025619506836, 0.055049087524414064, 0.05479430389404297, 0.054664031982421875, 0.05460089492797852, 0.0544284782409668, 0.05441654586791992, 0.055093727111816405, 0.05475571060180664, 0.054980609893798826, 0.05517311859130859, 0.05549260711669922, 0.055458976745605466, 0.055274337768554685, 0.055204864501953124, 0.05532160186767578, 0.05551103973388672, 0.055836673736572265, 0.05570553588867187, 0.0554005126953125, 0.055232192993164064, 0.05497417449951172, 0.0547845458984375, 0.05481887817382813, 0.05504000091552735, 0.05489459228515625, 0.05509894561767578, 0.05507859039306641, 0.05535628890991211, 0.055283103942871094, 0.05569785690307617, 0.05535737609863281, 0.05542102432250977, 0.05558476638793945, 0.05583222579956055, 0.05557408142089844, 0.05558524703979492, 0.055425342559814454, 0.055458976745605466, 0.05536854553222656, 0.05526732635498047, 0.05508643341064453, 0.05512006378173828, 0.05498313522338867, 0.0551649284362793, 0.05525299072265625, 0.055787521362304686, 0.05554995346069336, 0.055570430755615234, 0.055497825622558596, 0.05587251281738281, 0.05577481460571289, 0.05577043151855469, 0.05558953475952148, 0.05589360046386719, 0.05592550277709961, 0.05560076904296875, 0.05547455978393555, 0.0553963508605957, 0.05570560073852539, 0.05524860763549805, 0.05585539245605469, 0.05519551849365235, 0.059991905212402344, 0.05547126388549805, 0.05485548782348633, 0.05660790252685547, 0.05451785659790039, 0.0545164794921875, 0.054831104278564455, 0.05487152099609375, 0.05507740783691406, 0.054967681884765626, 0.05499353790283203, 0.05509225463867187, 0.05562374496459961, 0.055368606567382815, 0.0550645751953125, 0.055175167083740234, 0.05583420944213867, 0.05582275390625, 0.05570764923095703, 0.05544345474243164, 0.05543526458740235, 0.055074272155761717, 0.054843936920166016, 0.05482851028442383, 0.055245342254638674, 0.05511577606201172, 0.05502975845336914, 0.05509939193725586, 0.0551005744934082, 0.05537980651855469, 0.055618560791015625, 0.055475486755371096, 0.05543395233154297, 0.05554156875610351, 0.05578361511230469, 0.05567670440673828, 0.05607766342163086, 0.055484958648681644, 0.05551712036132812, 0.055226238250732425, 0.05544963073730469, 0.05534566497802734, 0.055226367950439455, 0.0550010871887207, 0.055433216094970705, 0.055272800445556644, 0.05545846557617187, 0.05543936157226562, 0.05575475311279297, 0.055656448364257816, 0.055656448364257816, 0.0557916145324707, 0.05598348617553711, 0.055760673522949215, 0.05566083145141602, 0.05560377502441406, 0.055911712646484375, 0.0553131217956543, 0.055477470397949216, 0.055591712951660155, 0.05556838226318359, 0.0555601921081543, 0.05518950271606445, 0.0600719985961914, 0.05534848022460938, 0.05494451141357422, 0.054642078399658206, 0.05463100814819336, 0.05454227066040039, 0.05504000091552735, 0.05481068801879883, 0.05481372833251953, 0.055032958984375, 0.05527536010742187, 0.05508652877807617, 0.05548704147338867, 0.05538816070556641, 0.05523455810546875, 0.05506047821044922, 0.05543955230712891, 0.05655881500244141, 0.05652131271362305, 0.05560729598999024, 0.05513785552978516, 0.05506467056274414, 0.05493532943725586, 0.05507241439819336, 0.05509622573852539, 0.05501270294189453, 0.05509356689453125, 0.055089569091796874, 0.05542291259765625, 0.05545097732543945, 0.055271839141845705, 0.05528102493286133, 0.05526383972167969, 0.055871646881103514, 0.05573030471801758, 0.055910400390625, 0.05569472122192383, 0.05600473785400391, 0.05504179382324219, 0.05518320083618164, 0.05521500778198242, 0.0554700813293457, 0.055211391448974606, 0.05517964935302734, 0.055255233764648436, 0.05527097702026367, 0.055331329345703124, 0.05544723129272461, 0.0553988151550293, 0.05586115264892578, 0.055787521362304686, 0.05573782348632812, 0.05590185546875, 0.056073089599609376, 0.055721023559570315, 0.0553807373046875, 0.05602844619750977, 0.05550339126586914, 0.055680416107177735, 0.05542956924438477, 0.05539894485473633, 0.0554700813293457, 0.05560681533813477, 0.058896224975585935, 0.05566230392456055, 0.05480809783935547, 0.05470912170410156, 0.05480243301391602, 0.05464883041381836, 0.054886398315429685, 0.05482710266113281, 0.054646686553955076, 0.05471548843383789, 0.05660764694213867, 0.055414302825927735, 0.05526166534423828, 0.05516191864013672, 0.055087200164794924, 0.05598704147338867, 0.05589424133300781, 0.05630543899536133, 0.05563910293579102, 0.05544847869873047, 0.055191585540771484, 0.05496387100219727, 0.054878559112548825, 0.05514368057250976, 0.05498336029052735, 0.05480044937133789, 0.0551649284362793, 0.055316478729248046, 0.05539779281616211, 0.05523721694946289, 0.05516463851928711, 0.0567606086730957, 0.055662368774414064, 0.05558294296264649, 0.05606399917602539, 0.055766719818115235, 0.05577878570556641, 0.05546425628662109, 0.05551721572875976, 0.056054302215576175, 0.05555401611328125, 0.055279552459716795, 0.05526534271240234, 0.05518905639648437, 0.055768863677978515, 0.055351966857910155, 0.05551491165161133, 0.05527068710327149, 0.055935935974121095, 0.05569446563720703, 0.05577804946899414, 0.055803680419921876, 0.056271198272705075, 0.05599603271484375, 0.05580543899536133, 0.055575328826904295, 0.05593097686767578, 0.05535948944091797, 0.055398048400878905, 0.055144798278808596, 0.0555601921081543, 0.05537334442138672, 0.055400096893310546, 0.05962771224975586, 0.05537411117553711, 0.05511948776245117, 0.05465248107910156, 0.05460416030883789, 0.0548994255065918, 0.05503372955322266, 0.0549370231628418, 0.05490041732788086, 0.05474371337890625, 0.05513993453979492, 0.055470718383789065, 0.055330814361572264, 0.05545369720458984, 0.05529135894775391, 0.05521257781982422, 0.05561955261230469, 0.05615824127197266, 0.055623680114746096, 0.05531615829467774, 0.05534342575073242, 0.055318145751953124, 0.054894977569580075, 0.05500233459472656, 0.055005985260009764, 0.055192897796630856, 0.05504889678955078, 0.05502361679077149, 0.05526287841796875, 0.055607521057128906, 0.05545792007446289, 0.055314430236816405, 0.05563596725463867, 0.055952896118164064, 0.05574099349975586, 0.05563299179077148, 0.05559587097167969, 0.05595046234130859, 0.05552012634277344, 0.05529763031005859, 0.05526713562011719, 0.05536380767822266, 0.054972606658935545, 0.055105728149414064, 0.05518710327148438, 0.055613536834716794, 0.055622943878173826, 0.05550908660888672, 0.05541155242919922, 0.05589606475830078, 0.05578099060058594, 0.05566108703613281, 0.055779071807861326, 0.05571596908569336, 0.05639782333374024, 0.055932926177978515, 0.05562777709960937, 0.05546188735961914, 0.05557788848876953, 0.05540668869018555, 0.05530220794677734, 0.05544198226928711, 0.05584281539916992]",tokens/s,18.04463043852797,,, -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17092,10 +17092,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 108477 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 107343 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17165,7 +17165,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17254,7 +17254,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17343,7 +17343,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17608,7 +17608,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19164,7 +19164,7 @@ torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 h " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3575.926784,5272.109056,0.0,4869.586944,4520.068608,s,1,11.9082158203125,11.9082158203125,0.0,11.9082158203125,11.9082158203125,11.9082158203125,11.9082158203125,[11.9082158203125],,kWh,0.00011748109869997732,1.2951751967291584e-05,5.073587392200041e-05,0.0001811687245892693,,MB,1572.0448,5305.663488,0.0,4888.461312,4194.018304,s,10,1.7972826843261718,0.1797282684326172,0.0001738198442844156,0.1796596145629883,0.17999109497070312,0.18004680938720702,0.18009138092041016,"[0.17963618469238282, 0.17959027099609376, 0.17979440307617187, 0.17958934020996092, 0.17953240966796874, 0.1796703338623047, 0.17973960876464845, 0.18010252380371095, 0.17964889526367187, 0.1799787139892578]",tokens/s,1424.3724831521326,kWh,5.31986542818176e-06,5.866864578697227e-07,3.5244119104362667e-06,9.430963796487749e-06,tokens/kWh,27144627.58253178,MB,1580.617728,5314.052096,0.0,4896.84992,4194.020864,s,10,17.081702392578126,1.7081702392578126,0.0056161762317267555,1.7110252685546876,1.7125834228515626,1.7134856201171875,1.7142073779296874,"[1.7121220703125, 1.6977357177734376, 1.7143878173828124, 1.7123829345703125, 1.708708740234375, 1.7110892333984375, 1.702832275390625, 1.699511474609375, 1.7109613037109375, 1.7119708251953125]",tokens/s,36.881569852998396,kWh,5.008176658306997e-05,5.523793393050986e-06,3.2168533310563484e-05,8.777409328668445e-05,tokens/kWh,717751.6467670223,,s,630,17.079105426788324,0.027109691153632272,0.0003871795010973703,0.027033616065979003,0.02741643466949463,0.02765972652435303,0.028699747276306156,"[0.02764591979980469, 0.026936767578125, 0.027089504241943358, 0.026810239791870118, 0.02691904067993164, 0.02692915153503418, 0.027225568771362306, 0.027033632278442382, 0.027195903778076173, 0.02698854446411133, 0.027123775482177735, 0.02688323211669922, 0.026767744064331054, 0.02683283233642578, 0.026644832611083986, 0.026685407638549805, 0.0265665283203125, 0.02719977569580078, 0.02693110466003418, 0.026812511444091795, 0.026861568450927735, 0.027158527374267577, 0.030638336181640625, 0.028607807159423827, 0.02733919906616211, 0.028241920471191406, 0.026908191680908203, 0.027139968872070312, 0.02687446403503418, 0.026822944641113282, 0.026930912017822266, 0.02690025520324707, 0.026872032165527342, 0.026824703216552736, 0.027172864913940428, 0.027158527374267577, 0.026933183670043947, 0.026931232452392578, 0.027045791625976562, 0.02728767967224121, 0.026887359619140624, 0.026889024734497072, 0.027285343170166017, 0.026939552307128908, 0.02695583915710449, 0.027501855850219727, 0.02780022430419922, 0.02729190444946289, 0.027063648223876954, 0.02724857521057129, 0.027147872924804688, 0.02704252815246582, 0.026992223739624024, 0.027340511322021484, 0.02716540718078613, 0.027795583724975585, 0.027655872344970703, 0.027328319549560547, 0.027093727111816405, 0.02705526351928711, 0.027486848831176757, 0.02707865524291992, 0.02694963264465332, 0.027532672882080077, 0.02706265640258789, 0.027143583297729493, 0.027011327743530274, 0.02690518379211426, 0.02694758415222168, 0.02692095947265625, 0.026840927124023438, 0.026720319747924805, 0.027064384460449217, 0.02690255928039551, 0.026866943359375, 0.026942207336425782, 0.026955904006958006, 0.0271092472076416, 0.027406335830688477, 0.02702332878112793, 0.02686342430114746, 0.026869983673095704, 0.027034751892089842, 0.026947616577148437, 0.02705289649963379, 0.026738687515258788, 0.02697420883178711, 0.026910720825195314, 0.026832895278930666, 0.026713279724121092, 0.026749759674072265, 0.026583040237426758, 0.026755071640014647, 0.02669158363342285, 0.026894336700439454, 0.026785791397094725, 0.02677555274963379, 0.026845184326171875, 0.02689023971557617, 0.02665990447998047, 0.026706527709960938, 0.027386079788208006, 0.027362655639648438, 0.02685158348083496, 0.026664735794067383, 0.026722784042358397, 0.026706207275390626, 0.02753331184387207, 0.026654495239257812, 0.026696063995361327, 0.026654207229614257, 0.026673503875732422, 0.02667910385131836, 0.026924352645874023, 0.02698534393310547, 0.026808319091796876, 0.026951679229736326, 0.027033599853515625, 0.027000192642211915, 0.02700320053100586, 0.027016576766967774, 0.02704697608947754, 0.027825376510620118, 0.027462303161621095, 0.027178655624389647, 0.027019615173339843, 0.027639808654785155, 0.02705939292907715, 0.026942144393920897, 0.026969919204711913, 0.026888511657714845, 0.02873139190673828, 0.02840166473388672, 0.027424448013305663, 0.02721414375305176, 0.02712166404724121, 0.027117151260375977, 0.027496864318847656, 0.02716166305541992, 0.027079456329345702, 0.027000991821289063, 0.02711142349243164, 0.027148096084594727, 0.02720992088317871, 0.027150335311889647, 0.027108448028564453, 0.027007904052734375, 0.027205631256103514, 0.027268415451049806, 0.02723686408996582, 0.02713382339477539, 0.027320640563964844, 0.02773196792602539, 0.027410688400268553, 0.027122623443603517, 0.027181888580322267, 0.027043903350830078, 0.027129791259765626, 0.027125759124755858, 0.02731827163696289, 0.027187231063842774, 0.027355104446411132, 0.026978303909301758, 0.02710323143005371, 0.02721513557434082, 0.027122335433959963, 0.02725279998779297, 0.027133951187133788, 0.027031551361083983, 0.027197343826293945, 0.02709820747375488, 0.027214847564697265, 0.02703171157836914, 0.027120607376098633, 0.027375520706176756, 0.027315168380737304, 0.02713804817199707, 0.02694963264465332, 0.02716262435913086, 0.027073631286621092, 0.02693212890625, 0.027084800720214845, 0.026990463256835937, 0.027039871215820313, 0.02703139114379883, 0.027147680282592773, 0.027146560668945312, 0.027252607345581055, 0.027224639892578124, 0.02781862449645996, 0.027123712539672853, 0.027035648345947266, 0.02711961555480957, 0.0271011848449707, 0.027025568008422853, 0.026801536560058594, 0.02757596778869629, 0.026921791076660158, 0.027022464752197266, 0.02692799949645996, 0.026978303909301758, 0.02862227249145508, 0.02807046318054199, 0.0270930233001709, 0.027504192352294923, 0.026919328689575195, 0.026932384490966795, 0.02696278381347656, 0.026912416458129883, 0.026853824615478517, 0.0268625602722168, 0.02747488021850586, 0.02698854446411133, 0.0270250244140625, 0.02698681640625, 0.02697225570678711, 0.026922975540161133, 0.02711961555480957, 0.02719651222229004, 0.027216800689697264, 0.027850431442260744, 0.027399871826171877, 0.027331199645996094, 0.027328512191772462, 0.02756937599182129, 0.027355936050415038, 0.027387903213500975, 0.027183103561401366, 0.027201536178588868, 0.02709440040588379, 0.027086816787719726, 0.027094879150390626, 0.02712454414367676, 0.027201440811157225, 0.027074655532836913, 0.027119647979736327, 0.027018783569335937, 0.027083072662353515, 0.02718694305419922, 0.026928735733032227, 0.02711631965637207, 0.027299840927124022, 0.027148288726806642, 0.027340799331665038, 0.02711756706237793, 0.027239423751831054, 0.027189823150634767, 0.027099552154541014, 0.027002912521362304, 0.026967424392700196, 0.02695577621459961, 0.02694003105163574, 0.028007295608520506, 0.027148288726806642, 0.02713702392578125, 0.027036672592163087, 0.02707593536376953, 0.02714076805114746, 0.027003904342651368, 0.026952703475952147, 0.026889471054077147, 0.026931968688964844, 0.026918304443359374, 0.026971904754638672, 0.026921920776367188, 0.026810272216796875, 0.026927072525024413, 0.02691689682006836, 0.026894336700439454, 0.027084159851074218, 0.03003455924987793, 0.027606111526489258, 0.027089727401733397, 0.027092416763305663, 0.027005599975585937, 0.027029504776000978, 0.02703683280944824, 0.027093856811523438, 0.02720358467102051, 0.027123712539672853, 0.02774790382385254, 0.02843622398376465, 0.027371679306030273, 0.027306528091430665, 0.027026432037353516, 0.026973472595214844, 0.02693507194519043, 0.027150272369384765, 0.027309951782226564, 0.026779424667358397, 0.026936864852905272, 0.02666783905029297, 0.02674278450012207, 0.02679964828491211, 0.027199264526367187, 0.02724246406555176, 0.027085535049438475, 0.02706537628173828, 0.02710806465148926, 0.027089120864868164, 0.02711759948730469, 0.02692131233215332, 0.02714793586730957, 0.027025407791137695, 0.027006975173950197, 0.02693939208984375, 0.02691481590270996, 0.026953727722167968, 0.026946592330932616, 0.027016000747680666, 0.026896320343017577, 0.026927263259887695, 0.026871871948242188, 0.026847232818603517, 0.026870880126953125, 0.02769264030456543, 0.02723062324523926, 0.02742095947265625, 0.027596351623535156, 0.027162784576416014, 0.027074623107910156, 0.02707449531555176, 0.026938400268554687, 0.02689084815979004, 0.026824703216552736, 0.02677564811706543, 0.02692460823059082, 0.02706496047973633, 0.027002176284790038, 0.02695452880859375, 0.0268984317779541, 0.026963712692260743, 0.027004831314086913, 0.0269105281829834, 0.026915359497070312, 0.027054304122924804, 0.02708252716064453, 0.027056127548217773, 0.027278400421142577, 0.026997343063354492, 0.027062240600585936, 0.02717123222351074, 0.027138015747070313, 0.027006975173950197, 0.02696396827697754, 0.02694758415222168, 0.027040031433105467, 0.027043552398681642, 0.02694144058227539, 0.02701024055480957, 0.027177791595458984, 0.026968063354492186, 0.027094207763671874, 0.027190080642700197, 0.027123872756958007, 0.027371360778808595, 0.027224063873291016, 0.02746905517578125, 0.027361696243286132, 0.03070755195617676, 0.028845695495605467, 0.02753420829772949, 0.02705526351928711, 0.02696668815612793, 0.02705622482299805, 0.027102975845336913, 0.027046239852905274, 0.026857471466064452, 0.027017215728759765, 0.027052032470703126, 0.02697216033935547, 0.026906591415405273, 0.027043935775756835, 0.026986431121826172, 0.026884096145629883, 0.027031551361083983, 0.026714111328125, 0.026955360412597655, 0.02742348861694336, 0.02694508743286133, 0.026835391998291016, 0.026705919265747072, 0.027096736907958986, 0.0269069766998291, 0.027309568405151367, 0.027283935546875, 0.027750431060791017, 0.027082752227783204, 0.027174911499023437, 0.026973663330078126, 0.026905120849609374, 0.026925056457519532, 0.02687385559082031, 0.02684880065917969, 0.02688252830505371, 0.026808319091796876, 0.026820608139038086, 0.026802175521850585, 0.026883743286132813, 0.026895904541015626, 0.026864448547363282, 0.026955007553100586, 0.026938112258911132, 0.026793983459472655, 0.02692300796508789, 0.027019296646118164, 0.02697929573059082, 0.027026432037353516, 0.026851327896118164, 0.02696566390991211, 0.026927040100097655, 0.02680182456970215, 0.02692140769958496, 0.027111391067504882, 0.026808160781860352, 0.026739200592041015, 0.02713599967956543, 0.026787839889526367, 0.027021312713623048, 0.02684880065917969, 0.027034080505371094, 0.027142143249511717, 0.027467775344848632, 0.02716057586669922, 0.02715238380432129, 0.027396095275878905, 0.027336191177368165, 0.027312639236450196, 0.02735228729248047, 0.02708745574951172, 0.02699849510192871, 0.02712214469909668, 0.027047296524047852, 0.026949344635009767, 0.02697417640686035, 0.0270447998046875, 0.026992256164550782, 0.02722649574279785, 0.027182336807250976, 0.027091712951660157, 0.02695577621459961, 0.027691007614135742, 0.027244543075561522, 0.02697420883178711, 0.026902528762817384, 0.026936960220336915, 0.02688198471069336, 0.026904064178466795, 0.026940351486206056, 0.0268984317779541, 0.026971359252929688, 0.027371583938598634, 0.027224800109863282, 0.027054079055786134, 0.027023040771484375, 0.027152479171752928, 0.02697648048400879, 0.02702662467956543, 0.027134784698486326, 0.027043264389038087, 0.02702601623535156, 0.027129695892333983, 0.02712188720703125, 0.026845087051391603, 0.026785791397094725, 0.02685040092468262, 0.02710416030883789, 0.02858367919921875, 0.02742092704772949, 0.026851327896118164, 0.026662912368774414, 0.026619903564453123, 0.02676902389526367, 0.026967487335205077, 0.0268623046875, 0.027015552520751954, 0.026938976287841795, 0.026712160110473632, 0.026674591064453124, 0.026745279312133788, 0.026837312698364257, 0.026879999160766603, 0.026719999313354493, 0.026943744659423827, 0.0269803524017334, 0.026858976364135742, 0.027011615753173828, 0.027146240234375, 0.026879999160766603, 0.02686566352844238, 0.02691859245300293, 0.026863935470581055, 0.02692691230773926, 0.026921152114868164, 0.02682032012939453, 0.026882335662841796, 0.026939327239990235, 0.026982751846313477, 0.026803936004638672, 0.026822048187255858, 0.026819168090820314, 0.02675916862487793, 0.026867168426513672, 0.026777887344360353, 0.027664384841918944, 0.026975936889648437, 0.026812736511230468, 0.02686534309387207, 0.026838720321655272, 0.02686796760559082, 0.02759881591796875, 0.02680022430419922, 0.02671648025512695, 0.02689023971557617, 0.026971647262573242, 0.026993024826049806, 0.026968191146850586, 0.026959871292114256, 0.027480064392089845, 0.027303680419921875, 0.027744512557983398, 0.027389408111572266, 0.02734748840332031, 0.02727337646484375, 0.027289440155029297, 0.027678655624389648, 0.027340864181518553, 0.02741593551635742, 0.027662879943847658, 0.027459360122680663, 0.02735686492919922, 0.027255424499511717, 0.02745260810852051, 0.02721670341491699, 0.0270743350982666, 0.027074783325195313, 0.026965728759765627, 0.027826528549194336, 0.02801043128967285, 0.027480064392089845, 0.02726911926269531, 0.027062271118164064, 0.027053184509277343, 0.0269686393737793, 0.02707046318054199, 0.026971744537353515, 0.02699951934814453, 0.0274736328125, 0.02705558395385742, 0.027007808685302736, 0.026996511459350586, 0.027058336257934572, 0.02704902458190918, 0.027382783889770508, 0.027143680572509765, 0.027412992477416992, 0.026976255416870116, 0.02702047920227051, 0.026763456344604492, 0.02670185661315918, 0.02674502372741699, 0.026888608932495117, 0.026830848693847657, 0.026813535690307616, 0.026780319213867188, 0.0268720645904541, 0.027292896270751953, 0.027602399826049805, 0.027097631454467773, 0.0269803524017334, 0.026940544128417968, 0.027168928146362306, 0.027357120513916016, 0.02733660888671875, 0.027019615173339843, 0.026906656265258788, 0.027804159164428712, 0.02903798484802246, 0.027134239196777345, 0.027158847808837892, 0.027151872634887695, 0.027076480865478515, 0.027037824630737305, 0.027080671310424805, 0.02719603157043457, 0.02749750328063965, 0.027648992538452148, 0.02711747169494629, 0.027000511169433594, 0.02699452781677246, 0.027082624435424803, 0.02710793685913086, 0.027107295989990236, 0.027058176040649414, 0.027069887161254882, 0.02698860740661621, 0.026919456481933595, 0.027094079971313478, 0.02728044891357422, 0.02734809684753418, 0.027080543518066408, 0.027024063110351562, 0.02715056037902832, 0.02697420883178711, 0.0268984317779541, 0.02696396827697754, 0.02693452835083008, 0.026953535079956056, 0.02707551956176758, 0.027090591430664064, 0.02701955223083496, 0.02694883155822754, 0.02685833549499512, 0.02691276741027832, 0.026873247146606445, 0.02685398483276367, 0.02738163185119629, 0.029198463439941407, 0.027064319610595702, 0.026941280364990234, 0.027010944366455077, 0.027148576736450197, 0.027209375381469728, 0.0272159366607666, 0.02727555274963379, 0.027143936157226562, 0.027002368927001953, 0.02696063995361328, 0.02714543914794922, 0.026972959518432617]",tokens/s,36.88717788531559,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19512,7 +19512,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.46336,11301.421056,0.0,10898.898944,10500.155392,s,1,16.0841962890625,16.0841962890625,0.0,16.0841962890625,16.0841962890625,16.0841962890625,16.0841962890625,[16.0841962890625],,kWh,0.00023903515877501605,2.635578892492956e-05,0.00010644591848999602,0.0003718368661899416,,MB,2981.781504,11320.295424,0.0,10903.093248,10049.059328,s,10,58.82700244140624,5.882700244140625,0.004587707694910432,5.88457568359375,5.8869137207031255,5.887116528320313,5.887278774414062,"[5.872099609375, 5.878892578125, 5.87893798828125, 5.88180908203125, 5.88490576171875, 5.88424560546875, 5.88512255859375, 5.88680126953125, 5.8873193359375, 5.88686865234375]",tokens/s,43.51743066544738,kWh,0.0001715680417150016,1.892438748033575e-05,0.00011416198021839893,0.00030465440941373627,tokens/kWh,840296.3885953113,MB,2990.690304,11320.295424,0.0,10903.093248,10072.241664,s,10,34.996525634765625,3.4996525634765625,0.012966653567288045,3.4956083984374997,3.5149693603515626,3.522730651855469,3.5289396850585937,"[3.499573486328125, 3.489941650390625, 3.50587646484375, 3.493220703125, 3.487412353515625, 3.49799609375, 3.487426025390625, 3.49134228515625, 3.51324462890625, 3.530491943359375]",tokens/s,18.001786993796795,kWh,8.709912472041503e-05,9.60775226422537e-06,5.789665742840164e-05,0.000154603534413042,tokens/kWh,407493.9181641728,,s,630,34.993715587615974,0.05554558029780312,0.0007844414827793098,0.05537020874023438,0.056168786621093744,0.05680088062286377,0.05904880897521973,"[0.05752035140991211, 0.05568723297119141, 0.05533200073242187, 0.05539487838745117, 0.05532185745239258, 0.05565283203125, 0.05611139297485351, 0.06058419036865234, 0.055643489837646484, 0.05541007995605469, 0.055081985473632813, 0.05492464065551758, 0.054833824157714844, 0.054988800048828126, 0.05497241592407227, 0.05585100936889648, 0.056330238342285156, 0.0558546257019043, 0.056334400177001955, 0.05558476638793945, 0.05540496063232422, 0.05532672119140625, 0.05550694274902344, 0.05599014282226562, 0.05616857528686523, 0.05579980850219726, 0.0551649284362793, 0.05569740676879883, 0.055803905487060546, 0.05541888046264649, 0.05506636810302734, 0.05524316787719727, 0.055220062255859376, 0.05512195205688476, 0.055441280364990235, 0.055054431915283204, 0.05497446441650391, 0.05486796951293945, 0.05508832168579102, 0.054983070373535156, 0.05496982574462891, 0.05489670562744141, 0.05505276870727539, 0.05513638305664063, 0.0552973747253418, 0.05524294281005859, 0.05522639846801758, 0.05508784103393555, 0.05557014465332031, 0.05600694274902344, 0.05583052825927735, 0.05527142333984375, 0.056076255798339844, 0.05578905487060547, 0.055715518951416014, 0.055274337768554685, 0.05529395294189453, 0.0554516487121582, 0.055501953125, 0.05534755325317383, 0.0555463981628418, 0.05544537734985352, 0.05550249481201172, 0.05679225540161133, 0.05586822509765625, 0.055313983917236326, 0.05497651290893555, 0.05503184127807617, 0.05538431930541992, 0.05536492919921875, 0.05531644821166992, 0.05510780715942383, 0.0549771842956543, 0.060708000183105466, 0.055829345703125, 0.05586943817138672, 0.055136257171630856, 0.055303871154785154, 0.05519142532348633, 0.05516336059570313, 0.054932926177978514, 0.05778451156616211, 0.056076641082763674, 0.05539632034301758, 0.05518544006347656, 0.05500092697143555, 0.05513999938964844, 0.054977024078369144, 0.05536665725708008, 0.055790592193603515, 0.05495603179931641, 0.05505023956298828, 0.055097278594970704, 0.05549375915527344, 0.05530310440063477, 0.05520329666137695, 0.05505283355712891, 0.05514796829223633, 0.055109760284423825, 0.05474092864990234, 0.055281696319580076, 0.05493561553955078, 0.054935806274414065, 0.05496847915649414, 0.0548289909362793, 0.05559708786010742, 0.05517667388916016, 0.05533462524414062, 0.05476393508911133, 0.05490521621704102, 0.05592067337036133, 0.05585513687133789, 0.055416831970214846, 0.055158302307128905, 0.05524460983276367, 0.055075294494628904, 0.05505199813842773, 0.055373985290527346, 0.05542841720581055, 0.05564630508422851, 0.05524563217163086, 0.055005279541015625, 0.05508121490478515, 0.055226112365722654, 0.05518502426147461, 0.05491545486450195, 0.056815616607666014, 0.055578624725341794, 0.05852569580078125, 0.055801025390625, 0.055530303955078124, 0.0554598388671875, 0.05585919952392578, 0.05538431930541992, 0.05509305572509766, 0.05487187194824219, 0.05538172912597656, 0.05519196701049805, 0.05554742431640625, 0.055763423919677736, 0.05557823944091797, 0.05541238403320312, 0.05543190383911133, 0.05543251037597656, 0.055261505126953124, 0.05525689697265625, 0.05945718383789062, 0.057696640014648436, 0.0562426872253418, 0.05578140640258789, 0.05584076690673828, 0.055524608612060544, 0.05524528121948242, 0.0551058235168457, 0.05554585647583008, 0.06011904144287109, 0.05586739349365234, 0.05580099105834961, 0.05551395034790039, 0.05529766464233398, 0.05524623870849609, 0.05517820739746094, 0.05505843353271484, 0.05509529495239258, 0.05551103973388672, 0.055193599700927735, 0.05561958312988281, 0.055363040924072265, 0.05562831878662109, 0.05534220886230469, 0.055034751892089846, 0.05537996673583984, 0.055772449493408205, 0.05527331161499023, 0.05505318450927734, 0.05527961730957031, 0.05611471939086914, 0.05525551986694336, 0.055222335815429686, 0.05510483169555664, 0.055050846099853515, 0.05563190460205078, 0.055299072265625, 0.055415264129638674, 0.05504668807983398, 0.055003135681152344, 0.05510758590698242, 0.054973953247070315, 0.055110145568847656, 0.056807937622070315, 0.05591641616821289, 0.05575228881835938, 0.055500545501708985, 0.05521036911010742, 0.05565686416625976, 0.05613935852050781, 0.05547436904907226, 0.0553023681640625, 0.05537996673583984, 0.055037120819091796, 0.055153438568115234, 0.05496620941162109, 0.05514806365966797, 0.05492998504638672, 0.054951934814453124, 0.05489459228515625, 0.054973918914794924, 0.05558480072021484, 0.05579622268676758, 0.05547417449951172, 0.055464191436767576, 0.05518259048461914, 0.05554227066040039, 0.055640064239501956, 0.055211647033691406, 0.05511411285400391, 0.05512310409545899, 0.05501424026489258, 0.05517926406860352, 0.055744510650634765, 0.055795711517333986, 0.0559288330078125, 0.05724140930175781, 0.05543718338012695, 0.05558911895751953, 0.05498220825195312, 0.055228256225585935, 0.055199905395507814, 0.05554201507568359, 0.0568873291015625, 0.056776927947998046, 0.055812095642089846, 0.05485567855834961, 0.05494988632202148, 0.05488230514526367, 0.05491097640991211, 0.05491302490234375, 0.05587968063354492, 0.05557968139648437, 0.0548691520690918, 0.055379169464111325, 0.055091808319091794, 0.05507215881347656, 0.05476003265380859, 0.05554166412353516, 0.05754889678955078, 0.055817569732666016, 0.05533747100830078, 0.05501353454589844, 0.05506390380859375, 0.054892318725585934, 0.05492972946166992, 0.056740222930908205, 0.05543766403198242, 0.055398303985595705, 0.0551563835144043, 0.05532844924926758, 0.05617331314086914, 0.05543027114868164, 0.05537171173095703, 0.055045055389404296, 0.055169025421142576, 0.054910526275634766, 0.055180896759033204, 0.055220287322998045, 0.05500188827514649, 0.05526732635498047, 0.05488435363769531, 0.05498470306396484, 0.055524478912353514, 0.05519996643066406, 0.055314910888671874, 0.05544160079956055, 0.055201793670654295, 0.055076416015625, 0.0552737922668457, 0.05529126358032226, 0.055695934295654295, 0.055119838714599606, 0.05518972778320313, 0.05532262420654297, 0.055619838714599606, 0.05511548614501953, 0.0552470703125, 0.055360416412353515, 0.05559782409667969, 0.05504220962524414, 0.054902782440185545, 0.05503327941894531, 0.05482144165039062, 0.05483481597900391, 0.05473641586303711, 0.05497942352294922, 0.05505843353271484, 0.054957313537597655, 0.05514316940307617, 0.058982078552246096, 0.055949119567871096, 0.05546585464477539, 0.05557276916503906, 0.05503142547607422, 0.055083744049072264, 0.055414783477783204, 0.055431167602539064, 0.058472095489501955, 0.055583072662353517, 0.055094913482666014, 0.05489702224731445, 0.055003135681152344, 0.05509529495239258, 0.05505033493041992, 0.054816574096679685, 0.05484143829345703, 0.05506047821044922, 0.05549260711669922, 0.056691646575927734, 0.05517475128173828, 0.055228160858154296, 0.05478985595703125, 0.05471244812011719, 0.05509971237182617, 0.05483161544799805, 0.054779712677001956, 0.05536796951293945, 0.055537567138671876, 0.05887945556640625, 0.05546448135375977, 0.05523212814331055, 0.055015583038330075, 0.05507727813720703, 0.05509715270996094, 0.05671513748168945, 0.056516735076904294, 0.055504894256591795, 0.05575411224365234, 0.05560720062255859, 0.0552496337890625, 0.05496012878417969, 0.05500723266601563, 0.055272800445556644, 0.05568915176391601, 0.05624496078491211, 0.055903968811035154, 0.05575094223022461, 0.056149185180664064, 0.05565862274169922, 0.0550366096496582, 0.055408641815185546, 0.05529190444946289, 0.0553779182434082, 0.05513203048706055, 0.05753459167480469, 0.05531363296508789, 0.05546409606933594, 0.0550775032043457, 0.05506662368774414, 0.055019039154052735, 0.05488880157470703, 0.05478412628173828, 0.05535334396362305, 0.05544345474243164, 0.055365631103515625, 0.05540249633789063, 0.055310176849365233, 0.05558028793334961, 0.05544195175170898, 0.055126014709472655, 0.05502099227905274, 0.05558143997192383, 0.0558724479675293, 0.05889555358886719, 0.05606572723388672, 0.05550080108642578, 0.05526063919067383, 0.0549791030883789, 0.054763519287109375, 0.05497817611694336, 0.05540499114990234, 0.056672863006591793, 0.0574463996887207, 0.056643585205078124, 0.05557241439819336, 0.05561139297485351, 0.05496223831176758, 0.055060127258300784, 0.055111457824707034, 0.05497708892822266, 0.05480038452148438, 0.05489622497558594, 0.054895008087158206, 0.05480652618408203, 0.05505187225341797, 0.05496259307861328, 0.05477580642700195, 0.055003135681152344, 0.05504931259155273, 0.05485456085205078, 0.055317920684814455, 0.054935840606689455, 0.055177536010742184, 0.0591278076171875, 0.05507276916503906, 0.054975967407226566, 0.05510982513427734, 0.054935455322265625, 0.05561177444458008, 0.054830528259277346, 0.055618175506591795, 0.05579894256591797, 0.05499580764770508, 0.05488611221313477, 0.05537984085083008, 0.055304607391357424, 0.0550230712890625, 0.0550173454284668, 0.05527619171142578, 0.05518950271606445, 0.05579718399047852, 0.055750720977783205, 0.05546799850463867, 0.055158687591552735, 0.05512870407104492, 0.05541068649291992, 0.055045631408691405, 0.05496268844604492, 0.05513216018676758, 0.05501337432861328, 0.055182689666748046, 0.0549807357788086, 0.0548454704284668, 0.054806846618652344, 0.05569673538208008, 0.054983329772949216, 0.05581638336181641, 0.055200832366943356, 0.055648574829101564, 0.055374080657958985, 0.05558515167236328, 0.055959552764892576, 0.055784671783447266, 0.05564495849609375, 0.05705775833129883, 0.055416831970214846, 0.0553221435546875, 0.055683006286621095, 0.05526950454711914, 0.05533849716186524, 0.055547870635986325, 0.05497897720336914, 0.05495577621459961, 0.054806400299072265, 0.05487913513183594, 0.05485123062133789, 0.0546860466003418, 0.06041705703735351, 0.055234783172607424, 0.05532675170898437, 0.05534281539916992, 0.05563449478149414, 0.05584918212890625, 0.05603942489624023, 0.05521539306640625, 0.055199775695800785, 0.055180191040039066, 0.05550236892700195, 0.05578710556030273, 0.05508163070678711, 0.05485385513305664, 0.05475532913208008, 0.05482291030883789, 0.05475923156738281, 0.0552092170715332, 0.05493443298339844, 0.05469187164306641, 0.0547677116394043, 0.05829935836791992, 0.055661502838134765, 0.0553963508605957, 0.05516435241699219, 0.055917022705078125, 0.05575689697265625, 0.05551449584960937, 0.05573593521118164, 0.055634944915771485, 0.05550233459472656, 0.0551223030090332, 0.05542639923095703, 0.05532889556884766, 0.055061054229736325, 0.055029537200927736, 0.05553184127807617, 0.05523251342773437, 0.05540249633789063, 0.05499289703369141, 0.05503385543823242, 0.05509939193725586, 0.05495606231689453, 0.0548515510559082, 0.05498371124267578, 0.054893535614013673, 0.055063583374023437, 0.055497695922851566, 0.05585100936889648, 0.05571583938598633, 0.05708278274536133, 0.055787391662597656, 0.05601696014404297, 0.0552182388305664, 0.055572479248046876, 0.05558476638793945, 0.055721343994140624, 0.05522700881958008, 0.05542911911010742, 0.05699599838256836, 0.055444480895996094, 0.05515350341796875, 0.055493888854980467, 0.05539654541015625, 0.05499142456054688, 0.05487411117553711, 0.05496422576904297, 0.05483273696899414, 0.05495849609375, 0.05490687942504883, 0.055109630584716796, 0.05492736053466797, 0.05519705581665039, 0.05524259185791015, 0.058765632629394535, 0.05551699066162109, 0.05523251342773437, 0.05518982315063477, 0.05489904022216797, 0.05546096038818359, 0.05604755020141602, 0.0558743667602539, 0.055414913177490234, 0.05539228820800781, 0.055347198486328124, 0.056737247467041015, 0.05617068862915039, 0.05565039825439453, 0.055494911193847654, 0.055282974243164064, 0.055255775451660154, 0.05601484680175781, 0.05650841522216797, 0.055714977264404296, 0.05562028884887695, 0.05907606506347656, 0.05645177459716797, 0.05583462524414062, 0.056289279937744144, 0.05696688079833984, 0.05606633758544922, 0.05572774505615234, 0.055546241760253905, 0.05544937515258789, 0.055543136596679685, 0.05561798477172852, 0.05579750442504883, 0.056379745483398434, 0.05561993789672852, 0.05573222351074219, 0.05717606353759765, 0.05620028686523437, 0.05578435134887695, 0.05707334518432617, 0.05615622329711914, 0.05643667221069336, 0.05616831970214844, 0.05564876937866211, 0.05588787078857422, 0.05595257568359375, 0.055699966430664063, 0.056186721801757815, 0.05612796783447266, 0.056190975189208986, 0.05639987182617188, 0.0559797134399414, 0.05565670394897461, 0.055809951782226565, 0.055990081787109375, 0.056151424407958984, 0.05577212905883789, 0.055756031036376955, 0.055667457580566404, 0.056027137756347656, 0.05570716857910156, 0.05545750427246094, 0.05607273483276367, 0.05593929672241211, 0.05592601776123047, 0.05609913635253906, 0.0556416015625, 0.055419422149658205, 0.05603193664550781, 0.05536870574951172, 0.05524553680419922, 0.05530828857421875, 0.05545369720458984, 0.05673123168945313, 0.05643036651611328, 0.05591718292236328, 0.05585039901733398, 0.05562223815917969, 0.05612748718261719, 0.05587353515625, 0.055785247802734375, 0.056038753509521484, 0.05636185455322266, 0.056027008056640626, 0.05583270263671875, 0.056183967590332035, 0.05585321426391601, 0.05589676666259766, 0.055932926177978515, 0.056137313842773436, 0.05613951873779297, 0.05638790512084961, 0.056164608001708985, 0.056621150970458986, 0.056360160827636716, 0.058641185760498045, 0.05611520004272461, 0.05553926467895508, 0.05593952178955078, 0.05743199920654297, 0.0559529914855957, 0.05585884857177734]",tokens/s,18.00323256393364,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19557,12 +19557,12 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 107457 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 106300 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,870.170624,617.545728,0.0,222.298112,199.93344,s,1,8.03411474609375,8.03411474609375,0.0,8.03411474609375,8.03411474609375,8.03411474609375,8.03411474609375,[8.03411474609375],,kWh,2.450999462917025e-05,2.69318842136144e-06,7.537506029997587e-06,3.474068908052927e-05,,MB,1211.572224,669.974528,0.0,262.144,220.881408,s,16,0.5669669075012207,0.035435431718826294,0.0010705433270812425,0.035248767852783205,0.03584315109252929,0.036712830543518066,0.03876446552276611,"[0.03927737426757812, 0.03503222274780273, 0.03528144073486328, 0.034322624206542966, 0.03507164764404297, 0.03585798263549805, 0.03582831954956055, 0.035390846252441405, 0.03559091186523437, 0.035216094970703125, 0.035463329315185546, 0.03534169769287109, 0.03486675262451172, 0.034920894622802734, 0.03455049514770508, 0.03495427322387695]",tokens/s,7224.407537385559,kWh,1.2342303559247405e-06,1.3611274231380592e-07,8.165512861518986e-07,2.186894384390445e-06,tokens/kWh,117060980.09454402,MB,1245.51168,684.654592,0.0,276.824064,220.883968,s,16,9.711272521972656,0.606954532623291,0.008585633852691918,0.6104183959960938,0.6147964477539063,0.6172033386230469,0.6172731384277343,"[0.5949608154296875, 0.58690283203125, 0.5928167114257813, 0.6113905639648437, 0.6124186401367188, 0.6122392578125, 0.6092311401367188, 0.6065200805664063, 0.6100323486328125, 0.6172905883789063, 0.6171742553710937, 0.610804443359375, 0.6068065795898437, 0.6120545043945312, 0.6117877807617188, 0.5988419799804687]",tokens/s,103.79690176743638,kWh,1.7168328469075072e-05,1.8933499684787224e-06,6.887743071723286e-06,2.594942150927708e-05,tokens/kWh,2427799.7864991752,,s,1008,9.702431542396553,0.009625428117456892,0.00026042284172965544,0.009625679969787599,0.009851100540161133,0.009908850908279419,0.01048333580970764,"[0.009447872161865234, 0.009883872032165527, 0.009592991828918457, 0.009680928230285645, 0.009769023895263672, 0.009662367820739747, 0.00961945629119873, 0.009543904304504395, 0.009411968231201173, 0.009510944366455078, 0.009316127777099609, 0.00942959976196289, 0.009633536338806153, 0.009889151573181152, 0.009661343574523926, 0.00935308837890625, 0.009390175819396973, 0.009297056198120117, 0.009340800285339356, 0.009318400382995605, 0.009380031585693359, 0.009345824241638184, 0.009279232025146484, 0.009408767700195313, 0.009334303855895997, 0.009332480430603028, 0.009302751541137695, 0.009256159782409668, 0.009249567985534668, 0.009538687705993653, 0.009282431602478027, 0.009288800239562989, 0.009294816017150879, 0.009297856330871582, 0.009354432106018067, 0.009312640190124512, 0.009260671615600586, 0.00941487979888916, 0.009314144134521484, 0.009253631591796875, 0.00919961643218994, 0.00927948760986328, 0.00922323226928711, 0.009255871772766114, 0.009237919807434082, 0.009220383644104003, 0.009230655670166016, 0.009252256393432617, 0.00960905647277832, 0.009428959846496582, 0.009360063552856445, 0.009312543869018555, 0.00996127986907959, 0.009870559692382812, 0.010033632278442383, 0.009816384315490723, 0.009544832229614258, 0.009667455673217773, 0.009404416084289552, 0.009328255653381348, 0.009349504470825195, 0.009328160285949707, 0.009316831588745118, 0.009525343894958497, 0.009607104301452637, 0.009431232452392578, 0.009420255661010742, 0.009408512115478516, 0.009412351608276368, 0.009341919898986816, 0.009263839721679688, 0.009296863555908204, 0.009326239585876465, 0.009684800148010254, 0.009287327766418456, 0.009227359771728515, 0.009227807998657227, 0.009212160110473632, 0.009289183616638184, 0.009218720436096192, 0.0091278076171875, 0.009142271995544434, 0.00932863998413086, 0.009203776359558106, 0.009279135704040528, 0.009216032028198241, 0.00928384017944336, 0.009214271545410156, 0.009309887886047363, 0.009207776069641113, 0.009220128059387206, 0.009185152053833008, 0.00923408031463623, 0.009265631675720214, 0.009371135711669922, 0.00929635238647461, 0.009306240081787109, 0.009285504341125489, 0.009322527885437012, 0.00929587173461914, 0.009286751747131347, 0.009294848442077636, 0.009238431930541992, 0.009256095886230468, 0.009261919975280761, 0.009430591583251952, 0.009429439544677735, 0.009507871627807618, 0.009454784393310546, 0.009492256164550782, 0.009390080451965332, 0.00931552028656006, 0.009323328018188477, 0.009362848281860351, 0.009230912208557129, 0.009256735801696777, 0.00925107192993164, 0.00942899227142334, 0.009298015594482421, 0.009239935874938964, 0.009159199714660645, 0.009328512191772461, 0.009303232192993165, 0.009180095672607422, 0.009190591812133789, 0.009220064163208008, 0.009267071723937988, 0.00943561553955078, 0.00954643154144287, 0.009376735687255859, 0.009472224235534668, 0.009382335662841797, 0.009531392097473144, 0.009433024406433106, 0.009355680465698242, 0.009446335792541503, 0.009403103828430176, 0.009461759567260742, 0.009308095932006835, 0.009351231575012208, 0.009328479766845704, 0.009283743858337402, 0.00929097557067871, 0.00927619171142578, 0.009274911880493165, 0.009390432357788086, 0.009349247932434083, 0.009261055946350098, 0.009506239891052246, 0.009422464370727539, 0.009231040000915527, 0.00928998374938965, 0.009314016342163086, 0.009215583801269531, 0.009251744270324706, 0.00929155158996582, 0.009220095634460449, 0.009334272384643554, 0.009203328132629394, 0.009170975685119629, 0.009195648193359374, 0.009214688301086425, 0.009293791770935058, 0.009230367660522461, 0.00923852825164795, 0.00927948760986328, 0.00937936019897461, 0.009452159881591798, 0.009427840232849122, 0.009343968391418458, 0.009350848197937012, 0.009389408111572265, 0.009313152313232423, 0.009361311912536622, 0.009377984046936036, 0.009385215759277343, 0.00963167953491211, 0.00966329574584961, 0.009664544105529785, 0.009652192115783692, 0.009612480163574218, 0.009548319816589355, 0.00981430435180664, 0.009943039894104003, 0.009624768257141113, 0.009614144325256348, 0.009523200035095216, 0.009523200035095216, 0.009533439636230469, 0.009478624343872071, 0.009737215995788574, 0.009750528335571289, 0.00968832015991211, 0.009748224258422852, 0.009639936447143555, 0.009650176048278808, 0.009576448440551758, 0.009660415649414063, 0.009524736404418945, 0.009575967788696288, 0.009534720420837403, 0.009532416343688965, 0.009501407623291016, 0.009538816452026368, 0.009546496391296387, 0.009613311767578125, 0.009697343826293945, 0.009756608009338379, 0.009654272079467773, 0.0097259521484375, 0.009699328422546387, 0.009706720352172852, 0.009686112403869629, 0.00983420753479004, 0.009747584342956543, 0.009736063957214356, 0.009894880294799804, 0.009772959709167481, 0.009694656372070313, 0.009681568145751954, 0.00976460838317871, 0.009732352256774903, 0.009707200050354003, 0.009885536193847657, 0.009771391868591309, 0.009736384391784668, 0.009703328132629394, 0.009703424453735352, 0.009764415740966797, 0.009817983627319336, 0.009847359657287598, 0.009705471992492675, 0.009762080192565917, 0.009691871643066407, 0.009650176048278808, 0.009794560432434082, 0.00993177604675293, 0.009791744232177735, 0.009791199684143066, 0.00973417568206787, 0.009665920257568359, 0.009630335807800292, 0.009607135772705078, 0.009599007606506348, 0.009582592010498046, 0.009631744384765625, 0.009594335556030273, 0.009579039573669434, 0.00962179183959961, 0.009606207847595215, 0.009966367721557618, 0.009864224433898925, 0.009728768348693847, 0.009780991554260253, 0.009867584228515625, 0.009820032119750976, 0.009682815551757812, 0.009683072090148926, 0.00966659164428711, 0.009727775573730469, 0.009598496437072754, 0.009607839584350585, 0.00975158405303955, 0.009662848472595215, 0.009642144203186036, 0.009896384239196778, 0.009752351760864258, 0.009782976150512695, 0.009587231636047364, 0.009539584159851074, 0.009578207969665527, 0.009640064239501953, 0.0097609281539917, 0.009832448005676269, 0.009678848266601562, 0.009674143791198731, 0.009722463607788086, 0.009668352127075196, 0.00967091178894043, 0.00967676830291748, 0.009576479911804199, 0.009721440315246582, 0.009687456130981445, 0.009823583602905273, 0.009754847526550293, 0.009850496292114258, 0.009798175811767578, 0.009890080451965332, 0.00964310359954834, 0.00962384033203125, 0.009656959533691407, 0.009654080390930176, 0.00962390422821045, 0.009645503997802734, 0.009607808113098144, 0.009596128463745118, 0.009746912002563477, 0.009666655540466309, 0.009727935791015625, 0.009897791862487793, 0.009782655715942383, 0.010109855651855468, 0.009703392028808594, 0.00971776008605957, 0.009703424453735352, 0.009557567596435546, 0.00975648021697998, 0.009560288429260254, 0.009544351577758788, 0.009524991989135742, 0.009566399574279785, 0.009518912315368652, 0.010208415985107421, 0.009755647659301758, 0.009953120231628417, 0.009440064430236816, 0.009772319793701172, 0.009629504203796387, 0.00984768009185791, 0.009584287643432617, 0.009733728408813477, 0.009590975761413574, 0.009642720222473145, 0.009518688201904296, 0.00959494400024414, 0.009668319702148438, 0.009644512176513673, 0.009582240104675293, 0.009628000259399415, 0.009624896049499511, 0.00961196804046631, 0.009709280014038086, 0.009664799690246582, 0.009739999771118163, 0.009668512344360352, 0.009656512260437011, 0.00959280014038086, 0.009611392021179199, 0.009633888244628906, 0.009545727729797364, 0.009674752235412597, 0.009637920379638672, 0.00965833568572998, 0.009666463851928712, 0.00971292781829834, 0.00967683219909668, 0.009672896385192871, 0.010103391647338868, 0.009857088088989259, 0.009727295875549316, 0.00967347240447998, 0.00975004768371582, 0.009642335891723633, 0.009559295654296874, 0.010099103927612305, 0.010545503616333008, 0.010921983718872071, 0.010004480361938477, 0.00971776008605957, 0.009684831619262695, 0.009625760078430176, 0.009709695816040038, 0.009615232467651367, 0.009707200050354003, 0.009793536186218262, 0.009676223754882812, 0.00973299217224121, 0.00988479995727539, 0.009646719932556152, 0.009568511962890625, 0.009506815910339356, 0.009621503829956055, 0.00954543972015381, 0.009546079635620118, 0.009545663833618163, 0.009560064315795898, 0.009516863822937012, 0.009967807769775391, 0.009519776344299317, 0.009664511680603028, 0.0096112642288208, 0.010798912048339843, 0.012132543563842774, 0.009691455841064452, 0.009690912246704101, 0.009564224243164063, 0.009650015830993652, 0.009531328201293945, 0.009705535888671875, 0.009598496437072754, 0.009545472145080567, 0.009618368148803711, 0.009574175834655762, 0.009661888122558594, 0.009585151672363281, 0.009570367813110351, 0.009614527702331543, 0.009620287895202636, 0.009600223541259766, 0.009645024299621581, 0.009611071586608887, 0.009715392112731933, 0.009715423583984376, 0.009599583625793457, 0.009594592094421386, 0.009596384048461915, 0.009569087982177734, 0.00952457618713379, 0.009534367561340332, 0.009571423530578613, 0.009612959861755371, 0.00976319980621338, 0.009531744003295898, 0.009554176330566407, 0.009568287849426269, 0.009592831611633301, 0.009661472320556641, 0.009608160018920899, 0.009574399948120118, 0.009570303916931153, 0.00958182430267334, 0.009619263648986817, 0.009495231628417969, 0.00952575969696045, 0.009541376113891602, 0.00971776008605957, 0.009644031524658203, 0.0095862398147583, 0.009658559799194336, 0.009570143699645995, 0.009585344314575195, 0.0095250883102417, 0.009553119659423829, 0.009638079643249512, 0.009620032310485839, 0.009583871841430664, 0.00970748805999756, 0.009548480033874512, 0.00951734447479248, 0.009573760032653809, 0.009591039657592773, 0.009543680191040039, 0.00970956802368164, 0.009793248176574707, 0.010053919792175294, 0.009944448471069336, 0.00968569564819336, 0.009922623634338379, 0.009713536262512206, 0.009567392349243165, 0.009699616432189941, 0.009695743560791016, 0.009801792144775391, 0.00960102367401123, 0.009666432380676269, 0.009656671524047852, 0.009633888244628906, 0.009620256423950195, 0.00968387222290039, 0.009682656288146973, 0.009783583641052246, 0.009635583877563476, 0.009656479835510254, 0.009541279792785644, 0.009611712455749512, 0.009586688041687011, 0.009625311851501464, 0.009572640419006347, 0.009660415649414063, 0.009564160346984863, 0.009552895545959473, 0.00953990364074707, 0.00965283203125, 0.009556384086608886, 0.009648927688598633, 0.009597855567932129, 0.009565728187561035, 0.009482815742492675, 0.00955401611328125, 0.009505951881408691, 0.009536160469055175, 0.00951910400390625, 0.009491840362548827, 0.009509568214416503, 0.009482111930847168, 0.009525376319885254, 0.00952086353302002, 0.009519328117370605, 0.00951910400390625, 0.009558015823364258, 0.009566176414489745, 0.009531295776367188, 0.009578623771667481, 0.009557663917541504, 0.009505120277404785, 0.009473983764648437, 0.009491552352905273, 0.009593184471130372, 0.009656096458435058, 0.009792351722717285, 0.009664511680603028, 0.009590784072875976, 0.009630816459655762, 0.009575615882873536, 0.009500320434570313, 0.009721152305603028, 0.009724703788757325, 0.009793536186218262, 0.00961740779876709, 0.00970537567138672, 0.009683296203613282, 0.009624959945678711, 0.00970911979675293, 0.009576671600341797, 0.009623647689819336, 0.009660479545593261, 0.009537631988525391, 0.009567935943603516, 0.009535967826843262, 0.00962559986114502, 0.009785247802734374, 0.009834464073181153, 0.009742655754089356, 0.009690688133239747, 0.009636287689208984, 0.009652095794677734, 0.009656448364257813, 0.009643296241760253, 0.009556703567504883, 0.009797632217407226, 0.009616543769836425, 0.009657183647155761, 0.009613311767578125, 0.009576448440551758, 0.009585760116577148, 0.009597855567932129, 0.00960048007965088, 0.009615903854370117, 0.009562111854553223, 0.009595135688781738, 0.009604063987731934, 0.009709856033325196, 0.009588768005371093, 0.009638208389282227, 0.009702560424804687, 0.009933440208435059, 0.009869215965270996, 0.009767231941223144, 0.009719615936279296, 0.009688575744628907, 0.009654879570007324, 0.00981606388092041, 0.00961945629119873, 0.009653696060180664, 0.009635711669921875, 0.009644991874694824, 0.009623552322387695, 0.009682080268859864, 0.009741151809692383, 0.009689087867736817, 0.009725664138793945, 0.00982806396484375, 0.009787967681884766, 0.009786944389343261, 0.009730496406555177, 0.00972390365600586, 0.009674976348876953, 0.00956287956237793, 0.009942815780639649, 0.00992899227142334, 0.010126272201538085, 0.010017056465148926, 0.009740544319152831, 0.00986460781097412, 0.00966649627685547, 0.009731040000915528, 0.009623488426208497, 0.00972390365600586, 0.00966864013671875, 0.009692768096923828, 0.009736576080322266, 0.009637887954711915, 0.009670656204223632, 0.009690943717956544, 0.009646464347839355, 0.00976467227935791, 0.010141695976257324, 0.009758720397949219, 0.009764863967895507, 0.009704607963562012, 0.009705984115600585, 0.009733983993530274, 0.009763487815856934, 0.009772480010986328, 0.009728416442871094, 0.009709856033325196, 0.009919391632080079, 0.009644576072692871, 0.00964031982421875, 0.009716799736022949, 0.00967353630065918, 0.00975376033782959, 0.009779775619506836, 0.009780768394470216, 0.00971673583984375, 0.009682720184326171, 0.009842880249023437, 0.010102720260620117, 0.009794943809509278, 0.009883999824523926, 0.009904671669006347, 0.009862784385681153, 0.009801728248596191, 0.00979155158996582, 0.009877440452575683, 0.009789440155029297, 0.00987110424041748, 0.009892352104187012, 0.00977894401550293, 0.009884960174560547, 0.009851615905761718, 0.009746527671813965, 0.009746335983276367, 0.009867551803588868, 0.00989087963104248, 0.009789728164672851, 0.009808256149291992, 0.009758015632629395, 0.009848640441894532, 0.009743231773376465, 0.009546112060546875, 0.009905983924865722, 0.009841312408447265, 0.009899423599243165, 0.009830080032348632, 0.009835200309753418, 0.009769503593444823, 0.009813055992126465, 0.00956287956237793, 0.009850655555725098, 0.009641823768615722, 0.009960960388183594, 0.009818943977355957, 0.00989132785797119, 0.009744256019592286, 0.009803647994995118, 0.009762847900390625, 0.009863519668579101, 0.00974448013305664, 0.009750335693359376, 0.009734560012817382, 0.009750528335571289, 0.009672639846801757, 0.009715456008911133, 0.009760640144348144, 0.009836480140686036, 0.009687423706054688, 0.009940223693847656, 0.00968291187286377, 0.009702431678771972, 0.009680000305175782, 0.009728768348693847, 0.009756704330444337, 0.00978940773010254, 0.009752575874328612, 0.00971116828918457, 0.00971731185913086, 0.009851776123046874, 0.00975692844390869, 0.009752320289611817, 0.009836799621582031, 0.00984447956085205, 0.00993660831451416, 0.009849120140075683, 0.009821279525756836, 0.009927488327026368, 0.009797727584838867, 0.00990835189819336, 0.00979475212097168, 0.009806879997253418, 0.009747551918029786, 0.009840288162231446, 0.009851807594299317, 0.009901856422424317, 0.00985324764251709, 0.009880576133728027, 0.009890527725219726, 0.009778495788574218, 0.009818431854248046, 0.009611840248107911, 0.009684991836547852, 0.00968892765045166, 0.009718048095703124, 0.00955571174621582, 0.010055935859680176, 0.009907839775085449, 0.009775456428527831, 0.00981123161315918, 0.009704192161560059, 0.009858783721923827, 0.009752863883972169, 0.009902239799499512, 0.01001046371459961, 0.009804032325744628, 0.009832127571105957, 0.00969324779510498, 0.009701215744018554, 0.009683327674865723, 0.009790623664855957, 0.009710495948791503, 0.009688384056091308, 0.009709983825683595, 0.009578783988952637, 0.009635552406311036, 0.009619135856628418, 0.009787712097167969, 0.009596927642822266, 0.009591103553771973, 0.009625280380249023, 0.009691360473632813, 0.009634655952453613, 0.009638079643249512, 0.009618176460266113, 0.009539135932922363, 0.009559935569763183, 0.010627103805541991, 0.009675295829772949, 0.009717887878417968, 0.009706687927246094, 0.009699999809265137, 0.009649727821350098, 0.009739871978759765, 0.00962060832977295, 0.009592576026916504, 0.009613311767578125, 0.009567328453063965, 0.009614239692687989, 0.009545408248901367, 0.009650495529174805, 0.009576255798339844, 0.00950819206237793, 0.00986182403564453, 0.009847999572753906, 0.009671648025512696, 0.009841952323913574, 0.00962435245513916, 0.00966211223602295, 0.009511199951171875, 0.009535264015197754, 0.009478367805480958, 0.009441280364990234, 0.009464863777160645, 0.009521216392517089, 0.009451583862304687, 0.009549920082092284, 0.009616127967834473, 0.00946793556213379, 0.00981049633026123, 0.009789055824279785, 0.009763520240783691, 0.00969696044921875, 0.009662336349487304, 0.010201215744018555, 0.01009059238433838, 0.009874367713928222, 0.009585023880004883, 0.009709407806396484, 0.009634655952453613, 0.009622688293457032, 0.009713408470153808, 0.009616512298583985, 0.009625472068786622, 0.009566335678100587, 0.00954150390625, 0.009667712211608887, 0.009665696144104004, 0.009688799858093261, 0.009669952392578125, 0.009570207595825195, 0.009588607788085938, 0.009584832191467284, 0.009519424438476563, 0.00958505630493164, 0.009573760032653809, 0.009697055816650391, 0.009556832313537598, 0.00960102367401123, 0.009555968284606933, 0.009668288230895996, 0.009566816329956054, 0.0096212158203125, 0.009615039825439453, 0.009517472267150879, 0.009524127960205079, 0.009548800468444824, 0.0094967041015625, 0.009534560203552245, 0.00950761604309082, 0.009504768371582031, 0.009524543762207032, 0.009503487586975098, 0.009540896415710449, 0.009869983673095703, 0.009561152458190917, 0.00952620792388916, 0.00961945629119873, 0.009545727729797364, 0.009526816368103028, 0.009525823593139648, 0.0095763521194458, 0.009511967658996583, 0.00963811206817627, 0.009548416137695312, 0.009611071586608887, 0.009576416015625, 0.009695775985717773, 0.00957420825958252, 0.009566240310668946, 0.009603039741516113, 0.00952768039703369, 0.009738719940185547, 0.009861120223999023, 0.009818112373352051, 0.009744383811950684, 0.00962559986114502, 0.009682239532470703, 0.00958672046661377, 0.009664959907531738, 0.009726335525512695, 0.009683903694152831, 0.009587807655334473, 0.009512672424316406, 0.009642080307006836, 0.009613311767578125, 0.009621503829956055, 0.009776255607604981, 0.009474368095397949, 0.009523776054382324, 0.009672736167907715, 0.009891807556152344, 0.009889792442321778, 0.009649984359741211, 0.009689279556274414, 0.009746432304382324, 0.009663711547851563, 0.009643808364868164, 0.009675775527954102, 0.009602656364440918, 0.009586943626403808, 0.009820320129394532, 0.009684703826904296, 0.009560288429260254, 0.009746560096740723, 0.009849023818969727, 0.009887264251708985, 0.0097193603515625, 0.009724575996398926, 0.009797792434692382, 0.009717599868774414, 0.009834495544433594, 0.009754143714904785, 0.009795104026794434, 0.009737152099609375, 0.009885567665100098, 0.009764991760253906, 0.0097609281539917, 0.009752415657043456, 0.009756352424621582, 0.009711487770080567, 0.009693792343139648, 0.009682784080505371, 0.00974847984313965, 0.009674688339233399, 0.009621888160705567, 0.00977683162689209, 0.009731679916381837, 0.009638015747070312, 0.009693375587463379, 0.009630047798156738, 0.00963161563873291, 0.00981174373626709, 0.00974847984313965, 0.00959718418121338, 0.009842720031738281, 0.009944992065429687, 0.009842687606811524, 0.009942432403564454, 0.009909119606018067, 0.009858688354492188, 0.009818207740783692, 0.009850879669189454, 0.009889535903930664, 0.00989414405822754, 0.009795871734619141, 0.010454527854919434, 0.010950655937194824, 0.010774751663208008, 0.01005568027496338, 0.01006175994873047, 0.009836607933044434, 0.009920512199401856, 0.009981951713562011, 0.009844736099243164, 0.009824352264404297, 0.009877408027648926, 0.009795583724975587, 0.009773216247558593, 0.009760607719421386, 0.009688639640808105, 0.009607872009277344, 0.009682687759399414, 0.009739359855651856, 0.009507871627807618, 0.009461503982543945, 0.009545856475830079, 0.009477343559265137, 0.009415488243103027, 0.009435104370117188, 0.009422656059265136, 0.009462271690368652, 0.009432191848754884, 0.009552448272705079, 0.009549247741699218, 0.009648927688598633, 0.009645855903625489, 0.00963584041595459, 0.009586432456970214, 0.009662943840026856, 0.009497695922851563, 0.00947270393371582, 0.009524255752563477, 0.009488991737365723, 0.009594592094421386, 0.009447263717651368, 0.009540703773498535, 0.009443008422851562, 0.009438976287841797, 0.009400320053100587, 0.009470239639282226, 0.009656319618225098, 0.009555968284606933, 0.00939417552947998, 0.009334783554077148, 0.00944547176361084, 0.009328543663024902, 0.009187328338623046, 0.009523200035095216, 0.009322784423828124, 0.009346495628356934, 0.009333215713500976, 0.009364319801330566, 0.00942409610748291, 0.009347071647644043, 0.00953916835784912, 0.009448863983154298, 0.009579263687133788, 0.009508864402770996, 0.01324169635772705, 0.010420576095581054, 0.010524991989135743, 0.010485504150390625, 0.009701631546020507, 0.009324543952941895, 0.009377216339111328, 0.009525823593139648, 0.00939417552947998, 0.00928553581237793, 0.009527392387390136, 0.00930611228942871, 0.009223648071289062, 0.009226911544799805, 0.009233311653137207, 0.009210047721862792, 0.009220864295959473, 0.009220128059387206, 0.009224320411682129, 0.00934284782409668, 0.009584639549255371, 0.009237855911254882, 0.009636480331420899, 0.009351200103759765, 0.009354720115661621, 0.00937564754486084, 0.009327232360839844, 0.00932585620880127, 0.009260831832885742, 0.00932755184173584, 0.009272480010986328, 0.0093274564743042, 0.009308159828186035, 0.009255231857299805, 0.009359040260314942, 0.00935321617126465, 0.009248543739318847, 0.009302240371704101, 0.00936297607421875, 0.010651712417602538, 0.009429408073425292, 0.00934928035736084, 0.009588031768798829, 0.009392767906188966, 0.009439167976379394, 0.009336064338684082, 0.009327327728271485, 0.009303872108459472, 0.009316512107849121, 0.009338560104370118, 0.009338368415832519]",tokens/s,103.8914828303977,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1196.367872,1149.108224,0.0,746.586112,728.01536,s,1,8.62387109375,8.62387109375,0.0,8.62387109375,8.62387109375,8.62387109375,8.62387109375,[8.62387109375],,kWh,3.0236546608375648e-05,3.3209170310235012e-06,9.779730045983426e-06,4.3337193685382574e-05,,MB,1572.92544,1457.389568,0.0,1040.187392,985.00096,s,10,1.589587371826172,0.1589587371826172,0.0011242750814520921,0.15868964385986328,0.15946807403564453,0.16077782974243165,0.16182563430786134,"[0.16208758544921875, 0.15779299926757812, 0.15914378356933595, 0.15866018676757812, 0.15893980407714844, 0.15843463134765626, 0.15803158569335937, 0.15871910095214845, 0.15860067749023438, 0.15917701721191407]",tokens/s,1610.48083633112,kWh,4.852531642554226e-06,5.351485358657892e-07,3.2067284305896596e-06,8.594408609009674e-06,tokens/kWh,29786808.10354194,MB,1605.65248,1505.624064,0.0,1088.421888,985.00352,s,10,11.109411743164065,1.1109411743164062,0.006887684538624719,1.1124687500000001,1.1183736694335937,1.1193114074707031,1.1200615979003907,"[1.101510498046875, 1.11509423828125, 1.118165283203125, 1.1052757568359375, 1.1135023193359375, 1.1061446533203125, 1.10007080078125, 1.1202491455078125, 1.1114351806640625, 1.1179638671875]",tokens/s,56.70867320114019,kWh,3.205926829119471e-05,3.53574678766871e-06,1.4968063887012112e-05,5.056307896587554e-05,tokens/kWh,1245968.427724071,,s,630,11.106172786712637,0.017628845693194676,0.0004051979093477331,0.017573904037475586,0.017849810600280763,0.018003361511230467,0.01855540641784668,"[0.017670688629150392, 0.01768604850769043, 0.01747235107421875, 0.017709056854248048, 0.017524736404418945, 0.01739708709716797, 0.01764998435974121, 0.017381664276123046, 0.01732371139526367, 0.01748134422302246, 0.01731865692138672, 0.017410400390625, 0.017512096405029296, 0.017399808883666993, 0.017292415618896485, 0.017548160552978517, 0.017425504684448243, 0.01723619270324707, 0.01740870475769043, 0.01738956832885742, 0.01742201614379883, 0.01724652862548828, 0.017284095764160155, 0.01741312026977539, 0.017270784378051757, 0.017202272415161132, 0.01740687942504883, 0.017399808883666993, 0.01742438316345215, 0.01764249610900879, 0.01741721534729004, 0.01745305633544922, 0.017495840072631837, 0.017424608230590822, 0.01780860710144043, 0.017707807540893555, 0.017375232696533204, 0.017378463745117187, 0.017437536239624022, 0.017612991333007814, 0.017434431076049806, 0.017516544342041016, 0.01772876739501953, 0.017576704025268553, 0.01734886360168457, 0.017329919815063478, 0.017479232788085938, 0.017762752532958986, 0.017573888778686524, 0.017491071701049805, 0.017357280731201172, 0.017510751724243163, 0.017434688568115236, 0.017350656509399414, 0.017375232696533204, 0.01739068794250488, 0.01733692741394043, 0.017472896575927734, 0.017469919204711915, 0.01800035285949707, 0.017910879135131837, 0.017709503173828123, 0.01757391929626465, 0.017772544860839845, 0.01761894416809082, 0.017655391693115235, 0.017790687561035155, 0.017722047805786133, 0.0175861759185791, 0.017551359176635743, 0.01760054397583008, 0.01767420768737793, 0.017532928466796875, 0.017754112243652344, 0.017712703704833986, 0.017739423751831053, 0.017573728561401367, 0.017630111694335936, 0.01755459213256836, 0.01746121597290039, 0.01794550323486328, 0.022190080642700196, 0.02043289566040039, 0.017580032348632812, 0.01778659248352051, 0.017614751815795898, 0.01788147163391113, 0.017603935241699217, 0.017535648345947265, 0.01762646484375, 0.017515167236328125, 0.017524736404418945, 0.017514495849609374, 0.017491615295410157, 0.017578336715698244, 0.017556671142578126, 0.017535423278808592, 0.01754764747619629, 0.017725439071655275, 0.017909759521484374, 0.017590272903442384, 0.01760665512084961, 0.017514080047607423, 0.017602975845336915, 0.01745510482788086, 0.017328128814697266, 0.017413728713989256, 0.01766032028198242, 0.01754070472717285, 0.01738153648376465, 0.0174021110534668, 0.017459199905395507, 0.017367040634155274, 0.017294464111328126, 0.017429279327392577, 0.01746895980834961, 0.017578048706054686, 0.017553728103637697, 0.01753926467895508, 0.017620384216308595, 0.017447519302368163, 0.017348608016967772, 0.017516544342041016, 0.017520639419555666, 0.017526784896850587, 0.01759846305847168, 0.018551679611206056, 0.017909759521484374, 0.01841289520263672, 0.017756128311157228, 0.017584831237792968, 0.017690624237060547, 0.01777663993835449, 0.017543167114257813, 0.017530784606933594, 0.01791804885864258, 0.017709056854248048, 0.017587551116943358, 0.017522464752197264, 0.017551712036132813, 0.017578239440917968, 0.017469728469848633, 0.017772544860839845, 0.017760255813598632, 0.01784956741333008, 0.017729440689086915, 0.017711296081542968, 0.017682367324829102, 0.017781408309936523, 0.01810985565185547, 0.018471616744995117, 0.018263168334960937, 0.017653696060180663, 0.017511072158813475, 0.017621280670166016, 0.017612800598144532, 0.01773676872253418, 0.017713792800903322, 0.017527103424072266, 0.01743667221069336, 0.017496063232421876, 0.01786579132080078, 0.017664608001708985, 0.017680736541748048, 0.01797302436828613, 0.017878719329833984, 0.017578847885131837, 0.017729055404663085, 0.018522592544555665, 0.01760630416870117, 0.017489952087402345, 0.01769267272949219, 0.017560928344726563, 0.01765433692932129, 0.01764726448059082, 0.017526720046997072, 0.017576448440551756, 0.018135040283203126, 0.017688575744628905, 0.017547071456909178, 0.017612991333007814, 0.01765171241760254, 0.017633279800415038, 0.017465599060058595, 0.017950464248657226, 0.01784012794494629, 0.017743616104125975, 0.01754751968383789, 0.017837503433227538, 0.01791155242919922, 0.01803241539001465, 0.017768575668334962, 0.017705759048461913, 0.017765600204467772, 0.018167999267578124, 0.017707679748535158, 0.017655807495117186, 0.017711103439331053, 0.017844224929809572, 0.017678272247314452, 0.017587839126586916, 0.017709247589111327, 0.017664255142211913, 0.01764761543273926, 0.017622240066528322, 0.017685344696044922, 0.017510208129882812, 0.017459104537963867, 0.017510080337524415, 0.017433120727539064, 0.017440095901489257, 0.01729097557067871, 0.017238208770751953, 0.017492544174194335, 0.01764476776123047, 0.017558271408081055, 0.017445087432861328, 0.017603904724121093, 0.017394367218017577, 0.017388671875, 0.017486719131469725, 0.0174072322845459, 0.017382144927978516, 0.017534175872802735, 0.017720096588134764, 0.0174202880859375, 0.017235872268676757, 0.0173089599609375, 0.017481855392456055, 0.017512575149536132, 0.017568031311035157, 0.017466848373413085, 0.017407808303833008, 0.017368064880371094, 0.01737113571166992, 0.017307647705078123, 0.017512351989746093, 0.01746339225769043, 0.017373119354248047, 0.017461055755615233, 0.01742220878601074, 0.017586463928222655, 0.017363040924072266, 0.018005823135375975, 0.01743459129333496, 0.017510623931884767, 0.017536767959594725, 0.017338176727294922, 0.017519039154052736, 0.01739948844909668, 0.017374879837036134, 0.0174182071685791, 0.01754582405090332, 0.017772544860839845, 0.01778892707824707, 0.01781532859802246, 0.017843584060668945, 0.017617759704589845, 0.017557472229003907, 0.01754319953918457, 0.017497760772705078, 0.017682783126831053, 0.01776780891418457, 0.01772198486328125, 0.01806035232543945, 0.017693567276000976, 0.017716991424560548, 0.017822015762329103, 0.017612064361572265, 0.01755731201171875, 0.01761577606201172, 0.017655807495117186, 0.017655807495117186, 0.017770496368408203, 0.01762441635131836, 0.01765033531188965, 0.017889087677001952, 0.01767647933959961, 0.01778825569152832, 0.01776019287109375, 0.018180831909179688, 0.017765535354614257, 0.017496383666992188, 0.017705503463745116, 0.017721343994140625, 0.017604608535766602, 0.01751641654968262, 0.01756787109375, 0.01762505531311035, 0.017625120162963866, 0.0177108154296875, 0.017643808364868164, 0.017639551162719726, 0.017618175506591796, 0.01794646453857422, 0.017568544387817384, 0.01756492805480957, 0.017504383087158203, 0.017745792388916017, 0.01805183982849121, 0.017786880493164063, 0.0178288631439209, 0.017750175476074218, 0.017603008270263672, 0.017618431091308593, 0.0175645751953125, 0.01767616081237793, 0.017583967208862305, 0.017475103378295897, 0.017498880386352538, 0.017457376480102538, 0.01745692825317383, 0.01738956832885742, 0.017435712814331053, 0.017555423736572266, 0.01782156753540039, 0.017715328216552733, 0.017786880493164063, 0.017851999282836914, 0.01825404739379883, 0.0177379207611084, 0.017544992446899416, 0.017857824325561523, 0.017617855072021484, 0.01741209602355957, 0.01755340766906738, 0.017463487625122072, 0.01743827247619629, 0.017524192810058594, 0.017521440505981447, 0.01740943908691406, 0.017411775588989258, 0.017408927917480468, 0.017470495223999023, 0.017417184829711913, 0.017778688430786133, 0.017399808883666993, 0.01755897521972656, 0.017658016204833985, 0.01747599983215332, 0.017343807220458984, 0.017496768951416015, 0.017754112243652344, 0.017502208709716797, 0.01750399971008301, 0.017625343322753905, 0.01754710388183594, 0.017445024490356446, 0.017326080322265625, 0.017686527252197267, 0.017426431655883787, 0.017373184204101562, 0.01741209602355957, 0.017678335189819337, 0.01760051155090332, 0.017633279800415038, 0.017555456161499023, 0.017421951293945314, 0.017483455657958984, 0.017480384826660155, 0.01744063949584961, 0.017469568252563475, 0.01743680000305176, 0.017491615295410157, 0.017532415390014648, 0.01747123146057129, 0.017382368087768555, 0.017450016021728517, 0.017482719421386718, 0.01740572738647461, 0.01772313690185547, 0.017648096084594726, 0.017496063232421876, 0.017649696350097655, 0.017549280166625977, 0.01740185546875, 0.0175861759185791, 0.017793088912963866, 0.0177108154296875, 0.017671167373657228, 0.017665760040283203, 0.01768876838684082, 0.017579967498779298, 0.01743449592590332, 0.017510271072387694, 0.017663808822631837, 0.017408607482910156, 0.017487520217895507, 0.017510175704956055, 0.017363519668579103, 0.017938432693481447, 0.01738137626647949, 0.017594112396240234, 0.01758233642578125, 0.017735071182250976, 0.017515104293823244, 0.0175918083190918, 0.01751091194152832, 0.017383455276489258, 0.01741334342956543, 0.017521408081054686, 0.017295360565185547, 0.017350048065185548, 0.0175765438079834, 0.017350656509399414, 0.017250303268432618, 0.017391424179077148, 0.017344703674316408, 0.01737932777404785, 0.017514495849609374, 0.0172359676361084, 0.017194623947143554, 0.01753945541381836, 0.01721343994140625, 0.017362943649291994, 0.01742646408081055, 0.01733628845214844, 0.01743667221069336, 0.01743052864074707, 0.017338367462158204, 0.017269920349121094, 0.017285600662231445, 0.017516927719116213, 0.017384672164916994, 0.01734022331237793, 0.01762761688232422, 0.017420799255371093, 0.01739776039123535, 0.01740310478210449, 0.017281824111938477, 0.0173505916595459, 0.017333759307861327, 0.01735478401184082, 0.017279392242431642, 0.01733030319213867, 0.017557247161865235, 0.01757414436340332, 0.01756979179382324, 0.01761894416809082, 0.01749782371520996, 0.017537248611450194, 0.017709280014038088, 0.01768448066711426, 0.018282495498657226, 0.018556928634643553, 0.018386240005493163, 0.017918432235717773, 0.01763555145263672, 0.01749737548828125, 0.017595104217529297, 0.017647008895874023, 0.02306732749938965, 0.017468671798706054, 0.017514432907104492, 0.017639167785644533, 0.017621984481811525, 0.017694719314575197, 0.017542207717895508, 0.017718143463134765, 0.017481792449951173, 0.01759436798095703, 0.017544864654541015, 0.0175947208404541, 0.017917503356933595, 0.01760710334777832, 0.017696512222290038, 0.017782304763793947, 0.01787913513183594, 0.01795110321044922, 0.017899776458740236, 0.017829887390136717, 0.017612224578857423, 0.0177772159576416, 0.017649663925170898, 0.0176680965423584, 0.017589599609375, 0.017604448318481444, 0.01757279968261719, 0.017481599807739258, 0.017604608535766602, 0.017892383575439454, 0.017445856094360352, 0.017475584030151366, 0.017653663635253905, 0.01747977638244629, 0.01745305633544922, 0.017532928466796875, 0.017534719467163087, 0.01751475143432617, 0.01772870445251465, 0.017761087417602538, 0.017592416763305665, 0.017579839706420897, 0.01760598373413086, 0.01772336006164551, 0.01779996871948242, 0.017630783081054688, 0.017579647064208986, 0.017802047729492187, 0.018020448684692384, 0.017601951599121094, 0.01771776008605957, 0.017723392486572266, 0.01756563186645508, 0.017653663635253905, 0.018037120819091798, 0.01763302421569824, 0.017893152236938478, 0.01779555130004883, 0.017709056854248048, 0.01763052749633789, 0.017502239227294922, 0.017469856262207033, 0.017527040481567384, 0.017768447875976562, 0.017493152618408205, 0.01750716781616211, 0.01745929527282715, 0.01758608055114746, 0.017426143646240233, 0.017430816650390625, 0.017537023544311522, 0.01757699203491211, 0.017497055053710936, 0.0178701114654541, 0.01780099105834961, 0.017654272079467775, 0.017580480575561525, 0.01768448066711426, 0.017532928466796875, 0.017710975646972656, 0.017616479873657227, 0.017566240310668946, 0.01765171241760254, 0.017651487350463867, 0.017568992614746093, 0.017722368240356445, 0.017667295455932618, 0.017629535675048828, 0.017623071670532228, 0.017672607421875, 0.01761484718322754, 0.017550783157348634, 0.017700927734375, 0.017844640731811523, 0.017609952926635742, 0.01785683250427246, 0.01764819145202637, 0.018169599533081053, 0.017701087951660158, 0.017532928466796875, 0.017704736709594725, 0.017443071365356444, 0.017507776260375977, 0.01762486457824707, 0.017492576599121092, 0.017542816162109374, 0.01760310363769531, 0.017612640380859374, 0.017538528442382812, 0.017483999252319336, 0.01758576011657715, 0.017876928329467773, 0.017652896881103514, 0.017649440765380858, 0.017702911376953127, 0.017514495849609374, 0.01971251106262207, 0.018228511810302734, 0.01810032081604004, 0.018188928604125975, 0.01776639938354492, 0.017760255813598632, 0.017952768325805665, 0.017688575744628905, 0.018028543472290038, 0.017670143127441407, 0.02002457618713379, 0.020568832397460938, 0.017804672241210937, 0.01752947235107422, 0.017587263107299803, 0.017445119857788086, 0.017508480072021486, 0.017973312377929686, 0.0179532470703125, 0.017846048355102537, 0.017533151626586915, 0.017537055969238283, 0.017606143951416017, 0.017936479568481444, 0.017531328201293946, 0.017588191986083985, 0.017532928466796875, 0.01775574493408203, 0.01749443244934082, 0.01760665512084961, 0.017479679107666016, 0.01741209602355957, 0.01770832061767578, 0.01759715270996094, 0.01743257522583008, 0.017489919662475584, 0.017688703536987305, 0.017520511627197265, 0.017579551696777343, 0.017617376327514648, 0.017487520217895507, 0.017494367599487304, 0.01743257522583008, 0.017515647888183595, 0.017535871505737304, 0.01743027114868164, 0.01765724754333496, 0.017468255996704103, 0.017270784378051757, 0.017364063262939454, 0.017398591995239257, 0.017340511322021485, 0.01725644874572754, 0.01725644874572754, 0.017346559524536134, 0.017391616821289063, 0.0174653434753418, 0.01743667221069336, 0.017522687911987304, 0.017657855987548828, 0.017517631530761718, 0.018043840408325195, 0.018311168670654295]",tokens/s,56.72521147462499,,, -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19706,7 +19706,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19911,7 +19911,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20008,7 +20008,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20427,7 +20427,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4145.373184,5819.465728,0.0,5416.943616,4964.135424,s,1,10.955767578125,10.955767578125,0.0,10.955767578125,10.955767578125,10.955767578125,10.955767578125,[10.955767578125],,kWh,0.00011800446072913169,1.3009526382198153e-05,5.2163652841996644e-05,0.00018317763995332647,,MB,1424.990208,5853.02016,0.0,5435.817984,4562.7008,s,10,30.790093994140626,3.079009399414063,0.005732084637263689,3.07714697265625,3.086281884765625,3.086529736328125,3.086728017578125,"[3.06939111328125, 3.073928466796875, 3.0777529296875, 3.075769775390625, 3.074871337890625, 3.076541015625, 3.082999755859375, 3.086226806640625, 3.085835205078125, 3.086777587890625]",tokens/s,83.14362406581706,kWh,8.988687569749649e-05,9.9144799998973e-06,5.97359366776018e-05,0.00015953729237499557,tokens/kWh,1604640.4962061592,MB,1450.78272,5853.02016,0.0,5435.817984,4562.70336,s,10,28.087651367187497,2.80876513671875,0.37335336652681217,2.8037958984375,3.1879590576171877,3.2115250610351564,3.2303778637695313,"[3.18272216796875, 3.162566650390625, 3.164280517578125, 3.235091064453125, 3.163201904296875, 2.44177490234375, 2.434739501953125, 2.428841796875, 2.445025146484375, 2.42940771484375]",tokens/s,22.429785665026355,kWh,8.072805652625448e-05,8.904910431306665e-06,5.118226316799674e-05,0.00014081523012555788,tokens/kWh,447394.7877926703,,s,630,28.08534365463256,0.04457991056290884,0.0059767882948195015,0.04637816047668457,0.05064093551635742,0.05124616641998291,0.05402988220214844,"[0.05156476974487305, 0.05043996810913086, 0.050949886322021486, 0.05123712158203125, 0.05175868988037109, 0.050640705108642575, 0.05506313705444336, 0.05021491241455078, 0.05020889663696289, 0.05047286224365234, 0.050245216369628906, 0.05006988906860352, 0.050067455291748046, 0.05041337585449219, 0.050299072265625, 0.05015552139282226, 0.0510648307800293, 0.049979393005371096, 0.050034465789794924, 0.0500164794921875, 0.05021641540527344, 0.05032563018798828, 0.05038716888427734, 0.05059193420410156, 0.050119712829589845, 0.05010089492797851, 0.05014371109008789, 0.050042049407958984, 0.05027907180786133, 0.05058268737792969, 0.05055347061157227, 0.050448257446289065, 0.050407169342041015, 0.050350399017333985, 0.050417953491210935, 0.05059993743896484, 0.05048844909667969, 0.05144073486328125, 0.05042502212524414, 0.050184417724609375, 0.0503422737121582, 0.05008793640136719, 0.04993014526367188, 0.05047830581665039, 0.05019468688964844, 0.04970969772338867, 0.050229248046875, 0.049870849609375, 0.050361568450927735, 0.05063091278076172, 0.05068854522705078, 0.05016175842285156, 0.05037599945068359, 0.050190944671630856, 0.05013679885864258, 0.05000016021728516, 0.050531841278076174, 0.05012329483032227, 0.05024313735961914, 0.05014384078979492, 0.05349766540527344, 0.05086207962036133, 0.05066880035400391, 0.05151894378662109, 0.05020284652709961, 0.049923423767089845, 0.049925086975097656, 0.04989446258544922, 0.04998035049438477, 0.04977827072143555, 0.050081790924072264, 0.05047740936279297, 0.050047039031982425, 0.0497520637512207, 0.05016953659057617, 0.05018246459960937, 0.050042304992675785, 0.05016409683227539, 0.04992182540893555, 0.04993865585327149, 0.050049182891845706, 0.050138912200927734, 0.04985673522949219, 0.049813247680664065, 0.04976899337768555, 0.04979072189331055, 0.050099998474121096, 0.05040150451660156, 0.05070230484008789, 0.05049568176269531, 0.05010979080200195, 0.0504898567199707, 0.050296863555908206, 0.0501063346862793, 0.05031283187866211, 0.053922176361083984, 0.05015081787109375, 0.050522113800048826, 0.050659488677978516, 0.05037635040283203, 0.0506366081237793, 0.050164703369140626, 0.04995072174072265, 0.049839649200439456, 0.05059836959838867, 0.05044790267944336, 0.050082271575927734, 0.04998912048339844, 0.049968929290771485, 0.04988956832885742, 0.04984451293945313, 0.04979727935791016, 0.0501288948059082, 0.04992214584350586, 0.050151424407958986, 0.049921279907226564, 0.04969539260864258, 0.05002239990234375, 0.050171489715576174, 0.049987201690673826, 0.050167774200439455, 0.05051065444946289, 0.04999750518798828, 0.05052633666992187, 0.049940673828125, 0.049926143646240234, 0.05179619216918945, 0.05049155044555664, 0.05022220611572266, 0.05021503829956055, 0.05025046539306641, 0.05014940643310547, 0.050449920654296876, 0.050196735382080075, 0.04993868637084961, 0.04985651016235351, 0.04978899383544922, 0.049782718658447266, 0.0501956787109375, 0.05059212875366211, 0.050259807586669925, 0.05028665542602539, 0.0500164794921875, 0.04997753524780273, 0.04986851119995117, 0.05019686508178711, 0.05043526458740234, 0.04993107223510742, 0.049893375396728515, 0.04983155059814453, 0.050022113800048826, 0.05341417694091797, 0.05070796966552735, 0.05008063888549805, 0.05013094329833984, 0.04996915054321289, 0.049884960174560546, 0.04969903945922852, 0.05002451324462891, 0.050104255676269534, 0.0528054084777832, 0.05016214370727539, 0.049769790649414065, 0.0497520637512207, 0.049852672576904296, 0.0497657585144043, 0.049883201599121095, 0.04986297607421875, 0.049996318817138674, 0.04995836639404297, 0.05000979232788086, 0.049819553375244144, 0.05036515045166016, 0.04990784072875976, 0.05003673553466797, 0.050255870819091795, 0.05008793640136719, 0.05030659103393555, 0.052308448791503905, 0.050716670989990234, 0.050010112762451174, 0.04985820770263672, 0.04983955383300781, 0.049934944152832034, 0.049790943145751956, 0.04985878372192383, 0.04992934417724609, 0.050135711669921874, 0.05039139175415039, 0.051890048980712894, 0.05035238265991211, 0.05022515106201172, 0.04988854217529297, 0.04992073440551758, 0.050088001251220704, 0.052643775939941406, 0.05403647994995117, 0.05463216018676758, 0.054767902374267576, 0.053766143798828124, 0.054013729095458984, 0.05321136093139649, 0.053301025390625, 0.053026817321777345, 0.05319891357421875, 0.052881568908691404, 0.052944896697998046, 0.05250838470458984, 0.05019881439208984, 0.05019647979736328, 0.05019852828979492, 0.05007769775390625, 0.05000806427001953, 0.0501923828125, 0.0500469741821289, 0.05053414535522461, 0.050796798706054684, 0.056780574798583984, 0.05103152084350586, 0.05535001754760742, 0.05087443161010742, 0.05072889709472656, 0.050671104431152345, 0.05048345565795898, 0.050484703063964846, 0.050688800811767576, 0.05046265411376953, 0.05064300918579102, 0.05048099136352539, 0.05121449661254883, 0.050929214477539064, 0.050762176513671875, 0.050925567626953126, 0.05092739105224609, 0.050771774291992186, 0.05110179138183594, 0.05125356674194336, 0.051195903778076174, 0.05081292724609375, 0.05052620697021484, 0.050314304351806644, 0.050027454376220706, 0.051871742248535156, 0.05113651275634765, 0.05046441650390625, 0.050246273040771484, 0.05022515106201172, 0.05022012710571289, 0.050315841674804684, 0.05001017761230469, 0.05135564804077149, 0.05002444839477539, 0.05207660675048828, 0.050817024230957034, 0.050708030700683596, 0.050753982543945315, 0.050520065307617185, 0.050446334838867186, 0.050116031646728516, 0.04999020767211914, 0.04976639938354492, 0.04980329513549805, 0.05005923080444336, 0.05017599868774414, 0.05006950378417969, 0.0498809928894043, 0.049895038604736326, 0.0501743049621582, 0.050385089874267576, 0.05002796936035156, 0.05022364807128906, 0.05002374267578125, 0.049842208862304685, 0.04989513778686523, 0.04993526458740234, 0.05009612655639648, 0.05042524719238281, 0.05032815933227539, 0.04997347259521484, 0.049899009704589846, 0.0498928337097168, 0.0498853759765625, 0.04985651016235351, 0.04996944046020508, 0.05004528045654297, 0.04990367889404297, 0.049972446441650394, 0.05025046539306641, 0.050191551208496096, 0.050135265350341796, 0.04996156692504883, 0.0501288948059082, 0.05033932876586914, 0.05021235275268555, 0.05080883026123047, 0.05004556655883789, 0.05004326248168945, 0.050233345031738284, 0.04992825698852539, 0.05002799987792969, 0.04974435043334961, 0.05013734436035156, 0.049770240783691404, 0.04981472015380859, 0.04975484848022461, 0.05023535919189453, 0.0498034553527832, 0.04974585723876953, 0.04979897689819336, 0.05599776077270508, 0.050540702819824215, 0.05006819152832031, 0.04981481552124024, 0.04974256134033203, 0.04983385467529297, 0.04007699203491211, 0.03965481567382813, 0.038695838928222655, 0.03888947296142578, 0.0417196159362793, 0.03954297637939453, 0.03871744155883789, 0.038240383148193356, 0.03834406280517578, 0.04043183898925781, 0.03842486572265625, 0.038321727752685546, 0.03822227096557617, 0.03820943832397461, 0.039008350372314454, 0.0398328971862793, 0.03844985580444336, 0.03878297424316406, 0.03854716873168945, 0.03877507019042969, 0.03839945602416992, 0.039096927642822264, 0.038510528564453125, 0.03926220703125, 0.039163902282714845, 0.038485694885253906, 0.03845766448974609, 0.03835289764404297, 0.03837542343139649, 0.03834230422973633, 0.0384249267578125, 0.038547454833984376, 0.03870028686523438, 0.03843353652954101, 0.03815744018554688, 0.03857088088989258, 0.03848825454711914, 0.038833984375, 0.04034764862060547, 0.03878319931030273, 0.0386409912109375, 0.038537662506103514, 0.03863347244262695, 0.03839139175415039, 0.03863593673706055, 0.038694911956787106, 0.038408191680908206, 0.038088542938232425, 0.03886608123779297, 0.03879183959960938, 0.03844976043701172, 0.03818675231933594, 0.038414337158203124, 0.03831526565551758, 0.03868035125732422, 0.03862422561645508, 0.03880064010620117, 0.03852979278564453, 0.03872556686401367, 0.03855558395385742, 0.03821327972412109, 0.0384881591796875, 0.039263999938964844, 0.040136737823486326, 0.038743137359619144, 0.03871360015869141, 0.03876326370239258, 0.03864547348022461, 0.03876448059082031, 0.038531295776367186, 0.03839590454101562, 0.0383787841796875, 0.03893936157226562, 0.03848601531982422, 0.038469440460205076, 0.03848211288452148, 0.03848806381225586, 0.03848332977294922, 0.038650497436523434, 0.03865599822998047, 0.03897971343994141, 0.03868201446533203, 0.038744544982910155, 0.03847792053222656, 0.03871120071411133, 0.03891795349121094, 0.03881593704223633, 0.038752254486083985, 0.038580223083496096, 0.03841606521606445, 0.0384126091003418, 0.03824435043334961, 0.04111769485473633, 0.038649406433105465, 0.03848646545410156, 0.03861913681030273, 0.03841823959350586, 0.03840956878662109, 0.03868348693847656, 0.03837747192382813, 0.038422527313232424, 0.03866755294799805, 0.038550304412841796, 0.03836716842651367, 0.03851468658447266, 0.03855708694458008, 0.03855795288085938, 0.03852896118164063, 0.03851510238647461, 0.038594558715820314, 0.038553600311279294, 0.03853718566894531, 0.03849942398071289, 0.03858118438720703, 0.03840108871459961, 0.039217823028564455, 0.038426910400390625, 0.038588417053222655, 0.038491615295410155, 0.038445953369140626, 0.039425121307373044, 0.0382305908203125, 0.03864345550537109, 0.038084865570068356, 0.038591552734375, 0.03830470275878906, 0.03978851318359375, 0.03864166259765625, 0.03846556854248047, 0.03852313613891602, 0.038338462829589845, 0.038545406341552735, 0.0382380485534668, 0.038241310119628905, 0.03830646514892578, 0.03889507293701172, 0.038472545623779296, 0.038305438995361325, 0.03817068862915039, 0.03842281723022461, 0.038332416534423826, 0.03827711868286133, 0.038430721282958984, 0.038182910919189454, 0.03845865631103516, 0.038290145874023435, 0.03840147018432617, 0.038343231201171876, 0.03839385604858398, 0.03836547088623047, 0.03833967971801758, 0.03845798492431641, 0.038340606689453126, 0.03856304168701172, 0.03855779266357422, 0.03858931350708008, 0.03843888092041016, 0.038442977905273436, 0.03822118377685547, 0.03830137634277344, 0.038242527008056644, 0.03850844955444336, 0.038274814605712894, 0.03869494247436524, 0.03973606491088867, 0.038995105743408205, 0.03866329574584961, 0.03863859176635742, 0.038343616485595707, 0.038465152740478514, 0.03906387329101563, 0.03876361465454101, 0.03865510559082031, 0.03852793502807617, 0.03839267349243164, 0.04114636611938476, 0.03862063980102539, 0.03888387298583985, 0.03853084945678711, 0.038510433197021486, 0.038371711730957034, 0.038529022216796875, 0.038489505767822264, 0.03839241409301758, 0.03856793594360351, 0.038348800659179685, 0.03857408142089844, 0.03833980941772461, 0.03826563262939453, 0.04021059036254883, 0.0391734733581543, 0.042865249633789064, 0.043060928344726565, 0.03910598373413086, 0.03853811264038086, 0.03852908706665039, 0.03845523071289063, 0.03831577682495117, 0.03833852767944336, 0.03835113525390625, 0.03857408142089844, 0.03837363052368164, 0.03832012939453125, 0.038459136962890626, 0.038266624450683594, 0.03843302536010742, 0.038365184783935545, 0.03844710540771484, 0.038434814453125, 0.03853311920166016, 0.03855769729614258, 0.038553600311279294, 0.03834470367431641, 0.038506145477294924, 0.0394172477722168, 0.03852361679077149, 0.0382957763671875, 0.038703361511230466, 0.03875404739379883, 0.039174144744873046, 0.03870515060424805, 0.0385167350769043, 0.03846697616577149, 0.03864022445678711, 0.03861091232299805, 0.03915779113769531, 0.038860767364501954, 0.03854716873168945, 0.03838540649414063, 0.038133888244628905, 0.03843267059326172, 0.03873606491088867, 0.038768543243408206, 0.03891449737548828, 0.03879296112060547, 0.03902899169921875, 0.038389759063720705, 0.038497791290283204, 0.0385577278137207, 0.03861756896972656, 0.038711296081542966, 0.03859462356567383, 0.038631359100341794, 0.038637569427490234, 0.038397953033447264, 0.038717025756835936, 0.038914688110351564, 0.038626976013183593, 0.03898988723754883, 0.039734912872314454, 0.039626625061035155, 0.03848041534423828, 0.03997727966308594, 0.0389222412109375, 0.038676830291748045, 0.038495903015136716, 0.03871539306640625, 0.03884751892089844, 0.03843731307983399, 0.038320289611816404, 0.03824031829833984, 0.038289695739746096, 0.03947484970092773, 0.03885641479492188, 0.038814369201660155, 0.038422527313232424, 0.03829145431518555, 0.03873791885375977, 0.03845478439331055, 0.03854387283325195, 0.03837897491455078, 0.0382408332824707, 0.03836310577392578, 0.03886447906494141, 0.039612831115722655, 0.03868876647949219, 0.03889766311645508, 0.03902790451049805, 0.0385274543762207, 0.038632926940917967, 0.03834969711303711, 0.038604286193847655, 0.038459903717041014, 0.03855270385742188, 0.038382305145263675, 0.03850368118286133, 0.038437152862548826, 0.03818764877319336, 0.038242305755615234, 0.038544960021972656, 0.03829190444946289, 0.038520832061767575, 0.03821363067626953, 0.03831577682495117, 0.03816678237915039, 0.03854892730712891, 0.03833065414428711, 0.03825897598266602, 0.03869286346435547, 0.03813324737548828, 0.03880511856079102, 0.03875270462036133, 0.038951358795166015, 0.038323486328125, 0.038456031799316406, 0.03812931060791016, 0.03808905410766601, 0.03819664001464844, 0.03842057418823242, 0.038429183959960936, 0.039370750427246096, 0.038410175323486326, 0.038416446685791014, 0.038209537506103515, 0.03873788833618164]",tokens/s,22.43162867249032,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1031.5776,965.67296,0.0,570.425344,525.840896,s,1,8.07862255859375,8.07862255859375,0.0,8.07862255859375,8.07862255859375,8.07862255859375,8.07862255859375,[8.07862255859375],,kWh,3.357288517912214e-05,3.6961730086164217e-06,1.2070287433996185e-05,4.933934562173475e-05,,MB,1262.501888,1005.518848,0.0,597.68832,584.940544,s,10,1.3383642425537108,0.13383642425537107,0.0006236488037619517,0.1337007522583008,0.13432077026367187,0.1349051055908203,0.13537257385253906,"[0.13548944091796875, 0.1332783966064453, 0.13394464111328125, 0.13326112365722656, 0.13419091796875, 0.1338299560546875, 0.1333603515625, 0.13388128662109375, 0.13357154846191407, 0.13355657958984374]",tokens/s,1912.7827228223807,kWh,4.107531149247839e-06,4.5298668327437093e-07,2.7255191557223735e-06,7.2860369882445834e-06,tokens/kWh,35135698.65388204,MB,1296.502784,1020.198912,0.0,612.368384,597.290496,s,10,13.734800659179687,1.3734800659179687,0.01288839497495525,1.3706681518554689,1.3920146484375,1.3957022094726563,1.3986522583007812,"[1.37127294921875, 1.3993897705078124, 1.3911951904296875, 1.35689794921875, 1.380880126953125, 1.3673121337890626, 1.3628775634765624, 1.3700633544921874, 1.3601531982421875, 1.3747584228515626]",tokens/s,45.8688855872792,kWh,3.944550456283639e-05,4.350508324748603e-06,1.5790801212874922e-05,5.958681410045991e-05,tokens/kWh,1057280.892611336,,s,630,13.728747289657596,0.02179166236453586,0.0005926802038786021,0.021665040016174315,0.022311522483825685,0.02247623510360718,0.024017830162048347,"[0.021702592849731445, 0.021878816604614257, 0.02166275215148926, 0.021687616348266603, 0.021984960556030272, 0.021578912734985353, 0.02175062370300293, 0.02166771125793457, 0.02207084846496582, 0.022034048080444336, 0.022055200576782227, 0.022097984313964845, 0.021896095275878907, 0.021925920486450194, 0.02201795196533203, 0.021746719360351562, 0.021730016708374024, 0.021786880493164063, 0.0215467529296875, 0.02178236770629883, 0.021530176162719728, 0.021594623565673828, 0.021624927520751954, 0.02163523292541504, 0.021725120544433593, 0.021622079849243164, 0.021665760040283203, 0.02189276885986328, 0.021909887313842774, 0.021782495498657226, 0.021547008514404296, 0.021490304946899415, 0.021520479202270508, 0.02208332824707031, 0.021560543060302733, 0.021574783325195312, 0.021687423706054688, 0.02184671974182129, 0.021788671493530275, 0.02183788871765137, 0.02208064079284668, 0.022030336380004883, 0.021713184356689452, 0.021602848052978515, 0.02157369613647461, 0.021520320892333984, 0.021427520751953123, 0.02154719924926758, 0.021602176666259767, 0.021635040283203125, 0.021746496200561523, 0.021525823593139648, 0.021412384033203124, 0.021428192138671875, 0.021442592620849608, 0.021647520065307617, 0.021646400451660157, 0.02192799949645996, 0.021881568908691407, 0.02209791946411133, 0.022228191375732422, 0.022180959701538085, 0.022189504623413087, 0.02251740837097168, 0.02215872001647949, 0.02196086311340332, 0.02212713623046875, 0.02216771125793457, 0.022167680740356445, 0.022293792724609376, 0.022042783737182617, 0.022104639053344727, 0.021972991943359374, 0.022137088775634764, 0.022148992538452147, 0.022161279678344727, 0.022689727783203124, 0.022320287704467773, 0.02234432029724121, 0.02226848030090332, 0.02253385543823242, 0.02214303970336914, 0.02224051284790039, 0.022389440536499022, 0.02219798469543457, 0.02225724792480469, 0.022399904251098633, 0.022361024856567383, 0.022383583068847655, 0.022339456558227538, 0.022437887191772463, 0.022374048233032226, 0.022333791732788086, 0.022386304855346678, 0.022335872650146485, 0.022401023864746093, 0.022107295989990235, 0.02224166488647461, 0.02240086364746094, 0.02238640022277832, 0.022272672653198242, 0.022290111541748047, 0.02229110336303711, 0.02220863914489746, 0.022261056900024414, 0.022250207901000976, 0.022134880065917968, 0.022046367645263673, 0.022053056716918946, 0.022072160720825195, 0.022250560760498046, 0.022071327209472656, 0.022026111602783203, 0.022214656829833986, 0.0219869441986084, 0.021733728408813477, 0.021549087524414062, 0.021560928344726563, 0.021816831588745117, 0.021932064056396486, 0.022317024230957033, 0.022172319412231446, 0.022048831939697266, 0.02206287956237793, 0.021989791870117188, 0.022829055786132812, 0.022126272201538087, 0.022487615585327147, 0.02252288055419922, 0.02257734489440918, 0.022480928421020507, 0.02247964859008789, 0.02232089614868164, 0.022429311752319336, 0.02242188835144043, 0.022504543304443358, 0.022623136520385743, 0.02257257652282715, 0.022532928466796876, 0.022433439254760743, 0.022069440841674805, 0.022092832565307616, 0.021948640823364257, 0.021979103088378905, 0.021967456817626952, 0.021934080123901366, 0.021824800491333007, 0.021609184265136718, 0.0214466552734375, 0.02166579246520996, 0.021884927749633788, 0.023183359146118163, 0.022454143524169922, 0.021768320083618165, 0.02170457649230957, 0.02168623924255371, 0.021787872314453127, 0.021869056701660155, 0.021882976531982422, 0.021916000366210938, 0.02190745544433594, 0.02315673637390137, 0.029048032760620117, 0.02218662452697754, 0.022224992752075196, 0.021939584732055664, 0.021932064056396486, 0.021545631408691406, 0.021518335342407227, 0.02147942352294922, 0.021503135681152342, 0.021646175384521484, 0.021743167877197267, 0.021652191162109376, 0.0216529598236084, 0.021575935363769533, 0.02153267288208008, 0.021415103912353517, 0.02144054412841797, 0.02143084716796875, 0.02143459129333496, 0.021527904510498047, 0.021469856262207033, 0.021635040283203125, 0.02154044723510742, 0.02142457580566406, 0.021702079772949218, 0.02213462448120117, 0.02201468849182129, 0.02181830406188965, 0.02204457664489746, 0.02193116760253906, 0.021977407455444336, 0.021991968154907227, 0.021759008407592772, 0.021577823638916017, 0.021551103591918946, 0.021502559661865234, 0.021528863906860353, 0.021522432327270507, 0.021646976470947266, 0.021635295867919922, 0.021679712295532227, 0.021643199920654298, 0.0224586238861084, 0.021559680938720703, 0.02149171257019043, 0.021429855346679686, 0.021495967864990233, 0.02136294364929199, 0.021389375686645506, 0.021343360900878905, 0.02148547172546387, 0.021523296356201174, 0.021720191955566407, 0.02193212890625, 0.021789535522460938, 0.021488832473754882, 0.021361024856567382, 0.02126892852783203, 0.021317632675170898, 0.021340160369873046, 0.02146086311340332, 0.021521888732910156, 0.021400224685668944, 0.02136819267272949, 0.021422719955444335, 0.02159984016418457, 0.021445215225219725, 0.021520191192626954, 0.02136172866821289, 0.021476255416870118, 0.02134223937988281, 0.02130534362792969, 0.02128873634338379, 0.021713119506835937, 0.021321727752685548, 0.021235712051391603, 0.021247648239135743, 0.021389375686645506, 0.021274528503417968, 0.021299264907836915, 0.021408063888549805, 0.021352287292480468, 0.0213504638671875, 0.021667903900146484, 0.02183359909057617, 0.021438623428344728, 0.021362688064575194, 0.021378175735473633, 0.021601152420043946, 0.021433504104614257, 0.0213175048828125, 0.021526016235351563, 0.021439104080200194, 0.0212620792388916, 0.021495519638061525, 0.021406240463256836, 0.02153990364074707, 0.022257919311523437, 0.026421279907226564, 0.026288799285888672, 0.022142431259155274, 0.02246441650390625, 0.022035104751586914, 0.021995840072631837, 0.021937503814697265, 0.023060800552368164, 0.021854015350341798, 0.02177043151855469, 0.021626880645751953, 0.021522432327270507, 0.02141961669921875, 0.021419519424438475, 0.02145372772216797, 0.02163692855834961, 0.021555391311645508, 0.021440383911132812, 0.021353887557983398, 0.02162761688232422, 0.02409062385559082, 0.022184032440185547, 0.021687904357910157, 0.021862207412719728, 0.02162067222595215, 0.02166431999206543, 0.021594112396240234, 0.021573631286621094, 0.021780479431152345, 0.022183935165405275, 0.02387721633911133, 0.021660064697265623, 0.022101631164550783, 0.02465830421447754, 0.02158729553222656, 0.021487775802612304, 0.02139187240600586, 0.021417600631713867, 0.021430656433105467, 0.021319679260253906, 0.021555200576782226, 0.02135161590576172, 0.02143929672241211, 0.02133331108093262, 0.02139411163330078, 0.021384191513061524, 0.021436704635620116, 0.02138185691833496, 0.021420032501220702, 0.021888448715209962, 0.021533248901367187, 0.02147327995300293, 0.021407743453979493, 0.021549055099487305, 0.021398944854736326, 0.021373056411743165, 0.021515743255615234, 0.021507776260375977, 0.02141881561279297, 0.02145712089538574, 0.021477184295654296, 0.02146713638305664, 0.02146303939819336, 0.02192793655395508, 0.021510143280029297, 0.02147478485107422, 0.021602848052978515, 0.0215285758972168, 0.02148761558532715, 0.021372415542602538, 0.022249824523925783, 0.021416095733642577, 0.02141779136657715, 0.021572864532470704, 0.021654464721679687, 0.021766368865966796, 0.021690143585205077, 0.02177008056640625, 0.022055072784423826, 0.02206924819946289, 0.021929983139038087, 0.021942272186279296, 0.02198297691345215, 0.02184623908996582, 0.021725471496582032, 0.021806848526000976, 0.021556480407714844, 0.021584640502929686, 0.021392831802368163, 0.021752384185791014, 0.02187264060974121, 0.021738847732543944, 0.02169718360900879, 0.021997312545776367, 0.02132371139526367, 0.021420095443725588, 0.021418239593505858, 0.021415327072143556, 0.021463487625122072, 0.02142633628845215, 0.021366239547729492, 0.021563936233520507, 0.021381120681762695, 0.021409791946411134, 0.021653600692749023, 0.02192095947265625, 0.02189753532409668, 0.021774751663208008, 0.02178767967224121, 0.021828575134277342, 0.02196678352355957, 0.021846208572387695, 0.021886240005493163, 0.021848672866821288, 0.021755903244018555, 0.02210028839111328, 0.023135135650634766, 0.02203126335144043, 0.021719039916992186, 0.02205695915222168, 0.021729183197021485, 0.02155945587158203, 0.021429695129394532, 0.02132633590698242, 0.02134864044189453, 0.021337823867797853, 0.02127462387084961, 0.02138444709777832, 0.02133011245727539, 0.021354143142700194, 0.022588319778442383, 0.021823200225830078, 0.02189651107788086, 0.021855199813842773, 0.021624767303466796, 0.021817407608032226, 0.021544639587402343, 0.02153081512451172, 0.0216060791015625, 0.021567264556884767, 0.02148198318481445, 0.021662111282348632, 0.021562751770019532, 0.021561088562011718, 0.02154969596862793, 0.021431488037109377, 0.021582015991210936, 0.02154355239868164, 0.02146665573120117, 0.021345760345458983, 0.021371488571166993, 0.021223072052001954, 0.021174495697021484, 0.021523040771484377, 0.021548160552978514, 0.021545791625976564, 0.021733312606811522, 0.021442432403564454, 0.02148953628540039, 0.02141209602355957, 0.02160214424133301, 0.021381343841552734, 0.021405696868896484, 0.02137196731567383, 0.021410207748413086, 0.02113795280456543, 0.02137276840209961, 0.021481632232666015, 0.021962080001831055, 0.022200159072875977, 0.022096832275390624, 0.021919904708862306, 0.021890111923217773, 0.02185487937927246, 0.021747711181640626, 0.022179840087890625, 0.021781856536865235, 0.02171356773376465, 0.022060895919799806, 0.02214076805114746, 0.02211187171936035, 0.02206230354309082, 0.022309759140014648, 0.022353279113769532, 0.02225619125366211, 0.022468576431274413, 0.02231091117858887, 0.022223007202148436, 0.022251264572143555, 0.022472063064575196, 0.02260479927062988, 0.022452064514160156, 0.022458240509033202, 0.022255104064941408, 0.022212287902832032, 0.022600255966186523, 0.022460800170898437, 0.022235231399536134, 0.022191904067993165, 0.021941984176635742, 0.02187708854675293, 0.0217640323638916, 0.021583520889282226, 0.021584096908569335, 0.021477504730224608, 0.021265632629394533, 0.021238304138183593, 0.021360895156860352, 0.021475360870361327, 0.021449823379516602, 0.0213239688873291, 0.021285568237304688, 0.02128611183166504, 0.02131817626953125, 0.021125375747680666, 0.021245023727416993, 0.021207967758178712, 0.021242944717407227, 0.02142464065551758, 0.021344703674316408, 0.02126848030090332, 0.021237024307250975, 0.021572032928466798, 0.0217458553314209, 0.021592159271240235, 0.02130454444885254, 0.021158239364624024, 0.021221824645996094, 0.021280096054077147, 0.021330591201782226, 0.021235712051391603, 0.021273664474487305, 0.021277631759643555, 0.021456127166748048, 0.02161347198486328, 0.021903263092041016, 0.021936063766479493, 0.02186240005493164, 0.02189004707336426, 0.02178755187988281, 0.02165155220031738, 0.021813247680664064, 0.02212563133239746, 0.021910207748413086, 0.02211043167114258, 0.02199692726135254, 0.021827583312988282, 0.02170911979675293, 0.021618879318237305, 0.021418367385864258, 0.02136819267272949, 0.02137766456604004, 0.021579776763916016, 0.021348352432250976, 0.02123097610473633, 0.02207916831970215, 0.021271488189697266, 0.021226879119873046, 0.021596799850463866, 0.021733184814453126, 0.02163020706176758, 0.021308416366577147, 0.021261407852172853, 0.02114406394958496, 0.02109270477294922, 0.021130399703979494, 0.021230432510375978, 0.021208127975463866, 0.021429183959960938, 0.021269535064697264, 0.02134048080444336, 0.021324447631835938, 0.021481472015380858, 0.02127257537841797, 0.02157360076904297, 0.021927967071533203, 0.022042623519897463, 0.02186467170715332, 0.021880064010620117, 0.021869087219238283, 0.021760000228881835, 0.02184339141845703, 0.022709856033325194, 0.02205820846557617, 0.02190140724182129, 0.021976383209228515, 0.022002016067504883, 0.022052831649780273, 0.02197711944580078, 0.021712896347045898, 0.021544960021972655, 0.021401599884033205, 0.021405183792114257, 0.021868703842163086, 0.02148796844482422, 0.02171801567077637, 0.021696575164794923, 0.02139846420288086, 0.02128233528137207, 0.021367200851440428, 0.021368703842163085, 0.02129142379760742, 0.02128463935852051, 0.02125791931152344, 0.021264543533325197, 0.021666271209716797, 0.021560415267944336, 0.021874912261962892, 0.02232694435119629, 0.023127647399902345, 0.022128639221191407, 0.022046560287475585, 0.021928640365600587, 0.021840896606445313, 0.02188582420349121, 0.0218951358795166, 0.021994911193847656, 0.02191231918334961, 0.021745471954345702, 0.021575872421264648, 0.02407526397705078, 0.025135135650634764, 0.021981632232666016, 0.02176630401611328, 0.021655071258544923, 0.021474111557006837, 0.02142620849609375, 0.021397504806518555, 0.021338111877441408, 0.02122127914428711, 0.021424224853515625, 0.02128281593322754, 0.02134556770324707, 0.021357280731201172, 0.02142207908630371, 0.02139116859436035, 0.02132601547241211, 0.02228953552246094, 0.021865215301513672, 0.021739648818969726, 0.021379423141479493, 0.02167363166809082, 0.021344255447387696, 0.021331647872924804, 0.021600576400756837, 0.021398815155029297, 0.021350303649902345, 0.021289535522460937, 0.02183737564086914, 0.0230447998046875, 0.021299104690551757, 0.02127676773071289, 0.021215200424194336, 0.02127177619934082, 0.021342655181884766, 0.021270591735839842, 0.021283136367797852, 0.021348031997680664, 0.021360960006713867, 0.021540864944458008, 0.02153267288208008, 0.02186854362487793, 0.022068384170532227, 0.022172512054443358, 0.0221693115234375, 0.022091487884521484, 0.02220921516418457, 0.02226982307434082, 0.022742528915405274, 0.022331327438354493]",tokens/s,45.88911039790236,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20610,7 +20610,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20813,7 +20813,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7166.230528,11446.124544,0.0,11043.602432,10644.85888,s,1,15.3945673828125,15.3945673828125,0.0,15.3945673828125,15.3945673828125,15.3945673828125,15.3945673828125,[15.3945673828125],,kWh,0.00024076620776249154,2.6550995485527102e-05,0.00010717841907598863,0.0003744956223240073,,MB,3125.383168,11464.998912,0.0,11047.796736,10193.762816,s,10,3.873770233154297,0.3873770233154297,0.0008017146505535117,0.38724441528320314,0.3881938079833984,0.3884663650512695,0.3886844107055664,"[0.3870435791015625, 0.3859915466308594, 0.38744525146484377, 0.386902099609375, 0.386473876953125, 0.3880574951171875, 0.38702590942382814, 0.38873892211914063, 0.38795831298828126, 0.38813323974609376]",tokens/s,660.8548896601612,kWh,1.1362643189422872e-05,1.2530906996021258e-06,7.572709049615256e-06,2.0188442938640255e-05,tokens/kWh,12680522.256128103,MB,3134.324736,11464.998912,0.0,11047.796736,10216.945152,s,10,35.00595971679687,3.5005959716796875,0.0070912096024240935,3.499769653320312,3.5076157958984373,3.510068273925781,3.512030256347656,"[3.506041259765625, 3.49793798828125, 3.49913525390625, 3.500029541015625, 3.485289306640625, 3.50707080078125, 3.503698974609375, 3.499509765625, 3.512520751953125, 3.49472607421875]",tokens/s,17.996935524601763,kWh,0.00010226989875307727,1.1279542750089423e-05,6.72559063687854e-05,0.00018080534787195206,tokens/kWh,348441.0209183478,,s,630,35.003313110351556,0.055560814460875484,0.0006759404369073328,0.055407808303833,0.05614957122802734,0.056722057914733885,0.058441114768981936,"[0.05706671905517578, 0.0566479377746582, 0.05593052673339844, 0.05570240020751953, 0.05591356658935547, 0.05569795227050781, 0.05528403091430664, 0.0554455680847168, 0.05533919906616211, 0.05525596618652344, 0.05510371017456055, 0.05513593673706055, 0.05490739059448242, 0.055091552734375, 0.05493734359741211, 0.05606646347045898, 0.054945793151855465, 0.05669820785522461, 0.058350303649902346, 0.0555478401184082, 0.05563158416748047, 0.055535903930664064, 0.0555146255493164, 0.05538595199584961, 0.05529462432861328, 0.05573775863647461, 0.055712352752685546, 0.05524684906005859, 0.055076862335205076, 0.05508233642578125, 0.05516559982299805, 0.05502361679077149, 0.055041950225830076, 0.055523422241210936, 0.055400577545166016, 0.055637889862060544, 0.0550748176574707, 0.05554995346069336, 0.05557763290405274, 0.05606643295288086, 0.0556069450378418, 0.05536857604980469, 0.05535136032104492, 0.05676582336425781, 0.05606054306030273, 0.055623680114746096, 0.05573126220703125, 0.055118431091308595, 0.05511958312988281, 0.05534755325317383, 0.055705696105957034, 0.05822585678100586, 0.05546649551391602, 0.05535337448120117, 0.05545353698730469, 0.055890560150146484, 0.05584220886230469, 0.05619363021850586, 0.05620121765136719, 0.055439006805419924, 0.055688865661621095, 0.05526393508911133, 0.055629825592041014, 0.05660467147827149, 0.05637936019897461, 0.055859233856201174, 0.05538739013671875, 0.05564902496337891, 0.055431167602539064, 0.05577059173583984, 0.0553447036743164, 0.05579391860961914, 0.055210689544677734, 0.05537900924682617, 0.055024608612060544, 0.05820211029052735, 0.05573567962646484, 0.055767681121826174, 0.05603308868408203, 0.05536377716064453, 0.05552864074707031, 0.05528992080688477, 0.055398529052734374, 0.05756582260131836, 0.0554700813293457, 0.055926303863525394, 0.055638496398925784, 0.05551030349731445, 0.05549055862426758, 0.05518204879760742, 0.05496422576904297, 0.05525910568237305, 0.055543838500976564, 0.05542483139038086, 0.05528595352172851, 0.055209983825683595, 0.05556220626831055, 0.05566876983642578, 0.055314430236816405, 0.05519974517822265, 0.05594028854370117, 0.05563584136962891, 0.05578438568115234, 0.05580595016479492, 0.05549260711669922, 0.05509017562866211, 0.05520854568481445, 0.05526979064941406, 0.054968318939208984, 0.05501244735717774, 0.054876190185546875, 0.05519792175292969, 0.05496489715576172, 0.05505228805541992, 0.05615779113769531, 0.055204193115234376, 0.054858974456787106, 0.055806976318359375, 0.05531628799438477, 0.05502560043334961, 0.05494339370727539, 0.05490678405761719, 0.05492937469482422, 0.055403072357177736, 0.055744510650634765, 0.05569945526123047, 0.0567459831237793, 0.055259136199951174, 0.054963489532470704, 0.05601753616333008, 0.05546198272705078, 0.05546803283691406, 0.05507455825805664, 0.055177536010742184, 0.05496620941162109, 0.05487411117553711, 0.05535334396362305, 0.05546188735961914, 0.058049758911132815, 0.056232769012451174, 0.05560335922241211, 0.056409439086914065, 0.05535113525390625, 0.05605606460571289, 0.05596521759033203, 0.055554912567138674, 0.05561711883544922, 0.05529436874389648, 0.05515008163452149, 0.05508147048950195, 0.055449600219726565, 0.05581167984008789, 0.05564457702636719, 0.05511516952514649, 0.05483990478515625, 0.05543119812011719, 0.055682174682617186, 0.05514435195922852, 0.0553298225402832, 0.05528358459472656, 0.055445793151855466, 0.05583027267456055, 0.05551628875732422, 0.05508607864379883, 0.055209632873535155, 0.05592496109008789, 0.05604323196411133, 0.05559462356567383, 0.05559347152709961, 0.055476383209228514, 0.05528575897216797, 0.05511129760742187, 0.0549728012084961, 0.055063934326171876, 0.05579840087890625, 0.055570430755615234, 0.055445182800292966, 0.055322208404541016, 0.05518819046020508, 0.055205249786376955, 0.05607078552246094, 0.055424606323242184, 0.05580636978149414, 0.056248321533203124, 0.05698355102539063, 0.055334209442138675, 0.0551605110168457, 0.055126750946044925, 0.05509664154052735, 0.056720897674560546, 0.055374526977539064, 0.055836929321289065, 0.054925537109375, 0.054728256225585935, 0.05529212951660156, 0.05496201705932617, 0.05519184112548828, 0.05515171051025391, 0.05484828948974609, 0.054708225250244144, 0.05506467056274414, 0.05489788818359375, 0.05487891387939453, 0.055787521362304686, 0.055408641815185546, 0.056180286407470706, 0.05564051055908203, 0.05531619262695313, 0.05535567855834961, 0.05585715103149414, 0.05696259307861328, 0.05593753433227539, 0.05577724838256836, 0.055982078552246094, 0.055412353515625, 0.05525747299194336, 0.054976062774658205, 0.055349281311035156, 0.055298465728759766, 0.05518950271606445, 0.05506646347045899, 0.05500534439086914, 0.05492326354980469, 0.05546188735961914, 0.05526528167724609, 0.05499401473999024, 0.05509417724609375, 0.055605247497558595, 0.05540249633789063, 0.055332862854003906, 0.055607040405273436, 0.055353408813476564, 0.05551683044433594, 0.05537401580810547, 0.055218528747558594, 0.055231807708740234, 0.055548480987548825, 0.055425086975097654, 0.05520374298095703, 0.05561964797973633, 0.05601219177246094, 0.059863422393798826, 0.05650201416015625, 0.05573689651489258, 0.055731231689453126, 0.05532547378540039, 0.055470401763916016, 0.055475902557373044, 0.05546780776977539, 0.05847820663452148, 0.055747360229492185, 0.055449600219726565, 0.05662332916259766, 0.05621475219726563, 0.05560940933227539, 0.054895008087158206, 0.05462844848632813, 0.055712799072265624, 0.05563027191162109, 0.055634464263916016, 0.05515856170654297, 0.05484956741333008, 0.05476512145996094, 0.05494847869873047, 0.054798336029052735, 0.05494169616699219, 0.05511756896972656, 0.05511193466186524, 0.055233631134033206, 0.0551693115234375, 0.055263870239257815, 0.055228416442871096, 0.05517926406860352, 0.055314430236816405, 0.055207263946533205, 0.05581206512451172, 0.055207744598388675, 0.05576179122924805, 0.05497232055664063, 0.05500688171386719, 0.05528416061401367, 0.05521104049682617, 0.055583518981933595, 0.0555145263671875, 0.05535996627807617, 0.055339134216308594, 0.05514873504638672, 0.05526732635498047, 0.055152641296386716, 0.05547622299194336, 0.05577318572998047, 0.05591980743408203, 0.055169857025146485, 0.055623519897460935, 0.055908512115478516, 0.056025089263916014, 0.05526095962524414, 0.05519177627563476, 0.05531167984008789, 0.05528646469116211, 0.055021568298339846, 0.054863872528076174, 0.05494195175170898, 0.05500083160400391, 0.05522127914428711, 0.05547123336791992, 0.055371807098388674, 0.05515145492553711, 0.05484864044189453, 0.05527657699584961, 0.05581286239624023, 0.05593299102783203, 0.05536153411865234, 0.05503171157836914, 0.05490441513061523, 0.0563721923828125, 0.05533033752441406, 0.05494012832641602, 0.056025089263916014, 0.055598369598388674, 0.05544319915771485, 0.056054302215576175, 0.05518297576904297, 0.056126270294189456, 0.055951358795166016, 0.055529247283935546, 0.055314430236816405, 0.05497468948364258, 0.055482368469238284, 0.055226367950439455, 0.0551014404296875, 0.05507609558105469, 0.05526796722412109, 0.055144577026367186, 0.055338977813720706, 0.05523625564575195, 0.05540697479248047, 0.05524070358276367, 0.054980609893798826, 0.055046142578125, 0.05522572708129883, 0.05577587127685547, 0.05610086441040039, 0.058312767028808596, 0.055914432525634765, 0.05548633575439453, 0.05542281723022461, 0.055232158660888674, 0.055341697692871096, 0.055160831451416016, 0.05523984146118164, 0.05571670532226562, 0.05554950332641601, 0.05539680099487305, 0.05572963333129883, 0.05532521438598633, 0.05489788818359375, 0.054952671051025394, 0.055842334747314454, 0.0560041618347168, 0.05586368179321289, 0.05548857498168945, 0.05520383834838867, 0.055002750396728514, 0.05508188629150391, 0.05523865509033203, 0.05575884628295898, 0.055812095642089846, 0.05574409484863281, 0.05998793411254883, 0.05604323196411133, 0.05575350570678711, 0.05602860641479492, 0.05583865737915039, 0.05582700729370117, 0.055475582122802736, 0.05523260879516602, 0.05937180709838867, 0.056723007202148436, 0.05545209503173828, 0.055244800567626956, 0.059635711669921876, 0.056403968811035154, 0.056524768829345706, 0.055821918487548826, 0.055502655029296875, 0.05552396774291992, 0.055093246459960936, 0.05566873550415039, 0.05614988708496094, 0.05538966369628906, 0.05560745620727539, 0.05526505661010742, 0.055126625061035155, 0.055400577545166016, 0.05524057769775391, 0.055141918182373045, 0.054853313446044924, 0.05475526428222656, 0.057218017578125, 0.05689139175415039, 0.055756065368652345, 0.0550687370300293, 0.055083072662353516, 0.055179103851318356, 0.05512038421630859, 0.05531264114379883, 0.05634822463989258, 0.057859775543212894, 0.05616511917114258, 0.05584051132202148, 0.0553342399597168, 0.05532134246826172, 0.0564178237915039, 0.05514918518066406, 0.05542707061767578, 0.055499935150146486, 0.05533980941772461, 0.055729312896728514, 0.05564303970336914, 0.05530131149291992, 0.054958015441894534, 0.05491801452636719, 0.05496012878417969, 0.055646209716796874, 0.056627201080322265, 0.055954689025878905, 0.05524147033691406, 0.055260353088378907, 0.055148609161376955, 0.05549100875854492, 0.055484737396240234, 0.05509939193725586, 0.055067649841308595, 0.054998016357421874, 0.05514790344238281, 0.055502880096435545, 0.055315040588378904, 0.05520793533325195, 0.05488435363769531, 0.05502361679077149, 0.05592905426025391, 0.05538035202026367, 0.05531033706665039, 0.055261184692382816, 0.055160831451416016, 0.055954910278320315, 0.05534774398803711, 0.05539430236816406, 0.055349246978759765, 0.05520793533325195, 0.05517107009887695, 0.055054336547851565, 0.05551103973388672, 0.05678079986572265, 0.05630563354492187, 0.0561317138671875, 0.05532563018798828, 0.05582947158813477, 0.0559119987487793, 0.05561731338500977, 0.055314559936523434, 0.05574710464477539, 0.055547904968261716, 0.05558483123779297, 0.05542396926879883, 0.05575574493408203, 0.055623680114746096, 0.05614527893066406, 0.05574310302734375, 0.05516016006469727, 0.05520595169067383, 0.055763553619384766, 0.05576294326782227, 0.05519500732421875, 0.05566886520385742, 0.055132671356201174, 0.05591155242919922, 0.0558682861328125, 0.05544140625, 0.0554516487121582, 0.05508819198608399, 0.055296382904052734, 0.05507062530517578, 0.05647990417480469, 0.055034366607666016, 0.05494784164428711, 0.055259136199951174, 0.054848896026611325, 0.055109760284423825, 0.05580646514892578, 0.056840190887451174, 0.05558272171020508, 0.055662593841552734, 0.05563119888305664, 0.05515740966796875, 0.05535539245605469, 0.05522022247314453, 0.05527142333984375, 0.05626582336425781, 0.05565491104125977, 0.05517494583129883, 0.05524339294433594, 0.05584896087646484, 0.05678031921386719, 0.05609673690795899, 0.05603372955322266, 0.05664339065551758, 0.0555926399230957, 0.05566857528686524, 0.055231201171875, 0.055349246978759765, 0.05542502212524414, 0.05680070495605469, 0.05515913772583008, 0.055427486419677735, 0.0549128303527832, 0.05485567855834961, 0.05494937515258789, 0.05540083312988281, 0.055842113494873044, 0.05489952087402344, 0.05489254379272461, 0.05522249603271484, 0.055232288360595704, 0.0551357421875, 0.054984607696533204, 0.05521612930297851, 0.057003711700439455, 0.05569424057006836, 0.056068286895751954, 0.05607814407348633, 0.05517311859130859, 0.05516313552856445, 0.055742176055908206, 0.05606403350830078, 0.05771891021728515, 0.058752895355224606, 0.05676851272583008, 0.05634048080444336, 0.055744510650634765, 0.05536767959594727, 0.05568307113647461, 0.0561231689453125, 0.05564982223510742, 0.05540217590332031, 0.05554691314697266, 0.05528284835815429, 0.05565087890625, 0.05581951904296875, 0.05503481674194336, 0.05595296096801758, 0.055906112670898435, 0.05580012893676758, 0.055675262451171874, 0.0561495361328125, 0.05605628967285156, 0.05566463851928711, 0.055556095123291016, 0.055258174896240235, 0.05528364944458008, 0.05577542495727539, 0.05532140731811523, 0.0556473274230957, 0.05547100830078125, 0.05520383834838867, 0.05789491271972656, 0.058880577087402346, 0.056213630676269534, 0.05619443130493164, 0.0554911994934082, 0.055312320709228514, 0.05562758255004883, 0.05529612731933594, 0.055279678344726565, 0.05556435012817383, 0.055801856994628904, 0.05547945785522461, 0.055470943450927734, 0.05499638366699219, 0.05520806503295898, 0.05496844863891601, 0.054780254364013674, 0.05528985595703125, 0.05589715194702148, 0.05701932907104492, 0.055478271484375, 0.05526732635498047, 0.05495603179931641, 0.0549354248046875, 0.05505651092529297, 0.0557088623046875, 0.055507774353027346, 0.05525222396850586, 0.055094017028808596, 0.05470207977294922, 0.055119873046875, 0.05482201766967774, 0.054932350158691405, 0.0548201904296875, 0.0547314224243164, 0.054988800048828126, 0.05505843353271484, 0.054863872528076174, 0.05551248168945312, 0.055734432220458985, 0.05572652816772461, 0.055235614776611326, 0.054970367431640625, 0.05554995346069336, 0.055710689544677734, 0.05543708801269531, 0.055835968017578126, 0.05574703979492188, 0.05623017501831055, 0.05530147171020508, 0.054913856506347655, 0.055422367095947264, 0.05523324966430664, 0.0549947509765625, 0.05554796981811523, 0.055012992858886715, 0.055644542694091796, 0.05546803283691406, 0.0551014404296875, 0.05513417434692383, 0.055167007446289065, 0.055531520843505856, 0.05592268753051758, 0.05832294464111328]",tokens/s,17.998296275951365,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20858,12 +20858,12 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 107963 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 106822 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,877.613056,615.448576,0.0,220.20096,205.438976,s,1,8.500099609375,8.500099609375,0.0,8.500099609375,8.500099609375,8.500099609375,8.500099609375,[8.500099609375],,kWh,2.8432755895839057e-05,3.1291399853608897e-06,8.50334013600268e-06,4.0065236017202625e-05,,MB,1265.762304,669.974528,0.0,260.046848,226.386944,s,17,0.21296153736114498,0.012527149256537942,0.0002239725954963185,0.01243660831451416,0.012815871810913086,0.012923238182067871,0.013149460105895997,"[0.01247321605682373, 0.012356096267700196, 0.012852543830871583, 0.012580448150634766, 0.012397248268127442, 0.013206015586853028, 0.012528351783752441, 0.012396703720092774, 0.012384832382202149, 0.012378560066223145, 0.012320768356323243, 0.012509632110595702, 0.012791423797607422, 0.01259267234802246, 0.01243660831451416, 0.012429247856140137, 0.012327168464660644]",tokens/s,20435.615059539035,kWh,3.6659520940118344e-07,4.0428975814141996e-08,2.1475729073366726e-07,6.217814759489927e-07,tokens/kWh,411720210.2383004,MB,1301.495808,684.654592,0.0,274.726912,226.389504,s,17,10.28993084716797,0.60529004983341,0.006259915020451108,0.6042811279296875,0.6105053955078125,0.6131617797851562,0.6216492016601562,"[0.6237710571289062, 0.60166943359375, 0.6040850830078125, 0.6031799926757813, 0.6103824462890625, 0.610502685546875, 0.6029651489257812, 0.6058961791992188, 0.5993203125, 0.5990332641601562, 0.6070938110351562, 0.6058821411132812, 0.606194580078125, 0.6105094604492187, 0.6042811279296875, 0.6007789306640625, 0.5943851928710937]",tokens/s,104.08233212711671,kWh,1.7470339347216228e-05,1.9266556545660242e-06,6.675967176089875e-06,2.6072962177872127e-05,tokens/kWh,2416296.2217414444,,s,1071,10.281280707359333,0.009599701874285074,0.0002051811333571774,0.009560256004333497,0.009805824279785156,0.009942975997924805,0.010313395404815674,"[0.009171008110046387, 0.009627840042114259, 0.009655872344970702, 0.009693375587463379, 0.009803775787353516, 0.009846783638000489, 0.009953280448913575, 0.009963104248046875, 0.010200960159301758, 0.010041824340820312, 0.009928768157958984, 0.009994239807128906, 0.009959199905395507, 0.010000032424926758, 0.01023977565765381, 0.010158592224121094, 0.010066207885742187, 0.01001471996307373, 0.010053119659423827, 0.01003980827331543, 0.00995952033996582, 0.009994144439697266, 0.009936960220336914, 0.010014656066894532, 0.010170080184936524, 0.010049823760986329, 0.010022496223449707, 0.010047679901123046, 0.010000800132751465, 0.009991999626159667, 0.009944128036499024, 0.009989055633544922, 0.009928704261779785, 0.009945152282714843, 0.009989888191223144, 0.009937088012695312, 0.00991436767578125, 0.009963520050048828, 0.009947168350219726, 0.010061440467834472, 0.010079999923706055, 0.009996895790100097, 0.009924351692199707, 0.009973919868469239, 0.009924703598022461, 0.009928704261779785, 0.009844672203063965, 0.009871264457702637, 0.009721823692321777, 0.009646400451660157, 0.009768832206726074, 0.009744000434875489, 0.009773632049560547, 0.009792448043823242, 0.009788031578063966, 0.009751999855041504, 0.0097259521484375, 0.009770079612731934, 0.009736063957214356, 0.00963753604888916, 0.009565952301025391, 0.009583104133605956, 0.009512703895568847, 0.009132831573486329, 0.009488384246826171, 0.009463839530944824, 0.009680191993713378, 0.009472736358642578, 0.009467840194702148, 0.009449600219726562, 0.009438207626342773, 0.009354080200195312, 0.00944099235534668, 0.009469792366027832, 0.009453920364379883, 0.009395456314086914, 0.009422911643981933, 0.009462592124938964, 0.009458687782287598, 0.009466879844665528, 0.009483360290527343, 0.009440128326416015, 0.009436575889587403, 0.009460351943969726, 0.009475263595581054, 0.00959558391571045, 0.00969536018371582, 0.009668607711791993, 0.009870495796203613, 0.009701567649841309, 0.009704095840454102, 0.009498623847961426, 0.009460000038146973, 0.009428735733032226, 0.009385439872741699, 0.009327103614807129, 0.009369855880737304, 0.009355072021484376, 0.009361344337463379, 0.009365056037902832, 0.009372096061706543, 0.009408415794372559, 0.00946390438079834, 0.00951910400390625, 0.009660096168518066, 0.00961308765411377, 0.009667103767395019, 0.009676095962524413, 0.009593536376953125, 0.009637887954711915, 0.009713919639587402, 0.009682687759399414, 0.009721856117248535, 0.009625887870788575, 0.009573663711547852, 0.009586175918579102, 0.009679807662963866, 0.009644031524658203, 0.009625696182250976, 0.009793439865112304, 0.009809920310974121, 0.009721599578857421, 0.009717375755310058, 0.009685471534729003, 0.009660575866699219, 0.00963379192352295, 0.009323552131652832, 0.009723967552185058, 0.009725631713867188, 0.009746432304382324, 0.009736096382141114, 0.009617504119873046, 0.009623488426208497, 0.009539711952209472, 0.009451456069946289, 0.00952444839477539, 0.009515456199645996, 0.009551360130310058, 0.009421567916870117, 0.009523200035095216, 0.009402496337890625, 0.00951852798461914, 0.009396896362304688, 0.009446944236755371, 0.009421183586120605, 0.009555232048034669, 0.009522111892700195, 0.00954054355621338, 0.009506912231445312, 0.009521951675415039, 0.009457280158996582, 0.009498527526855469, 0.0095217924118042, 0.009507712364196777, 0.009554240226745605, 0.00945206356048584, 0.009523263931274414, 0.009390080451965332, 0.00941055965423584, 0.009467904090881347, 0.009729887962341309, 0.009640031814575196, 0.010800640106201171, 0.009769023895263672, 0.009482751846313477, 0.009523200035095216, 0.009584671974182129, 0.0095600004196167, 0.009488224029541015, 0.009507231712341309, 0.009494303703308105, 0.00944495964050293, 0.009464223861694336, 0.009465855598449707, 0.009479488372802734, 0.009427647590637207, 0.009532416343688965, 0.009415264129638673, 0.009462176322937011, 0.009967616081237793, 0.00963804817199707, 0.009981792449951172, 0.009711615562438965, 0.009736319541931153, 0.00965824031829834, 0.009650176048278808, 0.009789440155029297, 0.00976300811767578, 0.00973958396911621, 0.009398367881774903, 0.009805824279785156, 0.009750687599182128, 0.009750528335571289, 0.00970355224609375, 0.00965839958190918, 0.009682175636291503, 0.00963753604888916, 0.009605152130126953, 0.009683456420898438, 0.00967910385131836, 0.009699007987976075, 0.009595264434814453, 0.009670880317687989, 0.0096212158203125, 0.009589887619018555, 0.00955072021484375, 0.009581791877746581, 0.009560864448547363, 0.00960870361328125, 0.009638400077819824, 0.009682656288146973, 0.0098306884765625, 0.009820159912109374, 0.0095862398147583, 0.009612031936645508, 0.009519840240478515, 0.009719008445739747, 0.010307040214538574, 0.00950819206237793, 0.009497407913208009, 0.009441408157348632, 0.009545120239257812, 0.009455583572387695, 0.00946457576751709, 0.009462944030761719, 0.009445695877075196, 0.009462176322937011, 0.00957148838043213, 0.009549759864807128, 0.00962172794342041, 0.009554304122924805, 0.009519200325012207, 0.009607647895812988, 0.009540575981140137, 0.009475040435791015, 0.009420607566833496, 0.009484383583068847, 0.00943446445465088, 0.00946233558654785, 0.009470080375671387, 0.009502592086791993, 0.009398271560668945, 0.009438719749450684, 0.009425408363342285, 0.009391743659973145, 0.009480256080627442, 0.009470175743103028, 0.009416159629821777, 0.009356096267700195, 0.009371359825134278, 0.00942255973815918, 0.009480575561523437, 0.00919539165496826, 0.009489055633544921, 0.009371808052062988, 0.009392127990722657, 0.009570143699645995, 0.00944099235534668, 0.009560256004333497, 0.009598464012145995, 0.00963046360015869, 0.009744319915771484, 0.00986950397491455, 0.009901951789855958, 0.009772255897521972, 0.009700127601623535, 0.009725824356079101, 0.00981824016571045, 0.009711615562438965, 0.009695199966430664, 0.009738112449645996, 0.009740544319152831, 0.009679776191711426, 0.009862272262573243, 0.009678303718566894, 0.00970748805999756, 0.009693535804748535, 0.00978166389465332, 0.00976863956451416, 0.00973414421081543, 0.009760767936706542, 0.009801888465881347, 0.009803296089172363, 0.009849120140075683, 0.009814271926879883, 0.009702752113342285, 0.00965062427520752, 0.00962764835357666, 0.009643872261047364, 0.009717920303344726, 0.009703424453735352, 0.009750656127929688, 0.009701248168945312, 0.00992198371887207, 0.009770848274230958, 0.009894240379333495, 0.009717791557312012, 0.01021561622619629, 0.009672127723693847, 0.009613632202148437, 0.009574879646301269, 0.009551712036132812, 0.009599072456359863, 0.009494560241699219, 0.009596799850463867, 0.009506912231445312, 0.009508447647094726, 0.009507231712341309, 0.009516480445861817, 0.009534015655517578, 0.009778592109680176, 0.00960377597808838, 0.009641504287719727, 0.009687423706054688, 0.009850879669189454, 0.009324543952941895, 0.00964515209197998, 0.009589632034301758, 0.00955504035949707, 0.009611840248107911, 0.00963212776184082, 0.009727999687194825, 0.00969711971282959, 0.009580703735351562, 0.009620863914489746, 0.00960307216644287, 0.009740960121154786, 0.009661664009094238, 0.009672575950622559, 0.009761664390563965, 0.009670399665832519, 0.009658623695373535, 0.009602432250976562, 0.009578623771667481, 0.009638400077819824, 0.009733375549316406, 0.00976089572906494, 0.009736831665039063, 0.009725664138793945, 0.009684927940368652, 0.00978774356842041, 0.009649920463562011, 0.009681023597717286, 0.009743743896484375, 0.009789343833923339, 0.00978121566772461, 0.009813119888305664, 0.009731840133666993, 0.009657855987548827, 0.009503199577331542, 0.009594911575317383, 0.009537311553955078, 0.009617631912231445, 0.009587008476257325, 0.00969491195678711, 0.00971571159362793, 0.009670207977294922, 0.009662431716918945, 0.009676575660705566, 0.0098056640625, 0.009737055778503418, 0.009699071884155273, 0.009733440399169922, 0.00975318431854248, 0.009720000267028808, 0.009915871620178223, 0.009802592277526856, 0.010184032440185547, 0.00979196834564209, 0.009754655838012695, 0.00963276767730713, 0.009667584419250488, 0.00958182430267334, 0.00965494441986084, 0.009599072456359863, 0.009582688331604004, 0.009639840126037597, 0.00960921573638916, 0.00929792022705078, 0.009689087867736817, 0.009654080390930176, 0.009646271705627442, 0.009649503707885742, 0.009935263633728028, 0.009639231681823731, 0.009681056022644043, 0.009609248161315918, 0.009585408210754395, 0.009542783737182618, 0.009476991653442382, 0.009478143692016602, 0.009482272148132324, 0.00952950382232666, 0.009523008346557617, 0.009414655685424805, 0.009484224319458008, 0.00949459171295166, 0.009535488128662109, 0.009529343605041504, 0.009584832191467284, 0.009631039619445801, 0.009661984443664551, 0.009627967834472657, 0.009737055778503418, 0.009662336349487304, 0.009559328079223633, 0.009552000045776367, 0.009482591629028321, 0.009488032341003419, 0.009537823677062988, 0.009518336296081543, 0.009475071907043458, 0.009479680061340333, 0.009483039855957031, 0.009533151626586914, 0.00955907154083252, 0.009726943969726563, 0.00952905559539795, 0.009521439552307128, 0.009510687828063965, 0.009373920440673828, 0.009457152366638183, 0.009426560401916504, 0.009427840232849122, 0.009445376396179199, 0.009512479782104493, 0.009518912315368652, 0.009427616119384766, 0.00944057559967041, 0.009456000328063964, 0.009515328407287598, 0.010233759880065918, 0.00963980770111084, 0.009683168411254883, 0.009689087867736817, 0.009656319618225098, 0.009720864295959473, 0.009599967956542969, 0.00951103973388672, 0.009512928009033203, 0.009490336418151855, 0.009165056228637695, 0.009415936470031739, 0.00938265609741211, 0.009409536361694336, 0.00937241554260254, 0.00933027172088623, 0.009364447593688964, 0.009411935806274413, 0.009388383865356445, 0.009390080451965332, 0.009383487701416015, 0.009430815696716308, 0.00937564754486084, 0.009410304069519043, 0.009468928337097168, 0.009412863731384277, 0.00934876823425293, 0.009426464080810546, 0.009412384033203125, 0.009421183586120605, 0.009427103996276855, 0.009400896072387695, 0.009358112335205077, 0.009396479606628419, 0.009394944190979004, 0.009483776092529296, 0.009447392463684081, 0.009490079879760742, 0.00958240032196045, 0.009833120346069335, 0.009627776145935059, 0.00975481605529785, 0.011046208381652833, 0.01058067226409912, 0.00982425594329834, 0.009805567741394043, 0.009711039543151856, 0.009804512023925781, 0.009725664138793945, 0.009687423706054688, 0.00982534408569336, 0.009741215705871583, 0.009658656120300292, 0.009708576202392578, 0.009941823959350586, 0.00987331199645996, 0.009747455596923828, 0.00971059226989746, 0.009843008041381836, 0.009844256401062011, 0.00981827163696289, 0.009767135620117188, 0.009688447952270508, 0.009711135864257812, 0.009737183570861816, 0.009630847930908203, 0.009642784118652343, 0.009627264022827148, 0.009607040405273437, 0.009544192314147949, 0.009564160346984863, 0.00953916835784912, 0.009517760276794433, 0.009058624267578126, 0.00949232006072998, 0.0094782075881958, 0.009377568244934083, 0.009380895614624024, 0.009429984092712403, 0.009412832260131836, 0.009476896286010741, 0.009476608276367187, 0.009590592384338379, 0.009569055557250976, 0.009490400314331056, 0.009492480278015136, 0.009602848052978516, 0.009642399787902833, 0.009704416275024413, 0.009544672012329102, 0.009545087814331055, 0.009611712455749512, 0.009443327903747559, 0.011451680183410645, 0.010408672332763672, 0.009653280258178711, 0.00951961612701416, 0.009482560157775878, 0.009478400230407715, 0.009477312088012696, 0.009429727554321289, 0.00946787166595459, 0.009475744247436524, 0.009490367889404296, 0.009535103797912598, 0.009470784187316894, 0.009422016143798829, 0.009470848083496094, 0.009419808387756348, 0.0094749755859375, 0.009498623847961426, 0.00941875171661377, 0.009441439628601073, 0.009448479652404786, 0.009462176322937011, 0.00950496006011963, 0.009538975715637207, 0.009491264343261719, 0.009500672340393066, 0.009441280364990234, 0.00942460823059082, 0.00943283176422119, 0.009398719787597657, 0.009399999618530273, 0.009406335830688476, 0.00933353614807129, 0.00936524772644043, 0.009364992141723634, 0.009334752082824707, 0.009383456230163574, 0.009349856376647949, 0.00933071994781494, 0.009394399642944336, 0.009390111923217773, 0.009396224021911622, 0.00944320011138916, 0.00910422420501709, 0.00940329647064209, 0.009640543937683106, 0.009717984199523926, 0.009580544471740723, 0.009473504066467285, 0.009511455535888672, 0.009469599723815917, 0.009451871871948242, 0.00944921588897705, 0.009576959609985352, 0.009397120475769042, 0.009390975952148438, 0.009457056045532226, 0.00946886444091797, 0.009419551849365234, 0.009382783889770508, 0.009373791694641113, 0.009432991981506348, 0.009418368339538574, 0.009483776092529296, 0.009499039649963379, 0.009457695960998535, 0.009476096153259277, 0.009492128372192384, 0.009571104049682618, 0.009510911941528321, 0.009511199951171875, 0.009462559700012207, 0.00970748805999756, 0.009495743751525879, 0.009486271858215331, 0.00948243236541748, 0.009498271942138672, 0.00945907211303711, 0.009527935981750488, 0.009486335754394531, 0.009486335754394531, 0.009474047660827637, 0.010303296089172364, 0.009507007598876953, 0.009545663833618163, 0.009566271781921386, 0.00951910400390625, 0.009592831611633301, 0.009515007972717286, 0.009516256332397461, 0.009462559700012207, 0.009488384246826171, 0.009514623641967774, 0.009496959686279297, 0.009467904090881347, 0.009456831932067871, 0.009416576385498046, 0.009629695892333985, 0.009510047912597656, 0.00952246379852295, 0.009447808265686036, 0.009457216262817383, 0.009454143524169923, 0.00950489616394043, 0.009461631774902344, 0.009506815910339356, 0.00906054401397705, 0.009447232246398925, 0.009499615669250488, 0.009506815910339356, 0.009447423934936524, 0.009434720039367676, 0.009488832473754883, 0.009455583572387695, 0.009481951713562012, 0.00935478401184082, 0.009437472343444823, 0.00941104030609131, 0.009596223831176759, 0.00964406394958496, 0.010400287628173828, 0.010328224182128906, 0.009663519859313964, 0.009513248443603515, 0.009525919914245605, 0.009545087814331055, 0.009527935981750488, 0.009539039611816406, 0.009586655616760253, 0.009589311599731445, 0.009667776107788085, 0.009604991912841797, 0.009605759620666504, 0.00966483211517334, 0.009691136360168457, 0.009743935585021972, 0.009705920219421387, 0.009610879898071288, 0.009609919548034668, 0.009643168449401855, 0.009634336471557618, 0.009661503791809082, 0.00980678367614746, 0.009740511894226074, 0.009710880279541015, 0.009898624420166015, 0.009799391746520995, 0.009852928161621094, 0.009749759674072265, 0.00981004810333252, 0.009923359870910645, 0.009751615524291992, 0.00973027229309082, 0.009787615776062011, 0.009729824066162109, 0.009724255561828613, 0.009680255889892578, 0.00961023998260498, 0.00962559986114502, 0.009615360260009765, 0.009559776306152344, 0.009611552238464355, 0.009512160301208496, 0.009536288261413573, 0.009527296066284179, 0.009443327903747559, 0.009508864402770996, 0.009500127792358398, 0.009505311965942382, 0.009476703643798828, 0.009670399665832519, 0.009631744384765625, 0.009697279930114745, 0.00969859218597412, 0.009681632041931152, 0.00971951961517334, 0.0096014404296875, 0.00956332778930664, 0.009609184265136718, 0.009517760276794433, 0.00952950382232666, 0.009523008346557617, 0.009516192436218261, 0.009671392440795899, 0.009494720458984375, 0.00947993564605713, 0.009517312049865723, 0.009430944442749023, 0.009511008262634277, 0.009662431716918945, 0.00947612762451172, 0.009488320350646973, 0.009642144203186036, 0.009512864112854003, 0.009500672340393066, 0.009456704139709473, 0.009518143653869628, 0.009435232162475587, 0.009445152282714843, 0.00973414421081543, 0.009494784355163575, 0.009483488082885742, 0.009506367683410644, 0.009413663864135741, 0.00960095977783203, 0.009500415802001953, 0.009463775634765624, 0.009595168113708497, 0.009612544059753418, 0.009621312141418457, 0.009638431549072266, 0.009648192405700683, 0.009674592018127441, 0.009671199798583984, 0.009633631706237792, 0.009644160270690918, 0.009672191619873047, 0.009816384315490723, 0.009676095962524413, 0.00974118423461914, 0.009797216415405274, 0.009735615730285644, 0.009796575546264648, 0.009756735801696777, 0.009629023551940919, 0.009669216156005859, 0.009676223754882812, 0.009752832412719726, 0.00971951961517334, 0.009730112075805665, 0.00979203224182129, 0.00975603199005127, 0.009398431777954101, 0.009790783882141113, 0.009690143585205079, 0.009721759796142579, 0.009706912040710449, 0.009738559722900391, 0.009764512062072754, 0.009703807830810547, 0.00963584041595459, 0.009580256462097168, 0.009599264144897462, 0.00960307216644287, 0.009569536209106446, 0.009566783905029297, 0.00950496006011963, 0.009641759872436524, 0.009528927803039551, 0.0094684476852417, 0.009706656455993652, 0.009982912063598633, 0.009622655868530273, 0.009667136192321778, 0.009652159690856933, 0.009632191658020019, 0.00969529628753662, 0.009756544113159179, 0.009693183898925782, 0.009708864212036133, 0.00965401554107666, 0.009642208099365234, 0.00963798427581787, 0.009586560249328612, 0.009584799766540527, 0.00955452823638916, 0.009522527694702148, 0.00953321647644043, 0.009482368469238282, 0.009499263763427735, 0.009439359664916993, 0.009482239723205567, 0.009531519889831542, 0.00957049560546875, 0.009444543838500977, 0.009574912071228027, 0.009498623847961426, 0.009477215766906738, 0.009527199745178223, 0.009548704147338867, 0.00949187183380127, 0.009691712379455566, 0.009611200332641602, 0.009554112434387207, 0.00957049560546875, 0.009463616371154785, 0.00952889633178711, 0.009559712409973144, 0.009497376441955566, 0.00961945629119873, 0.009672736167907715, 0.009834176063537597, 0.009779040336608886, 0.009763263702392578, 0.009883232116699218, 0.009333087921142577, 0.00965766429901123, 0.009728704452514648, 0.00973583984375, 0.009791839599609374, 0.010076160430908204, 0.009791487693786622, 0.009736448287963868, 0.009621312141418457, 0.009738176345825196, 0.00990822410583496, 0.009815199851989746, 0.009700127601623535, 0.009820223808288574, 0.009844736099243164, 0.009727999687194825, 0.009672767639160156, 0.009756223678588867, 0.009771391868591309, 0.009772992134094239, 0.009874784469604492, 0.009904864311218262, 0.009795743942260743, 0.009840767860412597, 0.009761631965637207, 0.009865856170654297, 0.009761216163635254, 0.009797087669372559, 0.009746784210205079, 0.009711615562438965, 0.00971776008605957, 0.009672287940979005, 0.009761343955993653, 0.009731936454772949, 0.009594047546386719, 0.009575231552124024, 0.009521056175231933, 0.00959273624420166, 0.009543264389038086, 0.009558815956115723, 0.009531455993652344, 0.009545472145080567, 0.009571423530578613, 0.009505696296691894, 0.009676256179809571, 0.009631808280944825, 0.00972060775756836, 0.009700480461120606, 0.009706048011779786, 0.009653599739074707, 0.009653120040893555, 0.009598112106323243, 0.009710207939147949, 0.009540736198425293, 0.009595775604248047, 0.009674752235412597, 0.009569279670715332, 0.00951097583770752, 0.009465056419372559, 0.009446720123291015, 0.009476511955261231, 0.009577983856201172, 0.009595392227172851, 0.009239904403686524, 0.009540255546569825, 0.009547200202941895, 0.009482815742492675, 0.00951427173614502, 0.009537728309631347, 0.009586496353149414, 0.009578495979309083, 0.009547967910766602, 0.009552160263061523, 0.009576383590698243, 0.009619839668273926, 0.009606911659240722, 0.009691328048706054, 0.009698719978332519, 0.009806336402893067, 0.00965340805053711, 0.009854144096374511, 0.009723744392395019, 0.009740351676940918, 0.009783072471618652, 0.00978313636779785, 0.009615584373474121, 0.009639936447143555, 0.009508864402770996, 0.009486335754394531, 0.009538592338562012, 0.010194111824035644, 0.009602784156799316, 0.010584416389465333, 0.009709280014038086, 0.010919103622436524, 0.009595775604248047, 0.009570240020751953, 0.009543680191040039, 0.009529343605041504, 0.009500672340393066, 0.009517215728759765, 0.009490592002868652, 0.00948192024230957, 0.009484416007995605, 0.009461600303649903, 0.009410592079162597, 0.009459712028503419, 0.009705216407775878, 0.009410431861877441, 0.009435296058654785, 0.009408672332763672, 0.009432576179504394, 0.009409279823303222, 0.009449440002441406, 0.009489343643188477, 0.009435263633728028, 0.009345088005065918, 0.009380576133728027, 0.009381855964660644, 0.00942905616760254, 0.009455360412597657, 0.009363679885864258, 0.009416704177856445, 0.009450943946838378, 0.009417344093322753, 0.009479583740234375, 0.009153792381286621, 0.009463871955871582, 0.009493247985839844, 0.009561823844909669, 0.009521439552307128, 0.009453568458557129, 0.009467904090881347, 0.009397503852844238, 0.009399040222167969, 0.009575679779052734, 0.00956492805480957, 0.01002905559539795, 0.010358943939208984, 0.009647968292236329, 0.009621631622314454, 0.009529215812683105, 0.01051206398010254, 0.00967523193359375, 0.00961315155029297, 0.009598976135253906, 0.00970956802368164, 0.009591072082519532, 0.009576064109802246, 0.009640031814575196, 0.009443327903747559, 0.009504799842834472, 0.009521120071411133, 0.009453568458557129, 0.009465855598449707, 0.009466143608093261, 0.009469663619995117, 0.0094269437789917, 0.009799679756164551, 0.009549823760986328, 0.009744511604309083, 0.00953331184387207, 0.009433279991149903, 0.009445183753967286, 0.009465408325195313, 0.009503007888793946, 0.009449631690979003, 0.009445376396179199, 0.009414655685424805, 0.009391679763793945, 0.00943558406829834, 0.00941055965423584, 0.009531392097473144, 0.009400256156921387, 0.009432703971862792, 0.009427007675170898, 0.009468255996704101, 0.009432255744934082, 0.009419615745544434, 0.009453120231628418, 0.009413056373596192, 0.009462016105651855, 0.009608960151672364, 0.009394207954406738, 0.009431008338928223, 0.009375231742858887, 0.009355584144592284, 0.009371904373168946, 0.009400159835815429, 0.009089088439941407, 0.00941868782043457, 0.00937990379333496, 0.009520511627197265, 0.009367551803588867, 0.009364255905151367, 0.009434975624084473, 0.00942080020904541, 0.00941055965423584, 0.009392127990722657, 0.009465951919555664, 0.009379743576049804, 0.009383328437805176, 0.009396832466125488, 0.0094651517868042, 0.009476160049438477, 0.009392767906188966, 0.009380096435546876, 0.009408448219299316, 0.009378687858581543, 0.009448160171508789, 0.009445599555969238, 0.009660415649414063, 0.009490431785583496, 0.009516127586364746, 0.00942307186126709, 0.00946662425994873, 0.009393216133117676, 0.009413503646850585, 0.009426591873168945, 0.009359711647033692, 0.009447423934936524, 0.009539584159851074, 0.009402112007141113, 0.009417023658752442, 0.009500032424926757, 0.00936415958404541, 0.00945081615447998, 0.00944595241546631, 0.009459327697753907, 0.00941811180114746, 0.009458687782287598, 0.00949884796142578, 0.009457440376281738, 0.009502752304077149, 0.00949187183380127, 0.009359935760498046, 0.009407648086547852, 0.009374784469604493, 0.009389920234680176, 0.009488320350646973, 0.00941055965423584, 0.009414112091064453, 0.009381952285766602, 0.009384415626525878, 0.009381888389587402, 0.009561471939086914, 0.009379520416259765, 0.009413567543029785, 0.009455360412597657, 0.009412416458129883, 0.009456064224243164, 0.009404704093933106]",tokens/s,104.16990163816664,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1154.973696,1182.662656,0.0,780.140544,738.50112,s,1,8.7898916015625,8.7898916015625,0.0,8.7898916015625,8.7898916015625,8.7898916015625,8.7898916015625,[8.7898916015625],,kWh,3.179194462495616e-05,3.4995406943539667e-06,9.635007708008736e-06,4.4926493027318864e-05,,MB,1588.457472,1490.944,0.0,1073.741824,995.355648,s,10,0.2559623355865479,0.025596233558654785,0.00014572111156549938,0.02560910415649414,0.025685941696166992,0.025822266578674317,0.025931326484680176,"[0.025491424560546875, 0.025655647277832032, 0.025620479583740235, 0.0256396484375, 0.02546348762512207, 0.025396831512451173, 0.02551801681518555, 0.025598527908325196, 0.02595859146118164, 0.025619680404663087]",tokens/s,10001.471482644734,kWh,9.073257625907674e-07,1.0002028912212674e-07,5.588358507950333e-07,1.5661819025079275e-06,tokens/kWh,163454832.15587357,MB,1623.416832,1539.178496,0.0,1121.97632,995.358208,s,10,11.333508056640623,1.1333508056640622,0.004087949794408497,1.1326409301757812,1.138636828613281,1.1390207458496093,1.1393278796386719,"[1.137170166015625, 1.13261376953125, 1.1326009521484375, 1.1394046630859376, 1.1326680908203124, 1.1323131103515625, 1.1287606201171876, 1.1253172607421875, 1.138551513671875, 1.13410791015625]",tokens/s,55.58737831671326,kWh,4.131977156824092e-05,4.557201763101779e-06,1.7371261826603804e-05,6.32482351579465e-05,tokens/kWh,996075.223959584,,s,630,11.33036725616455,0.01798470993041992,0.00038515741050633114,0.01792252826690674,0.018155727767944337,0.018261038684844973,0.019342688789367676,"[0.017776447296142577, 0.01785260772705078, 0.017967103958129883, 0.017999872207641602, 0.01801215934753418, 0.01788313674926758, 0.017954591751098634, 0.01805238342285156, 0.018027456283569335, 0.01794812774658203, 0.018341920852661134, 0.01929267120361328, 0.01804287910461426, 0.01795686340332031, 0.01846886444091797, 0.017975072860717773, 0.018257535934448243, 0.017838272094726562, 0.017871551513671875, 0.01821251106262207, 0.01820582389831543, 0.017806272506713867, 0.017963008880615236, 0.01818009567260742, 0.017941951751708984, 0.01797177505493164, 0.017903455734252928, 0.017862752914428712, 0.017952831268310546, 0.01817795181274414, 0.017987743377685547, 0.01793132781982422, 0.018024991989135743, 0.017862239837646485, 0.01805571174621582, 0.019036384582519533, 0.01932099151611328, 0.018263904571533204, 0.017987583160400392, 0.017981184005737304, 0.01794278335571289, 0.018112512588500978, 0.018053119659423827, 0.017872991561889647, 0.018040672302246093, 0.01817923164367676, 0.017955455780029297, 0.01794272041320801, 0.018051168441772462, 0.017963071823120118, 0.01792608070373535, 0.017913856506347657, 0.017928192138671875, 0.017806400299072267, 0.0177828483581543, 0.017959808349609373, 0.017836032867431642, 0.017833984375, 0.01784012794494629, 0.017968576431274415, 0.01797587203979492, 0.017907007217407227, 0.017916608810424804, 0.017899999618530272, 0.017955167770385742, 0.017904703140258788, 0.018039072036743164, 0.017988256454467774, 0.01796505546569824, 0.01797711944580078, 0.017936607360839844, 0.01783318328857422, 0.017904415130615234, 0.01812611198425293, 0.01791049575805664, 0.017924095153808595, 0.017876991271972655, 0.017999872207641602, 0.017868799209594728, 0.017920000076293945, 0.018001407623291017, 0.017863168716430664, 0.017855615615844728, 0.017931135177612304, 0.01800147247314453, 0.018008415222167968, 0.01846281623840332, 0.018089632034301757, 0.018024255752563476, 0.017930784225463868, 0.01823513603210449, 0.017946176528930664, 0.018157440185546873, 0.018135616302490234, 0.01793459129333496, 0.017946624755859376, 0.018112512588500978, 0.017987583160400392, 0.01794047927856445, 0.017976959228515624, 0.017820032119750976, 0.017833984375, 0.01798940849304199, 0.017932512283325194, 0.017920000076293945, 0.017897472381591797, 0.017792192459106446, 0.017914464950561523, 0.017847936630249025, 0.017846879959106447, 0.017864511489868163, 0.017850559234619142, 0.017883104324340822, 0.017936416625976562, 0.017829343795776366, 0.017953311920166016, 0.01798700714111328, 0.017899328231811524, 0.01781427192687988, 0.018239488601684572, 0.01799996757507324, 0.01821891212463379, 0.0179748477935791, 0.018245920181274414, 0.01809014320373535, 0.01812393569946289, 0.01782793617248535, 0.017954784393310545, 0.018192256927490234, 0.018038944244384767, 0.018030303955078125, 0.017878944396972657, 0.018053504943847658, 0.01781350326538086, 0.0179006404876709, 0.01780614471435547, 0.01785251235961914, 0.017920000076293945, 0.01861222457885742, 0.017912895202636718, 0.01792300796508789, 0.01796879959106445, 0.017924064636230468, 0.01805120086669922, 0.01812505531311035, 0.017967103958129883, 0.017901567459106444, 0.017613983154296874, 0.017777503967285155, 0.018001920700073244, 0.01803059196472168, 0.01786675262451172, 0.01803468894958496, 0.01801625633239746, 0.017876991271972655, 0.01775926399230957, 0.01778118324279785, 0.01806492805480957, 0.01779199981689453, 0.017835487365722658, 0.01786729621887207, 0.01781760025024414, 0.017925439834594728, 0.01782649612426758, 0.017876575469970703, 0.017938432693481447, 0.017801567077636717, 0.018039968490600584, 0.018019231796264648, 0.01844428825378418, 0.018132640838623048, 0.017862272262573243, 0.017897600173950194, 0.018112672805786132, 0.018051519393920898, 0.017807231903076173, 0.0179932804107666, 0.01836908721923828, 0.018046335220336916, 0.018220991134643556, 0.018047679901123048, 0.018165760040283203, 0.017970304489135742, 0.01794099235534668, 0.01795929527282715, 0.017981536865234377, 0.018082880020141603, 0.01800489616394043, 0.017967039108276368, 0.017869216918945312, 0.018026975631713866, 0.018040256500244142, 0.017926015853881837, 0.018008672714233398, 0.018515487670898438, 0.018338272094726563, 0.017971200942993162, 0.018118656158447266, 0.018495487213134765, 0.018182144165039063, 0.018279712677001955, 0.018027231216430663, 0.017942527770996093, 0.017933439254760743, 0.01806015968322754, 0.018187904357910158, 0.018016639709472655, 0.017971200942993162, 0.018058656692504883, 0.01808790397644043, 0.018031232833862303, 0.018089088439941406, 0.017912704467773436, 0.017901567459106444, 0.018006111145019533, 0.01797929573059082, 0.018028192520141602, 0.018065439224243165, 0.017913440704345703, 0.017885631561279296, 0.018102943420410158, 0.017927808761596678, 0.017963008880615236, 0.017895008087158205, 0.01806787109375, 0.02174127960205078, 0.018247200012207032, 0.018143999099731446, 0.017968704223632812, 0.01803104019165039, 0.0180316162109375, 0.018189311981201172, 0.01800387191772461, 0.018152671813964842, 0.018053695678710936, 0.01796659278869629, 0.017823616027832032, 0.017982175827026367, 0.017957088470458984, 0.017903615951538086, 0.017856735229492188, 0.017960735321044922, 0.017845375061035156, 0.017798015594482422, 0.017917312622070313, 0.018002016067504883, 0.018219104766845705, 0.018123199462890625, 0.017807104110717772, 0.017838336944580077, 0.017884832382202148, 0.01783024024963379, 0.017913728713989257, 0.01798566436767578, 0.018036735534667968, 0.017898752212524415, 0.01787104034423828, 0.017854560852050783, 0.01790771293640137, 0.018190816879272462, 0.017960960388183594, 0.017984575271606445, 0.017918912887573243, 0.01784739112854004, 0.018040960311889648, 0.01805801582336426, 0.01803468894958496, 0.01802239990234375, 0.01784832000732422, 0.017924095153808595, 0.017862655639648437, 0.017918975830078124, 0.01815616035461426, 0.018175487518310548, 0.017983903884887697, 0.017885663986206054, 0.01801615905761719, 0.017905183792114258, 0.018301504135131836, 0.01805881690979004, 0.017945024490356447, 0.0180402889251709, 0.018136863708496095, 0.01788800048828125, 0.01800396728515625, 0.019195903778076173, 0.018155519485473632, 0.018124063491821288, 0.018033344268798827, 0.01797088050842285, 0.017782623291015626, 0.017859071731567384, 0.01781692886352539, 0.017980064392089844, 0.01794047927856445, 0.017879039764404296, 0.01778892707824707, 0.017909759521484374, 0.017958911895751953, 0.018231296539306642, 0.018026336669921875, 0.017949888229370117, 0.017817951202392577, 0.017879680633544923, 0.018350080490112306, 0.017758560180664063, 0.017884832382202148, 0.017887231826782226, 0.017870847702026366, 0.01780496025085449, 0.01776265525817871, 0.017827840805053712, 0.017701152801513673, 0.01774131202697754, 0.017895647048950195, 0.01776473617553711, 0.017760255813598632, 0.018071136474609374, 0.01785628890991211, 0.01789401626586914, 0.017616159439086915, 0.017799200057983397, 0.01774563217163086, 0.01770787239074707, 0.01783616065979004, 0.017887231826782226, 0.017692319869995116, 0.017965408325195314, 0.017769472122192383, 0.017714176177978515, 0.017754112243652344, 0.017735679626464843, 0.017786144256591797, 0.017830623626708984, 0.01775542449951172, 0.017776447296142577, 0.017803232192993165, 0.017703807830810547, 0.017753728866577147, 0.017783231735229492, 0.017729536056518554, 0.017688575744628905, 0.01790492820739746, 0.017842912673950197, 0.017946399688720704, 0.017856735229492188, 0.018004287719726564, 0.017868480682373046, 0.017707008361816406, 0.01769503974914551, 0.01769599914550781, 0.017821792602539063, 0.017680736541748048, 0.017704959869384765, 0.017844224929809572, 0.017808639526367187, 0.017670368194580077, 0.017801759719848632, 0.01776608085632324, 0.018934080123901367, 0.02215116882324219, 0.021310943603515625, 0.01806800079345703, 0.018046207427978515, 0.01785113525390625, 0.017923871994018556, 0.01782806396484375, 0.017889280319213868, 0.017985151290893554, 0.018088319778442382, 0.01805516815185547, 0.0179814395904541, 0.017820768356323242, 0.017927072525024415, 0.01777004814147949, 0.017961408615112303, 0.017876991271972655, 0.017751071929931642, 0.017913856506347657, 0.017887231826782226, 0.018011295318603515, 0.018041568756103514, 0.017875072479248046, 0.017855712890625, 0.01786140823364258, 0.017913856506347657, 0.017762304306030274, 0.017868480682373046, 0.017842496871948242, 0.017686111450195312, 0.01797699165344238, 0.017941247940063475, 0.017836032867431642, 0.017815423965454102, 0.017928319931030272, 0.0177674560546875, 0.018015199661254883, 0.017993728637695314, 0.01785980796813965, 0.017658016204833985, 0.017935327529907227, 0.0178187198638916, 0.017963584899902345, 0.017954816818237306, 0.017993728637695314, 0.01780940818786621, 0.017864704132080078, 0.017878879547119142, 0.017712799072265625, 0.017825983047485353, 0.017762624740600585, 0.017964576721191405, 0.017854656219482422, 0.01815567970275879, 0.01791187286376953, 0.017859712600708007, 0.01794476890563965, 0.018987775802612305, 0.018010112762451173, 0.018075424194335936, 0.017860832214355468, 0.01792518424987793, 0.017820608139038085, 0.017909759521484374, 0.01806460762023926, 0.01782863998413086, 0.01782579231262207, 0.017864704132080078, 0.01798963165283203, 0.01782374382019043, 0.017770496368408203, 0.017956287384033202, 0.018037216186523437, 0.01791804885864258, 0.017780736923217775, 0.017876991271972655, 0.017801055908203123, 0.017766559600830078, 0.017893119812011717, 0.018024639129638673, 0.01810233688354492, 0.01781350326538086, 0.017960927963256837, 0.01795689582824707, 0.017916959762573244, 0.017746912002563477, 0.017730783462524415, 0.017754911422729492, 0.01779484748840332, 0.017913951873779296, 0.017751392364501954, 0.017938880920410155, 0.017886783599853514, 0.017741952896118164, 0.017867424011230468, 0.017919071197509767, 0.017797183990478516, 0.017738367080688478, 0.017880767822265626, 0.01780169677734375, 0.017791040420532228, 0.017827840805053712, 0.018005823135375975, 0.017899391174316406, 0.017913728713989257, 0.017949119567871093, 0.017966815948486328, 0.01785475158691406, 0.017829887390136717, 0.01866547203063965, 0.0179814395904541, 0.017890687942504882, 0.01780374336242676, 0.017731327056884766, 0.017740192413330077, 0.017731231689453127, 0.01780352020263672, 0.017862752914428712, 0.017876991271972655, 0.017829887390136717, 0.01783135986328125, 0.017816127777099608, 0.01777257537841797, 0.017797088623046874, 0.017880096435546874, 0.017795135498046875, 0.017752992630004884, 0.0178606071472168, 0.017944576263427735, 0.01781760025024414, 0.017724895477294923, 0.017621536254882813, 0.017764095306396485, 0.01775846481323242, 0.01785036849975586, 0.0178155517578125, 0.017887231826782226, 0.017913791656494142, 0.017817663192749023, 0.017960960388183594, 0.017885183334350584, 0.018008064270019532, 0.017952768325805665, 0.017906784057617187, 0.018125247955322266, 0.019351551055908203, 0.01812819290161133, 0.018086591720581056, 0.017913856506347657, 0.017958911895751953, 0.0178606071472168, 0.01775763130187988, 0.017774784088134765, 0.017678720474243164, 0.017819648742675782, 0.01780873680114746, 0.01777116775512695, 0.017657855987548828, 0.01769094467163086, 0.017741504669189452, 0.020137983322143553, 0.018218591690063478, 0.01827471923828125, 0.018255872726440428, 0.01783718490600586, 0.017895807266235353, 0.018217376708984375, 0.018077600479125978, 0.017856704711914063, 0.01779622459411621, 0.017633216857910156, 0.01783635139465332, 0.017793664932250975, 0.017758207321166994, 0.01769862365722656, 0.017724767684936523, 0.017804128646850586, 0.01787017631530762, 0.01807427215576172, 0.018005088806152345, 0.017767328262329102, 0.01789132881164551, 0.017915552139282226, 0.01781907272338867, 0.017834112167358397, 0.017895200729370116, 0.017777183532714843, 0.017633760452270508, 0.017836032867431642, 0.01784832000732422, 0.017836032867431642, 0.018141183853149414, 0.01795680046081543, 0.018010175704956055, 0.01808793640136719, 0.02124595260620117, 0.0209421443939209, 0.01799193572998047, 0.018289087295532226, 0.018163711547851562, 0.01787494468688965, 0.017917951583862304, 0.018083839416503905, 0.017922048568725587, 0.01789695930480957, 0.018020383834838866, 0.01776265525817871, 0.018179935455322267, 0.017915584564208983, 0.017943519592285156, 0.017843488693237305, 0.01814159965515137, 0.017902015686035156, 0.017843456268310548, 0.017864992141723633, 0.017951072692871092, 0.01827020835876465, 0.017928192138671875, 0.019091455459594727, 0.019060640335083007, 0.01799337577819824, 0.018213312149047853, 0.01811827278137207, 0.017930335998535156, 0.017872255325317384, 0.017937248229980468, 0.017958272933959962, 0.01791043281555176, 0.0178176326751709, 0.017780736923217775, 0.017820991516113282, 0.01787516784667969, 0.017856992721557618, 0.017902944564819338, 0.017790719985961913, 0.01782467269897461, 0.017833984375, 0.01797222328186035, 0.01795097541809082, 0.017988351821899413, 0.017956832885742187, 0.01793948745727539, 0.017943328857421875, 0.017907903671264647, 0.018012191772460936, 0.01799772834777832, 0.017904800415039064, 0.017879999160766602, 0.017895263671875, 0.018043039321899414, 0.018058303833007813, 0.01792505645751953, 0.018229248046875, 0.018190336227416993, 0.01786835289001465, 0.017983488082885742, 0.018006048202514648, 0.01799193572998047, 0.018036895751953125, 0.017993728637695314, 0.017999872207641602, 0.01810588836669922, 0.017895904541015625, 0.0178768310546875, 0.017928352355957033, 0.018072608947753907, 0.01791689682006836, 0.018017824172973634, 0.017911712646484376, 0.017989791870117188]",tokens/s,55.60278724921594,,, -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21009,7 +21009,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21216,7 +21216,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21307,7 +21307,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21726,7 +21726,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4142.239744,5959.974912,0.0,5557.4528,5102.547968,s,1,11.3579736328125,11.3579736328125,0.0,11.3579736328125,11.3579736328125,11.3579736328125,11.3579736328125,[11.3579736328125],,kWh,0.0001232327817875178,1.3586307343211346e-05,5.5161710796003405e-05,0.00019198079992673256,,MB,1471.340544,5993.529344,0.0,5576.327168,4701.113344,s,10,2.2378789062499997,0.22378789062499999,0.0002189927741978738,0.22382008361816408,0.22403562774658203,0.2240372688293457,0.22403858169555663,"[0.22368531799316407, 0.22403890991210937, 0.22381356811523437, 0.22393299865722657, 0.22403526306152344, 0.22382659912109376, 0.22357075500488283, 0.22367919921875, 0.2233137664794922, 0.22398252868652344]",tokens/s,1143.940359261787,kWh,6.600594317592031e-06,7.275468419877332e-07,4.385892397599604e-06,1.1714033557179369e-05,tokens/kWh,21854128.9599688,MB,1500.033024,5993.529344,0.0,5576.327168,4701.115904,s,10,32.09952001953125,3.2099520019531247,0.006455107516177166,3.2110751953125,3.216955102539062,3.2182889526367187,3.219356032714844,"[3.219622802734375, 3.212086669921875, 3.210063720703125, 3.2137177734375, 3.216244140625, 3.204347900390625, 3.20202587890625, 3.20470263671875, 3.2000498046875, 3.21665869140625]",tokens/s,19.626461692158347,kWh,8.13811449386552e-05,8.976775670583635e-06,4.489475813800094e-05,0.0001352526787472398,tokens/kWh,465794.84105992754,,s,630,32.097109970092745,0.05094779360332186,0.0006652971880553079,0.05079310417175293,0.05134559288024902,0.051735566139221194,0.054517566986083996,"[0.051076671600341794, 0.05143222427368164, 0.050884033203125, 0.05075392150878906, 0.05105635070800781, 0.05120582580566406, 0.05112313461303711, 0.05107843017578125, 0.05123555374145508, 0.05083750534057617, 0.05180329513549805, 0.051153759002685546, 0.050904193878173826, 0.05084454345703125, 0.050841598510742186, 0.050814304351806644, 0.050842304229736325, 0.05072803115844727, 0.05090188980102539, 0.05148876953125, 0.051253246307373046, 0.05142323303222656, 0.05083276748657226, 0.050821758270263674, 0.050816062927246095, 0.05079545593261719, 0.05090428924560547, 0.050845985412597654, 0.05090764617919922, 0.05152758407592774, 0.051319168090820315, 0.05120995330810547, 0.053542911529541014, 0.053224609375, 0.050989921569824216, 0.051165184020996096, 0.0511420783996582, 0.05124563217163086, 0.051023872375488284, 0.05088236618041992, 0.05105478286743164, 0.051133438110351564, 0.05120745468139649, 0.051229633331298825, 0.05109430313110352, 0.051081214904785156, 0.050904640197753905, 0.051122177124023435, 0.051106239318847654, 0.050955455780029295, 0.05080966567993164, 0.05092665481567383, 0.051082176208496095, 0.05093497467041016, 0.050850624084472655, 0.05064089584350586, 0.05071644973754883, 0.05104252624511719, 0.050834686279296874, 0.05084393692016602, 0.05087014389038086, 0.05088521575927735, 0.05114291381835938, 0.05411430358886719, 0.05107097625732422, 0.050871391296386716, 0.05067663955688476, 0.05067366409301758, 0.05088665771484375, 0.05070412826538086, 0.050847808837890626, 0.05111648178100586, 0.050958080291748045, 0.05087631988525391, 0.05099529647827149, 0.051162784576416015, 0.05097507095336914, 0.050948223114013674, 0.05157612609863281, 0.05126364898681641, 0.050862495422363284, 0.050733055114746094, 0.05117279815673828, 0.050716705322265625, 0.0514381103515625, 0.050982559204101566, 0.05093791961669922, 0.050772254943847656, 0.05094332885742187, 0.05081974411010742, 0.05186511993408203, 0.05072310256958008, 0.05083564758300781, 0.05069823837280273, 0.05058956909179688, 0.050542881011962894, 0.05173641586303711, 0.05098086547851562, 0.05099260711669922, 0.05061904144287109, 0.0507632942199707, 0.05065718460083008, 0.0509376335144043, 0.050705055236816406, 0.05157273483276367, 0.05071462249755859, 0.05067078399658203, 0.05069667053222656, 0.05079280090332031, 0.050775230407714846, 0.050661598205566406, 0.05052876663208008, 0.0507105598449707, 0.05068601608276367, 0.05099929428100586, 0.050819072723388675, 0.05068556976318359, 0.050599521636962894, 0.05066831970214844, 0.05219747161865235, 0.0506932487487793, 0.05179475021362305, 0.051431102752685545, 0.0507470703125, 0.05102447891235352, 0.05064470291137695, 0.0512174072265625, 0.050893505096435546, 0.05096006393432617, 0.05557059097290039, 0.05131280136108399, 0.05095167922973633, 0.050783775329589845, 0.05084463882446289, 0.050679649353027344, 0.050706367492675784, 0.05059196853637695, 0.05398732757568359, 0.05195980834960937, 0.05135273742675781, 0.05074393463134766, 0.05056451034545899, 0.050629440307617186, 0.05044976043701172, 0.05066819381713867, 0.05050777435302734, 0.05043199920654297, 0.050710529327392576, 0.05053440093994141, 0.05083932876586914, 0.05068412780761719, 0.05069823837280273, 0.0505398063659668, 0.05101232147216797, 0.0506695671081543, 0.05173452758789063, 0.05102985763549805, 0.05090934371948242, 0.05110147094726562, 0.0508449592590332, 0.050764511108398434, 0.05107120132446289, 0.05079654312133789, 0.050724864959716794, 0.05084979248046875, 0.0511016960144043, 0.051133567810058594, 0.05052406311035156, 0.05112931060791016, 0.05079228973388672, 0.05078796768188477, 0.050614814758300784, 0.050644992828369144, 0.05044367980957031, 0.05062883377075195, 0.050618751525878906, 0.05065052795410156, 0.05058176040649414, 0.050921375274658204, 0.05121068954467774, 0.05122867202758789, 0.05089279937744141, 0.050855934143066404, 0.05076377487182617, 0.05058969497680664, 0.05068912124633789, 0.05053737640380859, 0.05051801681518555, 0.05064704132080078, 0.05114857482910156, 0.0506864013671875, 0.05107680130004883, 0.05107356643676758, 0.05119388961791992, 0.050617664337158204, 0.05071270370483399, 0.05070249557495117, 0.05082287979125977, 0.05061443328857422, 0.05051408004760742, 0.05060240173339844, 0.051214336395263675, 0.05111983871459961, 0.0509884147644043, 0.05132380676269531, 0.051229759216308596, 0.050893470764160155, 0.0506104621887207, 0.050828544616699216, 0.05097468948364258, 0.050678558349609375, 0.050616222381591795, 0.05073632049560547, 0.05089782333374023, 0.05066342544555664, 0.050544639587402344, 0.05649407958984375, 0.051033344268798825, 0.0507850227355957, 0.05070342254638672, 0.050793407440185546, 0.051329025268554686, 0.05075276947021484, 0.05078678512573242, 0.050815265655517576, 0.05062223815917969, 0.050847072601318356, 0.050772865295410155, 0.050677761077880856, 0.05062451171875, 0.051129886627197266, 0.05098105621337891, 0.05091107177734375, 0.05130438232421875, 0.05149747085571289, 0.05111360168457031, 0.050913345336914065, 0.0508787841796875, 0.050937278747558594, 0.05063942337036133, 0.05083955383300781, 0.05086207962036133, 0.050939071655273435, 0.05088441467285156, 0.050936832427978515, 0.05095126342773437, 0.05097564697265625, 0.05092681503295898, 0.05080758285522461, 0.05081683349609375, 0.05229792022705078, 0.05180416107177734, 0.05123750305175781, 0.051073024749755856, 0.05084073638916015, 0.05115683364868164, 0.05096255874633789, 0.05111273574829102, 0.05199241638183594, 0.0508930549621582, 0.05079561614990234, 0.05065987014770508, 0.05065119934082031, 0.05058591842651367, 0.050887935638427736, 0.0509304313659668, 0.05065878295898438, 0.050950687408447264, 0.050907135009765625, 0.050695808410644534, 0.05072524642944336, 0.050685951232910156, 0.05057024002075195, 0.050810943603515624, 0.054608222961425784, 0.05160966491699219, 0.05123945617675781, 0.05098495864868164, 0.050735103607177735, 0.05205811309814453, 0.05100953674316406, 0.050661376953125, 0.0505643196105957, 0.05049628829956055, 0.050513313293457034, 0.05150371170043945, 0.050794559478759764, 0.051313697814941404, 0.05086025619506836, 0.050950847625732425, 0.05085184097290039, 0.05082643127441406, 0.05092147064208984, 0.0510206069946289, 0.05114380645751953, 0.05107392120361328, 0.05090256118774414, 0.05128220748901367, 0.05324614334106445, 0.05090508651733398, 0.05070800018310547, 0.050737632751464846, 0.05067161560058594, 0.050966304779052736, 0.051405025482177735, 0.050867584228515624, 0.0508355827331543, 0.050967041015625, 0.05050982284545898, 0.05083340835571289, 0.050670814514160153, 0.0507481918334961, 0.0510720329284668, 0.05099004745483399, 0.05210281753540039, 0.051689952850341794, 0.05105395126342773, 0.05075212860107422, 0.0506163215637207, 0.050566177368164066, 0.054764385223388674, 0.051368064880371093, 0.0515909423828125, 0.050972576141357424, 0.050993473052978515, 0.05065932846069336, 0.05069193649291992, 0.05073936080932617, 0.05066336059570312, 0.050745407104492185, 0.05106687927246094, 0.05060812759399414, 0.05082521438598633, 0.050773662567138673, 0.050530654907226566, 0.05046886444091797, 0.050423809051513675, 0.050528255462646485, 0.050718719482421876, 0.05097987365722656, 0.05050057601928711, 0.05057126235961914, 0.051078975677490236, 0.050466625213623044, 0.05086825561523438, 0.05062595367431641, 0.050428768157958985, 0.05045052719116211, 0.05062246322631836, 0.05209632110595703, 0.05138079833984375, 0.05161075210571289, 0.05142630386352539, 0.050972671508789064, 0.05087593460083008, 0.050618846893310546, 0.050950145721435545, 0.05094521713256836, 0.050647361755371094, 0.05053459167480469, 0.050639041900634764, 0.05120975875854492, 0.05063238525390625, 0.05045340728759766, 0.05071993637084961, 0.0507523193359375, 0.050560192108154295, 0.05060076904296875, 0.05055692672729492, 0.05058355331420898, 0.05055606460571289, 0.05108111953735352, 0.05071270370483399, 0.05050067138671875, 0.05063577651977539, 0.050498302459716794, 0.05053401565551758, 0.050428321838378906, 0.051515296936035154, 0.05082944107055664, 0.05050320053100586, 0.05051772689819336, 0.0506929931640625, 0.05076553726196289, 0.050646305084228516, 0.050609153747558595, 0.0505999984741211, 0.051172767639160156, 0.05079024124145508, 0.050786880493164065, 0.050702465057373046, 0.05063884735107422, 0.050391040802001956, 0.05034598541259765, 0.05039839935302735, 0.05039187240600586, 0.05087798309326172, 0.05076588821411133, 0.05073516845703125, 0.05100374221801758, 0.05356748962402344, 0.05048524856567383, 0.050392097473144534, 0.050791393280029296, 0.050816158294677734, 0.05045129776000976, 0.050810657501220706, 0.0507465934753418, 0.05090611267089844, 0.051476478576660156, 0.05095219039916992, 0.05061014556884766, 0.05031945419311523, 0.051344799041748046, 0.05066729736328125, 0.050649856567382814, 0.05073651123046875, 0.0510142707824707, 0.051700992584228514, 0.05090361785888672, 0.050581695556640625, 0.05034950256347656, 0.05037318420410156, 0.05079001617431641, 0.05078825759887695, 0.05050614547729492, 0.0504730224609375, 0.05063065719604492, 0.050724864959716794, 0.05048320007324219, 0.05044140625, 0.05069702529907227, 0.050527679443359376, 0.051272254943847656, 0.05329305648803711, 0.05074703979492187, 0.050861534118652345, 0.051424129486083985, 0.050761985778808597, 0.05049932861328125, 0.05054873657226563, 0.0512239990234375, 0.05091702270507813, 0.05072579193115234, 0.05077811050415039, 0.05039875030517578, 0.05047872161865234, 0.05048201751708985, 0.05082931137084961, 0.05070438385009766, 0.050462718963623046, 0.05046393585205078, 0.051001792907714845, 0.050548095703125, 0.050961406707763675, 0.05050982284545898, 0.05065523147583008, 0.05060771179199219, 0.050591552734375, 0.050461280822753904, 0.05055487823486328, 0.050918655395507814, 0.05085465621948242, 0.05179593658447266, 0.05103788757324219, 0.050784255981445314, 0.05068809509277344, 0.050759937286376955, 0.05064908981323242, 0.050579456329345705, 0.05082668685913086, 0.05122304153442383, 0.051471710205078125, 0.050782943725585936, 0.050759681701660155, 0.05121023941040039, 0.05086800003051758, 0.05052390289306641, 0.05105894470214844, 0.05076192092895508, 0.05058972930908203, 0.050898944854736325, 0.050533409118652346, 0.05082572937011719, 0.05119228744506836, 0.05089033508300781, 0.05071299362182617, 0.050726558685302736, 0.05089724731445312, 0.05082112121582031, 0.050595584869384765, 0.050651039123535156, 0.05099555206298828, 0.05072832107543945, 0.05053299331665039, 0.05041971206665039, 0.05143756866455078, 0.050730239868164065, 0.05141142272949219, 0.053226974487304686, 0.052398910522460936, 0.05113375854492187, 0.05075833511352539, 0.05045043182373047, 0.055003135681152344, 0.05104230499267578, 0.050700286865234374, 0.050634750366210936, 0.05051391983032227, 0.050487297058105465, 0.05051801681518555, 0.05059318542480469, 0.05074367904663086, 0.050528480529785154, 0.050391040802001956, 0.05049958419799805, 0.0513331184387207, 0.05119180679321289, 0.05075062561035156, 0.05055065536499023, 0.05106710433959961, 0.05112704086303711, 0.051179424285888675, 0.050748863220214845, 0.050541088104248046, 0.05067129516601562, 0.05058195114135742, 0.05069823837280273, 0.050638721466064456, 0.05058982467651367, 0.05054374313354492, 0.05044518280029297, 0.05086617660522461, 0.05113232040405274, 0.05089295959472656, 0.05070431900024414, 0.05054572677612305, 0.05045548629760742, 0.05061654281616211, 0.050450206756591794, 0.050603038787841795, 0.050789344787597654, 0.05088774490356445, 0.0505241584777832, 0.05035712051391601, 0.05066473770141602, 0.0504409294128418, 0.05111609649658203, 0.05079040145874023, 0.05066547012329101, 0.050826847076416014, 0.05059625625610351, 0.050683902740478515, 0.05060940933227539, 0.05049827194213867, 0.05112323379516601, 0.05110271835327149, 0.050743297576904295, 0.05068377685546875, 0.050812160491943356, 0.050723712921142576, 0.050783424377441405, 0.05133180618286133, 0.05065532684326172, 0.05059174346923828, 0.05072281646728516, 0.05052137756347656, 0.05080873489379883, 0.05138236618041992, 0.05060831832885742, 0.05053766250610352, 0.05055142211914063, 0.05303046417236328, 0.05129260635375977, 0.050840576171875, 0.050653888702392576, 0.05073078536987305, 0.051409503936767575, 0.05122655868530274, 0.05078015899658203, 0.050767871856689455, 0.05078835296630859, 0.051105792999267576, 0.05076496124267578, 0.0537026252746582, 0.05076873779296875, 0.05090307235717773, 0.05090284729003906, 0.05078790283203125, 0.05072342300415039, 0.05089436721801758, 0.05088275146484375, 0.05059932708740234, 0.050469375610351565, 0.05055702209472656, 0.05050556945800781, 0.05075606536865234, 0.05488601684570313, 0.050772350311279295, 0.05044182586669922, 0.05054265594482422, 0.050778465270996095, 0.05103411102294922, 0.050617408752441403, 0.05080313491821289, 0.05049135971069336, 0.05065577697753906, 0.0510750732421875, 0.05061427307128906, 0.050563201904296876, 0.05061344146728516, 0.05078713607788086, 0.050908798217773436, 0.05057279968261719, 0.05068057632446289, 0.05150067138671875, 0.05163251113891602, 0.054295616149902345, 0.05091628646850586, 0.05555801773071289, 0.05055910491943359, 0.0506262092590332, 0.05077164840698242, 0.05053916931152344, 0.05047865676879883, 0.05055327987670898, 0.05079040145874023, 0.050415584564208984, 0.050354209899902344, 0.05087343978881836]",tokens/s,19.627935368231505,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1048.969216,965.67296,0.0,570.425344,536.326656,s,1,8.42441015625,8.42441015625,0.0,8.42441015625,8.42441015625,8.42441015625,8.42441015625,[8.42441015625],,kWh,3.914825401667864e-05,4.310742464555275e-06,1.281612136400101e-05,5.6275117845234926e-05,,MB,1284.702208,1034.878976,0.0,624.951296,594.377728,s,10,0.26368310546875,0.026368310546875003,0.0002944813357621705,0.026420432090759276,0.026643379402160643,0.026729434108734133,0.026798277873992922,"[0.026592159271240236, 0.026053504943847658, 0.026624256134033204, 0.02681548881530762, 0.026454591751098634, 0.02652511978149414, 0.02638627243041992, 0.025766719818115236, 0.026129184722900392, 0.02633580780029297]",tokens/s,9708.623521590745,kWh,7.829544049486437e-07,8.630932426148413e-08,5.197487029598338e-07,1.3890124321699618e-06,tokens/kWh,184303605.98001865,MB,1320.767488,1049.55904,0.0,639.63136,607.71072,s,10,14.155046142578126,1.4155046142578125,0.020044638068717466,1.4226021728515623,1.4353281616210938,1.439636065673828,1.4430823889160156,"[1.4244794921875, 1.4072734375, 1.4439439697265626, 1.4047330322265625, 1.428071533203125, 1.4275911865234374, 1.3831318359375, 1.3807259521484374, 1.434370849609375, 1.420724853515625]",tokens/s,44.50709617293096,kWh,4.129228820421656e-05,4.554160190913781e-06,1.5726347627237077e-05,6.157279602236742e-05,tokens/kWh,1023179.1321789923,,s,630,14.148588958740227,0.022458077712286084,0.0006456810369411583,0.022481712341308593,0.022870140266418457,0.023023983955383298,0.024744324131011963,"[0.022230239868164064, 0.022661535263061524, 0.022646303176879882, 0.022819456100463868, 0.022765184402465822, 0.022737503051757812, 0.022618112564086915, 0.022514879226684572, 0.02265990447998047, 0.02262015914916992, 0.022624256134033204, 0.02262835121154785, 0.02246451187133789, 0.02240121650695801, 0.022331104278564454, 0.02237654495239258, 0.022538368225097655, 0.02249235153198242, 0.022801088333129882, 0.02239244842529297, 0.022370687484741213, 0.022194271087646485, 0.02226371192932129, 0.022259616851806642, 0.02264179229736328, 0.02233238410949707, 0.022403072357177735, 0.022494400024414062, 0.02242639923095703, 0.02242767906188965, 0.022456352233886718, 0.022386655807495118, 0.022379871368408202, 0.022436511993408202, 0.022386528015136718, 0.02252012825012207, 0.02275926399230957, 0.022765663146972655, 0.022750463485717774, 0.02276790428161621, 0.02277619171142578, 0.022676992416381835, 0.02260799980163574, 0.023404928207397462, 0.02253647994995117, 0.02274070358276367, 0.02272870445251465, 0.022656351089477538, 0.02271913528442383, 0.025694400787353515, 0.023480127334594727, 0.02301923179626465, 0.02263478469848633, 0.022355583190917967, 0.02245804786682129, 0.02247478485107422, 0.022327455520629882, 0.022335487365722655, 0.02236262321472168, 0.02229471969604492, 0.022413408279418946, 0.02223865509033203, 0.022210847854614257, 0.021541919708251953, 0.021699552536010743, 0.02169593620300293, 0.021744480133056642, 0.02181292724609375, 0.021600288391113283, 0.02167398452758789, 0.02247270393371582, 0.022636640548706056, 0.021931936264038086, 0.02292870330810547, 0.022119007110595702, 0.021884416580200194, 0.021832544326782225, 0.021957536697387696, 0.022623071670532225, 0.02195193672180176, 0.021917247772216798, 0.02222591972351074, 0.022218751907348632, 0.022341184616088867, 0.022106559753417968, 0.022046720504760742, 0.022157087326049804, 0.02312390327453613, 0.02233100891113281, 0.02221232032775879, 0.021914560317993163, 0.022068384170532227, 0.022149759292602537, 0.02252207946777344, 0.02199955177307129, 0.022005823135375975, 0.02215936088562012, 0.02231881523132324, 0.02212278366088867, 0.022176767349243166, 0.02241958427429199, 0.02247283172607422, 0.02234601593017578, 0.02240764808654785, 0.02254643249511719, 0.02271843147277832, 0.022761856079101563, 0.022601343154907225, 0.022374591827392577, 0.02256675148010254, 0.02273855972290039, 0.02259721565246582, 0.022784799575805665, 0.022550527572631835, 0.022378496170043945, 0.02249113655090332, 0.022565088272094726, 0.02289641571044922, 0.022597984313964845, 0.022608575820922853, 0.022694879531860352, 0.022681503295898436, 0.022941919326782228, 0.022915103912353515, 0.022800479888916016, 0.022905664443969728, 0.022567487716674803, 0.023592384338378906, 0.02298486328125, 0.022836864471435545, 0.022844383239746094, 0.022733055114746093, 0.02266048049926758, 0.022923616409301757, 0.02274723243713379, 0.022918815612792968, 0.022958335876464845, 0.02289664077758789, 0.022748479843139647, 0.022710975646972657, 0.02279814338684082, 0.022712511062622072, 0.02285468864440918, 0.022933887481689452, 0.022849952697753906, 0.022847679138183592, 0.022763263702392577, 0.022997247695922853, 0.02292051124572754, 0.022944448471069336, 0.02304739189147949, 0.02282192039489746, 0.022801216125488282, 0.022708480834960937, 0.022758079528808595, 0.0226910400390625, 0.02277238464355469, 0.02323244857788086, 0.02267568016052246, 0.022734207153320314, 0.022651487350463868, 0.02277299118041992, 0.022581792831420897, 0.025107807159423828, 0.023477119445800783, 0.02338323211669922, 0.0227061767578125, 0.022720447540283205, 0.0229303035736084, 0.022675455093383787, 0.022935232162475585, 0.022612415313720702, 0.02292076873779297, 0.022589759826660158, 0.022910688400268556, 0.022802719116210936, 0.022703712463378906, 0.022647392272949218, 0.02275641632080078, 0.022675743103027345, 0.02266364860534668, 0.02272991943359375, 0.023200639724731444, 0.02551171112060547, 0.02286150360107422, 0.022741472244262696, 0.02254204750061035, 0.02268307113647461, 0.02280022430419922, 0.022249343872070313, 0.022563072204589845, 0.023061952590942382, 0.022554527282714842, 0.022545120239257813, 0.02247091293334961, 0.022449024200439455, 0.02235251235961914, 0.022627904891967775, 0.02249951934814453, 0.022523967742919922, 0.02254800033569336, 0.022458368301391602, 0.02219059181213379, 0.022142751693725586, 0.02208620834350586, 0.022031423568725585, 0.022264448165893555, 0.022249792098999025, 0.02225315284729004, 0.022200735092163085, 0.0219931526184082, 0.02188038444519043, 0.021945119857788086, 0.021933759689331055, 0.021831008911132814, 0.0241591682434082, 0.022920831680297852, 0.022227071762084962, 0.022195903778076172, 0.022137407302856446, 0.02189926338195801, 0.022084703445434572, 0.022037408828735353, 0.022169151306152345, 0.021963327407836915, 0.022085184097290038, 0.022168960571289063, 0.022094112396240234, 0.022012256622314454, 0.02210032081604004, 0.022177183151245117, 0.02250809669494629, 0.022284000396728516, 0.022129056930541992, 0.022218656539916993, 0.022446048736572265, 0.022401023864746093, 0.022374176025390626, 0.022182111740112306, 0.022257728576660155, 0.022124479293823242, 0.022063104629516602, 0.022116352081298828, 0.022044607162475586, 0.021835199356079103, 0.021891712188720703, 0.022025760650634767, 0.02226019287109375, 0.022475839614868164, 0.022703039169311524, 0.02260361671447754, 0.02277187156677246, 0.022630048751831056, 0.02281827163696289, 0.022805120468139647, 0.022776063919067384, 0.02276259231567383, 0.022795167922973633, 0.02275958442687988, 0.022668256759643554, 0.022815616607666015, 0.0228701114654541, 0.022869951248168947, 0.022789823532104493, 0.022687999725341797, 0.022605087280273436, 0.022616832733154298, 0.022611520767211915, 0.022647232055664063, 0.02268956756591797, 0.022579519271850586, 0.022534048080444336, 0.022419679641723634, 0.022355743408203125, 0.022331424713134766, 0.022402624130249023, 0.022618528366088866, 0.02252150344848633, 0.022464063644409178, 0.023083808898925782, 0.02253113555908203, 0.022402015686035157, 0.022338752746582032, 0.02244588851928711, 0.022416351318359375, 0.02265910339355469, 0.022857887268066406, 0.022740800857543944, 0.022626304626464845, 0.022585567474365235, 0.022735967636108398, 0.02264950370788574, 0.022586784362792968, 0.022610048294067382, 0.022679391860961913, 0.022612640380859375, 0.022902431488037108, 0.02264713668823242, 0.02268707275390625, 0.022573728561401368, 0.02277484893798828, 0.02296703910827637, 0.023137887954711913, 0.022805248260498047, 0.022820512771606447, 0.022620351791381835, 0.0225316162109375, 0.022723039627075194, 0.02264678382873535, 0.02272774314880371, 0.022524864196777343, 0.022482751846313476, 0.02257734489440918, 0.022591487884521484, 0.02263382339477539, 0.022296575546264647, 0.02247270393371582, 0.022562816619873048, 0.023068672180175782, 0.02245193672180176, 0.02238902473449707, 0.02245449638366699, 0.022599584579467775, 0.0226507511138916, 0.022616064071655274, 0.022597696304321287, 0.022534080505371094, 0.022494815826416017, 0.02284160041809082, 0.022648000717163087, 0.022466527938842774, 0.022537023544311523, 0.022449695587158203, 0.022506143569946287, 0.02247478485107422, 0.022665088653564452, 0.022691904067993166, 0.022765727996826173, 0.02272233581542969, 0.022800479888916016, 0.022888511657714845, 0.022847423553466795, 0.022665023803710938, 0.022679391860961913, 0.022782304763793945, 0.022734943389892577, 0.022713504791259765, 0.02268227195739746, 0.022478527069091796, 0.02260419273376465, 0.022769664764404295, 0.02276483154296875, 0.022657024383544923, 0.022596384048461916, 0.02263238334655762, 0.022760831832885742, 0.02263104057312012, 0.02273689651489258, 0.022572927474975586, 0.022982847213745116, 0.02248067283630371, 0.022706239700317384, 0.022696287155151366, 0.022655040740966796, 0.022545568466186522, 0.022585887908935547, 0.02415407943725586, 0.022650943756103516, 0.022558496475219725, 0.02252783966064453, 0.0222291202545166, 0.02223459243774414, 0.022532863616943358, 0.022775423049926757, 0.022685279846191408, 0.022871936798095703, 0.022520223617553712, 0.022563488006591796, 0.02228000068664551, 0.022599231719970702, 0.02259868812561035, 0.02249510383605957, 0.022646432876586915, 0.02288627243041992, 0.022534624099731445, 0.022411264419555665, 0.022458240509033202, 0.022691999435424805, 0.022903871536254884, 0.02255459213256836, 0.022387744903564454, 0.022339487075805665, 0.022369440078735352, 0.02219424057006836, 0.02207414436340332, 0.02189926338195801, 0.021794591903686523, 0.021588191986083985, 0.02152448081970215, 0.021470399856567384, 0.021520511627197265, 0.02165158462524414, 0.021717567443847657, 0.021770240783691407, 0.021890432357788085, 0.02168230438232422, 0.02177689552307129, 0.021614912033081055, 0.02165318489074707, 0.021642303466796874, 0.02165171241760254, 0.021748416900634764, 0.02195590400695801, 0.0218221435546875, 0.0216760311126709, 0.021702783584594727, 0.02170252799987793, 0.02203251266479492, 0.021796735763549804, 0.02178483200073242, 0.021927679061889648, 0.021696512222290038, 0.02175699234008789, 0.02161961555480957, 0.021741600036621095, 0.021705823898315428, 0.021734304428100586, 0.02166169548034668, 0.0216712646484375, 0.02199344062805176, 0.02184671974182129, 0.021614591598510743, 0.021962751388549806, 0.021805055618286134, 0.021679519653320312, 0.021831392288208008, 0.021601152420043946, 0.021540864944458008, 0.0220897274017334, 0.021940223693847655, 0.021694784164428712, 0.021395456314086913, 0.021712896347045898, 0.021675775527954102, 0.02169481658935547, 0.02166543960571289, 0.021649663925170898, 0.021673887252807618, 0.021743520736694336, 0.021651168823242188, 0.021694015502929688, 0.02165443229675293, 0.02172857666015625, 0.021656255722045898, 0.02170572853088379, 0.022193248748779298, 0.021867488861083983, 0.02181999969482422, 0.021706111907958986, 0.02170537567138672, 0.021672256469726564, 0.022026336669921875, 0.021776287078857422, 0.021911808013916016, 0.021849279403686524, 0.021915712356567384, 0.021823999404907226, 0.021964736938476562, 0.02200966453552246, 0.021903167724609374, 0.02172915267944336, 0.021859039306640626, 0.021864288330078124, 0.021890239715576174, 0.022182016372680663, 0.0219965763092041, 0.021898080825805664, 0.022161439895629884, 0.021939231872558595, 0.021934112548828124, 0.02176518440246582, 0.021797536849975586, 0.021700607299804688, 0.021745664596557617, 0.02184774398803711, 0.021850431442260742, 0.021765920639038087, 0.024104991912841798, 0.022753408432006836, 0.02234579277038574, 0.02225948715209961, 0.02225161552429199, 0.021964927673339844, 0.0218787841796875, 0.021749759674072267, 0.02194780731201172, 0.022061471939086915, 0.022114463806152344, 0.02198121643066406, 0.021916799545288086, 0.021775232315063477, 0.021831615447998047, 0.021843103408813475, 0.021963552474975587, 0.02179478454589844, 0.021985631942749023, 0.022177791595458983, 0.02188083267211914, 0.022029920578002928, 0.023253408432006836, 0.029998912811279296, 0.02225606346130371, 0.022163200378417968, 0.022011808395385742, 0.021987743377685547, 0.022122175216674804, 0.022115520477294922, 0.02216659164428711, 0.02209078407287598, 0.022051551818847655, 0.021993471145629884, 0.022378528594970703, 0.02201913642883301, 0.02197555160522461, 0.022140735626220702, 0.02336409568786621, 0.022612064361572266, 0.02259744071960449, 0.02255891227722168, 0.022618080139160158, 0.022609376907348634, 0.022784576416015626, 0.022644287109375, 0.02262063980102539, 0.022716384887695312, 0.022800479888916016, 0.022617279052734376, 0.022631135940551758, 0.022548479080200197, 0.022757343292236328, 0.02272591972351074, 0.02285385513305664, 0.02288092803955078, 0.02329737663269043, 0.022691743850708008, 0.02257369613647461, 0.022509567260742186, 0.02253363227844238, 0.022486623764038087, 0.0224736328125, 0.02253385543823242, 0.022399168014526367, 0.02242799949645996, 0.022594879150390625, 0.022448575973510743, 0.022367904663085938, 0.022295167922973633, 0.02226736068725586, 0.022261247634887696, 0.022811391830444335, 0.023198783874511717, 0.02328876876831055, 0.023117824554443358, 0.0228818244934082, 0.029682144165039063, 0.024685888290405272, 0.02232569694519043, 0.022047103881835936, 0.022380640029907226, 0.022466560363769532, 0.022769535064697265, 0.022400447845458984, 0.02231158447265625, 0.022340991973876952, 0.022120672225952147, 0.022204927444458008, 0.02229987144470215, 0.02232579231262207, 0.0221976318359375, 0.022252351760864257, 0.02232729530334473, 0.022468671798706055, 0.022394304275512696, 0.02230678367614746, 0.022261920928955077, 0.02233283233642578, 0.022385215759277342, 0.022448543548583985, 0.022361728668212892, 0.022400800704956054, 0.022436447143554687, 0.02249728012084961, 0.022548511505126954, 0.022783327102661132, 0.022585760116577147, 0.022823135375976564, 0.022595199584960937, 0.02256934356689453, 0.02257030487060547, 0.022589792251586915, 0.02250489616394043, 0.024867744445800782, 0.02332876777648926, 0.02274518394470215, 0.022632064819335936, 0.022769567489624023, 0.022870399475097655, 0.022716415405273437, 0.023000511169433593, 0.02267350387573242, 0.022567392349243164, 0.022333120346069334, 0.02231715202331543, 0.02249545669555664, 0.022681312561035158, 0.022849279403686522, 0.022771360397338868, 0.022710752487182618, 0.022309024810791014, 0.02228268814086914, 0.023027872085571287, 0.024768192291259764, 0.022228511810302734, 0.022177600860595705, 0.02204640007019043, 0.021866592407226562, 0.021965919494628908, 0.022081567764282228, 0.022271711349487303, 0.022380544662475587]",tokens/s,44.52740848131149,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22074,7 +22074,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7172.022272,10246.5536,0.0,9860.808704,9797.323264,s,1,13.2211689453125,13.2211689453125,0.0,13.2211689453125,13.2211689453125,13.2211689453125,13.2211689453125,[13.2211689453125],,kWh,0.00016171045942918073,1.782812838696042e-05,5.287448674400064e-05,0.00023241307456014178,,MB,3114.426368,10626.138112,0.0,10211.033088,10096.966144,s,10,7.538050537109375,0.7538050537109375,0.00793216900750495,0.7540419616699219,0.7613850219726562,0.7631519531250001,0.764565498046875,"[0.7330369873046875, 0.7515281982421875, 0.7533756713867188, 0.7528856201171875, 0.7609923706054688, 0.75665185546875, 0.7649188842773438, 0.7565770263671875, 0.7534981079101563, 0.7545858154296875]",tokens/s,339.61035249064355,kWh,2.1851729629166053e-05,2.4097313488246706e-06,1.442136074342826e-05,3.8682821721418984e-05,tokens/kWh,6617924.665465931,MB,3130.580992,10630.332416,0.0,10213.13024,10096.968704,s,10,37.540674560546876,3.7540674560546874,0.010010934544368294,3.7587991943359373,3.762990649414063,3.7641482788085936,3.7650743823242188,"[3.7627333984375, 3.765305908203125, 3.742988525390625, 3.737947265625, 3.760489013671875, 3.737583984375, 3.754548828125, 3.757109375, 3.7611796875, 3.76078857421875]",tokens/s,16.781797540263018,kWh,0.0001096356925441675,1.2092824354417807e-05,7.289840355677225e-05,0.00019462692045535754,tokens/kWh,323696.2279041485,,s,630,37.537324813842794,0.059583055260067894,0.0007962526358102235,0.05942601585388184,0.060120168304443354,0.06054462490081787,0.0634981608581543,"[0.06065737533569336, 0.05999235153198242, 0.06016950225830078, 0.059509471893310545, 0.05974371337890625, 0.059731998443603516, 0.05959084701538086, 0.059748863220214846, 0.059141056060791015, 0.059362144470214845, 0.05948624038696289, 0.06010675048828125, 0.06020249557495117, 0.05968134307861328, 0.05916393661499023, 0.05951308822631836, 0.05976473617553711, 0.05929616165161133, 0.05954307174682617, 0.059138526916503904, 0.06032588958740234, 0.059428863525390625, 0.0596049919128418, 0.05930188751220703, 0.05929779052734375, 0.05939795303344726, 0.06055039978027344, 0.059765697479248044, 0.05965619277954102, 0.05993881607055664, 0.05990371322631836, 0.0595843505859375, 0.06011910247802734, 0.059646015167236326, 0.05935340881347656, 0.05989699172973633, 0.060920673370361327, 0.05990316772460937, 0.05910611343383789, 0.059703296661376956, 0.059396095275878906, 0.05964799880981445, 0.05927731323242187, 0.06034377670288086, 0.06092854309082031, 0.05942272186279297, 0.059994304656982425, 0.05944268798828125, 0.05943340682983399, 0.059470878601074216, 0.05944924926757812, 0.05960800170898437, 0.05956809616088867, 0.059598880767822264, 0.061886463165283206, 0.059338752746582034, 0.05937561416625976, 0.05897216033935547, 0.06000147247314453, 0.05987356948852539, 0.059550048828125, 0.05963983917236328, 0.05924844741821289, 0.06027465438842773, 0.059658271789550785, 0.0598364143371582, 0.0593422737121582, 0.05936595153808594, 0.059348033905029296, 0.05915948867797852, 0.05963919830322266, 0.05900348663330078, 0.059205631256103515, 0.0592097282409668, 0.05945548629760742, 0.05922169494628906, 0.059257152557373044, 0.05928889465332031, 0.05931488037109375, 0.059598209381103516, 0.05979199981689453, 0.05959267044067383, 0.06006377410888672, 0.059719680786132816, 0.059379711151123046, 0.059501983642578124, 0.059622047424316406, 0.05987526321411133, 0.059332447052001955, 0.05968707275390625, 0.05975376129150391, 0.06466223907470703, 0.05996495819091797, 0.05947216033935547, 0.059944766998291016, 0.059656574249267576, 0.05967424011230469, 0.05964617538452149, 0.059491775512695313, 0.06014771270751953, 0.060201694488525394, 0.05989580917358398, 0.059754432678222655, 0.05946988677978516, 0.05955744171142578, 0.059625247955322265, 0.0598125114440918, 0.060471454620361326, 0.05979119873046875, 0.05977468872070312, 0.05982441711425781, 0.06040576171875, 0.06021231842041016, 0.05975958251953125, 0.06008211135864258, 0.05959823989868164, 0.05996604919433594, 0.059778881072998044, 0.05972377777099609, 0.05985670471191406, 0.060039295196533206, 0.05957043075561524, 0.059641857147216794, 0.059668479919433595, 0.059580352783203124, 0.05973408126831055, 0.060549247741699216, 0.059433761596679686, 0.059285663604736326, 0.05965119934082031, 0.0597081298828125, 0.05965331268310547, 0.05891113662719726, 0.059912479400634766, 0.060211326599121096, 0.059666431427001954, 0.059799072265625, 0.05957270431518555, 0.05981388854980469, 0.059117408752441404, 0.05931955337524414, 0.06079171371459961, 0.05928345489501953, 0.059665855407714845, 0.0590239372253418, 0.05951631927490234, 0.05900348663330078, 0.05893033599853516, 0.05871529769897461, 0.05912905502319336, 0.059093441009521484, 0.05924192047119141, 0.05907721710205078, 0.05889023971557617, 0.059898113250732424, 0.05943014526367187, 0.05941708755493164, 0.0590799674987793, 0.05978595352172852, 0.05892300796508789, 0.059410079956054684, 0.05939849472045899, 0.05871206283569336, 0.05964384078979492, 0.05950239944458008, 0.05909939193725586, 0.059114944458007815, 0.05909561538696289, 0.05929369735717773, 0.058832225799560545, 0.05865539169311523, 0.05965187072753906, 0.059205856323242184, 0.05894553756713867, 0.05885337448120117, 0.059875328063964846, 0.05867504119873047, 0.0591769905090332, 0.059824256896972655, 0.06074758529663086, 0.059596992492675784, 0.05933603286743164, 0.059139873504638674, 0.05909183883666992, 0.05931827163696289, 0.059006271362304685, 0.060917919158935546, 0.05938623809814453, 0.05965430450439453, 0.060598785400390626, 0.058934593200683595, 0.05892121505737305, 0.05927756881713867, 0.05965238571166992, 0.05945487976074219, 0.059171329498291014, 0.05897011184692383, 0.05870143890380859, 0.05887392044067383, 0.058915199279785155, 0.059517921447753905, 0.058966144561767575, 0.059197887420654294, 0.059300254821777344, 0.05932137680053711, 0.05938275146484375, 0.05953126525878906, 0.05938380813598633, 0.05894259262084961, 0.059160545349121095, 0.05980220794677735, 0.05927052688598633, 0.059345375061035155, 0.05957475280761719, 0.059324127197265625, 0.059845024108886716, 0.059120735168457034, 0.0601297607421875, 0.05925680160522461, 0.05932271957397461, 0.05933055877685547, 0.05943910217285156, 0.05935078430175781, 0.05956771087646484, 0.05927388763427734, 0.059161918640136715, 0.059283935546875, 0.059864574432373044, 0.059943328857421874, 0.059223934173583986, 0.05905657577514648, 0.059150337219238285, 0.05926812744140625, 0.06005382537841797, 0.05954012680053711, 0.06006505584716797, 0.0591756477355957, 0.05900912094116211, 0.05956361770629883, 0.059341121673583984, 0.059448448181152344, 0.05909718322753906, 0.05909932708740234, 0.05918780899047851, 0.06002035140991211, 0.05912409591674805, 0.05906022262573242, 0.05907455825805664, 0.05886918258666992, 0.05877612686157226, 0.05870796966552734, 0.05931827163696289, 0.059991649627685543, 0.05885788726806641, 0.05880595016479492, 0.058954048156738284, 0.05905433654785156, 0.05926886367797852, 0.06720719909667969, 0.05939606475830078, 0.05950054550170898, 0.06342646408081054, 0.059868766784667966, 0.05981622314453125, 0.05944956970214844, 0.0600186882019043, 0.0592619857788086, 0.058806686401367186, 0.05891142272949219, 0.05886348724365234, 0.05893081665039063, 0.058982784271240235, 0.060063743591308595, 0.05917484664916992, 0.0588862075805664, 0.058990367889404295, 0.05953926467895508, 0.05989827346801758, 0.05978889465332031, 0.05937184143066406, 0.0592446403503418, 0.058887294769287106, 0.05934908676147461, 0.05970819091796875, 0.06125360107421875, 0.059493824005126955, 0.0592239990234375, 0.05892572784423828, 0.05893545532226562, 0.05884297561645508, 0.061052928924560546, 0.05901676940917969, 0.05939859390258789, 0.05932015991210938, 0.05948172760009766, 0.060166782379150394, 0.05970524978637695, 0.059840511322021485, 0.05881468963623047, 0.05936912155151367, 0.059206016540527345, 0.05907004928588867, 0.060158111572265624, 0.06347289657592774, 0.05959347152709961, 0.05978275299072266, 0.05898591995239258, 0.0594106559753418, 0.05904019165039062, 0.05991455841064453, 0.059006912231445316, 0.0607314567565918, 0.059709312438964844, 0.05957440185546875, 0.05938336181640625, 0.059996990203857424, 0.06092620849609375, 0.05948390579223633, 0.05916377639770508, 0.05927411270141601, 0.06024367904663086, 0.05946531295776367, 0.059224769592285155, 0.059305984497070315, 0.059080513000488284, 0.05919353485107422, 0.05934080123901367, 0.05897145462036133, 0.059090686798095704, 0.05925766372680664, 0.05960512161254883, 0.05939603042602539, 0.05932243347167969, 0.059361438751220706, 0.059350879669189456, 0.05957868957519531, 0.059094718933105465, 0.05942012786865234, 0.058999168395996095, 0.058826366424560544, 0.059951648712158204, 0.05927526473999024, 0.06005904006958008, 0.05950729751586914, 0.05878988647460937, 0.059172863006591796, 0.05946726226806641, 0.059419136047363284, 0.05921996688842773, 0.059262977600097654, 0.05913385772705078, 0.058977439880371095, 0.05926598358154297, 0.05888614273071289, 0.059169921875, 0.05911859130859375, 0.05915430450439453, 0.05898137664794922, 0.05903462219238281, 0.05977299118041992, 0.05940339279174805, 0.05931500625610352, 0.059188766479492186, 0.05964438247680664, 0.059369377136230465, 0.05934499359130859, 0.05920767974853516, 0.05933465576171875, 0.058865215301513674, 0.059728321075439454, 0.05918310546875, 0.05930527877807617, 0.059099838256835936, 0.05911347198486328, 0.05917718505859375, 0.0592852783203125, 0.05903974533081055, 0.05902950286865234, 0.059873630523681644, 0.05905408096313477, 0.059033599853515625, 0.058824222564697264, 0.05927779388427734, 0.059184864044189454, 0.0586794548034668, 0.058843265533447264, 0.05961423873901367, 0.059234977722167965, 0.05969891357421875, 0.05905059051513672, 0.058984447479248046, 0.05943910217285156, 0.059098751068115234, 0.05941856002807617, 0.05881625747680664, 0.05908755111694336, 0.05909292984008789, 0.05906438446044922, 0.05897177505493164, 0.05897663879394531, 0.05928345489501953, 0.06350848007202148, 0.05941843032836914, 0.05980707168579102, 0.06319558334350586, 0.05925724792480469, 0.06087200164794922, 0.059326400756835936, 0.05906614303588867, 0.05982905578613281, 0.06003235244750976, 0.0589238395690918, 0.05914214324951172, 0.06002454376220703, 0.059351329803466794, 0.059510784149169924, 0.059931968688964846, 0.05960559844970703, 0.05965628814697266, 0.059907455444335934, 0.06048012924194336, 0.05916831970214844, 0.059859390258789065, 0.05943910217285156, 0.05955136108398437, 0.05923417663574219, 0.059867008209228516, 0.06016883087158203, 0.05998387145996094, 0.060088321685791014, 0.059579391479492184, 0.059251712799072265, 0.05935500717163086, 0.0596890869140625, 0.05970105743408203, 0.059813438415527345, 0.05938035202026367, 0.059254783630371094, 0.05909612655639648, 0.06053897476196289, 0.05974921417236328, 0.06115913772583008, 0.05965238571166992, 0.059588191986083984, 0.059515296936035154, 0.05980364990234375, 0.05924863815307617, 0.05929312133789062, 0.06048006439208985, 0.05937881469726562, 0.05932121658325195, 0.06419865417480469, 0.059346240997314455, 0.06007593536376953, 0.059447391510009766, 0.059620033264160155, 0.059710590362548825, 0.05988399887084961, 0.059277694702148435, 0.05994704055786133, 0.059170814514160154, 0.05922719955444336, 0.0589683837890625, 0.059625537872314456, 0.05897808074951172, 0.06016899108886719, 0.05961497497558594, 0.059232513427734376, 0.05921551895141602, 0.059133567810058595, 0.059049758911132816, 0.05893331146240234, 0.05912051010131836, 0.06044672012329102, 0.06020044708251953, 0.059294208526611325, 0.059511966705322265, 0.05964009475708008, 0.05924448013305664, 0.059273311614990234, 0.059394046783447264, 0.059140224456787106, 0.05961078262329102, 0.05983868789672851, 0.05978140640258789, 0.059830047607421874, 0.05968073654174805, 0.059256927490234375, 0.059275680541992185, 0.05908415985107422, 0.06061270523071289, 0.05947856140136719, 0.05959993743896484, 0.05981209564208984, 0.05940646362304688, 0.05925881576538086, 0.05889257431030273, 0.059009376525878905, 0.05912371063232422, 0.06032793426513672, 0.060297183990478516, 0.05949161529541016, 0.059965663909912106, 0.05959939193725586, 0.061423583984375, 0.06417359924316406, 0.05934486389160156, 0.05937203216552735, 0.058928672790527346, 0.05921548843383789, 0.05917776107788086, 0.05969107055664063, 0.05965024185180664, 0.05952902221679687, 0.059289600372314455, 0.05993267059326172, 0.05960908889770508, 0.059312126159667966, 0.0592911376953125, 0.059095455169677735, 0.05997129440307617, 0.059940673828125, 0.05964857482910156, 0.059703296661376956, 0.05944297790527344, 0.059197662353515625, 0.05924249649047852, 0.05918105697631836, 0.05900697708129883, 0.06000230407714844, 0.05940963363647461, 0.059593406677246094, 0.0598590087890625, 0.05984195327758789, 0.05939878463745117, 0.05907030487060547, 0.05950419235229492, 0.06024790573120117, 0.05985667037963867, 0.059229152679443356, 0.059184478759765624, 0.059312736511230466, 0.05911529541015625, 0.06037478256225586, 0.06271235275268555, 0.05909711837768555, 0.06417404937744141, 0.05943471908569336, 0.060436767578125, 0.05953740692138672, 0.05896806335449219, 0.05966377639770508, 0.059265087127685544, 0.059781665802001956, 0.060118144989013675, 0.05900979232788086, 0.05896764755249023, 0.05927936172485351, 0.059234367370605466, 0.05964540863037109, 0.059358207702636716, 0.05917283248901367, 0.05966019058227539, 0.05919961547851563, 0.05894508743286133, 0.05974879837036133, 0.05907046508789063, 0.06613801574707032, 0.05981811141967774, 0.058851329803466794, 0.05878524780273438, 0.0587264633178711, 0.05893286514282227, 0.059065185546875, 0.058746177673339846, 0.0597982063293457, 0.05965548706054687, 0.05988832092285156, 0.059205631256103515, 0.05921811294555664, 0.05923001480102539, 0.0591278076171875, 0.05905340957641601, 0.0595851821899414, 0.059663455963134764, 0.05916969680786133, 0.05974016189575195, 0.059140094757080076, 0.05904793548583984, 0.05907660675048828, 0.0603658561706543, 0.059460575103759766, 0.06002880096435547, 0.06129471969604492, 0.060112895965576174, 0.05948543930053711, 0.059290176391601564, 0.059224254608154295, 0.05907251358032226, 0.05915036773681641, 0.05968278503417969, 0.06175350570678711, 0.05981100845336914, 0.06014028930664062, 0.05951059341430664, 0.05972956848144531, 0.059423168182373046, 0.05941862487792969, 0.059756542205810545, 0.06046281433105469, 0.06015327835083008, 0.06096080017089844, 0.05932505416870117, 0.05932582473754883, 0.05950099182128906, 0.05949683380126953, 0.05960639953613281, 0.06008623886108398, 0.059787071228027344, 0.05948825454711914, 0.059582496643066404, 0.059379936218261715, 0.059429473876953125, 0.05952716827392578, 0.05977907180786133, 0.05992038345336914, 0.059940864562988284, 0.059815937042236325, 0.05924454498291016, 0.059254783630371094]",tokens/s,16.78329511024911,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22119,12 +22119,12 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 106446 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 105325 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,872.189952,556.72832,0.0,178.25792,176.190464,s,1,7.68558203125,7.68558203125,0.0,7.68558203125,7.68558203125,7.68558203125,7.68558203125,[7.68558203125],,kWh,2.0834734899998844e-05,2.2905349177520978e-06,6.4622273920067475e-06,2.958749720975769e-05,,MB,1168.748544,661.58592,0.0,253.755392,220.750336,s,12,0.183656286239624,0.01530469051996867,0.00019884486284842023,0.015267151832580567,0.015476128292083741,0.015652350568771362,0.015812454061508177,"[0.015237055778503417, 0.015360063552856445, 0.015313632011413574, 0.01585247993469238, 0.015135199546813964, 0.015297247886657714, 0.015488608360290528, 0.015225055694580079, 0.015363807678222657, 0.015139295578002929, 0.01512771224975586, 0.015116127967834472]",tokens/s,16726.8981797434,kWh,4.6199605640822427e-07,5.0950060289420976e-08,3.079489981170813e-07,8.208951148147265e-07,tokens/kWh,311854700.2899127,MB,1202.958336,676.265984,0.0,268.435456,220.752896,s,12,10.052539978027344,0.837711664835612,0.008306164220725462,0.8395189514160156,0.8449926452636718,0.8473671356201171,0.8496844134521484,"[0.8449517211914063, 0.8366310424804687, 0.84122119140625, 0.8502637329101562, 0.8379326782226563, 0.841105224609375, 0.829469482421875, 0.8449971923828125, 0.8243952026367187, 0.8356583251953125, 0.821886474609375, 0.8440277099609375]",tokens/s,75.20487375851783,kWh,2.4577269508523106e-05,2.7104509462638155e-06,9.10293816021661e-06,3.6390658615003534e-05,tokens/kWh,1731213.514614041,,s,756,10.046389430999769,0.013288874908729837,0.0003385731659422532,0.013247408390045165,0.01360647964477539,0.01367526388168335,0.014316770839691167,"[0.012775808334350587, 0.01313587188720703, 0.013276576042175293, 0.013025823593139649, 0.013002816200256348, 0.012983519554138184, 0.012978976249694824, 0.013080351829528809, 0.013222111701965331, 0.013008895874023438, 0.01298588752746582, 0.012978655815124512, 0.012959744453430176, 0.012958815574645996, 0.013015968322753906, 0.013074432373046875, 0.013033727645874023, 0.013094464302062988, 0.013131967544555664, 0.013303808212280274, 0.013328384399414063, 0.013404159545898438, 0.013449024200439454, 0.013556991577148438, 0.01385478401184082, 0.013627424240112304, 0.013599424362182618, 0.013629311561584473, 0.013701343536376953, 0.013651424407958985, 0.013652576446533202, 0.013563936233520507, 0.01362723159790039, 0.013964703559875488, 0.013586688041687011, 0.013632160186767578, 0.013633343696594238, 0.013619199752807617, 0.013561856269836426, 0.013598719596862792, 0.013543231964111328, 0.013719167709350586, 0.0142741756439209, 0.013514880180358887, 0.013534015655517578, 0.01349555206298828, 0.013529855728149414, 0.01353932762145996, 0.013518848419189454, 0.013431072235107422, 0.013487199783325195, 0.013378175735473632, 0.013453311920166015, 0.013424639701843261, 0.01339187240600586, 0.013451264381408692, 0.013471903800964356, 0.01358240032196045, 0.013481760025024414, 0.013641728401184081, 0.013444735527038574, 0.013451647758483887, 0.013399231910705566, 0.013090944290161133, 0.013522687911987305, 0.013469696044921875, 0.013444671630859375, 0.013418944358825683, 0.013432479858398438, 0.014053728103637695, 0.014391039848327637, 0.013562111854553223, 0.013399200439453125, 0.013398112297058106, 0.013384063720703126, 0.013654175758361817, 0.013381535530090333, 0.013387999534606933, 0.013353055953979492, 0.013238271713256837, 0.01323209571838379, 0.013024479866027831, 0.013020064353942871, 0.01313980770111084, 0.013219903945922852, 0.01313587188720703, 0.013254015922546387, 0.01299078369140625, 0.013058367729187012, 0.01305190372467041, 0.013246015548706054, 0.013338784217834472, 0.014383392333984375, 0.013547616004943848, 0.0130600004196167, 0.013125727653503418, 0.012982175827026368, 0.013030688285827636, 0.01408073616027832, 0.01304371166229248, 0.013029024124145508, 0.013369695663452148, 0.013107168197631835, 0.013086784362792968, 0.013041631698608398, 0.013000639915466308, 0.013066304206848145, 0.013372703552246094, 0.013110239982604981, 0.013144864082336425, 0.012975071907043457, 0.01293875217437744, 0.0129684476852417, 0.012979455947875976, 0.012888992309570312, 0.012938816070556641, 0.013052191734313965, 0.013059616088867188, 0.013057696342468261, 0.01307875156402588, 0.013123744010925293, 0.013519295692443848, 0.013383071899414062, 0.013422623634338379, 0.013400927543640136, 0.013414143562316894, 0.013178879737854005, 0.013540351867675781, 0.013551520347595216, 0.013635680198669434, 0.013719008445739747, 0.01344313621520996, 0.013518655776977539, 0.01355020809173584, 0.013637120246887208, 0.0135316801071167, 0.013554816246032714, 0.013605759620666505, 0.013637439727783203, 0.01346726417541504, 0.013435680389404297, 0.01341004753112793, 0.013452447891235352, 0.013441408157348633, 0.013347264289855956, 0.013545536041259765, 0.013455360412597657, 0.013506239891052246, 0.013443391799926757, 0.013434816360473632, 0.013482048034667968, 0.013410304069519043, 0.013249919891357423, 0.013355392456054688, 0.013310208320617675, 0.013357376098632813, 0.013292863845825195, 0.013318528175354004, 0.01324614429473877, 0.013295743942260742, 0.01365830421447754, 0.013312000274658203, 0.013303808212280274, 0.013228032112121582, 0.013467647552490235, 0.013735263824462891, 0.013191840171813966, 0.013254783630371093, 0.013221504211425782, 0.013078656196594238, 0.013119615554809571, 0.01309017562866211, 0.01309887981414795, 0.013019904136657715, 0.013038880348205566, 0.012986592292785645, 0.013083135604858399, 0.013008607864379882, 0.01300211238861084, 0.013060959815979004, 0.013028639793395996, 0.013062687873840332, 0.013109567642211915, 0.013148287773132324, 0.013252351760864258, 0.013328351974487306, 0.013588576316833497, 0.013392928123474122, 0.013422911643981933, 0.013099616050720214, 0.01356060791015625, 0.013556639671325683, 0.013639552116394043, 0.013594752311706542, 0.013704575538635255, 0.013659839630126952, 0.013688960075378419, 0.013689727783203124, 0.013621536254882813, 0.013638943672180175, 0.01359648036956787, 0.013672351837158203, 0.013607680320739746, 0.013729727745056153, 0.013684000015258789, 0.013652000427246094, 0.013623680114746094, 0.013607199668884278, 0.013639039993286133, 0.013522848129272461, 0.013559871673583985, 0.01358505630493164, 0.013529088020324707, 0.013469696044921875, 0.01357430362701416, 0.013541215896606446, 0.013555328369140624, 0.01348646354675293, 0.013510656356811524, 0.013590527534484862, 0.013602144241333007, 0.013531807899475098, 0.013479552268981933, 0.013541343688964844, 0.013576383590698243, 0.013537504196166993, 0.01350169563293457, 0.013512479782104491, 0.013497311592102051, 0.0135863037109375, 0.013506015777587891, 0.013515423774719239, 0.013438752174377441, 0.013316351890563965, 0.014213088035583497, 0.01339187240600586, 0.013337984085083007, 0.013292415618896485, 0.013389887809753418, 0.01361411190032959, 0.013421567916870117, 0.013330080032348634, 0.013252608299255371, 0.013444512367248536, 0.013367136001586915, 0.013283295631408691, 0.0131878719329834, 0.0130600004196167, 0.01296985626220703, 0.01298044776916504, 0.012924032211303711, 0.01299135971069336, 0.013043775558471679, 0.01372332763671875, 0.013128543853759765, 0.013276896476745605, 0.013135744094848632, 0.013136320114135742, 0.013123519897460937, 0.013042079925537109, 0.013112607955932618, 0.013081151962280274, 0.014055328369140625, 0.014036543846130371, 0.01401039981842041, 0.01385923194885254, 0.013256640434265136, 0.013236288070678712, 0.013432607650756836, 0.013301759719848634, 0.013058143615722656, 0.013056127548217774, 0.013057760238647461, 0.01303171157836914, 0.012904255867004395, 0.013010463714599609, 0.013003423690795898, 0.013154399871826173, 0.013152159690856934, 0.013103296279907226, 0.01310700798034668, 0.013109248161315918, 0.013091103553771972, 0.013102815628051758, 0.013098272323608398, 0.012952287673950196, 0.013042943954467773, 0.012874496459960938, 0.012936223983764648, 0.012901344299316407, 0.012996512413024902, 0.013021280288696289, 0.013238143920898438, 0.013226112365722657, 0.013223039627075195, 0.013339455604553224, 0.014368831634521484, 0.01600307273864746, 0.013165984153747558, 0.014186431884765625, 0.013122048377990723, 0.013144224166870118, 0.013037599563598633, 0.01313100814819336, 0.013218624114990234, 0.013565855979919434, 0.013295904159545898, 0.013117152214050293, 0.01318057632446289, 0.013017375946044922, 0.013080351829528809, 0.01295798397064209, 0.013065279960632325, 0.013257823944091796, 0.014761183738708496, 0.012696864128112793, 0.013700063705444337, 0.01628927993774414, 0.014596416473388671, 0.013072447776794434, 0.013100864410400391, 0.013045887947082519, 0.01311574363708496, 0.013001952171325683, 0.012971776008605957, 0.012985055923461913, 0.012948703765869141, 0.012935263633728027, 0.012966303825378419, 0.01298185634613037, 0.013535008430480958, 0.013042559623718262, 0.012975584030151368, 0.013168224334716797, 0.01330681610107422, 0.013307904243469238, 0.013342720031738281, 0.013447168350219727, 0.013436351776123047, 0.013498751640319825, 0.013446784019470215, 0.013426495552062987, 0.013443424224853515, 0.013433279991149903, 0.013428640365600587, 0.013531200408935547, 0.013430879592895508, 0.013437855720520019, 0.013576640129089355, 0.013459936141967773, 0.013510751724243163, 0.013418496131896973, 0.013414719581604003, 0.013301440238952637, 0.013277503967285156, 0.01336473560333252, 0.013508864402770996, 0.013315327644348144, 0.013300415992736816, 0.013370656013488769, 0.013508576393127442, 0.013384448051452637, 0.013393695831298829, 0.0133820161819458, 0.013213312149047852, 0.013289695739746093, 0.013191167831420898, 0.01313372802734375, 0.013123680114746093, 0.013141311645507813, 0.013171392440795899, 0.013220159530639649, 0.013184351921081543, 0.013175168037414552, 0.013191136360168457, 0.01344921588897705, 0.013359359741210938, 0.013215680122375489, 0.0129518404006958, 0.013181280136108399, 0.013251839637756347, 0.013316512107849121, 0.013230463981628419, 0.013196576118469238, 0.013205408096313476, 0.013213536262512208, 0.0132424955368042, 0.01334768009185791, 0.013469696044921875, 0.013423999786376954, 0.013202207565307617, 0.013109087944030762, 0.013391263961791992, 0.01301302433013916, 0.013046336174011231, 0.013072640419006347, 0.013067839622497558, 0.013266847610473633, 0.013244064331054687, 0.013224224090576171, 0.013160896301269532, 0.013161760330200195, 0.013095552444458008, 0.013076031684875488, 0.01302950382232666, 0.013029696464538575, 0.012976127624511719, 0.013025152206420898, 0.013187199592590333, 0.013217791557312012, 0.013191167831420898, 0.01315782356262207, 0.01318057632446289, 0.013073311805725098, 0.013010944366455078, 0.012978400230407715, 0.01306595230102539, 0.013051039695739746, 0.013203680038452148, 0.013238975524902344, 0.013189120292663574, 0.013148127555847168, 0.0132892484664917, 0.013407839775085448, 0.01337388801574707, 0.013316320419311524, 0.013172736167907715, 0.013091936111450195, 0.013142815589904785, 0.013054047584533691, 0.013008288383483887, 0.01303382396697998, 0.013017375946044922, 0.013047807693481446, 0.0131146240234375, 0.013152064323425293, 0.013123871803283692, 0.013154815673828125, 0.013125791549682617, 0.013078720092773437, 0.013147616386413574, 0.01319974422454834, 0.01366972827911377, 0.013640064239501953, 0.013629728317260743, 0.01358233642578125, 0.013567551612854004, 0.013612480163574218, 0.013717696189880372, 0.013714048385620117, 0.013611200332641602, 0.013584159851074218, 0.01359488010406494, 0.013660223960876464, 0.013567904472351074, 0.013658111572265624, 0.013539232254028321, 0.0135578556060791, 0.01349955177307129, 0.013773344039916991, 0.013476384162902833, 0.013501728057861328, 0.013446751594543458, 0.013444095611572266, 0.01346127986907959, 0.013399359703063964, 0.01347049617767334, 0.013471776008605958, 0.013428640365600587, 0.013381695747375489, 0.013285759925842286, 0.01363491153717041, 0.01330735969543457, 0.01323209571838379, 0.013169792175292969, 0.013254176139831543, 0.013246432304382324, 0.013162752151489258, 0.013115391731262208, 0.013130784034729004, 0.013097951889038086, 0.013119615554809571, 0.013100768089294433, 0.013135711669921874, 0.013054047584533691, 0.013061440467834473, 0.013092896461486817, 0.013046976089477538, 0.012936800003051759, 0.013055904388427735, 0.01332038402557373, 0.013328384399414063, 0.013315296173095703, 0.0133721923828125, 0.013420063972473145, 0.0133853120803833, 0.013626239776611327, 0.013479935646057128, 0.013478976249694824, 0.013825087547302247, 0.013451295852661133, 0.013438816070556641, 0.013457375526428223, 0.013441151618957519, 0.013008864402770996, 0.013377568244934083, 0.01333801555633545, 0.013374048233032227, 0.013344896316528321, 0.013323583602905274, 0.013248384475708008, 0.013365951538085938, 0.013294624328613282, 0.013278240203857422, 0.013303359985351563, 0.013391231536865235, 0.013433088302612304, 0.013424960136413575, 0.013269375801086426, 0.013115391731262208, 0.013107263565063476, 0.013172032356262207, 0.013052096366882324, 0.013165056228637695, 0.013103103637695313, 0.01307852840423584, 0.013033760070800782, 0.01305519962310791, 0.013036031723022461, 0.013031328201293945, 0.012977663993835448, 0.012951807975769043, 0.012978528022766113, 0.012940735816955566, 0.012908384323120118, 0.01291324806213379, 0.012900639533996582, 0.012933247566223145, 0.013004511833190917, 0.012951423645019531, 0.013006560325622559, 0.012990880012512206, 0.012972031593322754, 0.012965696334838867, 0.012949503898620606, 0.012906304359436035, 0.01294979190826416, 0.012903743743896485, 0.012951807975769043, 0.012976672172546387, 0.012953599929809571, 0.012982272148132324, 0.01315225601196289, 0.012998656272888183, 0.013061951637268067, 0.013019328117370605, 0.013032671928405761, 0.013007648468017578, 0.013020544052124024, 0.013020992279052734, 0.01300153636932373, 0.012976127624511719, 0.01303337574005127, 0.012959839820861817, 0.012939040184020995, 0.012947456359863281, 0.013062687873840332, 0.012665087699890137, 0.013041152000427245, 0.012989695549011231, 0.013134847640991211, 0.013049023628234863, 0.013032256126403808, 0.012985952377319336, 0.012988096237182617, 0.013060383796691895, 0.01297862434387207, 0.012959839820861817, 0.012944767951965332, 0.012968640327453613, 0.012922719955444336, 0.012992511749267579, 0.01290060806274414, 0.01308249568939209, 0.012953472137451173, 0.012959039688110352, 0.013012672424316406, 0.012915712356567383, 0.013067584037780761, 0.013324992179870605, 0.013424768447875976, 0.01343609619140625, 0.013506367683410644, 0.013501343727111816, 0.01358028793334961, 0.013457056045532226, 0.013500384330749512, 0.01356326389312744, 0.013537823677062988, 0.013566240310668946, 0.01350496006011963, 0.013650912284851074, 0.013570624351501465, 0.013552127838134765, 0.013460607528686523, 0.013437503814697266, 0.013432127952575684, 0.013512543678283691, 0.013541631698608399, 0.013657983779907226, 0.013447392463684081, 0.013396479606628419, 0.013478079795837402, 0.013446975708007812, 0.013500415802001953, 0.013493824005126952, 0.013725664138793945, 0.013451807975769043, 0.0133602237701416, 0.01326576042175293, 0.013274368286132812, 0.013194144248962402, 0.013201248168945313, 0.013153440475463867, 0.013146976470947265, 0.013091872215270996, 0.013138912200927734, 0.013082431793212891, 0.012992704391479492, 0.013029376029968261, 0.012685312271118163, 0.012984319686889649, 0.012913760185241699, 0.012929951667785645, 0.01289241600036621, 0.012965215682983398, 0.012930784225463867, 0.012958047866821289, 0.012879743576049804, 0.01287548828125, 0.01296678352355957, 0.012991968154907227, 0.012927136421203612, 0.012953824043273925, 0.013031359672546386, 0.012949567794799804, 0.012954784393310546, 0.01296895980834961, 0.013039487838745117, 0.012909695625305176, 0.012917183876037597, 0.01309331226348877, 0.012857343673706055, 0.01287168025970459, 0.012928031921386718, 0.013063136100769044, 0.012881024360656738, 0.012860447883605956, 0.012859552383422852, 0.012958880424499511, 0.012847647666931153, 0.012918784141540527, 0.012890144348144532, 0.01291808032989502, 0.012965727806091308, 0.01294159984588623, 0.012964127540588379, 0.013041919708251953, 0.012985631942749024, 0.012945728302001953, 0.012939680099487304, 0.012907903671264648, 0.01315062427520752, 0.016723712921142577, 0.013594304084777832, 0.013100000381469726, 0.013029151916503907, 0.013027711868286133, 0.012992159843444824, 0.012984064102172852, 0.013037376403808594, 0.012985856056213378, 0.012948415756225585, 0.013020511627197265, 0.012959648132324218, 0.01297596836090088, 0.012946335792541504, 0.012969792366027832, 0.012975647926330566, 0.013073408126831054, 0.01328707218170166, 0.013286815643310548, 0.01304035186767578, 0.01278771209716797, 0.013139967918395995, 0.013100607872009278, 0.013107711791992188, 0.01305299186706543, 0.013032320022583008, 0.013086112022399902, 0.013121824264526367, 0.013062463760375977, 0.013013312339782715, 0.012998335838317871, 0.013000191688537598, 0.01297049617767334, 0.0129617919921875, 0.013021183967590331, 0.01306601619720459, 0.013153504371643067, 0.013424960136413575, 0.0134519681930542, 0.013462719917297363, 0.013384511947631836, 0.01349836826324463, 0.013414400100708008, 0.013413439750671387, 0.013446047782897949, 0.013586496353149414, 0.013549535751342774, 0.01358243179321289, 0.013538623809814453, 0.013546303749084472, 0.013778719902038574, 0.013981696128845214, 0.013533184051513672, 0.013594688415527344, 0.013522879600524902, 0.013528512001037598, 0.013468416213989257, 0.013442879676818847, 0.013491264343261719, 0.013448127746582031, 0.0134782075881958, 0.013442272186279297, 0.013449695587158203, 0.013514431953430175, 0.013476160049438477, 0.013610527992248536, 0.013522720336914063, 0.013611488342285157, 0.013588704109191895, 0.013389823913574218, 0.01338368034362793, 0.013334527969360351, 0.013316096305847168, 0.01351593589782715, 0.013978495597839355, 0.013553631782531739, 0.013499391555786134, 0.013511679649353027, 0.013540512084960937, 0.013478752136230468, 0.013484031677246093, 0.013532608032226563, 0.013480031967163086]",tokens/s,75.25091528576823,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1138.520064,1096.679424,0.0,710.934528,686.03904,s,1,8.581947265625,8.581947265625,0.0,8.581947265625,8.581947265625,8.581947265625,8.581947265625,[8.581947265625],,kWh,3.028218689998236e-05,3.3329752637880467e-06,9.33750747000861e-06,4.2952669633779014e-05,,MB,1498.996736,1436.418048,0.0,1021.313024,985.00096,s,10,0.3625167045593263,0.03625167045593262,0.00021326722920203412,0.036177934646606444,0.03636599769592285,0.03661200733184815,0.03680881504058838,"[0.03685801696777344, 0.036311328887939455, 0.036157150268554684, 0.0361987190246582, 0.036135711669921876, 0.036092254638671876, 0.03629014587402344, 0.036156257629394534, 0.03610809707641602, 0.03620902252197265]",tokens/s,7061.743549478431,kWh,1.181011162987339e-06,1.3024354844658021e-07,7.819809123226835e-07,2.0932356237566027e-06,tokens/kWh,122298702.11198315,MB,1531.551744,1486.749696,0.0,1071.644672,985.00352,s,10,16.115920166015627,1.6115920166015623,0.00446747942641449,1.6106911010742189,1.6192678588867186,1.6195976135253904,1.6198614172363281,"[1.619194580078125, 1.6054296875, 1.60847412109375, 1.6074649658203124, 1.6096724853515625, 1.610689697265625, 1.6199273681640625, 1.613071044921875, 1.6106925048828125, 1.6113037109375]",tokens/s,39.091779650814466,kWh,5.74596066828467e-05,6.33701027013548e-06,2.3074621506275535e-05,8.687123845925773e-05,tokens/kWh,725211.2565374183,,s,630,16.112701669693006,0.025575716936020624,0.0007607594888319328,0.025405376434326173,0.025761101150512694,0.026643320751190185,0.0302019294166565,"[0.02538960075378418, 0.025730976104736326, 0.025276544570922852, 0.025794559478759766, 0.025511232376098633, 0.025393440246582032, 0.025514400482177735, 0.026634239196777345, 0.02555084800720215, 0.02524083137512207, 0.025289472579956056, 0.025196544647216795, 0.02536396789550781, 0.0252359676361084, 0.025220928192138673, 0.025292640686035157, 0.025471328735351562, 0.025306848526000975, 0.025440351486206055, 0.03029216003417969, 0.025413631439208984, 0.02549519920349121, 0.02528291130065918, 0.025307455062866212, 0.02519321632385254, 0.02547808074951172, 0.025497055053710936, 0.025245920181274414, 0.025414207458496093, 0.02529199981689453, 0.025639455795288087, 0.02540675163269043, 0.025299007415771485, 0.02538470458984375, 0.025348320007324218, 0.025520544052124023, 0.025368799209594728, 0.02537478446960449, 0.02556643295288086, 0.025448768615722657, 0.025782751083374023, 0.025386367797851563, 0.025471616744995117, 0.027412479400634765, 0.026916864395141602, 0.02597887992858887, 0.025438207626342774, 0.025609823226928712, 0.02721014404296875, 0.0257126407623291, 0.025362239837646485, 0.025362335205078124, 0.025332000732421874, 0.02527427291870117, 0.025331039428710938, 0.025301984786987305, 0.02539289665222168, 0.025303071975708007, 0.02540060806274414, 0.027908832550048827, 0.02774630355834961, 0.02668070411682129, 0.025434112548828124, 0.02535433578491211, 0.02567804718017578, 0.025534208297729493, 0.02559391975402832, 0.025491552352905275, 0.025431968688964843, 0.02539948844909668, 0.025487360000610353, 0.025214208602905273, 0.025329631805419923, 0.02540764808654785, 0.0252926082611084, 0.02530339241027832, 0.02569264030456543, 0.025440095901489258, 0.025319263458251952, 0.025362688064575194, 0.025689216613769533, 0.02531219291687012, 0.02531328010559082, 0.02550579261779785, 0.025380767822265626, 0.025145439147949217, 0.025302047729492187, 0.025350303649902345, 0.025172800064086915, 0.025401344299316408, 0.025210880279541017, 0.025195648193359375, 0.025213632583618164, 0.026295551300048826, 0.02550409507751465, 0.02531337547302246, 0.025892959594726563, 0.025688127517700197, 0.027909536361694336, 0.02751584053039551, 0.025171743392944337, 0.02533193588256836, 0.025304895401000976, 0.025526464462280272, 0.025379903793334962, 0.02546988868713379, 0.025462783813476563, 0.025290752410888673, 0.02546281623840332, 0.025398784637451172, 0.02544483184814453, 0.02535321617126465, 0.025225311279296874, 0.025440448760986327, 0.02539388847351074, 0.025182111740112305, 0.02535638427734375, 0.025546432495117188, 0.025293119430541994, 0.025454591751098633, 0.02535974311828613, 0.02533030319213867, 0.025386592864990235, 0.025343711853027345, 0.025288639068603517, 0.02528108787536621, 0.025408384323120117, 0.025372064590454102, 0.025352800369262695, 0.02537811279296875, 0.02522118377685547, 0.02523814392089844, 0.025349920272827148, 0.0252677116394043, 0.025416416168212892, 0.025409536361694338, 0.025450496673583983, 0.025663488388061522, 0.028508031845092773, 0.025587839126586916, 0.0252105598449707, 0.025438528060913086, 0.02531532859802246, 0.025529600143432616, 0.025281280517578126, 0.02535424041748047, 0.025281919479370116, 0.025287296295166014, 0.025366527557373047, 0.025281696319580077, 0.025281375885009765, 0.02530303955078125, 0.02536038398742676, 0.025288703918457032, 0.025264127731323242, 0.02551945686340332, 0.0254366397857666, 0.025458879470825195, 0.025231359481811523, 0.02556492805480957, 0.02575334358215332, 0.02545270347595215, 0.025280704498291017, 0.025349599838256836, 0.02534809684753418, 0.02524188804626465, 0.02541200065612793, 0.025476896286010742, 0.025266399383544923, 0.0252391357421875, 0.025342367172241212, 0.025416799545288086, 0.0253155517578125, 0.025778528213500976, 0.025570816040039062, 0.02578054428100586, 0.025524511337280273, 0.02563088035583496, 0.029537439346313477, 0.025364992141723632, 0.025409311294555665, 0.025418399810791015, 0.025441919326782227, 0.02560652732849121, 0.025523872375488282, 0.025405792236328124, 0.02565017509460449, 0.02545257568359375, 0.02546575927734375, 0.025385536193847657, 0.0254421443939209, 0.02552841567993164, 0.025569183349609375, 0.025397151947021485, 0.02555683135986328, 0.025735231399536134, 0.02561859130859375, 0.025413759231567384, 0.025194175720214845, 0.025392799377441405, 0.02530534362792969, 0.025546367645263673, 0.025434944152832033, 0.02535580825805664, 0.025342496871948242, 0.025495296478271486, 0.025353919982910155, 0.02528927993774414, 0.025196544647216795, 0.025337087631225587, 0.0253570556640625, 0.02531123161315918, 0.02550409507751465, 0.02537763214111328, 0.02542207908630371, 0.02539571189880371, 0.025407392501831053, 0.027948671340942383, 0.02761782455444336, 0.02531491279602051, 0.025541023254394533, 0.02553856086730957, 0.025231359481811523, 0.025183551788330077, 0.02545254325866699, 0.025297599792480467, 0.025464704513549805, 0.0266507511138916, 0.02547302436828613, 0.025288415908813477, 0.02520012855529785, 0.025419967651367188, 0.02562886428833008, 0.025334175109863282, 0.02552217674255371, 0.025450496673583983, 0.026025312423706055, 0.02556716728210449, 0.02543280029296875, 0.025460416793823243, 0.02541804885864258, 0.025463968276977538, 0.02545136070251465, 0.02540889549255371, 0.025479808807373047, 0.025394847869873047, 0.025141599655151368, 0.025483135223388673, 0.025386175155639647, 0.025127872467041016, 0.02537676811218262, 0.025304319381713868, 0.02531670379638672, 0.02520521545410156, 0.02542425537109375, 0.02523750305175781, 0.02783807945251465, 0.027811328887939454, 0.025944799423217774, 0.02514672088623047, 0.026006336212158202, 0.025183551788330077, 0.025217727661132814, 0.02530031967163086, 0.025270912170410158, 0.02525187110900879, 0.025227264404296876, 0.02518556785583496, 0.025291488647460936, 0.025268224716186522, 0.025300479888916014, 0.025467391967773437, 0.025161727905273438, 0.0252620792388916, 0.025362432479858397, 0.025286048889160157, 0.02522534370422363, 0.025208927154541014, 0.025313983917236327, 0.02534163284301758, 0.025212928771972655, 0.025425247192382813, 0.02536057662963867, 0.025188159942626954, 0.025268543243408204, 0.02541971206665039, 0.025332128524780274, 0.025239551544189453, 0.025903104782104492, 0.02650931167602539, 0.02569011116027832, 0.025616384506225585, 0.025377119064331054, 0.025843360900878905, 0.025769983291625977, 0.02536038398742676, 0.025591808319091795, 0.030459903717041017, 0.025247488021850586, 0.02508201599121094, 0.025597248077392578, 0.02534684753417969, 0.0251429443359375, 0.025416032791137695, 0.025210880279541017, 0.025407487869262696, 0.02543180847167969, 0.025245504379272463, 0.025300575256347657, 0.025439071655273437, 0.025260128021240235, 0.025284191131591797, 0.025444671630859374, 0.025540607452392578, 0.025335615158081054, 0.02518239974975586, 0.025341951370239257, 0.025235328674316406, 0.025323616027832032, 0.025159135818481445, 0.02513523292541504, 0.02510310363769531, 0.025378143310546875, 0.02508755111694336, 0.025105119705200196, 0.025688383102416994, 0.025321151733398436, 0.025161920547485353, 0.025194368362426757, 0.025081247329711915, 0.025164384841918946, 0.02548240089416504, 0.025305376052856446, 0.025278432846069336, 0.025746015548706053, 0.025386016845703126, 0.027921375274658204, 0.02730188751220703, 0.025401023864746092, 0.025133279800415038, 0.025473119735717774, 0.025249792098999024, 0.025212928771972655, 0.025404960632324218, 0.02528713607788086, 0.025163360595703125, 0.025511775970458984, 0.025342111587524415, 0.02523360061645508, 0.025956575393676757, 0.025406911849975587, 0.025569856643676756, 0.02537833595275879, 0.02545097541809082, 0.025242879867553712, 0.02536729621887207, 0.025538368225097655, 0.02534623908996582, 0.025640960693359374, 0.025345407485961913, 0.025422464370727538, 0.02532863998413086, 0.025267200469970705, 0.025358335494995117, 0.0251146240234375, 0.025198591232299804, 0.026243072509765625, 0.025500703811645507, 0.02541257667541504, 0.025650848388671876, 0.028608543395996094, 0.02541804885864258, 0.025437503814697265, 0.025251935958862305, 0.02519273567199707, 0.025276735305786134, 0.025595903396606445, 0.030346431732177735, 0.025577472686767577, 0.025585023880004883, 0.025344640731811523, 0.025487360000610353, 0.025417119979858398, 0.025382688522338867, 0.025299264907836915, 0.025807039260864258, 0.025463104248046875, 0.027776800155639648, 0.02593302345275879, 0.027532512664794923, 0.02529020881652832, 0.025411775588989258, 0.025167999267578126, 0.025511936187744142, 0.025337568283081056, 0.025194528579711915, 0.025522432327270507, 0.025347360610961912, 0.025332128524780274, 0.025379039764404296, 0.02523129653930664, 0.025394336700439453, 0.025523199081420898, 0.025323200225830077, 0.025588031768798827, 0.02529596710205078, 0.025303968429565428, 0.025577152252197265, 0.025424192428588867, 0.025366336822509765, 0.025256128311157228, 0.02527027130126953, 0.02527129554748535, 0.026149728775024413, 0.025479328155517577, 0.025640960693359374, 0.030248960494995116, 0.025398399353027342, 0.025406335830688475, 0.025413408279418945, 0.025520351409912108, 0.02532307243347168, 0.025465280532836913, 0.025845760345458983, 0.025409536361694338, 0.02572083282470703, 0.031352640151977536, 0.02562886428833008, 0.02582304000854492, 0.025897151947021486, 0.02532124710083008, 0.02537196731567383, 0.025594783782958985, 0.02533340835571289, 0.0252890567779541, 0.025447872161865233, 0.025508415222167968, 0.025251840591430662, 0.025761024475097656, 0.025565183639526368, 0.025510656356811524, 0.02567945671081543, 0.0277390079498291, 0.026003103256225586, 0.025495904922485352, 0.02527027130126953, 0.02574745559692383, 0.025384000778198242, 0.025477184295654296, 0.025410367965698243, 0.0254683837890625, 0.02564156723022461, 0.025297056198120116, 0.025476959228515624, 0.028081888198852538, 0.027920671463012695, 0.025290719985961913, 0.02528873634338379, 0.025487360000610353, 0.02537478446960449, 0.0254749755859375, 0.025354272842407228, 0.025257568359375, 0.025463199615478514, 0.02553788757324219, 0.02551465606689453, 0.02536038398742676, 0.025440576553344727, 0.025402528762817383, 0.02546463966369629, 0.025112928390502928, 0.025345727920532225, 0.025254112243652344, 0.02510691261291504, 0.02532275199890137, 0.025277183532714843, 0.02555084800720215, 0.025411584854125976, 0.025608192443847655, 0.02554265594482422, 0.025268224716186522, 0.02542521667480469, 0.02523014450073242, 0.025171232223510743, 0.02544291114807129, 0.025372352600097656, 0.025114912033081055, 0.02567967987060547, 0.025434335708618163, 0.02530940818786621, 0.025200416564941406, 0.025380863189697265, 0.025253023147583008, 0.025316192626953126, 0.02555084800720215, 0.030373888015747072, 0.025361919403076173, 0.025575551986694336, 0.025179712295532227, 0.025387775421142577, 0.025473087310791016, 0.025223167419433593, 0.02535580825805664, 0.02533030319213867, 0.025432416915893555, 0.025470239639282227, 0.025266912460327147, 0.025417728424072264, 0.025430015563964844, 0.025289951324462892, 0.02526902389526367, 0.025333759307861328, 0.025347295761108397, 0.02535094451904297, 0.025173759460449217, 0.025352575302124022, 0.02525347137451172, 0.025487199783325195, 0.025288671493530274, 0.025298944473266603, 0.025326047897338867, 0.02528665542602539, 0.025409568786621095, 0.025442272186279296, 0.02574505615234375, 0.025536863327026368, 0.025393152236938478, 0.025417728424072264, 0.026308671951293945, 0.025382848739624025, 0.025550432205200195, 0.02537104034423828, 0.025503744125366212, 0.02562156867980957, 0.030008256912231444, 0.025561088562011718, 0.02537788772583008, 0.02550876808166504, 0.025382911682128906, 0.02551379203796387, 0.02575971221923828, 0.0254998722076416, 0.02583750343322754, 0.02571494483947754, 0.025597759246826172, 0.02553446388244629, 0.025437280654907225, 0.025624704360961915, 0.025444799423217774, 0.025852256774902344, 0.025374719619750977, 0.025540416717529296, 0.025479360580444334, 0.025600000381469725, 0.025450496673583983, 0.026195167541503906, 0.025283327102661134, 0.025448480606079103, 0.025275968551635743, 0.025868736267089843, 0.025341951370239257, 0.025475072860717773, 0.025483264923095703, 0.02568191909790039, 0.025495424270629882, 0.025395328521728516, 0.025552192687988282, 0.02534844779968262, 0.025626752853393556, 0.025604095458984375, 0.02553241539001465, 0.025456640243530275, 0.025683231353759765, 0.030489120483398437, 0.025712831497192383, 0.025455839157104494, 0.025510688781738282, 0.025388383865356447, 0.025655967712402344, 0.025282560348510744, 0.02541312026977539, 0.025307647705078123, 0.02525951957702637, 0.025516544342041016, 0.025839391708374022, 0.02547443199157715, 0.025465696334838868, 0.02540947151184082, 0.025419839859008787, 0.02548940849304199, 0.025456640243530275, 0.025430015563964844, 0.02541263961791992, 0.025293567657470702, 0.025355680465698242, 0.025372575759887696, 0.02520284843444824, 0.02537548828125, 0.025298944473266603, 0.025455936431884766, 0.025494047164916992, 0.025413791656494142, 0.02531532859802246, 0.025761791229248047, 0.025411584854125976, 0.025202688217163087, 0.02533785629272461, 0.025253599166870117, 0.025149728775024412, 0.02527663993835449, 0.025347936630249025, 0.025321184158325197, 0.0253603515625, 0.02545894432067871, 0.030086784362792968, 0.025512287139892578, 0.02519862365722656, 0.025621503829956056, 0.025455392837524415, 0.025453983306884767, 0.02525254440307617, 0.02539491271972656, 0.02517852783203125, 0.025536096572875977, 0.02521913528442383, 0.025432096481323243, 0.025421567916870117, 0.02524012756347656, 0.025370624542236327, 0.025503103256225585]",tokens/s,39.09958819537952,,, -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22268,7 +22268,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22478,7 +22478,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22575,7 +22575,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22968,7 +22968,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4147.081216,4758.306816,0.0,4372.56192,4292.521984,s,1,10.4303134765625,10.4303134765625,0.0,10.4303134765625,10.4303134765625,10.4303134765625,10.4303134765625,[10.4303134765625],,kWh,8.556503264168971e-05,9.430852056552683e-06,2.722641066998932e-05,0.00012222229536823173,,MB,1416.237056,5045.61664,0.0,4628.414464,4562.7008,s,10,3.9022229614257813,0.39022229614257814,0.001694630852617215,0.3903907012939453,0.3919876220703125,0.39240007629394535,0.3927300396728516,"[0.3869678649902344, 0.3893675537109375, 0.38851165771484375, 0.38892413330078124, 0.39027044677734374, 0.3915806579589844, 0.3918959655761719, 0.3913811950683594, 0.3905109558105469, 0.39281253051757814]",tokens/s,656.0363221953457,kWh,1.1916911465667604e-05,1.3133233537120736e-06,7.913039663760002e-06,2.114327448313968e-05,tokens/kWh,12107869.110063465,MB,1442.070528,5045.61664,0.0,4628.414464,4562.70336,s,10,38.4860888671875,3.8486088867187505,0.011653275781762331,3.8463636474609375,3.8654144287109373,3.867638610839844,3.8694179565429687,"[3.841705322265625, 3.842807861328125, 3.83989892578125, 3.836185791015625, 3.84991943359375, 3.832818359375, 3.85083642578125, 3.86986279296875, 3.864920166015625, 3.8571337890625]",tokens/s,16.3695511428579,kWh,8.764843768682992e-05,9.66871983711143e-06,5.46024825708391e-05,0.00015191964009478044,tokens/kWh,414692.92555389955,,s,630,38.48315692138673,0.06108437606569321,0.0008951420687282461,0.06085835266113281,0.06147976913452148,0.06254073619842529,0.06547078186035157,"[0.06219388961791992, 0.061213920593261716, 0.06080182266235352, 0.060795967102050784, 0.060545696258544925, 0.06098972702026367, 0.06104883193969726, 0.06073667144775391, 0.06139580917358398, 0.06636479949951171, 0.060861057281494144, 0.06076361465454101, 0.06085686492919922, 0.061118465423583984, 0.06068569564819336, 0.060522750854492186, 0.060770687103271485, 0.06210464096069336, 0.06105593490600586, 0.06075187301635742, 0.06061056137084961, 0.06066368103027344, 0.06429494476318359, 0.06096086502075195, 0.06078668975830078, 0.06109798431396484, 0.06051203155517578, 0.060512191772460935, 0.060776737213134764, 0.06072115325927734, 0.060560993194580075, 0.060552833557128906, 0.06048438262939453, 0.06093123245239258, 0.061181983947753905, 0.06060502243041992, 0.06091750335693359, 0.06090800094604492, 0.06089628982543945, 0.06094496154785156, 0.061030815124511716, 0.060827648162841794, 0.06075187301635742, 0.06062694549560547, 0.06058979034423828, 0.06042038345336914, 0.06093315124511719, 0.06098223876953125, 0.06062483215332031, 0.06068844985961914, 0.060945919036865234, 0.06078105545043945, 0.06073734283447266, 0.06045280075073242, 0.060827457427978515, 0.06089712142944336, 0.060899646759033206, 0.060687744140625, 0.060570049285888675, 0.06081158447265625, 0.06054851150512695, 0.060654335021972657, 0.06061590576171875, 0.06177382278442383, 0.060999614715576175, 0.0610544319152832, 0.06049033737182617, 0.06092172622680664, 0.06094655990600586, 0.060708446502685545, 0.060722591400146485, 0.06118297576904297, 0.06104608154296875, 0.06096345520019531, 0.06111238479614258, 0.06068576049804687, 0.06049635314941406, 0.06102569580078125, 0.06121542358398437, 0.061023841857910155, 0.06061673736572266, 0.06099353790283203, 0.06077644729614258, 0.06151152038574219, 0.06197068786621094, 0.060479774475097656, 0.06068835067749023, 0.0608746223449707, 0.06068598556518555, 0.06073715209960937, 0.06069747161865234, 0.06071209716796875, 0.060693313598632816, 0.060741600036621095, 0.060702785491943356, 0.0608554573059082, 0.060893791198730465, 0.060813568115234376, 0.06099296188354492, 0.060655487060546874, 0.06103542327880859, 0.06076313781738281, 0.060709663391113285, 0.061020160675048826, 0.060921855926513675, 0.06123276901245117, 0.06146732711791992, 0.061004798889160154, 0.06109049606323242, 0.061061119079589846, 0.06084812927246094, 0.061102081298828125, 0.06105702209472656, 0.06099308776855469, 0.061241790771484374, 0.0607127685546875, 0.06376432037353516, 0.06117820739746094, 0.06051225662231445, 0.060802974700927735, 0.06058329772949219, 0.060569759368896484, 0.06054969787597656, 0.06305356979370118, 0.06122623825073242, 0.06055014419555664, 0.06203596878051758, 0.06084198379516602, 0.06063622283935547, 0.06029385757446289, 0.0609917106628418, 0.060641281127929686, 0.06107328033447266, 0.06091584014892578, 0.06049587249755859, 0.06086812973022461, 0.060526336669921875, 0.0606231689453125, 0.060755359649658204, 0.0610431022644043, 0.06070742416381836, 0.0611328010559082, 0.06168547058105469, 0.06070915222167969, 0.06078464126586914, 0.06096691131591797, 0.060830879211425784, 0.06080803298950195, 0.0605296630859375, 0.06070316696166992, 0.061071937561035156, 0.06043648147583008, 0.06106521606445312, 0.06137651062011719, 0.060674049377441405, 0.060561153411865236, 0.060641536712646486, 0.060886943817138675, 0.06109193420410156, 0.06134483337402344, 0.060953536987304685, 0.06497004699707032, 0.06119289779663086, 0.06053187179565429, 0.060408096313476566, 0.06114976119995117, 0.06083993530273438, 0.060835201263427736, 0.060789344787597656, 0.06098313522338867, 0.06074079895019531, 0.06084812927246094, 0.060902400970458986, 0.061446144104003904, 0.06126729583740234, 0.06088566589355469, 0.06084934234619141, 0.06081324768066406, 0.06113983917236328, 0.060684383392333986, 0.06101375961303711, 0.06105718231201172, 0.061056865692138675, 0.06089334487915039, 0.06085126495361328, 0.06092044830322266, 0.06089344024658203, 0.06051027297973633, 0.060422080993652344, 0.06159769439697266, 0.06114281463623047, 0.06144179153442383, 0.06090185546875, 0.06061670303344727, 0.06071862411499023, 0.06156521606445312, 0.060891326904296876, 0.060731391906738284, 0.060569217681884766, 0.06068454360961914, 0.06107926559448242, 0.060735904693603515, 0.060714622497558594, 0.06069900894165039, 0.06079897689819336, 0.060590080261230465, 0.06071091079711914, 0.06051820755004883, 0.06254390335083007, 0.0611223030090332, 0.061338016510009766, 0.06066329574584961, 0.060779006958007815, 0.06071091079711914, 0.06037094497680664, 0.06066175842285156, 0.06078464126586914, 0.06078822326660156, 0.06085068893432617, 0.06068966293334961, 0.06063779067993164, 0.060383392333984376, 0.06054912185668945, 0.06078464126586914, 0.06107686233520508, 0.06063539123535156, 0.060628673553466794, 0.06048531341552734, 0.0605821762084961, 0.06047510528564453, 0.06071795272827148, 0.0607720947265625, 0.06069491195678711, 0.060683425903320314, 0.06071516799926758, 0.06238483047485351, 0.060706817626953125, 0.06062080001831055, 0.06070393753051758, 0.06099641418457031, 0.06094345474243164, 0.061434398651123046, 0.060983680725097654, 0.06093743896484375, 0.0607977294921875, 0.06122086334228516, 0.06101606369018555, 0.060911617279052734, 0.06122409439086914, 0.06088739013671875, 0.060910079956054686, 0.06137855911254883, 0.062004001617431644, 0.06076825714111328, 0.060729248046875, 0.060593441009521486, 0.061072193145751956, 0.060979072570800784, 0.06095065689086914, 0.06068204879760742, 0.06136441421508789, 0.06226473617553711, 0.0610118408203125, 0.061190879821777344, 0.06075334548950195, 0.06106547164916992, 0.06114131164550781, 0.0610918083190918, 0.062536865234375, 0.061063262939453126, 0.06087145614624023, 0.06101196670532227, 0.06073344039916992, 0.06095811080932617, 0.06076403045654297, 0.0610629768371582, 0.06122723388671875, 0.06115603256225586, 0.061217952728271484, 0.06125980758666992, 0.0610948486328125, 0.06102521514892578, 0.06090028762817383, 0.06159769439697266, 0.06183731079101563, 0.06127206420898437, 0.06098329544067383, 0.060796928405761716, 0.06109183883666992, 0.060835006713867185, 0.06064112091064453, 0.060895263671875, 0.061518688201904294, 0.06086614227294922, 0.06129510498046875, 0.06104064178466797, 0.061183998107910156, 0.061259777069091796, 0.06074982452392578, 0.0607081298828125, 0.0611250228881836, 0.06102457427978516, 0.06072524642944336, 0.060818431854248046, 0.06102860641479492, 0.06070963287353515, 0.06084198379516602, 0.060903423309326174, 0.061208576202392576, 0.06077635192871094, 0.06113238525390625, 0.06084659194946289, 0.063833984375, 0.060886302947998044, 0.06065401458740234, 0.062013439178466793, 0.06038323211669922, 0.06073942565917969, 0.06098883056640625, 0.060891902923583985, 0.06072431945800781, 0.060986270904541014, 0.06060851287841797, 0.06044672012329102, 0.06108345413208008, 0.060602336883544924, 0.06079919815063477, 0.0605816650390625, 0.060749664306640624, 0.060539264678955075, 0.06105404663085937, 0.0605943374633789, 0.060668670654296875, 0.06040694427490234, 0.06047011184692383, 0.06070476913452148, 0.06076399993896484, 0.06121590423583984, 0.060757152557373045, 0.06057353591918945, 0.0607960319519043, 0.0611453742980957, 0.06073632049560547, 0.06090518569946289, 0.06085228729248047, 0.06064323043823242, 0.06067334365844727, 0.06066255950927734, 0.06067814254760742, 0.06038937759399414, 0.06074163055419922, 0.06064726257324219, 0.060784961700439455, 0.06063087844848633, 0.06095977783203125, 0.06055830383300781, 0.06073775863647461, 0.06076617431640625, 0.06071686553955078, 0.061628414154052735, 0.061558368682861325, 0.061149600982666016, 0.060792320251464846, 0.06069475173950195, 0.060516223907470704, 0.060730945587158205, 0.06082236862182617, 0.06081254577636719, 0.06124006271362305, 0.060723201751708984, 0.06262169647216796, 0.060857856750488284, 0.06065558242797851, 0.06083433532714844, 0.06076969528198242, 0.06105155181884766, 0.06107100677490234, 0.060604705810546874, 0.062389984130859374, 0.061130943298339846, 0.06262515258789063, 0.06085833740234375, 0.06088508987426758, 0.060717632293701175, 0.060638656616210936, 0.0609202880859375, 0.06069462585449219, 0.06130444717407227, 0.06075020980834961, 0.060534080505371096, 0.06114287948608398, 0.06083164978027344, 0.06073644638061523, 0.06145024108886719, 0.0614205436706543, 0.06081228637695312, 0.06133542251586914, 0.06108992004394531, 0.061001728057861325, 0.06073705673217773, 0.060714881896972654, 0.060365310668945314, 0.061071456909179686, 0.06110822296142578, 0.060784225463867185, 0.060752288818359375, 0.0611220817565918, 0.06060393524169922, 0.06068016052246094, 0.060513248443603514, 0.06093619155883789, 0.06086556625366211, 0.06389763259887696, 0.06114390563964844, 0.060714881896972654, 0.06082579040527344, 0.060540798187255856, 0.06067420959472656, 0.060872703552246096, 0.06773078155517578, 0.06091856002807617, 0.060649345397949216, 0.06042745590209961, 0.06098723220825195, 0.06072339248657226, 0.060482177734375, 0.06073155212402344, 0.060861984252929685, 0.060547550201416014, 0.06098710250854492, 0.060629280090332034, 0.060739585876464844, 0.06096281433105469, 0.0607088623046875, 0.06096236801147461, 0.06082400131225586, 0.06503218841552734, 0.06080716705322266, 0.06078569412231445, 0.060644222259521485, 0.061214046478271486, 0.06211008071899414, 0.061044960021972655, 0.06074748611450195, 0.06073686218261719, 0.060910526275634765, 0.06339763259887696, 0.06293660736083985, 0.06105324935913086, 0.06106889724731445, 0.06097798538208008, 0.06172393417358398, 0.06055120086669922, 0.06127280044555664, 0.060620128631591795, 0.06156150436401367, 0.06105680084228516, 0.06469776153564454, 0.061748001098632814, 0.06085836791992188, 0.060409854888916016, 0.06175932693481445, 0.06091097640991211, 0.0653048324584961, 0.06125209426879883, 0.06174070358276367, 0.06103638458251953, 0.06063507080078125, 0.06132796859741211, 0.06090719985961914, 0.06105875015258789, 0.06128499221801758, 0.06351667022705078, 0.06114918518066406, 0.06100889587402344, 0.06069523239135742, 0.06082992172241211, 0.06137046432495117, 0.06064316940307617, 0.06080118560791015, 0.06550323486328125, 0.06074367904663086, 0.06094150543212891, 0.0606440315246582, 0.060904991149902346, 0.06115619277954101, 0.06132096099853516, 0.060862464904785155, 0.06118195343017578, 0.06081740951538086, 0.06149324798583984, 0.06088284683227539, 0.06134710311889648, 0.06118278503417969, 0.06116761779785156, 0.060849536895751954, 0.06075660705566406, 0.06539878082275391, 0.06093619155883789, 0.060962047576904294, 0.06096563339233398, 0.060930049896240235, 0.06101606369018555, 0.06089468765258789, 0.06329977416992187, 0.06317910385131836, 0.06308995056152343, 0.06511068725585938, 0.06109968185424805, 0.060725662231445314, 0.06079283142089844, 0.06060201644897461, 0.061185920715332034, 0.06076259231567383, 0.061209632873535154, 0.06095356750488281, 0.06099110412597656, 0.06068876647949219, 0.06074771118164062, 0.06108339309692383, 0.06096723175048828, 0.06048767852783203, 0.060499969482421874, 0.060788734436035156, 0.06550732421875, 0.060892288208007815, 0.060703712463378905, 0.06050806427001953, 0.06092800140380859, 0.06158095932006836, 0.06232025527954101, 0.060711647033691404, 0.06095894241333008, 0.06099148941040039, 0.0604824333190918, 0.060645664215087894, 0.06069926452636719, 0.06073331069946289, 0.06059196853637695, 0.06059347152709961, 0.06111276626586914, 0.06514332580566407, 0.06077199935913086, 0.06060038375854492, 0.06067023849487305, 0.0607782096862793, 0.061073696136474606, 0.060896896362304685, 0.06085055923461914, 0.06077788925170898, 0.060794784545898435, 0.06043308639526367, 0.061074718475341794, 0.06042396926879883, 0.060588768005371094, 0.060889312744140625, 0.06110819244384766, 0.0608903694152832, 0.06553206634521484, 0.06084806442260742, 0.061619998931884766, 0.06087756729125977, 0.0649115219116211, 0.06126182556152344, 0.06121446228027344, 0.06126617431640625, 0.061104129791259766, 0.06246649551391602, 0.06550019073486328, 0.06097110366821289, 0.06054182434082031, 0.060837535858154296, 0.06065187072753906, 0.06136217498779297, 0.060638622283935545, 0.06053744125366211, 0.06141097640991211, 0.06145878219604492, 0.061052734375, 0.06097903823852539, 0.06077679824829101, 0.060665855407714846, 0.06093967819213867, 0.06110678482055664, 0.061478271484375, 0.06529702758789062, 0.060641376495361325, 0.06061372756958008, 0.060838462829589844, 0.060604320526123044, 0.06069305419921875, 0.060913440704345706, 0.06145775985717773, 0.060875423431396486, 0.060609664916992184, 0.06069132614135742, 0.060604129791259766, 0.06063951873779297, 0.06051839828491211, 0.060647361755371096, 0.06075321578979492, 0.060848224639892576, 0.06517826843261719, 0.06092390441894531, 0.06060441589355469, 0.061084735870361326, 0.060650432586669925, 0.06051132965087891, 0.06413609313964844, 0.061009696960449215, 0.061478145599365236, 0.0611005744934082, 0.06113119888305664, 0.06047129440307617, 0.06081740951538086, 0.06061862564086914, 0.06100806427001953, 0.06059001541137695, 0.060665760040283206, 0.06611567687988282, 0.060970752716064454, 0.060571903228759764, 0.06058393478393555, 0.06072524642944336, 0.06059417724609375, 0.061017887115478515, 0.06083123016357422, 0.060942718505859375, 0.06104217529296875, 0.06084489440917969]",tokens/s,16.3707983024096,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1015.31648,867.106816,0.0,488.636416,482.553856,s,1,8.1101240234375,8.1101240234375,0.0,8.1101240234375,8.1101240234375,8.1101240234375,8.1101240234375,[8.1101240234375],,kWh,2.895301289996344e-05,3.1790054850173666e-06,8.45528454196387e-06,4.0587302926944674e-05,,MB,1269.141504,1024.393216,0.0,616.562688,581.925888,s,10,0.3571811180114745,0.03571811180114746,0.00024128084346798723,0.03568595123291016,0.0359390510559082,0.03610627670288086,0.036240057220458984,"[0.03627350234985351, 0.035744129180908205, 0.03564166259765625, 0.035899871826171874, 0.03551369476318359, 0.035539169311523434, 0.035438591003417966, 0.035730239868164065, 0.03549836730957031, 0.03590188980102539]",tokens/s,7167.232171320319,kWh,1.1306078822072933e-06,1.246858942938572e-07,7.525145445251156e-07,2.007808321026266e-06,tokens/kWh,127502210.9028559,MB,1302.872064,1039.07328,0.0,631.242752,597.192192,s,10,17.16676220703125,1.7166762207031248,0.010543409282530449,1.7162196655273436,1.7303883056640625,1.73304150390625,1.7351640625,"[1.7196602783203125, 1.7171500244140625, 1.706823974609375, 1.715289306640625, 1.6963521728515625, 1.7125345458984376, 1.7297987060546876, 1.7356947021484375, 1.71229150390625, 1.7211669921875]",tokens/s,36.69882488044026,kWh,5.007109421654183e-05,5.522389019015717e-06,1.920022363987546e-05,7.479370687543301e-05,tokens/kWh,842316.8556804501,,s,630,17.161206449508658,0.027240010237315346,0.0005188733701594881,0.02709940814971924,0.02779759616851807,0.02792737331390381,0.029234470233917238,"[0.027675647735595704, 0.027025407791137695, 0.0269354248046875, 0.026990463256835937, 0.026808319091796876, 0.02679110336303711, 0.027115488052368165, 0.02669843292236328, 0.027004255294799804, 0.026748863220214844, 0.02677235221862793, 0.026746879577636717, 0.02672230339050293, 0.02672960090637207, 0.026765727996826173, 0.026963808059692382, 0.026674816131591797, 0.026856447219848634, 0.02701692771911621, 0.027678272247314454, 0.02742755126953125, 0.027883487701416014, 0.02728940773010254, 0.027338783264160157, 0.026982175827026368, 0.026990655899047852, 0.027154399871826173, 0.027189151763916015, 0.027142623901367187, 0.027060224533081056, 0.027894880294799803, 0.027333696365356444, 0.02734467124938965, 0.02705824089050293, 0.027176895141601563, 0.027256128311157226, 0.027377792358398437, 0.02716476821899414, 0.02755638313293457, 0.027098592758178712, 0.027162815093994142, 0.02699299240112305, 0.02683679962158203, 0.027074880599975586, 0.026856416702270507, 0.027110111236572264, 0.027228511810302735, 0.027453279495239256, 0.028520448684692383, 0.02765945625305176, 0.02781279945373535, 0.027895360946655273, 0.027851295471191407, 0.02776006317138672, 0.02769875144958496, 0.027867712020874024, 0.02777110481262207, 0.027824447631835936, 0.02787705612182617, 0.028055168151855468, 0.02783475112915039, 0.02780521583557129, 0.02767305564880371, 0.027300031661987304, 0.027410240173339845, 0.027450559616088867, 0.026913600921630858, 0.026771455764770507, 0.026738687515258788, 0.02669068717956543, 0.026907520294189455, 0.027125759124755858, 0.02925542449951172, 0.027385696411132813, 0.026886528015136718, 0.026765344619750976, 0.026617855072021485, 0.026617855072021485, 0.026732255935668945, 0.02668367958068848, 0.026648832321166993, 0.026676095962524415, 0.02670476722717285, 0.02675302314758301, 0.02669935989379883, 0.02660598373413086, 0.02654207992553711, 0.030242816925048828, 0.02679311943054199, 0.026712352752685548, 0.026828704833984376, 0.027150976181030274, 0.027025440216064452, 0.02676736068725586, 0.026937280654907227, 0.02674643135070801, 0.026735103607177735, 0.026891807556152343, 0.029122848510742188, 0.028404224395751954, 0.027829023361206056, 0.027661216735839843, 0.02795315170288086, 0.02754256057739258, 0.027666559219360353, 0.02746985626220703, 0.027693056106567384, 0.02756211280822754, 0.027625696182250976, 0.02750326347351074, 0.02740947151184082, 0.027459455490112306, 0.027582847595214843, 0.027478527069091797, 0.027495744705200196, 0.02770207977294922, 0.02740559959411621, 0.027372127532958986, 0.027262975692749023, 0.027067935943603516, 0.02691440010070801, 0.027276159286499024, 0.026869760513305665, 0.02711507225036621, 0.027474111557006835, 0.027023616790771483, 0.02651817512512207, 0.02729759979248047, 0.028847360610961915, 0.02705299186706543, 0.027455039978027344, 0.02675059127807617, 0.02670675277709961, 0.026613759994506835, 0.026705919265747072, 0.026695680618286134, 0.02674844741821289, 0.026632671356201173, 0.02663417625427246, 0.0267194881439209, 0.026714912414550783, 0.026703903198242188, 0.026847232818603517, 0.02680169677734375, 0.02704636764526367, 0.02693120002746582, 0.026697727203369142, 0.026681343078613282, 0.02672982406616211, 0.026655391693115236, 0.02672640037536621, 0.02676940727233887, 0.026771455764770507, 0.026730495452880858, 0.027097215652465822, 0.02721900749206543, 0.027067071914672853, 0.026871583938598634, 0.02689039993286133, 0.02694588851928711, 0.026729791641235352, 0.02679043197631836, 0.02678374481201172, 0.027236320495605468, 0.027318336486816405, 0.027922655105590822, 0.02765817642211914, 0.02728563117980957, 0.026850368499755858, 0.02679772758483887, 0.026897119522094726, 0.02688640022277832, 0.0269434871673584, 0.02719875144958496, 0.0270097599029541, 0.027060224533081056, 0.027112480163574218, 0.027169759750366212, 0.02699673652648926, 0.027122976303100586, 0.027654111862182616, 0.027797567367553712, 0.027767488479614258, 0.02777292823791504, 0.027893024444580077, 0.027717472076416016, 0.027693952560424805, 0.027822080612182616, 0.028004352569580077, 0.02742425537109375, 0.027900415420532225, 0.02795644760131836, 0.028986143112182616, 0.028463104248046874, 0.02780963134765625, 0.02746384048461914, 0.027288959503173827, 0.027361343383789063, 0.02714681625366211, 0.026908672332763672, 0.02733260726928711, 0.0269550724029541, 0.026812864303588868, 0.026800384521484377, 0.026720256805419923, 0.026617984771728515, 0.026877824783325194, 0.026742271423339844, 0.028252447128295898, 0.027893695831298828, 0.026915103912353515, 0.026877952575683595, 0.026728288650512695, 0.026975519180297853, 0.026800512313842773, 0.02674265670776367, 0.026993280410766603, 0.027254783630371093, 0.027046176910400392, 0.02710905647277832, 0.027419872283935547, 0.027667264938354492, 0.027735359191894533, 0.027765439987182616, 0.02755075263977051, 0.02761440086364746, 0.027372928619384767, 0.027461151123046874, 0.027072608947753905, 0.0268953914642334, 0.02672368049621582, 0.026970527648925782, 0.02714419174194336, 0.02699673652648926, 0.02691196823120117, 0.026907424926757812, 0.026807872772216797, 0.028145183563232423, 0.02776678466796875, 0.027022239685058593, 0.026944831848144533, 0.026700031280517577, 0.027038143157958983, 0.02727071952819824, 0.027234752655029296, 0.027164512634277344, 0.026893728256225585, 0.026655487060546875, 0.026953727722167968, 0.026986495971679687, 0.02686502456665039, 0.026943552017211915, 0.026728448867797853, 0.02694704055786133, 0.026892831802368164, 0.026705919265747072, 0.026816511154174806, 0.026591232299804687, 0.02667241668701172, 0.02681248092651367, 0.026739360809326172, 0.026641759872436523, 0.026762079238891602, 0.02673206329345703, 0.02664886474609375, 0.026695680618286134, 0.02654003143310547, 0.026660863876342773, 0.02664019203186035, 0.026763456344604492, 0.02702035140991211, 0.02685638427734375, 0.026736640930175783, 0.026818559646606444, 0.026828351974487304, 0.026823104858398436, 0.0268155517578125, 0.02691516876220703, 0.027329120635986328, 0.02701312065124512, 0.027061824798583985, 0.026874271392822266, 0.02689846420288086, 0.027017215728759765, 0.027691007614135742, 0.027550783157348633, 0.027323328018188476, 0.02734694480895996, 0.027467967987060547, 0.02806937599182129, 0.02718137550354004, 0.027092992782592775, 0.027209728240966798, 0.026801376342773436, 0.026874975204467775, 0.027419551849365235, 0.02711222457885742, 0.026906272888183595, 0.026806623458862304, 0.026763263702392577, 0.026881311416625978, 0.02697075271606445, 0.027066463470458983, 0.027076608657836915, 0.026849023818969725, 0.026689056396484376, 0.0265960636138916, 0.02690662384033203, 0.026607328414916993, 0.026753183364868163, 0.02665894317626953, 0.026884096145629883, 0.02680143928527832, 0.026740671157836914, 0.02675996780395508, 0.026572799682617186, 0.02689023971557617, 0.02685923194885254, 0.026876192092895507, 0.026771455764770507, 0.026799488067626952, 0.026780288696289064, 0.026832767486572266, 0.026984575271606446, 0.02704150390625, 0.02704207992553711, 0.027140096664428712, 0.027099136352539063, 0.027183103561401366, 0.027020896911621094, 0.027090431213378906, 0.027034528732299806, 0.02702739143371582, 0.026974271774291993, 0.02688204765319824, 0.026955360412597655, 0.027198911666870117, 0.0270284481048584, 0.02695599937438965, 0.027191072463989257, 0.02724870491027832, 0.02709907150268555, 0.02693939208984375, 0.027181055068969725, 0.027170976638793944, 0.02739616012573242, 0.02768160057067871, 0.02782512092590332, 0.02775859260559082, 0.027550848007202148, 0.027509248733520508, 0.02762326431274414, 0.02776915168762207, 0.0277926082611084, 0.0275382080078125, 0.027506879806518555, 0.027514911651611327, 0.027639455795288086, 0.027627647399902342, 0.02737993621826172, 0.02717081642150879, 0.02733670425415039, 0.02750828742980957, 0.027148767471313475, 0.027076576232910158, 0.027256799697875977, 0.02745756721496582, 0.02735923194885254, 0.027586559295654296, 0.027131168365478516, 0.026825439453125, 0.026828800201416016, 0.026736480712890625, 0.02685148811340332, 0.026959871292114256, 0.026947519302368165, 0.027002784729003908, 0.026846752166748047, 0.026652063369750977, 0.027099679946899415, 0.02711155128479004, 0.02717750358581543, 0.026966976165771483, 0.02694003105163574, 0.026951776504516602, 0.02674518394470215, 0.027111072540283204, 0.027107328414916993, 0.026789888381958008, 0.026836448669433594, 0.026787679672241212, 0.026834911346435546, 0.02684364891052246, 0.02695395278930664, 0.027218975067138673, 0.027685855865478514, 0.027789024353027342, 0.027781408309936525, 0.02851430320739746, 0.029511680603027345, 0.030965856552124024, 0.027887840270996094, 0.027801279067993165, 0.027985664367675783, 0.02776835250854492, 0.027732032775878906, 0.02792723274230957, 0.02804230308532715, 0.027927488327026368, 0.027728927612304687, 0.0275567684173584, 0.02750048065185547, 0.02722831916809082, 0.027073600769042968, 0.02691600036621094, 0.027563680648803712, 0.0268657283782959, 0.026861568450927735, 0.027056127548217773, 0.026986431121826172, 0.0273918399810791, 0.027475872039794923, 0.027519296646118165, 0.02794495964050293, 0.02756387138366699, 0.027594911575317384, 0.02753657531738281, 0.027451616287231445, 0.027570783615112306, 0.027578464508056642, 0.02762656021118164, 0.027341663360595705, 0.02739200019836426, 0.02794495964050293, 0.02750771141052246, 0.027405311584472656, 0.027289600372314454, 0.02737299156188965, 0.02699673652648926, 0.026806272506713868, 0.02713043212890625, 0.0267509765625, 0.027082752227783204, 0.029183168411254883, 0.0270601921081543, 0.027363616943359374, 0.0274366397857666, 0.02719228744506836, 0.029930944442749022, 0.02762588882446289, 0.02756537628173828, 0.02751148796081543, 0.027473440170288087, 0.02741926383972168, 0.027438432693481445, 0.027830944061279297, 0.027522079467773436, 0.027829216003417968, 0.03010918426513672, 0.027225631713867188, 0.027374080657958984, 0.027316640853881836, 0.027043903350830078, 0.02692131233215332, 0.026938175201416014, 0.027058944702148438, 0.027060096740722656, 0.027314207077026368, 0.0273635196685791, 0.027230207443237304, 0.027236255645751953, 0.0275313606262207, 0.027582592010498046, 0.02736729621887207, 0.027313695907592774, 0.027101984024047853, 0.026998464584350585, 0.02705161666870117, 0.027558559417724608, 0.02771718406677246, 0.027769023895263673, 0.02766819190979004, 0.027689407348632813, 0.027718591690063476, 0.027810720443725585, 0.027717695236206055, 0.027776960372924805, 0.027758848190307616, 0.027612415313720703, 0.029039295196533203, 0.027840320587158202, 0.02787705612182617, 0.028334400177001954, 0.027736320495605468, 0.028142784118652345, 0.027413055419921874, 0.02750048065185547, 0.02742073631286621, 0.02735923194885254, 0.027064096450805663, 0.026884319305419922, 0.026790176391601563, 0.0267238712310791, 0.02691004753112793, 0.02657148742675781, 0.026826751708984374, 0.026949567794799803, 0.02719526481628418, 0.027017375946044923, 0.027109632492065428, 0.027217695236206055, 0.027428863525390625, 0.02742032051086426, 0.02733296012878418, 0.027085056304931642, 0.026963104248046876, 0.026813024520874022, 0.027033599853515625, 0.027056127548217773, 0.027170047760009766, 0.026650623321533205, 0.0268374080657959, 0.026980703353881835, 0.02696396827697754, 0.02694144058227539, 0.026957151412963867, 0.027073183059692384, 0.027066368103027344, 0.0276081600189209, 0.02755471992492676, 0.027613183975219727, 0.027027231216430664, 0.02705135917663574, 0.02694009590148926, 0.026818016052246093, 0.026875776290893556, 0.027091392517089842, 0.026954143524169923, 0.02703385543823242, 0.02754979133605957, 0.02772777557373047, 0.02759561538696289, 0.02744403266906738, 0.027399904251098634, 0.02721232032775879, 0.02703548812866211, 0.027013023376464843, 0.02701683235168457, 0.027056608200073242, 0.02689580726623535, 0.02703971290588379, 0.026884864807128907, 0.027742271423339845, 0.0286176643371582, 0.027697792053222658, 0.02693734359741211, 0.0267675838470459, 0.02695577621459961, 0.02735103988647461, 0.027060159683227537, 0.027136064529418944, 0.02730803108215332, 0.02735206413269043, 0.027282560348510742, 0.027322240829467773, 0.02780486488342285, 0.027283807754516602, 0.02771011161804199, 0.029391136169433594, 0.027764799118041993, 0.0275742073059082, 0.027507743835449218, 0.027846656799316406, 0.02774457550048828, 0.02774700736999512, 0.02823779106140137, 0.027706432342529296, 0.027570783615112306, 0.027691423416137697, 0.02768492889404297, 0.027774911880493164, 0.027725759506225585, 0.027731327056884764, 0.027709440231323244, 0.02770800018310547, 0.02782316780090332, 0.027797855377197266, 0.027777727127075196, 0.02766431999206543, 0.027555200576782228, 0.02740902328491211, 0.027363391876220704, 0.027571231842041015, 0.027827104568481444, 0.02751804733276367, 0.027197856903076172, 0.026838592529296875, 0.026887104034423827, 0.026804224014282226, 0.026740480422973632, 0.027191551208496093, 0.02691276741027832, 0.0271646728515625, 0.02663827133178711, 0.026941024780273437, 0.02692553520202637, 0.026850688934326173, 0.027027168273925782, 0.02703657531738281, 0.026921247482299803, 0.026982112884521483, 0.02694265556335449, 0.027019968032836916, 0.02691494369506836, 0.02677299118041992, 0.026746368408203124, 0.026895360946655275, 0.026952863693237305, 0.02690287971496582, 0.02679612731933594, 0.026759103775024416, 0.02676140785217285, 0.026656063079833984, 0.027648992538452148, 0.027062271118164064, 0.027082752227783204, 0.02720774459838867, 0.026980287551879884, 0.026990591049194337, 0.027250303268432616]",tokens/s,36.710705733514274,,, -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23041,7 +23041,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23191,7 +23191,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23281,7 +23281,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23497,7 +23497,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnu7l5whr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23704,7 +23704,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23749,10 +23749,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 112010 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 110804 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23856,7 +23856,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2w614cxh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23947,7 +23947,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24070,7 +24070,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24196,7 +24196,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: q_weight and gptq_scales have incompatible shapes " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24259,7 +24259,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24322,7 +24322,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24367,10 +24367,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26701 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26500 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24503,7 +24503,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp18bxefd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24593,7 +24593,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24872,7 +24872,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -24989,7 +24989,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25205,7 +25205,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25268,7 +25268,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25484,7 +25484,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25556,7 +25556,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25619,7 +25619,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -25682,7 +25682,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -26099,7 +26099,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -26224,7 +26224,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfvmzarta/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -28749,7 +28749,7 @@ ChildProcessError: Traceback (most recent call last): ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -28820,7 +28820,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -28970,7 +28970,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29240,7 +29240,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb54f2zm9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29447,7 +29447,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29492,10 +29492,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 111535 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 110339 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29599,7 +29599,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpd5b7wzyr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29688,7 +29688,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29811,7 +29811,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29935,7 +29935,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: scales and qweight have incompatible shapes " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -29998,7 +29998,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30061,7 +30061,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30106,10 +30106,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26204 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26007 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30242,7 +30242,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp870ld_h3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30332,7 +30332,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30617,7 +30617,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30734,7 +30734,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -30950,7 +30950,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31013,7 +31013,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31229,7 +31229,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31301,7 +31301,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31364,7 +31364,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31427,7 +31427,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31844,7 +31844,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -31969,7 +31969,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp57p77u1h/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " -4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32292,7 +32292,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,831.893504,666.763264,0.0,281.018368,267.897856,s,1,10.0142451171875,10.0142451171875,0.0,10.0142451171875,10.0142451171875,10.0142451171875,10.0142451171875,[10.0142451171875],,kWh,1.8046305170833912e-05,1.9833146055481723e-06,5.168615245999819e-06,2.5198235022381905e-05,,MB,1251.4304,775.815168,0.0,360.710144,344.082944,s,21,0.19524713706970212,0.009297482717604864,0.0001770074854136784,0.00928108787536621,0.009428192138671875,0.009685600280761719,0.009716268920898437,"[0.009685600280761719, 0.009387392044067383, 0.009384320259094239, 0.009723936080932617, 0.009428192138671875, 0.009395199775695801, 0.009289024353027343, 0.00928108787536621, 0.009147968292236328, 0.009244319915771484, 0.009195584297180176, 0.00915561580657959, 0.009373536109924316, 0.009367775917053223, 0.009072128295898438, 0.009268927574157714, 0.008988832473754883, 0.009237759590148926, 0.0093187837600708, 0.009256416320800782, 0.009044735908508302]",tokens/s,27534.334590938448,kWh,2.7363691433145896e-07,3.0176325469129224e-08,1.431725606485474e-07,4.4698580044913553e-07,tokens/kWh,572725128.5002092,MB,1297.793024,798.88384,0.0,383.778816,344.085504,s,21,10.226491394042966,0.48697578066871267,0.008746852984113134,0.4841940612792969,0.4983719482421875,0.49846926879882814,0.5027245422363281,"[0.5037883605957031, 0.4943537292480469, 0.4961960144042969, 0.4983719482421875, 0.49660528564453127, 0.49846926879882814, 0.4933099975585937, 0.49139312744140623, 0.471669921875, 0.48066192626953125, 0.474512939453125, 0.4807530822753906, 0.4843514709472656, 0.4908294677734375, 0.48114617919921876, 0.48362307739257815, 0.4837484436035156, 0.4841940612792969, 0.4837935791015625, 0.4806653442382812, 0.4740541687011719]",tokens/s,129.36988347446913,kWh,1.4108932130112977e-05,1.5559505434870885e-06,5.579940483446673e-06,2.1244823157046742e-05,tokens/kWh,2965428.3085478824,,s,1323,10.213597888946534,0.007720028638659512,0.0003448043095214563,0.007695263862609864,0.00792732162475586,0.008045948696136474,0.008787071018218993,"[0.00760214376449585, 0.007857279777526855, 0.0078405442237854, 0.007827455997467042, 0.007878367900848388, 0.007872352123260498, 0.007935776233673095, 0.007940639972686767, 0.007827712059020995, 0.007833471775054932, 0.007781888008117676, 0.007907519817352295, 0.007841824054718017, 0.007990848064422608, 0.007852767944335938, 0.007806879997253418, 0.007872767925262451, 0.009319744110107421, 0.010504735946655273, 0.01048697566986084, 0.008555071830749512, 0.008086943626403808, 0.0078439040184021, 0.0078057279586791995, 0.007845536231994629, 0.00790118408203125, 0.00820464038848877, 0.007878560066223145, 0.007894879817962646, 0.007829760074615479, 0.007882207870483398, 0.007893792152404785, 0.007860032081604004, 0.007780288219451904, 0.008220383644104004, 0.007790688037872314, 0.007762239933013916, 0.00788431978225708, 0.0077686400413513185, 0.007735072135925293, 0.007647232055664062, 0.007730688095092773, 0.008059040069580078, 0.007781983852386475, 0.007746623992919922, 0.008462016105651855, 0.007811039924621582, 0.007795775890350342, 0.007692927837371826, 0.007780096054077148, 0.00812876796722412, 0.007975296020507812, 0.007810239791870117, 0.007881760120391846, 0.007981152057647704, 0.007884448051452636, 0.007845888137817383, 0.007770112037658691, 0.007812255859375, 0.007734111785888672, 0.007753727912902832, 0.008034303665161132, 0.00782972812652588, 0.007544928073883057, 0.00786636781692505, 0.00784819221496582, 0.007710048198699951, 0.00813212776184082, 0.007830399990081787, 0.00789692783355713, 0.007859551906585693, 0.0078080959320068356, 0.0077636799812316895, 0.00778982400894165, 0.007981760025024414, 0.0077926721572875975, 0.007759200096130371, 0.007640960216522216, 0.007574079990386963, 0.0076495361328125, 0.007823359966278077, 0.007649600028991699, 0.007689727783203125, 0.007771391868591309, 0.007737279891967774, 0.007721983909606934, 0.007673183917999268, 0.007764639854431152, 0.00770579195022583, 0.007760159969329834, 0.007731743812561035, 0.007710432052612305, 0.007761792182922363, 0.007715231895446777, 0.0077844481468200685, 0.007732384204864502, 0.007719776153564453, 0.008011039733886718, 0.007744224071502686, 0.007832575798034667, 0.007779583930969238, 0.007788383960723877, 0.007733248233795166, 0.007751584053039551, 0.007675903797149658, 0.007755775928497314, 0.007847040176391602, 0.00782047986984253, 0.00786191987991333, 0.007782144069671631, 0.007833920001983642, 0.007738815784454346, 0.00772764778137207, 0.007999199867248535, 0.00806281566619873, 0.007983232021331787, 0.007739712238311767, 0.007902463912963868, 0.007940127849578857, 0.00794271993637085, 0.007892352104187011, 0.009161503791809082, 0.007948287963867188, 0.007735360145568848, 0.007980991840362548, 0.008793984413146973, 0.007521344184875488, 0.007832352161407471, 0.007923711776733398, 0.007830912113189698, 0.007830048084259033, 0.0078009281158447265, 0.007830687999725342, 0.007826176166534424, 0.007958623886108398, 0.00876255989074707, 0.007856639862060547, 0.007829408168792724, 0.007753471851348877, 0.007745376110076904, 0.007987264156341553, 0.007883520126342773, 0.007920767784118652, 0.007787199974060058, 0.007820543766021728, 0.007830336093902587, 0.007800960063934327, 0.00783785581588745, 0.007718751907348633, 0.007759871959686279, 0.007856383800506591, 0.007856063842773438, 0.00785097599029541, 0.007875360012054444, 0.008046496391296386, 0.007858335971832275, 0.007771455764770508, 0.0077933440208435055, 0.007890944004058837, 0.008003552436828614, 0.007943808078765869, 0.0077398080825805664, 0.007800127983093262, 0.007799039840698243, 0.007776127815246582, 0.007700607776641845, 0.007698847770690918, 0.007895071983337403, 0.0077760319709777835, 0.007783967971801758, 0.007723936080932617, 0.007747360229492187, 0.007795775890350342, 0.007932511806488038, 0.007706528186798096, 0.007721407890319824, 0.007729152202606201, 0.008089599609375, 0.00801587200164795, 0.007816256046295166, 0.008004768371582031, 0.007860159873962403, 0.007857952117919922, 0.007841599941253661, 0.007780608177185059, 0.007820320129394532, 0.00803059196472168, 0.007807136058807373, 0.008829376220703126, 0.007377664089202881, 0.00788646411895752, 0.00788108777999878, 0.0077209601402282715, 0.0077835841178894045, 0.0077625918388366695, 0.007719295978546143, 0.007958335876464844, 0.007897088050842285, 0.007955872058868408, 0.00812502384185791, 0.008134336471557617, 0.007902783870697021, 0.00785484790802002, 0.007891071796417235, 0.007824543952941895, 0.007815807819366456, 0.00809993553161621, 0.007837088108062744, 0.00779529619216919, 0.007837696075439453, 0.007905280113220215, 0.007956543922424316, 0.008120320320129394, 0.007959775924682617, 0.007801343917846679, 0.007841375827789307, 0.00783955192565918, 0.007879487991333008, 0.007849184036254883, 0.007844639778137206, 0.007845888137817383, 0.007970816135406494, 0.008181759834289551, 0.007964511871337891, 0.007929215908050537, 0.007967616081237793, 0.007851647853851318, 0.007829792022705078, 0.007872159957885743, 0.007913536071777343, 0.007915616035461426, 0.007882175922393798, 0.008043328285217284, 0.008114239692687989, 0.007929183959960938, 0.007983744144439697, 0.007760064125061035, 0.00787824010848999, 0.007829599857330323, 0.007832608222961425, 0.007803616046905518, 0.00776582384109497, 0.00807369613647461, 0.008011232376098633, 0.008063296318054199, 0.007860447883605957, 0.007901055812835693, 0.007909088134765626, 0.007928224086761474, 0.007780352115631104, 0.008015680313110351, 0.008032320022583007, 0.007629471778869629, 0.007837440013885498, 0.0078438401222229, 0.007885119915008544, 0.00784991979598999, 0.007810783863067627, 0.007883391857147216, 0.00786191987991333, 0.007730559825897217, 0.007928415775299073, 0.007898623943328858, 0.007760767936706543, 0.007820543766021728, 0.007815584182739258, 0.007771967887878418, 0.007781760215759277, 0.0078117442131042485, 0.00781328010559082, 0.008007904052734376, 0.007841567993164062, 0.007855455875396728, 0.0077151360511779786, 0.007708384037017822, 0.007889440059661865, 0.007794655799865722, 0.00772108793258667, 0.007757696151733398, 0.007768288135528565, 0.008714143753051757, 0.00798422384262085, 0.007920544147491455, 0.007696447849273681, 0.008281888008117676, 0.007759391784667969, 0.007825503826141358, 0.007836192131042481, 0.008059167861938476, 0.007812640190124513, 0.00781769609451294, 0.0077610878944396975, 0.007719456195831299, 0.007827263832092285, 0.00787443208694458, 0.007826848030090332, 0.007815648078918457, 0.00773363208770752, 0.007993343830108643, 0.0077927041053771975, 0.007714943885803223, 0.007728256225585937, 0.008088352203369141, 0.007868288040161132, 0.008042112350463867, 0.008126943588256837, 0.007895040035247802, 0.007911424160003662, 0.007880703926086426, 0.007919007778167725, 0.007899104118347168, 0.007932479858398438, 0.008267840385437012, 0.008015040397644044, 0.007834144115447998, 0.007445375919342041, 0.00810912036895752, 0.007805568218231201, 0.007964320182800293, 0.0077108159065246585, 0.0077069120407104496, 0.00772870397567749, 0.007682496070861816, 0.007854080200195313, 0.007727168083190918, 0.00782860803604126, 0.007730016231536865, 0.007776256084442139, 0.007788544178009033, 0.007784607887268066, 0.008017727851867676, 0.0077538881301879884, 0.007747424125671387, 0.007817215919494629, 0.007725312232971192, 0.007669119834899902, 0.007723616123199463, 0.007883679866790772, 0.0077402877807617185, 0.007725056171417236, 0.007655360221862793, 0.007692351818084717, 0.007686143875122071, 0.007763391971588135, 0.007651904106140137, 0.007985151767730713, 0.007695551872253418, 0.007662399768829346, 0.007627007961273194, 0.007626783847808838, 0.007631616115570068, 0.007723999977111816, 0.007948224067687987, 0.007753087997436524, 0.00776032018661499, 0.007782656192779541, 0.007844128131866456, 0.007688960075378418, 0.007638112068176269, 0.007949696063995362, 0.007887360095977783, 0.007747583866119385, 0.007662623882293701, 0.007549503803253174, 0.009116064071655274, 0.009774880409240723, 0.01297430419921875, 0.00923635196685791, 0.007665791988372803, 0.007628191947937011, 0.0075976958274841305, 0.007535583972930908, 0.0075855679512023926, 0.007870816230773925, 0.0076696319580078125, 0.007618559837341309, 0.00765337610244751, 0.007796735763549805, 0.007318719863891602, 0.007619391918182373, 0.007648640155792237, 0.007765696048736572, 0.007707104206085205, 0.007731808185577393, 0.007759583950042725, 0.007778463840484619, 0.0077578239440917966, 0.00780079984664917, 0.007801087856292724, 0.007870592117309571, 0.008046239852905273, 0.007849696159362794, 0.007784128189086914, 0.007916224002838135, 0.007830656051635743, 0.0077770562171936035, 0.007731071949005127, 0.007876959800720215, 0.007825183868408202, 0.007858335971832275, 0.007665472030639648, 0.007639423847198486, 0.007882400035858154, 0.007770016193389893, 0.007723104000091552, 0.007755775928497314, 0.007804927825927735, 0.007671807765960693, 0.007614463806152344, 0.008060928344726562, 0.007696383953094482, 0.007548031806945801, 0.0076780481338500975, 0.01010972785949707, 0.010175647735595704, 0.007887712001800537, 0.007745888233184815, 0.007850751876831055, 0.007783328056335449, 0.007688000202178955, 0.007646944046020508, 0.007585951805114746, 0.007622943878173828, 0.007680160045623779, 0.007769599914550781, 0.00772054386138916, 0.00758681583404541, 0.007673823833465576, 0.008126272201538085, 0.007848127841949463, 0.007650591850280762, 0.0076943678855896, 0.007665728092193604, 0.007616511821746826, 0.007821824073791504, 0.0077263998985290525, 0.007695263862609864, 0.007546016216278076, 0.007776832103729248, 0.0076574721336364745, 0.007789792060852051, 0.007358463764190673, 0.008105216026306152, 0.007660255908966064, 0.007632927894592285, 0.00762172794342041, 0.0075857281684875485, 0.007686719894409179, 0.007741439819335938, 0.007753151893615723, 0.007676544189453125, 0.00767574405670166, 0.007510176181793213, 0.007487840175628662, 0.007479231834411621, 0.007532544136047363, 0.007728479862213135, 0.007853792190551758, 0.007774784088134766, 0.007651040077209473, 0.007973599910736083, 0.0077274560928344725, 0.007691904067993164, 0.007831583976745606, 0.007770048141479492, 0.007573535919189453, 0.007436319828033448, 0.007355711936950684, 0.00793068790435791, 0.007637152194976807, 0.007505504131317139, 0.007497056007385254, 0.007543200016021728, 0.0076455678939819334, 0.007716032028198242, 0.007752511978149414, 0.008292351722717285, 0.01295577621459961, 0.010304384231567383, 0.007874783992767333, 0.007633696079254151, 0.007763199806213379, 0.007686719894409179, 0.007815360069274902, 0.007731328010559082, 0.007776127815246582, 0.007718783855438232, 0.007684224128723145, 0.007743391990661621, 0.007755104064941406, 0.0074934401512145995, 0.007527359962463379, 0.0075345921516418455, 0.007641088008880615, 0.007700255870819092, 0.00759830379486084, 0.007583583831787109, 0.007802591800689697, 0.00756166410446167, 0.007526400089263916, 0.007513376235961914, 0.007518400192260742, 0.007520319938659668, 0.00743881607055664, 0.007124767780303955, 0.007530720233917237, 0.007510015964508057, 0.007524767875671387, 0.007429728031158447, 0.007362239837646485, 0.0074651198387146, 0.007497888088226318, 0.007485792160034179, 0.007580607891082764, 0.007625440120697022, 0.007624703884124756, 0.007682047843933106, 0.007675903797149658, 0.007616479873657226, 0.007552127838134765, 0.0077446079254150394, 0.007521503925323486, 0.0075203518867492675, 0.007549215793609619, 0.007524576187133789, 0.007708831787109375, 0.00761840009689331, 0.007501823902130127, 0.00739737606048584, 0.0073705921173095705, 0.007415775775909424, 0.007393152236938477, 0.007341663837432861, 0.007420608043670654, 0.007464352130889892, 0.007406208038330078, 0.007611519813537597, 0.007428415775299072, 0.0074000639915466305, 0.007323584079742432, 0.007375904083251953, 0.007343071937561035, 0.007393280029296875, 0.00745472002029419, 0.007483104228973389, 0.007378528118133545, 0.007298751831054688, 0.0073482880592346195, 0.007369664192199707, 0.007464128017425537, 0.007467840194702148, 0.00758128023147583, 0.007440800189971924, 0.007415904045104981, 0.00743836784362793, 0.007407487869262696, 0.007452672004699707, 0.007472256183624268, 0.007565408229827881, 0.00743503999710083, 0.007403456211090088, 0.007460671901702881, 0.007393695831298828, 0.007417695999145508, 0.007522016048431396, 0.007630144119262695, 0.00770470380783081, 0.007451104164123535, 0.0077324481010437015, 0.00816646385192871, 0.0075764799118041995, 0.007521088123321533, 0.0076145920753479006, 0.007578879833221436, 0.007437088012695312, 0.0076260800361633305, 0.00763750410079956, 0.007532767772674561, 0.007571231842041015, 0.0075345921516418455, 0.0075833601951599125, 0.007535232067108154, 0.007615583896636963, 0.0075978879928588865, 0.007701344013214111, 0.0077489280700683595, 0.007750592231750488, 0.007794112205505371, 0.007708831787109375, 0.007947936058044433, 0.008283904075622558, 0.007987008094787598, 0.00785913610458374, 0.00775596809387207, 0.007679647922515869, 0.0077415680885314945, 0.007743423938751221, 0.00787401580810547, 0.007881343841552734, 0.007702784061431885, 0.007659264087677002, 0.007627071857452393, 0.0076481919288635255, 0.007660287857055664, 0.007687839984893799, 0.007604288101196289, 0.007632991790771484, 0.007608223915100098, 0.007567008018493652, 0.007619200229644775, 0.007768223762512207, 0.007661087989807129, 0.007975232124328614, 0.00786636781692505, 0.007753376007080078, 0.007651552200317383, 0.007513472080230713, 0.007601119995117187, 0.007525792121887207, 0.007422431945800782, 0.007339935779571533, 0.007284095764160156, 0.007230144023895263, 0.007182271957397461, 0.007192575931549072, 0.007208159923553467, 0.007313536167144775, 0.00730998420715332, 0.0072679038047790525, 0.00727510404586792, 0.007065599918365479, 0.007546847820281983, 0.007571104049682617, 0.007544256210327148, 0.007541696071624756, 0.007457824230194092, 0.007412928104400635, 0.007438111782073975, 0.0074670081138610836, 0.007712096214294434, 0.007617184162139893, 0.007472959995269776, 0.007426239967346191, 0.0073854079246521, 0.007365632057189942, 0.00728764820098877, 0.007257599830627442, 0.007209311962127685, 0.007263743877410888, 0.0074134721755981445, 0.007553247928619384, 0.007440959930419922, 0.007416959762573242, 0.007414559841156006, 0.007501952171325684, 0.0075467839241027835, 0.007485504150390625, 0.007599423885345459, 0.007516032218933106, 0.0073788480758666996, 0.007496607780456543, 0.007458816051483155, 0.007374847888946533, 0.0073434882164001465, 0.007404160022735595, 0.007450623989105225, 0.007702432155609131, 0.007594079971313477, 0.007660672187805175, 0.007838592052459717, 0.007571455955505371, 0.007385087966918945, 0.007387296199798584, 0.0073810238838195805, 0.0074158720970153805, 0.007321343898773193, 0.007411520004272461, 0.007483583927154541, 0.007632895946502686, 0.007683328151702881, 0.0077331199645996095, 0.0077218561172485356, 0.007796224117279053, 0.007715487957000732, 0.007697247982025147, 0.007658495903015137, 0.007571616172790527, 0.007641151905059814, 0.007878431797027588, 0.007816383838653565, 0.007871359825134277, 0.007831679821014405, 0.007649184226989746, 0.0072371201515197756, 0.007869344234466552, 0.007571455955505371, 0.007513919830322266, 0.00759007978439331, 0.00758784008026123, 0.0075673599243164065, 0.007532767772674561, 0.00758351993560791, 0.007426239967346191, 0.007357952117919922, 0.00733625602722168, 0.007339935779571533, 0.007519807815551758, 0.007529344081878662, 0.0075608320236206054, 0.007783936023712158, 0.007604832172393799, 0.007605855941772461, 0.0076884799003601074, 0.007614528179168701, 0.007561312198638916, 0.0075259838104248045, 0.007565216064453125, 0.0075504322052001955, 0.007697343826293945, 0.007616384029388428, 0.007524159908294678, 0.007610688209533692, 0.007542784214019775, 0.007478591918945313, 0.007523007869720459, 0.0075690560340881344, 0.007741663932800293, 0.007597311973571778, 0.007517183780670166, 0.007443327903747559, 0.007572415828704834, 0.007559232234954834, 0.007554687976837158, 0.007696767807006836, 0.007729119777679443, 0.007754879951477051, 0.0077341761589050295, 0.007723008155822754, 0.007684095859527588, 0.007808703899383545, 0.00789737606048584, 0.008004863739013672, 0.0078504319190979, 0.007806687831878662, 0.00777180814743042, 0.007805920124053955, 0.0077209601402282715, 0.007706624031066894, 0.007648767948150635, 0.007668223857879639, 0.007710944175720215, 0.0076900157928466795, 0.007737343788146973, 0.0076037440299987795, 0.00756163215637207, 0.0076689600944519045, 0.007369408130645752, 0.007791935920715332, 0.00775875186920166, 0.007800479888916015, 0.007744256019592285, 0.00771449613571167, 0.00776643180847168, 0.0077430720329284665, 0.007678112030029297, 0.007628191947937011, 0.007653215885162354, 0.007551040172576904, 0.007631103992462158, 0.007931263923645019, 0.007732128143310547, 0.007686143875122071, 0.007669888019561768, 0.00757747220993042, 0.0075038719177246095, 0.007483551979064942, 0.007587679862976074, 0.007532544136047363, 0.007583744049072265, 0.007659776210784912, 0.007866015911102295, 0.007872608184814453, 0.007766335964202881, 0.007624703884124756, 0.007548384189605713, 0.007638783931732178, 0.007614943981170654, 0.007585343837738037, 0.0076745920181274414, 0.007665664196014404, 0.007591551780700684, 0.007602272033691407, 0.007585792064666748, 0.00757916784286499, 0.007573823928833007, 0.008021568298339844, 0.007776256084442139, 0.007793248176574707, 0.00783792018890381, 0.007656511783599854, 0.007602911949157715, 0.007640384197235108, 0.007649663925170898, 0.00777235221862793, 0.007607935905456543, 0.007672319889068604, 0.007702527999877929, 0.007771840095520019, 0.007764287948608398, 0.007600192070007324, 0.007638976097106933, 0.007823552131652832, 0.007621471881866455, 0.007578591823577881, 0.007673344135284424, 0.007729119777679443, 0.007759520053863525, 0.007738495826721191, 0.007715648174285889, 0.007298912048339844, 0.007635104179382324, 0.007617887973785401, 0.00768067216873169, 0.007696256160736084, 0.007684224128723145, 0.007596096038818359, 0.007839104175567628, 0.008012351989746093, 0.0077285442352294925, 0.0076724162101745605, 0.007686143875122071, 0.007609983921051025, 0.007827839851379394, 0.007727200031280517, 0.0076973757743835445, 0.007716832160949707, 0.007709023952484131, 0.007654143810272217, 0.007673312187194825, 0.007578015804290772, 0.007848095893859864, 0.007606112003326416, 0.007589888095855713, 0.007681568145751953, 0.007704736232757569, 0.0076854400634765625, 0.008231936454772949, 0.00774348783493042, 0.00777843189239502, 0.007844863891601562, 0.007758975982666016, 0.007792384147644043, 0.007903232097625732, 0.007720863819122315, 0.007702623844146729, 0.007710720062255859, 0.007814976215362548, 0.007852223873138428, 0.0077636799812316895, 0.007737631797790527, 0.008036319732666015, 0.00781932783126831, 0.007788512229919433, 0.0077762241363525395, 0.007770207881927491, 0.007722784042358399, 0.007709983825683594, 0.008739456176757813, 0.007895103931427002, 0.008284352302551269, 0.008699295997619629, 0.007711328029632568, 0.007661888122558594, 0.00767299222946167, 0.008063520431518555, 0.0077003841400146485, 0.007639391899108887, 0.007761663913726806, 0.007947968006134034, 0.007779679775238037, 0.007709280014038086, 0.00753107213973999, 0.007247871875762939, 0.007706399917602539, 0.007629024028778076, 0.007609920024871826, 0.00755683183670044, 0.007528863906860352, 0.00755344009399414, 0.007517920017242431, 0.00760646390914917, 0.007614463806152344, 0.007708672046661377, 0.007700223922729493, 0.007727712154388428, 0.007855072021484374, 0.0076819839477539065, 0.007778304100036621, 0.00783616018295288, 0.008025504112243653, 0.007790527820587158, 0.0076902399063110355, 0.007678400039672852, 0.007677567958831787, 0.00767471981048584, 0.00770630407333374, 0.007663424015045166, 0.0076349759101867675, 0.007642943859100342, 0.007526495933532715, 0.00759987211227417, 0.007649824142456054, 0.007540736198425293, 0.007500288009643555, 0.00745356798171997, 0.007844768047332763, 0.007548927783966064, 0.007523392200469971, 0.00755398416519165, 0.007571040153503418, 0.007495200157165528, 0.007523200035095215, 0.007684095859527588, 0.007450623989105225, 0.007595935821533203, 0.00754256010055542, 0.007463391780853272, 0.0074709439277648925, 0.007482560157775879, 0.007562047958374023, 0.0076267518997192385, 0.007886943817138671, 0.007772064208984375, 0.0076984319686889645, 0.007675903797149658, 0.007597472190856934, 0.007691199779510498, 0.007648928165435791, 0.00766476821899414, 0.0076251840591430664, 0.007588352203369141, 0.007587744235992432, 0.007712800025939942, 0.007620319843292236, 0.007552288055419922, 0.0075615358352661135, 0.007734975814819336, 0.007669760227203369, 0.007618752002716065, 0.007696191787719727, 0.007676928043365478, 0.007558495998382569, 0.007605728149414063, 0.007703775882720947, 0.007846879959106445, 0.007860223770141601, 0.007772480010986328, 0.007798463821411133, 0.007915775775909423, 0.00790617609024048, 0.00790822410583496, 0.007859839916229247, 0.007854464054107667, 0.007898399829864502, 0.007792384147644043, 0.007852799892425537, 0.007655519962310791, 0.007726304054260254, 0.007655968189239502, 0.007672160148620605, 0.007567584037780762, 0.00785158395767212, 0.007590047836303711, 0.007701727867126465, 0.007691135883331299, 0.00756489610671997, 0.0076455998420715335, 0.007500895977020264, 0.0074388480186462404, 0.0074141759872436526, 0.00738099193572998, 0.007465184211730957, 0.0074217281341552735, 0.007411712169647216, 0.007622623920440674, 0.007540800094604492, 0.007536608219146729, 0.007565311908721924, 0.0075972480773925785, 0.007662591934204102, 0.007771520137786865, 0.0075903358459472655, 0.0076162881851196285, 0.007655648231506348, 0.007669760227203369, 0.008271200180053711, 0.008106687545776366, 0.008050656318664551, 0.008664896011352539, 0.007587168216705322, 0.007531040191650391, 0.007380383968353272, 0.0073532481193542484, 0.0073134078979492185, 0.007358335971832276, 0.007446464061737061, 0.007303071975708008, 0.007465248107910156, 0.007284224033355713, 0.007672160148620605, 0.00763315200805664, 0.007627999782562256, 0.007516799926757812, 0.00748748779296875, 0.007342080116271973, 0.007422207832336426, 0.007448319911956787, 0.0074934720993041996, 0.007401375770568847, 0.007401919841766357, 0.00743609619140625, 0.008453184127807618, 0.007683008193969726, 0.007651552200317383, 0.007519552230834961, 0.0075103998184204105, 0.0073500161170959475, 0.007383232116699219, 0.007477344036102295, 0.007695712089538574, 0.007547711849212647, 0.007421855926513672, 0.00734771203994751, 0.007363071918487549, 0.00743123197555542, 0.007449535846710205, 0.007618815898895264, 0.007644447803497314, 0.007632800102233887, 0.007686719894409179, 0.008171520233154296, 0.007798079967498779, 0.007740096092224121, 0.007734784126281738, 0.007750400066375732, 0.007749375820159912, 0.007845888137817383, 0.007681151866912842, 0.0078037757873535155, 0.007948287963867188, 0.008177087783813477, 0.007784575939178467, 0.0077881598472595214, 0.007877439975738525, 0.008007871627807616, 0.0077617278099060055, 0.007790592193603516, 0.007704895973205566, 0.0076735677719116215, 0.007648575782775879, 0.007749407768249512, 0.007664127826690674, 0.007713280200958252, 0.007784512042999268, 0.007687200069427491, 0.00767193603515625, 0.0077216320037841795, 0.00783680009841919, 0.00808131217956543, 0.00783846378326416, 0.007874783992767333, 0.007350272178649903, 0.007791935920715332, 0.007800896167755127, 0.007657855987548828, 0.007651584148406982, 0.007671807765960693, 0.0076943359375, 0.007575679779052735, 0.0077331199645996095, 0.007671232223510742, 0.007684832096099854, 0.008099391937255859, 0.00771014404296875, 0.007595039844512939, 0.007654399871826172, 0.007686944007873535, 0.007696288108825684, 0.007780479907989502, 0.007585792064666748, 0.00748748779296875, 0.007475200176239013, 0.007460864067077637, 0.007483391761779785, 0.007408736228942871, 0.0075047359466552735, 0.007479487895965576, 0.007480447769165039, 0.007478015899658203, 0.007550975799560547, 0.0075138239860534665, 0.0076495680809021, 0.00759113597869873, 0.007649663925170898, 0.00761897611618042, 0.007837696075439453, 0.00759603214263916, 0.007470719814300537, 0.007391615867614746, 0.007568704128265381, 0.007862239837646485, 0.008208127975463867, 0.008481568336486816, 0.007809184074401856, 0.008235039710998536, 0.007948480129241943, 0.007604032039642334, 0.007612415790557861, 0.007687647819519043, 0.007718431949615479, 0.007727807998657227, 0.008079680442810058, 0.007893248081207276, 0.007751552104949952, 0.007680031776428222, 0.007676864147186279, 0.0076697921752929684, 0.0076640000343322755, 0.007565567970275879, 0.007536896228790283, 0.007921216011047363, 0.007502175807952881, 0.0074834880828857425, 0.007466464042663574, 0.007388671875, 0.007569983959197998, 0.007708032131195069, 0.007723487854003906, 0.007711071968078614, 0.007712672233581543, 0.007657599925994873, 0.0076813120841979984, 0.007610976219177246, 0.007629951953887939, 0.007687392234802246, 0.007656576156616211, 0.007568064212799072, 0.007544672012329102, 0.007779327869415284, 0.007569536209106446, 0.007555647850036621, 0.007549248218536377, 0.007675615787506104, 0.007801439762115479, 0.007737023830413818, 0.007710271835327148, 0.007794464111328125, 0.007670432090759277, 0.007761407852172852, 0.007772672176361084, 0.007808703899383545, 0.00793836784362793, 0.008080544471740723, 0.007901919841766357, 0.007764287948608398, 0.007715712070465088, 0.007592095851898194, 0.007676191806793213, 0.007550816059112549, 0.0075270719528198245, 0.007483391761779785, 0.0075304961204528805, 0.007556992053985596, 0.00760588788986206, 0.0076087360382080076, 0.007833695888519288, 0.00760422420501709, 0.007657855987548828, 0.007745151996612549, 0.007731200218200684, 0.0075345921516418455, 0.007579648017883301, 0.007674943923950195, 0.0076648640632629396, 0.007675615787506104, 0.007636864185333252, 0.007589087963104248, 0.007521183967590332, 0.007745632171630859, 0.0076789441108703616, 0.007607135772705078, 0.007614528179168701, 0.007643136024475097, 0.007642655849456787, 0.007943967819213867, 0.007651487827301026, 0.007643712043762207, 0.007362112045288086, 0.007768511772155762, 0.007718912124633789, 0.007778272151947021, 0.007780447959899902, 0.00771827220916748, 0.00767852783203125, 0.007639039993286132, 0.007759232044219971, 0.007688896179199219, 0.007661503791809082, 0.00782972812652588, 0.007692063808441162, 0.007524703979492188, 0.007476895809173584, 0.007403264045715332, 0.007460192203521728, 0.007349184036254883, 0.007370880126953125, 0.007376287937164307, 0.007573376178741455, 0.007566080093383789, 0.007860127925872804, 0.007847424030303956, 0.007811679840087891, 0.007868351936340333, 0.007765888214111328, 0.007783743858337402, 0.007756447792053223, 0.007749663829803467, 0.007755775928497314, 0.007661568164825439, 0.007608575820922852, 0.0077719039916992185, 0.007744991779327392, 0.007535136222839355, 0.0075038719177246095, 0.007640384197235108, 0.00769052791595459, 0.007632512092590332, 0.007596543788909912, 0.007528063774108887, 0.007459487915039063, 0.0076483840942382815, 0.007607391834259033, 0.007573631763458252, 0.007589183807373047, 0.007590240001678467, 0.007542240142822266, 0.007461408138275147, 0.007403295993804931, 0.007446752071380615, 0.007514143943786621, 0.007724095821380615, 0.0076397438049316405, 0.007528287887573242, 0.007599520206451416, 0.00770576000213623, 0.0076811199188232425, 0.007551360130310059, 0.007485792160034179, 0.007476863861083985, 0.007555456161499024, 0.007080512046813965, 0.00742137622833252, 0.007617472171783447, 0.007497407913208008, 0.007446400165557861, 0.007368832111358642, 0.007354015827178955, 0.007315008163452148, 0.007515135765075683, 0.00743507194519043, 0.007377888202667236, 0.007511839866638183, 0.007628223896026611, 0.007578495979309082, 0.007605535984039307, 0.007561791896820069, 0.007607552051544189, 0.007660672187805175, 0.007661312103271484, 0.0076836800575256345, 0.0076637759208679195, 0.007767551898956298, 0.007643743991851807, 0.007783743858337402, 0.007525152206420899, 0.007505919933319092, 0.007437439918518066, 0.007482240200042725, 0.007749631881713868, 0.00750822401046753, 0.007513792037963867, 0.007485824108123779, 0.007538368225097656, 0.0075447998046875, 0.0075502400398254396, 0.007437056064605713, 0.007484416007995606, 0.00745136022567749, 0.007553664207458496, 0.007497312068939209, 0.007622719764709473, 0.007680287837982178, 0.0076039361953735354, 0.0075731201171875, 0.0075838398933410645, 0.007612383842468261, 0.007478784084320069, 0.007545216083526611, 0.007438560009002685, 0.007438560009002685, 0.007438464164733887, 0.007534783840179443, 0.007572480201721191, 0.007533247947692871, 0.007409664154052734, 0.007628384113311767, 0.007449215888977051, 0.007365856170654297, 0.0073151359558105465, 0.007236480236053467, 0.007359968185424805, 0.007543327808380127, 0.007500864028930664]",tokens/s,129.53319823093787,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,829.616128,666.763264,0.0,281.018368,267.897856,s,1,9.8784375,9.8784375,0.0,9.8784375,9.8784375,9.8784375,9.8784375,[9.8784375],,kWh,1.590189947499899e-05,1.7466632093362058e-06,4.294170101999933e-06,2.1942732786335125e-05,,MB,1281.08544,775.815168,0.0,360.710144,344.082944,s,20,0.18771663951873777,0.00938583197593689,0.00015274587235024058,0.00936188793182373,0.009490463638305665,0.009540688276290893,0.00983568968772888,"[0.009909440040588378, 0.009521280288696289, 0.009374336242675781, 0.009246912002563477, 0.009212703704833984, 0.009309568405151368, 0.00946998405456543, 0.009449567794799805, 0.009325728416442872, 0.009487039566040039, 0.009413439750671387, 0.00921555233001709, 0.00934943962097168, 0.009271903991699219, 0.009478079795837402, 0.009443039894104005, 0.009347999572753906, 0.009375967979431153, 0.009215519905090333, 0.0092991361618042]",tokens/s,27275.15266162073,kWh,2.7081202673160223e-07,2.986575457215874e-08,1.4224990765677167e-07,4.4292768896053265e-07,tokens/kWh,577972446.4749167,MB,1327.988736,800.980992,0.0,385.875968,344.085504,s,20,9.926833679199218,0.4963416839599609,0.004407894203706206,0.4968547210693359,0.5014775085449219,0.5034424209594727,0.504056233215332,"[0.4982658996582031, 0.49839389038085935, 0.498930908203125, 0.501263671875, 0.49628701782226564, 0.49851321411132815, 0.5034020385742187, 0.49742242431640626, 0.49560894775390624, 0.5007698974609375, 0.49753250122070314, 0.4907862548828125, 0.4941471252441406, 0.4898678894042969, 0.4952970886230469, 0.5042096862792969, 0.49067034912109375, 0.4947938232421875, 0.48678912353515624, 0.49388192749023435]",tokens/s,126.92869052900684,kWh,1.3951180819101697e-05,1.5385681687541659e-06,5.466180505009894e-06,2.0955929492865754e-05,tokens/kWh,3006309.026829268,,s,1260,9.914113731861116,0.007868344231635805,0.00020790832087812502,0.007851855993270873,0.008027135562896729,0.008127570915222168,0.008633991632461552,"[0.007504223823547363, 0.00797814416885376, 0.00799625587463379, 0.00794547176361084, 0.007926527976989745, 0.007898943901062012, 0.007807328224182129, 0.007831391811370849, 0.008006784439086914, 0.007857024192810058, 0.007978047847747803, 0.00799180793762207, 0.007950560092926025, 0.008166655540466309, 0.008168416023254394, 0.008042495727539062, 0.008023776054382325, 0.007979040145874023, 0.008038528442382812, 0.007934080123901368, 0.007931903839111328, 0.008079680442810058, 0.007949312210083008, 0.008135392189025878, 0.007997407913208007, 0.008535200119018555, 0.007858367919921875, 0.007866399765014648, 0.007795328140258789, 0.007749184131622315, 0.007770688056945801, 0.007837247848510743, 0.007768383979797363, 0.007833439826965332, 0.007901343822479248, 0.007927616119384766, 0.00786246395111084, 0.00782697582244873, 0.007805376052856445, 0.007784480094909668, 0.007708064079284668, 0.0076698241233825685, 0.00767145586013794, 0.007760767936706543, 0.007783679962158203, 0.007940576076507569, 0.007776095867156983, 0.007901631832122802, 0.007733248233795166, 0.007811039924621582, 0.007851808071136475, 0.00784329605102539, 0.007870463848114014, 0.007790431976318359, 0.00790828800201416, 0.00793017578125, 0.008012543678283692, 0.008057600021362304, 0.007845856189727784, 0.008109984397888183, 0.007792960166931153, 0.007790592193603516, 0.007830880165100097, 0.00772051191329956, 0.007920063972473145, 0.00789638376235962, 0.007862048149108887, 0.007831583976745606, 0.007747712135314941, 0.007811935901641845, 0.007810976028442383, 0.007932096004486085, 0.007771423816680908, 0.007725599765777588, 0.007809023857116699, 0.007890463829040528, 0.007820767879486084, 0.007812096118927002, 0.0078537278175354, 0.007809375762939453, 0.007875807762145996, 0.007776127815246582, 0.008004511833190918, 0.007878367900848388, 0.007878687858581543, 0.007769792079925537, 0.007804480075836182, 0.00795136022567749, 0.0077948799133300785, 0.007804736137390137, 0.007862271785736084, 0.007902431964874267, 0.007869247913360595, 0.007831552028656007, 0.008116288185119629, 0.007884704113006591, 0.007968768119812012, 0.007898975849151611, 0.00789084815979004, 0.007873023986816406, 0.008115967750549317, 0.008458239555358887, 0.007970367908477783, 0.007839360237121583, 0.00790006399154663, 0.007933440208435059, 0.008499872207641602, 0.007988224029541016, 0.007950592041015624, 0.007936511993408203, 0.007904640197753906, 0.007924511909484863, 0.007906816005706786, 0.008008031845092774, 0.007871488094329833, 0.007861248016357422, 0.007821087837219238, 0.00783996820449829, 0.007865439891815186, 0.008027039527893066, 0.007833600044250488, 0.007905375957489014, 0.0079616961479187, 0.007877439975738525, 0.00798476791381836, 0.007916160106658935, 0.007544832229614258, 0.007948448181152343, 0.007918560028076171, 0.0079269437789917, 0.007962143898010254, 0.008021599769592285, 0.00797756814956665, 0.008136704444885253, 0.007996575832366943, 0.008005503654479981, 0.008014816284179688, 0.007964672088623047, 0.007929024219512939, 0.00795740795135498, 0.007898752212524413, 0.007878399848937989, 0.008231455802917481, 0.008157183647155761, 0.008004735946655274, 0.008039360046386718, 0.007954368114471436, 0.007903232097625732, 0.007928095817565917, 0.007992127895355224, 0.007838272094726562, 0.007864672183990479, 0.007880703926086426, 0.007938240051269531, 0.007834911823272705, 0.007864480018615722, 0.007948512077331544, 0.007820703983306886, 0.007992063999176026, 0.007725056171417236, 0.007743391990661621, 0.008275168418884277, 0.00793497610092163, 0.007712255954742431, 0.007696447849273681, 0.007958847999572754, 0.007765088081359863, 0.007777184009552002, 0.007933951854705811, 0.0079300799369812, 0.007826655864715577, 0.007894944190979004, 0.007766079902648926, 0.00799724817276001, 0.007793439865112305, 0.007724031925201416, 0.007830527782440186, 0.00783564805984497, 0.007804927825927735, 0.007759871959686279, 0.007816256046295166, 0.007887807846069336, 0.008112031936645508, 0.007899231910705566, 0.007919616222381591, 0.007864319801330566, 0.007955679893493653, 0.008008735656738281, 0.00787225580215454, 0.00755014419555664, 0.008220576286315917, 0.008227744102478028, 0.007975135803222656, 0.008042271614074708, 0.007852287769317626, 0.008184864044189452, 0.007852384090423584, 0.007849408149719237, 0.007842912197113036, 0.007823296070098877, 0.007857632160186768, 0.007964352130889892, 0.008067520141601562, 0.007890336036682129, 0.007950335979461669, 0.007969056129455566, 0.007850560188293457, 0.007876768112182617, 0.00797049617767334, 0.007882976055145263, 0.007862271785736084, 0.007849984169006348, 0.007781983852386475, 0.007901599884033203, 0.007881984233856202, 0.008139616012573242, 0.007964511871337891, 0.007931968212127686, 0.007927807807922363, 0.007826623916625976, 0.007754271984100342, 0.00789961576461792, 0.007805823802947998, 0.007793600082397461, 0.007725056171417236, 0.007714816093444824, 0.007810272216796875, 0.007922463893890381, 0.007763999938964844, 0.007819231986999511, 0.008067071914672852, 0.00803388786315918, 0.007868512153625488, 0.007913951873779296, 0.007866464138031005, 0.007878399848937989, 0.007870368003845215, 0.00936355209350586, 0.008167455673217774, 0.007968192100524902, 0.007915808200836181, 0.007913343906402588, 0.007993728160858153, 0.008207615852355956, 0.009161184310913086, 0.007798687934875488, 0.007798175811767578, 0.007810239791870117, 0.007810880184173584, 0.007935232162475585, 0.007768799781799316, 0.007760128021240234, 0.007374783992767334, 0.007812032222747803, 0.007790592193603516, 0.00806713581085205, 0.007947648048400879, 0.007779168128967285, 0.00782102394104004, 0.007857503890991211, 0.007825215816497803, 0.007936575889587402, 0.007858367919921875, 0.007757343769073486, 0.007729728221893311, 0.007774208068847656, 0.007860256195068359, 0.007874527931213378, 0.007931519985198974, 0.007880928039550782, 0.007812479972839356, 0.007907648086547852, 0.007799263954162598, 0.007866144180297852, 0.007870463848114014, 0.007776159763336182, 0.007817056179046631, 0.007822976112365723, 0.00782204818725586, 0.007876736164093017, 0.008126463890075684, 0.008044384002685546, 0.007907487869262696, 0.007872447967529297, 0.007867455959320068, 0.007730239868164062, 0.007808127880096436, 0.007881472110748291, 0.007825471878051759, 0.007919392108917237, 0.007834976196289062, 0.007850880146026612, 0.007829152107238769, 0.00780025577545166, 0.007785376071929932, 0.007862271785736084, 0.007926815986633301, 0.007982048034667968, 0.007861440181732177, 0.007850304126739501, 0.007848447799682617, 0.00784774398803711, 0.007872128009796142, 0.007843776226043701, 0.007895199775695801, 0.008248991966247558, 0.007885568141937256, 0.007866432189941406, 0.00787446403503418, 0.007978847980499268, 0.007910880088806152, 0.007875264167785645, 0.007987264156341553, 0.008038432121276855, 0.007864319801330566, 0.007448575973510742, 0.0077619199752807615, 0.007765279769897461, 0.007817567825317383, 0.007887231826782226, 0.007823296070098877, 0.007831615924835205, 0.00784716796875, 0.007885568141937256, 0.007823359966278077, 0.007860352039337158, 0.007857376098632813, 0.00786729621887207, 0.007964416027069092, 0.00791155195236206, 0.007901055812835693, 0.007900320053100585, 0.008419424057006837, 0.008028927803039551, 0.007966720104217529, 0.00811520004272461, 0.007891392230987549, 0.007802752017974854, 0.007800640106201172, 0.00788159990310669, 0.007784736156463623, 0.007947999954223633, 0.007849855899810791, 0.007836991786956788, 0.007853951930999756, 0.007795904159545898, 0.007843584060668944, 0.00784716796875, 0.007920383930206299, 0.008184032440185547, 0.007927264213562012, 0.007940415859222411, 0.007919616222381591, 0.007940192222595215, 0.008029760360717774, 0.00819200038909912, 0.007978943824768067, 0.007917888164520263, 0.007878687858581543, 0.007863711833953858, 0.007914144039154053, 0.007880095958709716, 0.007848544120788574, 0.007847040176391602, 0.007922560214996337, 0.008040448188781739, 0.008019040107727051, 0.008028127670288087, 0.007972832202911378, 0.007914015769958496, 0.007948607921600341, 0.007896255970001221, 0.007890944004058837, 0.007846208095550537, 0.007827104091644287, 0.007908383846282958, 0.007847807884216308, 0.007812479972839356, 0.007600128173828125, 0.007952383995056152, 0.008337056159973145, 0.008012127876281739, 0.008077312469482421, 0.007974271774291991, 0.007999872207641602, 0.007921919822692872, 0.007794688224792481, 0.007864543914794922, 0.007875743865966798, 0.00790822410583496, 0.007988992214202881, 0.008036383628845215, 0.007994527816772462, 0.007926432132720948, 0.007869696140289307, 0.007862847805023193, 0.007960927963256837, 0.007860000133514405, 0.007868639945983887, 0.007947936058044433, 0.007941887855529786, 0.007918176174163818, 0.007890944004058837, 0.007919616222381591, 0.007843616008758545, 0.00820035171508789, 0.007948351860046386, 0.008032416343688965, 0.007964447975158691, 0.00809993553161621, 0.0079585599899292, 0.007859903812408447, 0.007837503910064698, 0.007804800033569336, 0.00781766414642334, 0.007831424236297608, 0.00787395191192627, 0.007770944118499756, 0.008007552146911621, 0.007891104221343995, 0.007857855796813964, 0.007854112148284912, 0.007849408149719237, 0.0077933440208435055, 0.00798531198501587, 0.007813119888305664, 0.007824639797210693, 0.007928575992584229, 0.007938015937805175, 0.007917600154876709, 0.008048288345336914, 0.007807328224182129, 0.00802406406402588, 0.00784339189529419, 0.007868192195892333, 0.007958720207214356, 0.008937055587768555, 0.009333632469177245, 0.008792063713073731, 0.008433024406433105, 0.007920447826385499, 0.007503104209899902, 0.007869184017181396, 0.007892864227294922, 0.007925695896148681, 0.00785427188873291, 0.007823616027832032, 0.007860000133514405, 0.007761888027191162, 0.007751808166503906, 0.007772031784057617, 0.007862271785736084, 0.007862271785736084, 0.007742847919464111, 0.007809663772583008, 0.007761631965637207, 0.007819071769714355, 0.008011327743530274, 0.007924863815307617, 0.00776313591003418, 0.007860640048980712, 0.007852384090423584, 0.007933792114257812, 0.0077656002044677735, 0.007748095989227295, 0.007869887828826904, 0.00787446403503418, 0.007874720096588135, 0.007825823783874511, 0.00789299201965332, 0.00783785581588745, 0.007838848114013672, 0.007922399997711181, 0.008095744132995606, 0.007902400016784668, 0.007883008003234864, 0.008151488304138184, 0.008512991905212403, 0.008004256248474121, 0.007854080200195313, 0.007868415832519531, 0.00814303970336914, 0.007992159843444825, 0.007875552177429199, 0.007958528041839599, 0.008129759788513184, 0.007818016052246093, 0.007886655807495117, 0.007909567832946777, 0.007890783786773681, 0.008318880081176757, 0.007853504180908202, 0.007822368144989014, 0.007812096118927002, 0.007911488056182862, 0.007870272159576416, 0.007803455829620361, 0.007686495780944824, 0.007882175922393798, 0.00787660789489746, 0.007844480037689209, 0.007886655807495117, 0.007807072162628174, 0.007863455772399903, 0.00744159984588623, 0.007821695804595947, 0.007798816204071045, 0.007774623870849609, 0.007929152011871337, 0.00787724781036377, 0.007830719947814941, 0.007812032222747803, 0.0079584641456604, 0.008076576232910157, 0.007882719993591308, 0.007783071994781494, 0.007818784236907959, 0.007844704151153565, 0.008799967765808105, 0.008064640045166016, 0.007960031986236573, 0.007811583995819092, 0.008076959609985352, 0.007875328063964843, 0.007845888137817383, 0.007862271785736084, 0.007794688224792481, 0.007837120056152344, 0.007729248046875, 0.007788640022277832, 0.007747968196868896, 0.007814752101898194, 0.0078032960891723634, 0.007872223854064941, 0.007838047981262208, 0.0078000321388244625, 0.007832575798034667, 0.007853792190551758, 0.007862271785736084, 0.008054783821105957, 0.007907328128814697, 0.007753183841705322, 0.007805600166320801, 0.007802175998687744, 0.007732992172241211, 0.007867199897766113, 0.007880703926086426, 0.007862048149108887, 0.007784671783447265, 0.0077430720329284665, 0.007708447933197022, 0.0077926721572875975, 0.007770463943481445, 0.007779583930969238, 0.007830016136169434, 0.008241312026977539, 0.007876800060272218, 0.007745984077453613, 0.007735199928283691, 0.007819071769714355, 0.00777942419052124, 0.007904096126556397, 0.007788735866546631, 0.007818751811981202, 0.007893375873565674, 0.007862592220306396, 0.007892672061920167, 0.007620607852935791, 0.007927552223205566, 0.008063232421875, 0.00783680009841919, 0.008046527862548829, 0.008082592010498046, 0.007900191783905029, 0.007950975894927978, 0.007983359813690185, 0.007985087871551513, 0.007982912063598633, 0.0078951678276062, 0.008058879852294922, 0.007952383995056152, 0.008150976181030273, 0.008068191528320312, 0.008033568382263184, 0.007900032043457031, 0.007908160209655762, 0.007933311939239502, 0.007963263988494873, 0.007896128177642823, 0.007938432216644286, 0.007848512172698975, 0.007972864151000977, 0.00786636781692505, 0.007925568103790283, 0.007869664192199708, 0.008127455711364745, 0.007946239948272706, 0.007904736042022706, 0.007899775981903076, 0.007847839832305909, 0.007913568019866944, 0.007948383808135987, 0.008013855934143067, 0.008207200050354004, 0.007996352195739746, 0.007908576011657715, 0.00787663984298706, 0.007953152179718018, 0.008146783828735352, 0.007964672088623047, 0.008157343864440917, 0.007845888137817383, 0.007871615886688232, 0.007906208038330078, 0.008016927719116211, 0.007858560085296631, 0.008013631820678711, 0.007891871929168702, 0.0079268798828125, 0.007869184017181396, 0.00780083179473877, 0.008094783782958984, 0.007876992225646973, 0.007826015949249268, 0.007796768188476562, 0.007799967765808106, 0.007815968036651612, 0.007872511863708496, 0.007790592193603516, 0.007844031810760498, 0.007538976192474366, 0.00795417594909668, 0.007990655899047851, 0.00793673610687256, 0.00786732816696167, 0.007866496086120606, 0.007840576171875, 0.007822559833526612, 0.007744480133056641, 0.00781824016571045, 0.007794591903686523, 0.008050944328308106, 0.008392736434936524, 0.007921631813049317, 0.007881375789642334, 0.007913472175598145, 0.007886112213134766, 0.00782204818725586, 0.007899104118347168, 0.007890016078948974, 0.0078058881759643555, 0.007866528034210205, 0.008068960189819336, 0.008017919540405273, 0.007949312210083008, 0.007910399913787843, 0.007823359966278077, 0.007751679897308349, 0.007798399925231934, 0.007852416038513183, 0.007854080200195313, 0.007884992122650146, 0.007919424057006836, 0.008062975883483887, 0.007970208168029786, 0.00787507200241089, 0.007975008010864258, 0.007987199783325195, 0.007870592117309571, 0.007873600006103515, 0.007768864154815674, 0.007764224052429199, 0.007810848236083984, 0.00801734447479248, 0.007869056224822998, 0.007818848133087157, 0.007834112167358399, 0.008027999877929688, 0.007833600044250488, 0.007790431976318359, 0.007812255859375, 0.00775270414352417, 0.007804927825927735, 0.007976191997528076, 0.007866144180297852, 0.007825727939605714, 0.00784607982635498, 0.008272480010986329, 0.007833471775054932, 0.00789292812347412, 0.007853472232818603, 0.007736095905303955, 0.007903103828430175, 0.00734822416305542, 0.007786496162414551, 0.007814464092254638, 0.007880640029907227, 0.007867231845855712, 0.007833439826965332, 0.007784512042999268, 0.007738463878631592, 0.007838624000549316, 0.007823359966278077, 0.007766304016113282, 0.007786208152770996, 0.007677951812744141, 0.007770080089569092, 0.00779475212097168, 0.007856095790863037, 0.00787663984298706, 0.007740384101867676, 0.008037471771240234, 0.007845727920532227, 0.007771679878234863, 0.007772223949432373, 0.007859903812408447, 0.007734047889709473, 0.007743680000305176, 0.00785200023651123, 0.0077116479873657225, 0.007687104225158691, 0.007719103813171387, 0.007710527896881103, 0.007751679897308349, 0.007788064002990723, 0.00782697582244873, 0.007840703964233399, 0.008103903770446777, 0.00784764814376831, 0.008134528160095215, 0.00792742395401001, 0.007854911804199219, 0.007807168006896972, 0.007883999824523926, 0.007735455989837646, 0.0076845440864562985, 0.007682015895843506, 0.0076638078689575196, 0.00763375997543335, 0.007797760009765625, 0.007624192237854004, 0.0077127361297607425, 0.00772160005569458, 0.007751584053039551, 0.007644544124603272, 0.007637856006622315, 0.007893951892852783, 0.007717599868774414, 0.007669119834899902, 0.007674623966217041, 0.007704671859741211, 0.007815072059631348, 0.007858176231384278, 0.007711904048919678, 0.007750495910644532, 0.007876480102539062, 0.007448192119598389, 0.007740128040313721, 0.007836544036865234, 0.007877600193023682, 0.007897088050842285, 0.007868383884429932, 0.007850016117095947, 0.007814720153808593, 0.007857823848724365, 0.007762656211853027, 0.007685311794281006, 0.007727551937103272, 0.007753151893615723, 0.0077727680206298826, 0.00784015989303589, 0.00815120029449463, 0.008009056091308593, 0.00781763219833374, 0.00784764814376831, 0.007882719993591308, 0.007878880023956298, 0.007932096004486085, 0.00791756820678711, 0.007723231792449951, 0.007703487873077393, 0.007704895973205566, 0.007770431995391846, 0.007866591930389404, 0.00782047986984253, 0.0077543997764587405, 0.007894591808319092, 0.007831168174743653, 0.0077465281486511234, 0.007755775928497314, 0.007817215919494629, 0.007737343788146973, 0.007747935771942139, 0.007797632217407226, 0.007819263935089112, 0.007798687934875488, 0.007908383846282958, 0.007841631889343262, 0.007712768077850342, 0.007745728015899658, 0.008046560287475586, 0.00791100788116455, 0.008129856109619141, 0.007961535930633545, 0.00790937614440918, 0.007935808181762696, 0.007866591930389404, 0.0078123841285705565, 0.007772863864898682, 0.007827455997467042, 0.007839744091033935, 0.007824895858764648, 0.007842304229736329, 0.008016032218933105, 0.007837535858154297, 0.007786496162414551, 0.007956352233886719, 0.00785145616531372, 0.007736095905303955, 0.007405087947845459, 0.007787072181701661, 0.007788544178009033, 0.007798783779144287, 0.007770080089569092, 0.0077292799949646, 0.007798463821411133, 0.007762432098388672, 0.007810783863067627, 0.007710720062255859, 0.00782528018951416, 0.007990623950958252, 0.008059167861938476, 0.00793446397781372, 0.007923327922821044, 0.007878880023956298, 0.007798912048339844, 0.007675168037414551, 0.007778495788574219, 0.007753376007080078, 0.007730016231536865, 0.007671872138977051, 0.007751679897308349, 0.007726687908172608, 0.007960000038146973, 0.007699423789978027, 0.00752566385269165, 0.007525055885314941, 0.007544864177703858, 0.007568480014801026, 0.007590816020965576, 0.007597631931304931, 0.007670207977294922, 0.007741439819335938, 0.007714047908782959, 0.007887712001800537, 0.007761824131011963, 0.008017919540405273, 0.007781568050384521, 0.007834208011627198, 0.00821065616607666, 0.007835968017578125, 0.0077777280807495115, 0.007731455802917481, 0.007716959953308106, 0.007682144165039062, 0.00773305606842041, 0.007868224143981934, 0.007725247859954834, 0.007638463973999024, 0.007845695972442626, 0.007799680233001709, 0.007686016082763672, 0.007949696063995362, 0.007742080211639404, 0.007712768077850342, 0.0077926721572875975, 0.007869696140289307, 0.007682784080505371, 0.007673439979553223, 0.00785868787765503, 0.007772064208984375, 0.007666816234588623, 0.007342847824096679, 0.007700160026550293, 0.007823359966278077, 0.0077916159629821775, 0.007788832187652588, 0.007819488048553466, 0.007760384082794189, 0.0077578239440917966, 0.007722239971160888, 0.007709440231323242, 0.00780511999130249, 0.007669119834899902, 0.007852735996246338, 0.007764063835144043, 0.007827136039733886, 0.007921504020690919, 0.008564767837524414, 0.007917664051055907, 0.008093024253845215, 0.007893663883209228, 0.007790624141693115, 0.007730751991271973, 0.007737855911254882, 0.007751584053039551, 0.007862271785736084, 0.007898560047149658, 0.007868991851806641, 0.007946400165557862, 0.007847616195678711, 0.007784607887268066, 0.007746912002563476, 0.007770783901214599, 0.007780416011810303, 0.007769375801086426, 0.00784880018234253, 0.007862080097198486, 0.008109472274780273, 0.008100223541259766, 0.007859551906585693, 0.007893887996673584, 0.00788479995727539, 0.007846911907196046, 0.00786297607421875, 0.00784015989303589, 0.007887104034423827, 0.00787830400466919, 0.007853407859802247, 0.00791593599319458, 0.007861631870269776, 0.007895391941070556, 0.007930399894714356, 0.007874559879302979, 0.00786636781692505, 0.008078592300415039, 0.00793062400817871, 0.007788832187652588, 0.008096896171569824, 0.007764607906341553, 0.007735007762908935, 0.007769408226013183, 0.007897312164306641, 0.007851903915405273, 0.007842336177825928, 0.007569439888000488, 0.007991360187530518, 0.007911392211914062, 0.007910560131072998, 0.00787446403503418, 0.007846271991729737, 0.007887360095977783, 0.007818272113800049, 0.007862592220306396, 0.007865024089813233, 0.00784774398803711, 0.007841951847076416, 0.007884479999542237, 0.007881023883819581, 0.007833504199981689, 0.008116095542907716, 0.007901663780212403, 0.007849984169006348, 0.00794598388671875, 0.007913343906402588, 0.007932032108306885, 0.007872543811798095, 0.007839295864105224, 0.007840447902679444, 0.007925471782684327, 0.007899136066436767, 0.00793724822998047, 0.007953279972076416, 0.007908895969390869, 0.007917952060699463, 0.00812217617034912, 0.00800812816619873, 0.008545760154724122, 0.009316767692565918, 0.011201472282409667, 0.008290687561035156, 0.007871039867401123, 0.007915584087371827, 0.008103872299194336, 0.007878655910491944, 0.00787279987335205, 0.007843776226043701, 0.007882527828216553, 0.007819263935089112, 0.00826153564453125, 0.007923232078552247, 0.00785260820388794, 0.007824607849121094, 0.007801439762115479, 0.007796927928924561, 0.007897088050842285, 0.00800767993927002, 0.007968768119812012, 0.007897088050842285, 0.00781654405593872, 0.007906976222991944, 0.007834688186645507, 0.007794112205505371, 0.00815385627746582, 0.007955615997314452, 0.007926527976989745, 0.007848063945770263, 0.007818975925445556, 0.007473152160644531, 0.007839744091033935, 0.007881728172302246, 0.007860799789428712, 0.007919936180114747, 0.00790335988998413, 0.007964288234710694, 0.007951935768127442, 0.007890048027038575, 0.007943647861480713, 0.007864223957061768, 0.007884607791900635, 0.008178175926208496, 0.007842048168182373, 0.00798854398727417, 0.007892960071563721, 0.007848415851593018, 0.007767807960510254, 0.007733759880065918, 0.00787395191192627, 0.007790688037872314, 0.007859551906585693, 0.007754464149475098, 0.007701951980590821, 0.00792784023284912, 0.007753983974456787, 0.007758175849914551, 0.007758111953735352, 0.007714655876159668, 0.0077292160987854006, 0.007706560134887695, 0.007642208099365234, 0.007676928043365478, 0.007629759788513183, 0.007721951961517334, 0.007771423816680908, 0.007661344051361084, 0.0075887999534606934, 0.0077209601402282715, 0.007708672046661377, 0.007694176197052002, 0.007704671859741211, 0.007833663940429687, 0.007786496162414551, 0.007708384037017822, 0.007772607803344727, 0.007958208084106445, 0.0077890558242797855, 0.0077247037887573245, 0.007597824096679687, 0.007760128021240234, 0.007747583866119385, 0.007710527896881103, 0.0077131838798522945, 0.00769977617263794, 0.007599743843078613, 0.007758687973022461, 0.007651328086853027, 0.0077142720222473146, 0.007727327823638916, 0.007790304183959961, 0.007680607795715332, 0.007813375949859619, 0.007430592060089111, 0.00782044792175293, 0.007815904140472412, 0.007804927825927735, 0.007770112037658691, 0.007780416011810303, 0.007784383773803711, 0.007718463897705078, 0.007696095943450928, 0.007714784145355225, 0.007725376129150391, 0.007716832160949707, 0.007710944175720215, 0.007854080200195313, 0.007840320110321045, 0.007761600017547607, 0.007806655883789063, 0.007708672046661377, 0.007765535831451416, 0.00798799991607666, 0.007826784133911132, 0.007813439846038819, 0.007750048160552979, 0.007665631771087647, 0.0077142400741577145, 0.007702591896057129, 0.007758304119110108, 0.007659840106964111, 0.007684031963348389, 0.007740799903869629, 0.00775548791885376, 0.008061599731445313, 0.00820751953125, 0.009003328323364259, 0.008608223915100098, 0.007802944183349609, 0.007810592174530029, 0.007918047904968261, 0.007972864151000977, 0.007819263935089112, 0.007639039993286132, 0.007673984050750733, 0.007679999828338623, 0.007730400085449219, 0.00766428804397583, 0.007763967990875244, 0.007681471824645996, 0.007733823776245118, 0.008302495956420899, 0.009394271850585938, 0.008951040267944336, 0.007870304107666015, 0.007781280040740967, 0.00769155216217041, 0.007660255908966064, 0.008093152046203613, 0.007631392002105713, 0.007569632053375244, 0.007647007942199707, 0.007663616180419922, 0.007635295867919922, 0.007622303962707519, 0.007632895946502686, 0.007284639835357666, 0.007592031955718994, 0.00755238389968872, 0.007570047855377197, 0.00757696008682251, 0.007605088233947754, 0.007613408088684082, 0.0077090878486633305, 0.0076306557655334475, 0.007600736141204834, 0.007687359809875488, 0.007630720138549805, 0.007612607955932617, 0.007702303886413574, 0.007737792015075683, 0.007897632122039795, 0.007736800193786621, 0.007645472049713135, 0.007703968048095703, 0.00768291187286377, 0.0077495999336242676, 0.007751711845397949, 0.007694047927856445, 0.007730495929718017, 0.007788703918457031, 0.007842175960540772, 0.007809184074401856, 0.007771711826324463, 0.007748320102691651, 0.007772160053253174, 0.007776095867156983, 0.008108192443847657, 0.007876480102539062, 0.007750976085662842, 0.00772156810760498, 0.007737567901611328, 0.0077610559463500975, 0.007737984180450439, 0.007632544040679931, 0.007649727821350098, 0.007669919967651367, 0.00775164794921875, 0.007751679897308349, 0.007714816093444824, 0.007765984058380127, 0.007943552017211914, 0.007723167896270752, 0.007657983779907227, 0.007585792064666748, 0.007600128173828125, 0.0076018238067626955, 0.007719264030456543, 0.007700064182281494, 0.007709440231323242, 0.007675551891326905, 0.007939104080200196, 0.0076500802040100095, 0.0076414718627929685, 0.0076839041709899905, 0.007861279964447021, 0.008108736038208008, 0.007725344181060791, 0.0077901120185852055, 0.007383200168609619, 0.00787436819076538, 0.00772435188293457, 0.0078056640625, 0.0077209601402282715, 0.007747712135314941, 0.00772051191329956, 0.007655007839202881, 0.007641119956970215, 0.007645887851715088, 0.007747583866119385, 0.007835360050201415, 0.007946752071380615, 0.00781440019607544, 0.007888671875, 0.007845759868621826, 0.00787936019897461, 0.008048831939697266, 0.008804351806640624, 0.007868415832519531, 0.007844863891601562, 0.007783711910247803, 0.008091679573059082, 0.008189472198486328, 0.007819104194641113, 0.008029919624328613, 0.007866015911102295, 0.007785408020019531, 0.007821311950683594, 0.007913472175598145, 0.007884736061096192, 0.007930016040802002, 0.008144800186157226, 0.007868031978607177, 0.007877024173736572, 0.007868383884429932, 0.00780083179473877, 0.008671072006225586, 0.007874720096588135, 0.00788479995727539, 0.007841792106628418, 0.007856128215789794, 0.007802527904510498, 0.007784959793090821, 0.007847616195678711, 0.008024224281311035, 0.007728672027587891, 0.007727231979370117, 0.007890655994415283, 0.007807968139648437, 0.0077816638946533205, 0.007696320056915283, 0.00771888017654419, 0.0075977277755737305, 0.00751855993270874, 0.007645823955535889, 0.007665408134460449, 0.007684192180633545, 0.007614463806152344, 0.007700479984283447, 0.0076574721336364745, 0.007560224056243897, 0.0075747199058532715]",tokens/s,127.09154182392741,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,829.468672,666.763264,0.0,281.018368,267.897856,s,1,9.6287314453125,9.6287314453125,0.0,9.6287314453125,9.6287314453125,9.6287314453125,9.6287314453125,[9.6287314453125],,kWh,1.5180897170833417e-05,1.6670602500036167e-06,5.1775041419997204e-06,2.2025461562836755e-05,,MB,1245.851648,775.815168,0.0,360.710144,344.082944,s,20,0.19236044788360598,0.009618022394180298,0.0006343652001617168,0.009499328136444092,0.00973256597518921,0.009938745546340944,0.01181761132240295,"[0.00972339153289795, 0.009383296012878418, 0.009815135955810546, 0.009310144424438477, 0.012287327766418457, 0.009464415550231933, 0.009556639671325683, 0.00952787208557129, 0.00953542423248291, 0.009400128364562988, 0.009319071769714355, 0.00955673599243164, 0.009577312469482423, 0.009176992416381835, 0.009283679962158203, 0.009197855949401855, 0.009470784187316894, 0.00964476776123047, 0.009682784080505371, 0.009446687698364258]",tokens/s,26616.698267920572,kWh,2.7255625332243883e-07,3.00581677804388e-08,1.4423784752007436e-07,4.46852268622952e-07,tokens/kWh,572896274.6209294,MB,1292.484608,800.980992,0.0,385.875968,344.085504,s,20,9.904938964843751,0.49524694824218746,0.005486446484585594,0.4955020294189453,0.5022049835205079,0.5030523483276367,0.5069318319702149,"[0.49549691772460935, 0.4981065368652344, 0.4927342529296875, 0.5006013488769532, 0.5021391906738282, 0.502797119140625, 0.4972896728515625, 0.49019790649414063, 0.5079017028808593, 0.4962594909667969, 0.4973736572265625, 0.49428482055664064, 0.4963424072265625, 0.4940211181640625, 0.4881112365722656, 0.4848570556640625, 0.48676266479492186, 0.4906120300292969, 0.49354269409179685, 0.49550714111328126]",tokens/s,127.20926443587395,kWh,1.3767582652233182e-05,1.5183167232338346e-06,5.375621065718039e-06,2.0661520441185052e-05,tokens/kWh,3049146.3674871065,,s,1260,9.892112445831302,0.007850882893516903,0.0003436467944562197,0.007813823938369751,0.008005893993377686,0.008149097681045532,0.010008320159912116,"[0.007663072109222412, 0.0079202880859375, 0.007790592193603516, 0.007812767982482911, 0.007841824054718017, 0.007864672183990479, 0.007780096054077148, 0.007780576229095459, 0.00788809585571289, 0.007764768123626709, 0.007952256202697754, 0.007852159976959228, 0.007825407981872558, 0.007861311912536622, 0.007940576076507569, 0.009740768432617187, 0.010504192352294921, 0.010383359909057617, 0.007989408016204834, 0.007757887840270996, 0.007806655883789063, 0.007806464195251465, 0.007788447856903076, 0.007648096084594726, 0.007827295780181884, 0.0077578239440917966, 0.0077471680641174314, 0.007622655868530274, 0.007686560153961182, 0.007620607852935791, 0.007585504055023193, 0.007772448062896728, 0.0077619199752807615, 0.007673855781555176, 0.007617728233337402, 0.007543551921844483, 0.007465023994445801, 0.007481344223022461, 0.007667712211608887, 0.007907328128814697, 0.00749948787689209, 0.007455039978027344, 0.007833407878875733, 0.007561376094818115, 0.007528448104858398, 0.007585279941558838, 0.0077053442001342775, 0.00799887990951538, 0.007666016101837158, 0.007903232097625732, 0.007792064189910889, 0.007567935943603515, 0.007573728084564209, 0.007737120151519775, 0.0078063678741455075, 0.007698272228240967, 0.007719679832458496, 0.007708159923553467, 0.007830143928527832, 0.0077064957618713376, 0.007757760047912597, 0.007789951801300049, 0.0077617278099060055, 0.007423935890197754, 0.007898367881774903, 0.007826144218444825, 0.0078787522315979, 0.007958528041839599, 0.00783564805984497, 0.007885151863098145, 0.007972256183624267, 0.00795468807220459, 0.00789299201965332, 0.007929855823516846, 0.008148672103881836, 0.00783785581588745, 0.00782537603378296, 0.007917952060699463, 0.00791487979888916, 0.007901631832122802, 0.00786243200302124, 0.00789692783355713, 0.007886847972869874, 0.008230239868164062, 0.008174240112304687, 0.007844031810760498, 0.007929855823516846, 0.007909440040588378, 0.007994656085968017, 0.007814879894256593, 0.007798943996429443, 0.00782806396484375, 0.00781660795211792, 0.007763775825500488, 0.007812064170837402, 0.007837503910064698, 0.007903232097625732, 0.007878655910491944, 0.007800672054290771, 0.007765471935272217, 0.007736000061035156, 0.007785920143127441, 0.007766592025756836, 0.008085503578186035, 0.007929599761962891, 0.007786399841308593, 0.007800576210021973, 0.007756447792053223, 0.007779551982879639, 0.007873248100280762, 0.007999167919158936, 0.00794598388671875, 0.007872223854064941, 0.007929728031158448, 0.00796291208267212, 0.007828447818756103, 0.007892704010009766, 0.007806975841522217, 0.007755775928497314, 0.0077844481468200685, 0.0077552638053894046, 0.008049152374267577, 0.007860320091247559, 0.007918784141540527, 0.008383296012878417, 0.008642592430114747, 0.007518239974975586, 0.007935808181762696, 0.007946144104003907, 0.008196415901184083, 0.00794211196899414, 0.007860223770141601, 0.007899136066436767, 0.007996575832366943, 0.007928671836853028, 0.007860223770141601, 0.007860000133514405, 0.007942368030548095, 0.007872511863708496, 0.007831776142120361, 0.00806275177001953, 0.007842944145202637, 0.008012703895568848, 0.007955520153045655, 0.007845056056976319, 0.007820256233215332, 0.007857920169830323, 0.007838592052459717, 0.007835775852203368, 0.007790592193603516, 0.007915679931640624, 0.00796892786026001, 0.007697247982025147, 0.007734111785888672, 0.007839744091033935, 0.007755136013031006, 0.007733888149261474, 0.007765632152557373, 0.007817759990692138, 0.008005472183227539, 0.007842976093292237, 0.00783839988708496, 0.007812704086303711, 0.007844287872314453, 0.007769919872283935, 0.007788864135742188, 0.007867904186248779, 0.007791103839874268, 0.00769593620300293, 0.007719071865081787, 0.007689792156219483, 0.0075803837776184085, 0.0075632638931274416, 0.007582880020141601, 0.007616576194763184, 0.007942944049835206, 0.007625919818878174, 0.007709504127502442, 0.00769977617263794, 0.007628543853759766, 0.007705535888671875, 0.007745535850524903, 0.0077099518775939945, 0.007631616115570068, 0.007648447990417481, 0.007772448062896728, 0.00798364782333374, 0.007818880081176758, 0.007802624225616455, 0.007413760185241699, 0.008410335540771485, 0.007942143917083741, 0.007934751987457276, 0.007968768119812012, 0.007806431770324707, 0.0077972798347473145, 0.008257535934448243, 0.008079360008239746, 0.007899231910705566, 0.007820864200592042, 0.00781660795211792, 0.007826367855072021, 0.008010848045349121, 0.007842720031738281, 0.007910592079162598, 0.007825727939605714, 0.007789184093475341, 0.007763455867767334, 0.007762303829193115, 0.007804128170013427, 0.007815072059631348, 0.00784496021270752, 0.007808544158935547, 0.007978464126586914, 0.007869376182556152, 0.007986656188964844, 0.007941664218902588, 0.007975776195526123, 0.00783564805984497, 0.007841983795166015, 0.00785584020614624, 0.007751776218414307, 0.007745120048522949, 0.007731616020202637, 0.0082423677444458, 0.008134976387023925, 0.007930335998535156, 0.007939199924468994, 0.008029088020324707, 0.007920832157135009, 0.007988031864166259, 0.007809184074401856, 0.007786335945129394, 0.007806975841522217, 0.00780083179473877, 0.007773439884185791, 0.007856287956237794, 0.007780672073364258, 0.007818560123443603, 0.00783846378326416, 0.008024127960205078, 0.007831711769104004, 0.007913472175598145, 0.007996543884277344, 0.007881472110748291, 0.007770112037658691, 0.00790057611465454, 0.008255776405334472, 0.008163776397705078, 0.007946112155914307, 0.008925408363342284, 0.00921177577972412, 0.00997158432006836, 0.008089216232299804, 0.007853600025177002, 0.007870368003845215, 0.007983200073242188, 0.007811935901641845, 0.00791756820678711, 0.007882559776306152, 0.008065216064453125, 0.00783561611175537, 0.007764224052429199, 0.007816991806030273, 0.00835590362548828, 0.007826879978179931, 0.008279647827148438, 0.007875232219696045, 0.007758304119110108, 0.007812448024749756, 0.007823808193206787, 0.00783571195602417, 0.007890880107879639, 0.008160639762878417, 0.008014464378356933, 0.007860544204711915, 0.008009407997131348, 0.007800608158111572, 0.007995935916900634, 0.007890624046325683, 0.00783519983291626, 0.00786681604385376, 0.007890495777130127, 0.007876895904541015, 0.00796073579788208, 0.007924895763397216, 0.007920767784118652, 0.007917183876037598, 0.007890143871307372, 0.007859263896942139, 0.007935935974121094, 0.007913343906402588, 0.007937151908874512, 0.00811302375793457, 0.007900896072387696, 0.007870751857757569, 0.007952288150787353, 0.007954783916473388, 0.00794707202911377, 0.007975200176239013, 0.007938560009002685, 0.00785529613494873, 0.007881855964660645, 0.007880064010620117, 0.007886559963226318, 0.007862271785736084, 0.007899424076080323, 0.007929855823516846, 0.007944704055786133, 0.007934207916259766, 0.007866015911102295, 0.007903295993804932, 0.008218751907348634, 0.007968639850616455, 0.008011391639709473, 0.007602176189422607, 0.007878655910491944, 0.007886847972869874, 0.00786246395111084, 0.007948095798492432, 0.007950560092926025, 0.007907104015350342, 0.007854080200195313, 0.0081080322265625, 0.00790937614440918, 0.007913472175598145, 0.007795839786529541, 0.007807199954986572, 0.007864992141723632, 0.007927807807922363, 0.007855135917663575, 0.008332256317138673, 0.007907328128814697, 0.008265727996826172, 0.008025343894958497, 0.007993216037750245, 0.008024191856384278, 0.007910143852233887, 0.008469856262207032, 0.007992159843444825, 0.007886208057403565, 0.008005855560302734, 0.007852255821228027, 0.007831456184387207, 0.007906879901885986, 0.00786624002456665, 0.007806719779968262, 0.007787583827972412, 0.008007519721984864, 0.00790067195892334, 0.008091648101806641, 0.007988800048828125, 0.008003775596618651, 0.008006239891052246, 0.00792294406890869, 0.007890944004058837, 0.007994048118591309, 0.007956416130065917, 0.008447999954223634, 0.008157183647155761, 0.008470815658569336, 0.00777129602432251, 0.008364895820617675, 0.008409024238586426, 0.007983168125152589, 0.007933792114257812, 0.00809785556793213, 0.007944096088409423, 0.007954016208648681, 0.007983391761779784, 0.007889472007751464, 0.007912767887115478, 0.007862720012664795, 0.007867648124694824, 0.007815072059631348, 0.007696352005004883, 0.007658207893371582, 0.008100000381469727, 0.010228511810302734, 0.010290623664855957, 0.007780159950256348, 0.007861023902893066, 0.0078089919090271, 0.007818367958068848, 0.007875807762145996, 0.007853824138641358, 0.007840832233428956, 0.008162240028381347, 0.007821216106414794, 0.007925055980682372, 0.007751423835754395, 0.00770249605178833, 0.007902527809143066, 0.00786620807647705, 0.00788979196548462, 0.007794847965240478, 0.007786687850952149, 0.007733280181884765, 0.007813695907592774, 0.007737664222717286, 0.0078089919090271, 0.00782099199295044, 0.007898591995239257, 0.007742303848266602, 0.00775980806350708, 0.007816991806030273, 0.00786236810684204, 0.007747488021850586, 0.007731200218200684, 0.007769728183746338, 0.007706399917602539, 0.007728896141052246, 0.007742303848266602, 0.007655424118041992, 0.00760972785949707, 0.007815807819366456, 0.007727104187011719, 0.007731296062469482, 0.007779327869415284, 0.007723584175109864, 0.007989535808563233, 0.0078439040184021, 0.00785203218460083, 0.007852287769317626, 0.00777132797241211, 0.007755807876586914, 0.007713312149047851, 0.00774348783493042, 0.007862271785736084, 0.007686143875122071, 0.007755775928497314, 0.007749631881713868, 0.007860223770141601, 0.007967967987060547, 0.007900224208831788, 0.007829216003417969, 0.007794688224792481, 0.007738848209381103, 0.007868095874786377, 0.007785568237304688, 0.007895040035247802, 0.0073324480056762694, 0.007654560089111328, 0.0075929279327392575, 0.007681951999664306, 0.007693535804748535, 0.0077909440994262695, 0.007614304065704345, 0.007678112030029297, 0.007695072174072266, 0.007737120151519775, 0.0076165437698364254, 0.007875648021697998, 0.00795472002029419, 0.0077584958076477055, 0.00781932783126831, 0.007776351928710937, 0.007847775936126709, 0.0078067522048950195, 0.007836959838867187, 0.007862336158752441, 0.007827455997467042, 0.007752927780151367, 0.007960224151611328, 0.007863935947418212, 0.007913856029510499, 0.008103103637695312, 0.007940991878509522, 0.007917535781860352, 0.007831039905548096, 0.007836128234863281, 0.008034463882446288, 0.007818751811981202, 0.007798175811767578, 0.007815839767456055, 0.00768233585357666, 0.0075504322052001955, 0.007611167907714844, 0.0077760000228881835, 0.007710464000701904, 0.007608575820922852, 0.007571104049682617, 0.007524703979492188, 0.007532095909118653, 0.007715040206909179, 0.007700543880462646, 0.007600287914276123, 0.007634943962097168, 0.007628799915313721, 0.0077578239440917966, 0.007602176189422607, 0.007708864212036133, 0.007855040073394775, 0.0076624641418457035, 0.00795420789718628, 0.007950560092926025, 0.007888895988464355, 0.00794985580444336, 0.007834176063537597, 0.007907104015350342, 0.007880832195281982, 0.007855199813842773, 0.007868415832519531, 0.007994559764862061, 0.007500095844268799, 0.008073408126831055, 0.007999263763427734, 0.007971136093139649, 0.010618559837341309, 0.010504192352294921, 0.00799129581451416, 0.007960576057434082, 0.008517536163330078, 0.007997536182403564, 0.00793782377243042, 0.008644831657409669, 0.008068479537963868, 0.00802086353302002, 0.008007424354553223, 0.0079268798828125, 0.007994272232055665, 0.007929535865783691, 0.008173439979553223, 0.007997536182403564, 0.00795084810256958, 0.007954271793365478, 0.007921311855316162, 0.008087936401367188, 0.0079584641456604, 0.00792784023284912, 0.007964672088623047, 0.007907328128814697, 0.007910880088806152, 0.007928351879119873, 0.007922815799713134, 0.007945087909698487, 0.00788479995727539, 0.00789904022216797, 0.007925856113433837, 0.008017184257507325, 0.00830742359161377, 0.00800153636932373, 0.008023903846740723, 0.007999648094177246, 0.008010784149169921, 0.007994624137878418, 0.007924928188323974, 0.007891488075256348, 0.00786191987991333, 0.007890719890594483, 0.007930431842803955, 0.007898240089416505, 0.00792416000366211, 0.00793235206604004, 0.00783564805984497, 0.00785644817352295, 0.007933536052703857, 0.008082655906677247, 0.007968863964080811, 0.007926559925079346, 0.007907328128814697, 0.007879776000976562, 0.00787779188156128, 0.007883679866790772, 0.00790835189819336, 0.00784284782409668, 0.007813951969146728, 0.007405375957489014, 0.007835999965667724, 0.008142815589904786, 0.007894752025604248, 0.00786198377609253, 0.007805088043212891, 0.007929855823516846, 0.007884384155273438, 0.007882719993591308, 0.00791868782043457, 0.008069024085998536, 0.007810304164886475, 0.008187871932983398, 0.007899807929992677, 0.007882495880126954, 0.008255743980407714, 0.007976960182189942, 0.007835072040557861, 0.007776991844177246, 0.007804927825927735, 0.007778143882751465, 0.007823359966278077, 0.007865856170654297, 0.00783571195602417, 0.007838175773620606, 0.00782755184173584, 0.007880576133728027, 0.007891104221343995, 0.007972832202911378, 0.007822400093078613, 0.008010687828063965, 0.007958399772644042, 0.00787062406539917, 0.007886559963226318, 0.00783903980255127, 0.008100671768188477, 0.007882207870483398, 0.007869311809539795, 0.007897823810577393, 0.007964960098266602, 0.007955103874206544, 0.007885024070739747, 0.00785590410232544, 0.007851520061492919, 0.008036864280700684, 0.0078438401222229, 0.007868192195892333, 0.007864543914794922, 0.007858176231384278, 0.007753568172454834, 0.007726880073547363, 0.0077209601402282715, 0.007747968196868896, 0.007753791809082031, 0.008316191673278808, 0.007766687870025634, 0.0076687679290771485, 0.007762911796569824, 0.007648672103881836, 0.007789279937744141, 0.0077597441673278806, 0.007685664176940918, 0.007657824039459228, 0.007416863918304443, 0.0077424321174621585, 0.008713631629943848, 0.008604255676269532, 0.007800640106201172, 0.007798655986785889, 0.010604864120483398, 0.010345727920532226, 0.00774015998840332, 0.0077036161422729494, 0.007779263973236084, 0.007768064022064209, 0.007989247798919678, 0.00783516788482666, 0.007768544197082519, 0.0078438401222229, 0.007811071872711181, 0.007849760055541992, 0.00779695987701416, 0.007882751941680909, 0.007749887943267822, 0.007751423835754395, 0.007839295864105224, 0.007727551937103272, 0.007956543922424316, 0.00774345588684082, 0.0076771841049194335, 0.007759871959686279, 0.007639008045196533, 0.0076193280220031735, 0.007620607852935791, 0.0076871681213378906, 0.007641536235809326, 0.007628799915313721, 0.007608160018920899, 0.007713727951049805, 0.007583424091339112, 0.007567455768585205, 0.007679872035980225, 0.007716224193572998, 0.007697152137756348, 0.007740575790405274, 0.007795008182525634, 0.007825503826141358, 0.007876063823699951, 0.007885280132293702, 0.00809443187713623, 0.007911200046539307, 0.007802879810333252, 0.007788544178009033, 0.007872064113616944, 0.007825632095336914, 0.00776358413696289, 0.007761792182922363, 0.007778783798217773, 0.008085760116577148, 0.007895103931427002, 0.007749184131622315, 0.007786240100860596, 0.0077872958183288574, 0.007818751811981202, 0.0077348160743713375, 0.007764351844787598, 0.007409120082855224, 0.007708864212036133, 0.0076457920074462895, 0.007625984191894531, 0.007644095897674561, 0.007625823974609375, 0.00766537618637085, 0.007643648147583008, 0.007678463935852051, 0.007606272220611572, 0.00756499195098877, 0.007643455982208252, 0.007530335903167725, 0.0076464319229125974, 0.007610655784606933, 0.007631328105926514, 0.00762883186340332, 0.007678112030029297, 0.007664991855621338, 0.007930528163909913, 0.007794688224792481, 0.007735167980194092, 0.009314432144165039, 0.007906720161437989, 0.007797344207763672, 0.008218624114990235, 0.008138303756713866, 0.007950784206390381, 0.007968063831329347, 0.007819744110107421, 0.007801055908203125, 0.008112256050109862, 0.00788262414932251, 0.0078063678741455075, 0.00776416015625, 0.00784835195541382, 0.007812960147857666, 0.007747744083404541, 0.008130559921264649, 0.007884448051452636, 0.007827231884002686, 0.007876800060272218, 0.007842175960540772, 0.007809023857116699, 0.007803135871887207, 0.007824543952941895, 0.007791200160980225, 0.007785920143127441, 0.007889599800109863, 0.007892896175384521, 0.00790934419631958, 0.007972864151000977, 0.007940095901489258, 0.007812543869018555, 0.00788643217086792, 0.00785097599029541, 0.008171072006225586, 0.007940896034240722, 0.007941792011260987, 0.008008992195129395, 0.007918303966522216, 0.007847424030303956, 0.007842463970184326, 0.0074973440170288085, 0.007815552234649659, 0.007813119888305664, 0.007840767860412597, 0.007865344047546387, 0.008202239990234375, 0.007886079788208008, 0.00787497615814209, 0.007847551822662353, 0.01065443229675293, 0.010317631721496582, 0.007870272159576416, 0.007819647789001466, 0.007829504013061523, 0.007790815830230713, 0.0077285442352294925, 0.007944575786590576, 0.007763391971588135, 0.007737184047698974, 0.007779039859771728, 0.007811071872711181, 0.007789984226226807, 0.0077707200050354, 0.007651072025299072, 0.00762713623046875, 0.00763862419128418, 0.007587423801422119, 0.007648128032684326, 0.0076817598342895504, 0.0077227201461792, 0.007713247776031494, 0.007580992221832276, 0.007798848152160645, 0.00754307222366333, 0.007614528179168701, 0.007745728015899658, 0.00871014404296875, 0.008159232139587403, 0.007895040035247802, 0.007792160034179687, 0.00783788776397705, 0.007723296165466308, 0.007825503826141358, 0.007712063789367676, 0.007715424060821533, 0.007818336009979249, 0.007650207996368408, 0.008738816261291504, 0.007712768077850342, 0.007687583923339843, 0.0076204161643981935, 0.007699391841888428, 0.0076696000099182125, 0.007686143875122071, 0.007755775928497314, 0.007741439819335938, 0.007692287921905518, 0.007673439979553223, 0.007600543975830078, 0.007602015972137451, 0.007819424152374268, 0.0077405118942260745, 0.00765225601196289, 0.007348415851593018, 0.007876416206359864, 0.0075998401641845705, 0.007616991996765137, 0.007608320236206055, 0.007501823902130127, 0.007542208194732666, 0.007500351905822754, 0.007467360019683838, 0.008111647605895996, 0.007546144008636475, 0.0077528319358825684, 0.007601215839385987, 0.007781248092651367, 0.0076100797653198245, 0.007540800094604492, 0.007636127948760986, 0.007563488006591797, 0.0076130561828613285, 0.007581056118011475, 0.007504511833190918, 0.007600031852722168, 0.007566592216491699, 0.008112128257751466, 0.007596896171569824, 0.00759603214263916, 0.007834784030914307, 0.007805600166320801, 0.007772255897521973, 0.007661695957183838, 0.007800479888916015, 0.0076776638031005855, 0.0076416640281677245, 0.007536672115325928, 0.007605696201324463, 0.007789120197296142, 0.007571455955505371, 0.0075632638931274416, 0.007705920219421387, 0.007920127868652344, 0.009857215881347657, 0.009547807693481446, 0.008760831832885741, 0.007659999847412109, 0.007700479984283447, 0.007683072090148926, 0.007627359867095947, 0.007669407844543457, 0.007750592231750488, 0.007655231952667236, 0.0076902399063110355, 0.007716864109039307, 0.007966720104217529, 0.007788544178009033, 0.007772384166717529, 0.007705535888671875, 0.00772105598449707, 0.0077803201675415035, 0.007936800003051758, 0.010061183929443359, 0.009454208374023438, 0.007743616104125977, 0.007902080059051514, 0.0073045120239257815, 0.0076154241561889645, 0.007605696201324463, 0.007606847763061524, 0.007945343971252442, 0.007562272071838379, 0.007563104152679443, 0.007507967948913574, 0.007713056087493896, 0.0076953921318054196, 0.007901023864746094, 0.00777507209777832, 0.00789254379272461, 0.01063980770111084, 0.0102456636428833, 0.007802527904510498, 0.007738175868988037, 0.007794112205505371, 0.007740992069244385, 0.0076912641525268555, 0.008126560211181641, 0.008095647811889648, 0.007735648155212402, 0.007740128040313721, 0.00769324779510498, 0.0076267518997192385, 0.007591455936431885, 0.007578271865844727, 0.007643167972564697, 0.007601952075958252, 0.007646975994110107, 0.007608799934387207, 0.007591775894165039, 0.007631968021392823, 0.00763808012008667, 0.007606272220611572, 0.007694111824035644, 0.007491583824157715, 0.007563488006591797, 0.00797379207611084, 0.0076830401420593265, 0.007516064167022705, 0.007573503971099854, 0.007466591835021972, 0.007456480026245117, 0.007459584236145019, 0.0074930558204650876, 0.007628543853759766, 0.007584191799163818, 0.007577919960021972, 0.007597407817840576, 0.007533215999603272, 0.007505248069763184, 0.007559391975402832, 0.007528895854949951, 0.00752019214630127, 0.00747321605682373, 0.007743616104125977, 0.0077064957618713376, 0.007682047843933106, 0.007771520137786865, 0.007625343799591064, 0.007604479789733887, 0.007567488193511963, 0.007731200218200684, 0.007787903785705566, 0.007749472141265869, 0.00774838399887085, 0.007729152202606201, 0.007735104084014893, 0.007766079902648926, 0.00781283187866211, 0.007708479881286621, 0.007684800148010254, 0.007752895832061768, 0.007885536193847657, 0.007786496162414551, 0.0078106880187988285, 0.007747424125671387, 0.007851967811584473, 0.007634687900543213, 0.007578559875488281, 0.007550784111022949, 0.007682144165039062, 0.007641088008880615, 0.007584959983825684, 0.007639711856842041, 0.007704192161560058, 0.007688064098358155, 0.007707263946533203, 0.007671679973602295, 0.007687839984893799, 0.007721471786499024, 0.007659135818481445, 0.007644896030426025, 0.007887519836425782, 0.0076902079582214355, 0.0075838398933410645, 0.007589216232299805, 0.007544447898864746, 0.007582687854766846, 0.007548927783966064, 0.007623712062835694, 0.0075222721099853515, 0.007488192081451416, 0.007428512096405029, 0.007489439964294434, 0.007620672225952149, 0.007616447925567627, 0.007566463947296142, 0.007757887840270996, 0.007818048000335693, 0.007771967887878418, 0.0076672320365905764, 0.007609151840209961, 0.0075630397796630855, 0.007612480163574219, 0.007646944046020508, 0.0077560958862304685, 0.007720608234405518, 0.007712831974029541, 0.007721216201782227, 0.00799126386642456, 0.007780288219451904, 0.007733344078063965, 0.007941247940063477, 0.007664544105529785, 0.008185055732727051, 0.008608768463134766, 0.008070752143859864, 0.007770207881927491, 0.007992832183837891, 0.00767855978012085, 0.007902207851409913, 0.0078057279586791995, 0.007734687805175782, 0.007709504127502442, 0.00766374397277832, 0.007657567977905274, 0.007714911937713623, 0.00769708776473999, 0.007688511848449707, 0.007832032203674316, 0.007757279872894287, 0.007684512138366699, 0.007723296165466308, 0.007725344181060791, 0.007652703762054443, 0.007717440128326416, 0.00785209608078003, 0.007991327762603759, 0.007967648029327392, 0.007712831974029541, 0.007692543983459472, 0.0077192320823669435, 0.007708928108215332, 0.007538015842437744, 0.007570047855377197, 0.007401440143585205, 0.007450784206390381, 0.007743391990661621, 0.007693759918212891, 0.007668511867523193, 0.0075568962097167965, 0.007614463806152344, 0.0075980801582336424, 0.00759603214263916, 0.007624224185943603, 0.007636544227600098, 0.007654304027557373, 0.007970816135406494, 0.00760211181640625, 0.007915584087371827, 0.007562655925750733, 0.007576511859893799, 0.007482048034667969, 0.007412703990936279, 0.007349728107452392, 0.007346720218658447, 0.007522304058074952, 0.00759603214263916, 0.007723008155822754, 0.007622655868530274, 0.00774348783493042, 0.007780704021453857, 0.007786143779754639, 0.007924992084503174, 0.007771935939788819, 0.007812064170837402, 0.0073786239624023435, 0.007806911945343018, 0.007731488227844238, 0.00768995189666748, 0.007665503978729248, 0.00756547212600708, 0.007640960216522216, 0.007684224128723145, 0.0076186881065368655, 0.007569536209106446, 0.0078023681640625, 0.007702816009521485, 0.007598048210144043, 0.007600128173828125, 0.007771359920501709, 0.007680799961090088, 0.007746848106384277, 0.0077506561279296875, 0.007714144229888916, 0.007778719902038574, 0.00828822422027588, 0.007937632083892822, 0.007917695999145508, 0.0079137601852417, 0.007907135963439942, 0.007839935779571533, 0.007833631992340087, 0.007850272178649902, 0.00786198377609253, 0.007740543842315674, 0.007805791854858399, 0.007933152198791505, 0.007856480121612549, 0.007835135936737061, 0.007730112075805664, 0.007825056076049804, 0.007809375762939453, 0.00774124813079834, 0.00786236810684204, 0.007751584053039551, 0.008145088195800782, 0.007864319801330566, 0.007837503910064698, 0.007917600154876709, 0.007728447914123535, 0.007756671905517578, 0.007655392169952393, 0.007620607852935791, 0.0077003521919250485, 0.007737343788146973, 0.007839295864105224, 0.007815423965454102, 0.007822944164276122, 0.007749375820159912, 0.007715807914733887, 0.007798399925231934, 0.007692639827728272, 0.007659552097320557, 0.0076771841049194335, 0.008061696052551269, 0.00794758415222168, 0.007779007911682129, 0.007732895851135254, 0.007422080039978027, 0.007862783908843995, 0.007956639766693115, 0.007886591911315919, 0.007833759784698486, 0.007769567966461182, 0.007797056198120117, 0.0077907519340515135, 0.0076876158714294434, 0.007785215854644776, 0.007710527896881103, 0.00773529577255249, 0.007783904075622559, 0.007788735866546631, 0.007893343925476075, 0.007995391845703125, 0.007926976203918457, 0.008134752273559571, 0.00798790407180786, 0.00796777582168579, 0.007875135898590088, 0.007941952228546142, 0.007793280124664306, 0.007921152114868164, 0.00793398380279541, 0.007866847991943359, 0.00782697582244873, 0.007872032165527345, 0.007734208106994629, 0.00785103988647461, 0.007805920124053955, 0.007842912197113036, 0.007822144031524658, 0.007802976131439209, 0.008071071624755859, 0.007884223937988282, 0.00787113618850708, 0.007833695888519288, 0.007802783966064453, 0.0077305278778076175, 0.007688543796539307, 0.0076817917823791505, 0.007692512035369873, 0.007805280208587647, 0.007817215919494629, 0.007747712135314941, 0.007832736015319823, 0.007850719928741454, 0.007802879810333252, 0.007749631881713868, 0.008091039657592773, 0.00788646411895752, 0.007725088119506836, 0.007742656230926514, 0.00772435188293457, 0.007747647762298584, 0.007739327907562256, 0.007716991901397705, 0.007754047870635986, 0.007696383953094482, 0.008067071914672852, 0.007771840095520019, 0.0077760319709777835, 0.007587520122528077, 0.007936607837677002, 0.007925504207611084, 0.007878111839294434, 0.007809023857116699, 0.00786246395111084, 0.007834112167358399, 0.00785584020614624, 0.007844223976135253, 0.007874303817749024, 0.007915296077728272, 0.007866975784301757, 0.0078089280128479, 0.007856095790863037, 0.008140543937683105, 0.008014080047607421, 0.007815167903900147, 0.00785423994064331, 0.007806816101074219, 0.007768256187438965, 0.007781472206115723, 0.007729887962341309, 0.00786959981918335, 0.007815872192382813, 0.007895199775695801, 0.007870592117309571, 0.007870336055755616, 0.00787772798538208, 0.00811680030822754, 0.008073568344116211, 0.007833600044250488, 0.007804927825927735, 0.0077023677825927735, 0.007673823833465576, 0.007675648212432861, 0.0077216320037841795, 0.007764800071716309, 0.007676896095275879, 0.007671999931335449, 0.00774124813079834, 0.007667712211608887, 0.007956480026245117, 0.007888512134552002, 0.007913536071777343, 0.007897215843200684, 0.007887392044067383, 0.007881440162658692, 0.007932064056396484, 0.007930655956268311, 0.007895040035247802, 0.007979008197784423, 0.00796447992324829, 0.007917471885681152, 0.00797648000717163, 0.008012831687927246, 0.007820608139038086, 0.007678368091583252, 0.0077578239440917966, 0.00782863998413086, 0.007826272010803223, 0.007888895988464355, 0.007874783992767333, 0.008054559707641601]",tokens/s,127.37420918936128,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32391,7 +32391,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32535,7 +32535,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32723,7 +32723,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32840,7 +32840,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7165.108224,11446.124544,0.0,11043.602432,10644.85888,s,1,15.7751533203125,15.7751533203125,0.0,15.7751533203125,15.7751533203125,15.7751533203125,15.7751533203125,[15.7751533203125],,kWh,0.00024206430987082586,2.669443745340421e-05,0.00010852758682200481,0.0003772863341462349,,MB,3060.89984,11464.998912,0.0,11047.796736,10193.631744,s,10,3.767375183105468,0.37673751831054686,0.0007211279943865331,0.37685900878906253,0.3774028839111328,0.3775787612915039,0.37771946319580074,"[0.37504647827148435, 0.37696453857421874, 0.3762663269042969, 0.37660128784179686, 0.3770697021484375, 0.37728143310546874, 0.37675347900390627, 0.37736380004882814, 0.37627349853515624, 0.377754638671875]",tokens/s,679.5181991642727,kWh,1.1012407687036822e-05,1.2142990624317782e-06,7.313740418888799e-06,1.95404471683574e-05,tokens/kWh,13101030.78984552,MB,3064.385536,11464.998912,0.0,11047.796736,10216.945152,s,10,31.02382958984375,3.102382958984375,0.005613867344000379,3.1036630859375,3.1080973876953126,3.1085290405273436,3.1088743627929687,"[3.102741943359375, 3.093324462890625, 3.103872314453125, 3.0974462890625, 3.10800146484375, 3.106240966796875, 3.092713134765625, 3.107074462890625, 3.103453857421875, 3.108960693359375]",tokens/s,20.306970748905954,kWh,7.523066486671181e-05,8.298192851494044e-06,4.991572203131186e-05,0.0001334445797495177,tokens/kWh,472106.0991630699,,s,630,31.021249328613287,0.04924007829938616,0.0008027221667489253,0.04911606407165527,0.04976451644897461,0.05023721599578858,0.052640840911865244,"[0.05024038314819336, 0.04905779266357422, 0.048584705352783204, 0.04890828704833984, 0.04904547119140625, 0.04886937713623047, 0.04877519989013672, 0.04880384063720703, 0.04896768188476563, 0.04900044631958008, 0.04938150405883789, 0.049030208587646486, 0.049476417541503906, 0.050016254425048826, 0.04987289428710937, 0.049554656982421875, 0.04975392150878906, 0.050340225219726566, 0.04948847961425781, 0.0495753288269043, 0.04948233413696289, 0.04878745651245117, 0.048791553497314455, 0.04864419174194336, 0.04878921508789062, 0.04889728164672852, 0.048765888214111326, 0.04887756729125976, 0.05234483337402344, 0.04929740905761719, 0.04937039947509766, 0.049175262451171875, 0.04952473449707031, 0.049246177673339844, 0.05009555053710937, 0.04971990585327148, 0.049512001037597654, 0.049303966522216795, 0.049231105804443356, 0.04979779052734375, 0.04951571273803711, 0.0493076171875, 0.04907926559448242, 0.04895539093017578, 0.04894425582885742, 0.04868185424804688, 0.048791553497314455, 0.04906732940673828, 0.0488897590637207, 0.04874934387207031, 0.04868710327148437, 0.049255809783935546, 0.04968902587890625, 0.04926268768310547, 0.04879801559448242, 0.04938310241699219, 0.04891247940063476, 0.0487014389038086, 0.04957782363891602, 0.048680702209472654, 0.049118846893310544, 0.04926748657226562, 0.04879564666748047, 0.049883777618408204, 0.049315391540527345, 0.04951408004760742, 0.04905660629272461, 0.04916403198242188, 0.04918422317504883, 0.048971904754638675, 0.04901955032348633, 0.05396275329589844, 0.04940390396118164, 0.05386579132080078, 0.048767646789550784, 0.048672447204589846, 0.04938582229614258, 0.049240062713623044, 0.048901599884033205, 0.048974048614501955, 0.04921990585327148, 0.04903936004638672, 0.048844799041748044, 0.04919094467163086, 0.0492441291809082, 0.04924934387207031, 0.04945948791503906, 0.04889667129516601, 0.04871097564697266, 0.048833217620849606, 0.04887551879882813, 0.049040607452392575, 0.04862179183959961, 0.0484890251159668, 0.048541694641113284, 0.04860243225097656, 0.04854179382324219, 0.04856668853759766, 0.04830227279663086, 0.048464897155761716, 0.04867974472045898, 0.04851116943359375, 0.0487374382019043, 0.04891324615478516, 0.04849868774414062, 0.048613086700439456, 0.049067615509033206, 0.04874924850463867, 0.048575809478759766, 0.04894704055786133, 0.0488702392578125, 0.0490332145690918, 0.04926259231567383, 0.04937318420410156, 0.048965633392333986, 0.048801631927490235, 0.048543712615966794, 0.04864223861694336, 0.04841062545776367, 0.04854988861083984, 0.04860704040527344, 0.048660671234130856, 0.049608062744140625, 0.04972812652587891, 0.049313728332519534, 0.04935440063476562, 0.05031731033325195, 0.04991727828979492, 0.04918339157104492, 0.0488787841796875, 0.0487083854675293, 0.04862515258789062, 0.04865209579467773, 0.04852134323120117, 0.04856659317016602, 0.04872582244873047, 0.0516426887512207, 0.049987743377685544, 0.04913913726806641, 0.049467967987060546, 0.048905632019042966, 0.04859145736694336, 0.04847206497192383, 0.049036479949951174, 0.048501567840576174, 0.05274755096435547, 0.04948054504394531, 0.04923747253417969, 0.04924662399291992, 0.04898739242553711, 0.04867488098144531, 0.04876544189453125, 0.04929145431518555, 0.059176319122314455, 0.04935465621948242, 0.04902166366577149, 0.048904129028320316, 0.049883201599121095, 0.04915135955810547, 0.0490563850402832, 0.04899580764770508, 0.04883305740356445, 0.048852481842041016, 0.049637889862060545, 0.049154048919677736, 0.04901683044433594, 0.04976025772094726, 0.049168384552001954, 0.048721214294433594, 0.04869337463378906, 0.04854022216796875, 0.04859270477294922, 0.048447681427001954, 0.048332607269287106, 0.04883065414428711, 0.04851286315917969, 0.04845792007446289, 0.0487276496887207, 0.04977606582641601, 0.049540031433105466, 0.04920070266723633, 0.04924870300292969, 0.04895948791503906, 0.04876438522338867, 0.048605342864990235, 0.0488513298034668, 0.04877212905883789, 0.04913046264648437, 0.048683006286621096, 0.05006099319458008, 0.04924857711791992, 0.04892671966552734, 0.04877721786499024, 0.049317886352539066, 0.04899020767211914, 0.0493568000793457, 0.04933631896972656, 0.04990156936645508, 0.04955305480957031, 0.049146240234375, 0.04889596939086914, 0.04940521621704102, 0.04927139282226563, 0.049495521545410155, 0.049001087188720704, 0.048686241149902346, 0.04872211074829102, 0.04876972961425781, 0.04882947158813476, 0.04870652770996094, 0.04893036651611328, 0.049393985748291014, 0.04902659225463867, 0.049111038208007815, 0.04917049789428711, 0.04881856155395508, 0.04894940948486328, 0.04897788619995117, 0.04904758453369141, 0.04915971374511719, 0.04911718368530273, 0.049277408599853516, 0.05003868865966797, 0.05005526351928711, 0.04907350540161133, 0.04858492660522461, 0.04870598220825195, 0.04879087829589844, 0.048758785247802736, 0.04888643264770508, 0.048922622680664066, 0.04985164642333984, 0.04883942413330078, 0.048945056915283204, 0.0497553596496582, 0.04885593414306641, 0.04909056091308594, 0.049476673126220706, 0.04926537704467773, 0.0489813117980957, 0.048933502197265624, 0.0486976318359375, 0.04904719924926758, 0.04938313674926758, 0.04918102264404297, 0.049324321746826175, 0.049186206817626955, 0.04919766235351562, 0.04936908721923828, 0.0496800651550293, 0.04966022491455078, 0.04927840042114258, 0.052379585266113284, 0.04981356811523437, 0.050337791442871094, 0.04917862319946289, 0.049219070434570314, 0.0497628173828125, 0.04880342483520508, 0.048761249542236325, 0.04895318222045898, 0.04866057586669922, 0.048662593841552734, 0.048486400604248046, 0.04905779266357422, 0.04879753494262695, 0.04909616088867187, 0.04935750579833984, 0.04918272018432617, 0.04955692672729492, 0.04900268936157227, 0.050382465362548826, 0.049172286987304685, 0.04869011306762695, 0.0486583366394043, 0.04847420883178711, 0.048793182373046876, 0.05048556900024414, 0.04910704040527344, 0.04887478256225586, 0.04907843017578125, 0.055575103759765626, 0.050028545379638675, 0.051509246826171876, 0.04925439834594727, 0.048914432525634766, 0.04897702407836914, 0.04931878280639648, 0.050496673583984374, 0.04901113510131836, 0.04887798309326172, 0.049161888122558596, 0.05083580780029297, 0.04923577499389648, 0.04952492904663086, 0.04863974380493164, 0.04887356948852539, 0.04893689727783203, 0.04909065628051758, 0.0487213134765625, 0.04879433441162109, 0.04881331253051758, 0.04905558395385742, 0.04884492874145508, 0.04866438293457031, 0.04927791976928711, 0.050233345031738284, 0.04866048049926758, 0.048684894561767576, 0.04861148834228515, 0.04870095825195313, 0.04856217575073242, 0.04921392059326172, 0.04873769760131836, 0.0491209602355957, 0.05044224166870117, 0.05360639953613281, 0.04993212890625, 0.04921155166625977, 0.0489881591796875, 0.04925798416137695, 0.04962128067016602, 0.049255905151367185, 0.04902169418334961, 0.049137664794921876, 0.052910079956054686, 0.04929724884033203, 0.049275039672851566, 0.04902092742919922, 0.04896112060546875, 0.049162113189697265, 0.04915980911254883, 0.04976323318481445, 0.04933631896972656, 0.05172633743286133, 0.0494202880859375, 0.04907417678833008, 0.0490588493347168, 0.04947353744506836, 0.04943743896484375, 0.049350879669189454, 0.04966195297241211, 0.049293312072753906, 0.04930559921264648, 0.04936240005493164, 0.04890671920776367, 0.048793567657470706, 0.04868310546875, 0.04856175994873047, 0.048679328918457034, 0.04864527893066406, 0.04862432098388672, 0.04847737503051758, 0.04878550338745117, 0.04856419372558594, 0.049228702545166016, 0.049288543701171875, 0.04894287872314453, 0.04888051223754883, 0.04926054382324219, 0.0492564468383789, 0.04921654510498047, 0.0495052490234375, 0.04974591827392578, 0.04914694213867187, 0.049197822570800784, 0.049104640960693356, 0.04878790283203125, 0.04927644729614258, 0.04914838409423828, 0.049079807281494144, 0.04885964965820312, 0.04860470581054688, 0.04879206466674805, 0.04880121612548828, 0.04905014419555664, 0.04890419387817383, 0.04867071914672851, 0.05045616149902344, 0.04965961456298828, 0.049380321502685544, 0.049358463287353514, 0.04940428924560547, 0.049014751434326174, 0.04897999954223633, 0.04872806549072266, 0.0487116813659668, 0.048745918273925784, 0.04884307098388672, 0.048629310607910155, 0.048398399353027345, 0.04834921646118164, 0.04848495864868164, 0.0486316146850586, 0.04885023880004883, 0.049200000762939455, 0.04917452621459961, 0.04902844619750977, 0.04867139053344727, 0.048797695159912106, 0.04849868774414062, 0.048584320068359374, 0.04852159881591797, 0.04910847854614258, 0.04910476684570313, 0.04934873580932617, 0.04916044616699219, 0.04916016006469726, 0.04909904098510742, 0.049307647705078124, 0.0490618896484375, 0.05021897506713867, 0.049084449768066404, 0.048906238555908206, 0.04914790344238281, 0.048996353149414064, 0.04886528015136719, 0.04869254302978516, 0.0489315185546875, 0.04877926254272461, 0.04845977783203125, 0.04836761474609375, 0.04848166275024414, 0.04849676895141602, 0.04876339340209961, 0.04916979217529297, 0.04955609512329102, 0.04923392105102539, 0.04899785614013672, 0.04902556610107422, 0.050114559173583983, 0.04934656143188477, 0.049637374877929685, 0.04954662322998047, 0.04939772796630859, 0.049189537048339844, 0.049108894348144534, 0.04914339065551758, 0.04903923034667969, 0.05156099319458008, 0.04971699142456055, 0.050459743499755856, 0.04937308883666992, 0.049277374267578125, 0.04960678482055664, 0.04911967849731445, 0.04913151931762695, 0.0489117431640625, 0.049095104217529294, 0.049141216278076175, 0.04908224105834961, 0.05016169738769531, 0.04959494400024414, 0.05002880096435547, 0.04968447875976562, 0.04957388687133789, 0.049532928466796876, 0.050051071166992187, 0.04957523345947266, 0.04962783813476562, 0.04937843322753906, 0.04939209747314453, 0.04944527816772461, 0.05181644821166992, 0.04980275344848633, 0.04931020736694336, 0.04918067169189453, 0.049170433044433595, 0.04942006301879883, 0.049492191314697266, 0.04965372848510742, 0.04935641479492187, 0.04910745620727539, 0.04890537643432617, 0.04907855987548828, 0.04888825607299805, 0.04878134536743164, 0.04919500732421875, 0.049142879486083986, 0.048992767333984374, 0.04878976058959961, 0.048553600311279296, 0.04866457748413086, 0.048678462982177734, 0.04879849624633789, 0.04935084915161133, 0.048846847534179685, 0.04866252899169922, 0.04871372985839844, 0.049135616302490234, 0.049249599456787106, 0.04934931182861328, 0.04948976135253906, 0.04950147247314453, 0.04921433639526367, 0.04905331039428711, 0.04888614273071289, 0.04923324966430664, 0.049619617462158205, 0.04933539199829102, 0.04920739364624024, 0.049310527801513675, 0.04939756774902344, 0.049229183197021485, 0.050276065826416014, 0.05043024063110352, 0.05038819122314453, 0.04954329681396484, 0.04952105712890625, 0.04923622512817383, 0.04931484985351563, 0.0493199348449707, 0.0496258544921875, 0.04868438339233398, 0.05087731170654297, 0.049827136993408204, 0.04867961502075195, 0.04856422424316406, 0.04865228652954102, 0.04924399948120117, 0.049473697662353516, 0.04926668930053711, 0.048899326324462894, 0.04926950454711914, 0.04922777557373047, 0.049278209686279294, 0.048783199310302734, 0.04868495941162109, 0.048856128692626954, 0.04860886383056641, 0.048701793670654296, 0.048979839324951174, 0.04928438568115234, 0.04895209503173828, 0.04895126342773438, 0.04901855850219727, 0.04901315307617188, 0.04885830307006836, 0.049223743438720706, 0.04920703887939453, 0.04938444900512695, 0.04923187255859375, 0.04905574417114258, 0.04911494445800781, 0.049197086334228514, 0.04912470245361328, 0.04955424118041992, 0.04965785598754883, 0.0493834228515625, 0.049403873443603516, 0.04920556640625, 0.04933776092529297, 0.049250080108642576, 0.04927337646484375, 0.04934633636474609, 0.04943689727783203, 0.049446910858154294, 0.04906393432617188, 0.049797119140625, 0.04929536056518555, 0.049239967346191404, 0.049040512084960936, 0.049189281463623044, 0.04902511978149414, 0.048989662170410155, 0.049337345123291014, 0.04907212829589844, 0.05012688064575195, 0.04905923080444336, 0.048947681427001954, 0.04867289733886719, 0.05107712173461914, 0.04885504150390625, 0.04936492919921875, 0.049159934997558594, 0.04962518310546875, 0.04907030487060547, 0.04877238464355469, 0.049232608795166014, 0.04947763061523437, 0.049167808532714845, 0.0488289909362793, 0.04887756729125976, 0.04877449417114258, 0.048707775115966793, 0.049037792205810546, 0.04940185546875, 0.04909465789794922, 0.04915526580810547, 0.049279743194580075, 0.049027137756347657, 0.04953664016723633, 0.04960908889770508, 0.04953011322021485, 0.049566017150878904, 0.04896137619018555, 0.04945161437988281, 0.04911494445800781, 0.049672382354736325, 0.05024563217163086, 0.0494219856262207, 0.049025089263916015, 0.049000736236572265, 0.04926464080810547, 0.05017184066772461, 0.04940982437133789, 0.04916662216186524, 0.04910076904296875, 0.04924419021606445, 0.049459201812744144, 0.049430526733398435, 0.049860607147216796, 0.04900044631958008, 0.049269920349121095, 0.049382240295410156, 0.04905574417114258, 0.04906198501586914, 0.04964956665039062, 0.04956470489501953, 0.05219631958007812, 0.04972544097900391, 0.05018009567260742, 0.04894425582885742, 0.04874911880493164, 0.04874886322021484, 0.04891638565063477, 0.049082176208496094, 0.04928745651245117, 0.04905574417114258, 0.049805313110351565]",tokens/s,20.30865982624699,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32885,10 +32885,10 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 110006 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 108790 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -32958,7 +32958,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -33049,7 +33049,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -33138,7 +33138,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -33253,7 +33253,7 @@ RuntimeError: q_weight and gptq_scales have incompatible shapes " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1558.757376,1861.156864,0.0,1465.909248,1358.169088,s,1,8.9897099609375,8.9897099609375,0.0,8.9897099609375,8.9897099609375,8.9897099609375,8.9897099609375,[8.9897099609375],,kWh,5.637756384169279e-05,6.211118228914528e-06,2.0644460960006494e-05,8.323314303061381e-05,,MB,1643.012096,1882.128384,0.0,1472.200704,1356.544512,s,10,0.4452473640441894,0.04452473640441894,0.000230934573435603,0.044446897506713864,0.04470375595092774,0.04491909484863281,0.04509136596679687,"[0.04513443374633789, 0.04442534255981445, 0.044324703216552734, 0.044585472106933595, 0.04443088150024414, 0.04446291351318359, 0.04465590286254883, 0.04453984069824219, 0.04426470565795899, 0.044423168182373046]",tokens/s,5749.612926952506,kWh,1.3550279394869336e-06,1.4942703200066107e-07,8.961992766296999e-07,2.4006542481172944e-06,tokens/kWh,106637596.89708221,MB,1651.46624,1882.128384,0.0,1472.200704,1409.94816,s,10,11.271697265625,1.1271697265624998,0.018579239538430867,1.1324287109375,1.15225517578125,1.153508386230469,1.1545109545898438,"[1.1547615966796876, 1.111167724609375, 1.1320802001953125, 1.1327772216796874, 1.1519766845703125, 1.1377652587890625, 1.1368804931640626, 1.1102855224609376, 1.1004166259765624, 1.1035859375]",tokens/s,55.89220373415232,kWh,3.237725702801701e-05,3.5706078520038297e-06,1.5798369634570826e-05,5.174623451459166e-05,tokens/kWh,1217479.8918409212,,s,630,11.268769048690773,0.017886934997921898,0.0004886429251878175,0.017771887779235843,0.01840783042907715,0.01857043237686157,0.018941482048034673,"[0.018433664321899416, 0.018131328582763673, 0.018302112579345702, 0.01831942367553711, 0.018147775650024414, 0.018092384338378908, 0.018077247619628905, 0.0179238395690918, 0.0179168643951416, 0.017909503936767577, 0.018465791702270508, 0.018035711288452147, 0.01802239990234375, 0.01822719955444336, 0.018233343124389647, 0.018263200759887695, 0.018498271942138673, 0.01828860855102539, 0.01826812744140625, 0.018292928695678713, 0.018290687561035156, 0.018284543991088868, 0.018455808639526367, 0.018359039306640627, 0.01818204879760742, 0.018145376205444336, 0.01825382423400879, 0.018358272552490236, 0.018330848693847657, 0.018278911590576173, 0.01834832000732422, 0.018462656021118164, 0.01850783920288086, 0.018591583251953123, 0.01856492805480957, 0.018301279067993163, 0.018294271469116212, 0.01831065559387207, 0.018435007095336915, 0.01837868881225586, 0.018342016220092773, 0.018309120178222657, 0.01821286392211914, 0.01837808036804199, 0.018365087509155272, 0.018550783157348632, 0.018481151580810547, 0.018245311737060548, 0.018372928619384766, 0.018571296691894532, 0.01852207946777344, 0.018329599380493163, 0.01840742492675781, 0.018327552795410155, 0.01836841583251953, 0.0184116153717041, 0.020241504669189454, 0.018825759887695314, 0.01843849563598633, 0.017876031875610352, 0.017907743453979493, 0.017949567794799805, 0.018101472854614258, 0.019706239700317384, 0.017653087615966796, 0.01745167922973633, 0.017554719924926757, 0.017468128204345703, 0.017345792770385744, 0.017400575637817384, 0.017555456161499023, 0.01745510482788086, 0.017375232696533204, 0.017441856384277345, 0.017454015731811524, 0.01746124839782715, 0.01737094306945801, 0.017497695922851563, 0.018227807998657225, 0.01897881507873535, 0.01772297668457031, 0.01759859275817871, 0.0174881591796875, 0.01740185546875, 0.017373184204101562, 0.017508352279663086, 0.017590272903442384, 0.017483232498168945, 0.017393760681152344, 0.017585599899291992, 0.017953792572021485, 0.017864479064941406, 0.01753264045715332, 0.01751705551147461, 0.0174400634765625, 0.01744879913330078, 0.01737603187561035, 0.017469375610351563, 0.017360671997070313, 0.017690975189208983, 0.01762099266052246, 0.017502208709716797, 0.017467391967773437, 0.017485824584960938, 0.017831199645996092, 0.01754185676574707, 0.017559551239013673, 0.0175119686126709, 0.017581632614135742, 0.017893312454223632, 0.01783875274658203, 0.017813823699951173, 0.01775152015686035, 0.017729536056518554, 0.01768502426147461, 0.017740896224975586, 0.01778361511230469, 0.017735679626464843, 0.01764156723022461, 0.01778838348388672, 0.01762505531311035, 0.01751206398010254, 0.017428735733032226, 0.017519296646118163, 0.017484832763671875, 0.017649951934814452, 0.017761983871459962, 0.017817375183105468, 0.017662496566772462, 0.01787215995788574, 0.017810144424438477, 0.0175914249420166, 0.017557952880859377, 0.01784876823425293, 0.017641471862792968, 0.017542623519897462, 0.01753273582458496, 0.017749984741210936, 0.0175882568359375, 0.017563648223876953, 0.017638111114501955, 0.017489919662475584, 0.0174202880859375, 0.017505407333374023, 0.017604768753051756, 0.017684415817260744, 0.017590848922729493, 0.017475807189941406, 0.017456703186035157, 0.0175927677154541, 0.017880640029907226, 0.01823583984375, 0.0183474235534668, 0.018221664428710937, 0.018705440521240235, 0.018252351760864257, 0.0183525447845459, 0.018416704177856444, 0.018613183975219726, 0.01829203224182129, 0.018320064544677734, 0.018134912490844726, 0.01815283203125, 0.01830988883972168, 0.018139135360717772, 0.01807535934448242, 0.018112127304077148, 0.018117279052734376, 0.01822105598449707, 0.018089984893798827, 0.01801420783996582, 0.01798921585083008, 0.01802684783935547, 0.018038848876953124, 0.018018207550048827, 0.017981536865234377, 0.018114303588867186, 0.018036991119384765, 0.017897056579589843, 0.017856927871704103, 0.01793769645690918, 0.017805023193359373, 0.01789187240600586, 0.018076128005981445, 0.018232351303100587, 0.018244192123413085, 0.01835865592956543, 0.018327232360839843, 0.018790719985961914, 0.018585887908935547, 0.01828233528137207, 0.018249887466430664, 0.01824358367919922, 0.01825382423400879, 0.018388736724853517, 0.018350175857543945, 0.018569375991821287, 0.01884297561645508, 0.01866819190979004, 0.018275903701782226, 0.01829478454589844, 0.01826041603088379, 0.018229248046875, 0.018318944931030274, 0.01838688087463379, 0.018430431365966796, 0.018650848388671874, 0.018850080490112303, 0.01822105598449707, 0.018206720352172853, 0.018203840255737305, 0.017910591125488283, 0.017886655807495117, 0.017885759353637697, 0.01776416015625, 0.017709247589111327, 0.017762304306030274, 0.0177458553314209, 0.017657920837402342, 0.01765171241760254, 0.01781350326538086, 0.0181712646484375, 0.018135679244995116, 0.017913856506347657, 0.01782921600341797, 0.017889951705932616, 0.01773695945739746, 0.018146047592163084, 0.017671743392944337, 0.01846259117126465, 0.017652288436889648, 0.018015520095825195, 0.017611200332641602, 0.01765318489074707, 0.017382240295410155, 0.017532928466796875, 0.017483135223388672, 0.01807833671569824, 0.017571647644042968, 0.01748601531982422, 0.017460224151611328, 0.017521663665771483, 0.017452768325805664, 0.017518367767333985, 0.018015743255615235, 0.017642080307006838, 0.017667680740356444, 0.017762144088745116, 0.01751139259338379, 0.017573888778686524, 0.017639039993286133, 0.017713535308837892, 0.01848646354675293, 0.018223743438720703, 0.021685823440551758, 0.018537088394165038, 0.018241535186767577, 0.018126848220825196, 0.0182061767578125, 0.01810259246826172, 0.018261856079101562, 0.01879599952697754, 0.018278656005859376, 0.018562911987304687, 0.01877689552307129, 0.018374176025390626, 0.018432384490966798, 0.018344032287597657, 0.018319360733032225, 0.01840947151184082, 0.018406463623046876, 0.01825267219543457, 0.018075712203979494, 0.018110368728637697, 0.018276447296142577, 0.01839321517944336, 0.01823321533203125, 0.0183306884765625, 0.018526880264282227, 0.018126848220825196, 0.018122495651245116, 0.017789695739746095, 0.017766176223754884, 0.017709056854248048, 0.017751712799072266, 0.017727296829223634, 0.01787673568725586, 0.01772991943359375, 0.017666463851928712, 0.017655487060546874, 0.01764793586730957, 0.017977344512939454, 0.01820979118347168, 0.018195327758789064, 0.01833718490600586, 0.018291423797607422, 0.018280448913574218, 0.01831888008117676, 0.018177663803100586, 0.018223968505859375, 0.018305023193359374, 0.01819647979736328, 0.01840742492675781, 0.01832979202270508, 0.01820038414001465, 0.018339839935302735, 0.018751520156860352, 0.018386911392211915, 0.019165184020996092, 0.01835612869262695, 0.018217056274414063, 0.018187295913696288, 0.018202688217163084, 0.018258848190307618, 0.01803468894958496, 0.018173824310302733, 0.018450559616088866, 0.017936384201049805, 0.01781724739074707, 0.017903968811035155, 0.018753536224365236, 0.017752063751220702, 0.017750015258789064, 0.01763705635070801, 0.01749839973449707, 0.017688608169555663, 0.01768841552734375, 0.017879199981689454, 0.017739776611328126, 0.01779427146911621, 0.017916032791137695, 0.01768649673461914, 0.01768684768676758, 0.017669952392578125, 0.017506879806518556, 0.017498111724853514, 0.017479232788085938, 0.017520511627197265, 0.017436256408691408, 0.017578975677490234, 0.017426431655883787, 0.01763532829284668, 0.017524736404418945, 0.017661951065063478, 0.017672191619873046, 0.0177475528717041, 0.01771356773376465, 0.01756159973144531, 0.01764352035522461, 0.01798963165283203, 0.01803059196472168, 0.01809542465209961, 0.01816032028198242, 0.018155519485473632, 0.018025632858276366, 0.018137727737426758, 0.018221376419067382, 0.018190143585205078, 0.018151519775390625, 0.018310495376586914, 0.018437856674194335, 0.01814214324951172, 0.022026239395141603, 0.01843564796447754, 0.01823711967468262, 0.018156288146972656, 0.018277408599853516, 0.01849350357055664, 0.018315296173095703, 0.018279296875, 0.018765888214111327, 0.018593727111816408, 0.01884364891052246, 0.018498752593994142, 0.018327455520629882, 0.01840764808654785, 0.018301376342773436, 0.018281728744506835, 0.018490560531616212, 0.01826259231567383, 0.01831484794616699, 0.01823299217224121, 0.018136064529418947, 0.018075647354125975, 0.018150720596313476, 0.018121408462524413, 0.018382848739624022, 0.01849750328063965, 0.018589727401733397, 0.018357696533203124, 0.0184202880859375, 0.018241535186767577, 0.018124799728393554, 0.017897472381591797, 0.01786675262451172, 0.01783955192565918, 0.017818208694458007, 0.017838048934936523, 0.017804351806640625, 0.017725439071655275, 0.017660863876342775, 0.017958560943603517, 0.017626880645751953, 0.017711711883544923, 0.0176312313079834, 0.01772537612915039, 0.017649280548095704, 0.017521087646484374, 0.01800115203857422, 0.01811315155029297, 0.018142623901367186, 0.01861222457885742, 0.018684288024902344, 0.01820483207702637, 0.018243776321411134, 0.018142847061157225, 0.01812905693054199, 0.018156991958618165, 0.018180896759033203, 0.018077823638916017, 0.01808572769165039, 0.01799171257019043, 0.01803004837036133, 0.018307615280151367, 0.018182144165039063, 0.01840742492675781, 0.018261119842529296, 0.018074495315551758, 0.01815705680847168, 0.01811916732788086, 0.017901439666748047, 0.01779520034790039, 0.017795072555541993, 0.017838079452514647, 0.017706207275390625, 0.01803651237487793, 0.017799392700195312, 0.017890079498291016, 0.017682207107543944, 0.017682655334472656, 0.017532352447509766, 0.01745542335510254, 0.017581056594848633, 0.017377439498901366, 0.017377056121826173, 0.017406719207763672, 0.017283071517944337, 0.017319936752319336, 0.01791542434692383, 0.018665952682495116, 0.017520639419555666, 0.0174653434753418, 0.017661951065063478, 0.017550559997558595, 0.0174354248046875, 0.017764352798461915, 0.017429920196533204, 0.017422943115234374, 0.017506303787231444, 0.017496063232421876, 0.017591999053955077, 0.01748204803466797, 0.01744428825378418, 0.017465919494628907, 0.017569215774536132, 0.01744748878479004, 0.01746329689025879, 0.01860745620727539, 0.017494688034057616, 0.017421375274658202, 0.01743929672241211, 0.01742448043823242, 0.017359136581420898, 0.01740563201904297, 0.017487232208251952, 0.017674367904663087, 0.017715423583984376, 0.0176595516204834, 0.01822153663635254, 0.018229408264160155, 0.017934431076049806, 0.017761695861816407, 0.017701696395874024, 0.017780479431152345, 0.017764608383178712, 0.017794111251831054, 0.017777599334716798, 0.01789129638671875, 0.017690656661987304, 0.017732767105102538, 0.018058080673217774, 0.01761894416809082, 0.017561151504516603, 0.017580352783203124, 0.01741632080078125, 0.01755267143249512, 0.017545951843261718, 0.01760870361328125, 0.01764659118652344, 0.017443296432495117, 0.017406496047973632, 0.017514495849609374, 0.017573888778686524, 0.01746086311340332, 0.017567743301391603, 0.017529056549072265, 0.017409824371337892, 0.017345535278320313, 0.01741721534729004, 0.017330175399780275, 0.017441823959350587, 0.017777631759643555, 0.01781488037109375, 0.017603231430053712, 0.017489599227905273, 0.017426624298095703, 0.017418367385864258, 0.01738751983642578, 0.01740390396118164, 0.017518592834472657, 0.01762918472290039, 0.017333343505859376, 0.017375808715820312, 0.01739401626586914, 0.017327360153198242, 0.01739379119873047, 0.017336959838867187, 0.017331743240356447, 0.017359071731567383, 0.01734681510925293, 0.017354751586914064, 0.01737913513183594, 0.017451200485229492, 0.018071296691894532, 0.01765350341796875, 0.017499776840209962, 0.017516576766967773, 0.017371295928955078, 0.01744054412841797, 0.017371583938598632, 0.017609184265136718, 0.01739151954650879, 0.017494112014770507, 0.017469440460205078, 0.01737932777404785, 0.017372928619384765, 0.0175229434967041, 0.017482847213745118, 0.017582815170288087, 0.017360639572143555, 0.017449407577514647, 0.017494016647338868, 0.01742959976196289, 0.0175645751953125, 0.01741619110107422, 0.017369087219238282, 0.01743212890625, 0.017510015487670897, 0.017482559204101564, 0.017479167938232423, 0.01738598442077637, 0.017451007843017577, 0.017464895248413086, 0.01745052719116211, 0.017403871536254882, 0.017465599060058595, 0.017448671340942384, 0.01761484718322754, 0.017519968032836914, 0.017423007965087892, 0.017364320755004884, 0.017498687744140626, 0.017373311996459962, 0.017350528717041017, 0.017426528930664063, 0.01737932777404785, 0.017340063095092773, 0.017350400924682617, 0.017375839233398437, 0.017391263961791994, 0.017305696487426758, 0.017253984451293947, 0.017402240753173828, 0.017335840225219726, 0.01732275199890137, 0.01739571189880371, 0.01741004753112793, 0.017391616821289063, 0.017393760681152344, 0.01742019271850586, 0.017422143936157226, 0.017413951873779296, 0.017375616073608397, 0.017530879974365234, 0.01754521560668945, 0.02065577507019043, 0.01744316864013672, 0.017487871170043946, 0.017467391967773437, 0.017451007843017577, 0.017356800079345702, 0.017384672164916994, 0.01740060806274414, 0.01741414451599121, 0.017383424758911133, 0.017395679473876952, 0.017338399887084962, 0.017364992141723632, 0.017348159790039064, 0.017337791442871092, 0.01737126350402832, 0.017355648040771485, 0.017284223556518555, 0.017331071853637695, 0.017358400344848632, 0.01739107131958008, 0.017340639114379882, 0.01740444755554199, 0.018694368362426758, 0.018763776779174804, 0.017467391967773437, 0.017411487579345703, 0.017504863739013672, 0.017491455078125, 0.01807756805419922, 0.01774790382385254, 0.017465631484985353, 0.017426271438598633, 0.017433151245117188, 0.017474624633789064]",tokens/s,55.90672745868311,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1572.00384,1861.156864,0.0,1465.909248,1358.169088,s,1,8.7545458984375,8.7545458984375,0.0,8.7545458984375,8.7545458984375,8.7545458984375,8.7545458984375,[8.7545458984375],,kWh,5.511534694168555e-05,6.072308957361274e-06,1.9198904248007853e-05,8.038656014705468e-05,,MB,1672.114176,1882.128384,0.0,1472.200704,1356.544512,s,10,0.4443775062561035,0.04443775062561035,0.0001839494016502686,0.04437872123718262,0.044650616455078125,0.044781340026855465,0.04488591888427734,"[0.04491206359863281, 0.04436921691894531, 0.044349407196044924, 0.044292064666748045, 0.04443267059326172, 0.044427104949951175, 0.044268577575683594, 0.04431660842895508, 0.04462156677246094, 0.04438822555541992]",tokens/s,5760.8676495984055,kWh,1.3502476793010703e-06,1.4890786084304384e-07,8.933430085805946e-07,2.3924985487247085e-06,tokens/kWh,107001109.83827247,MB,1680.297984,1882.128384,0.0,1472.200704,1409.94816,s,10,11.17914501953125,1.117914501953125,0.003509245620187121,1.1180933227539063,1.122057958984375,1.1229037963867188,1.1235804663085938,"[1.119744873046875, 1.1185635986328124, 1.1198131103515625, 1.1237496337890625, 1.1218699951171875, 1.111178955078125, 1.117623046875, 1.11444482421875, 1.1170386962890626, 1.1151182861328126]",tokens/s,56.354935811219704,kWh,3.26895830231972e-05,3.604478886908341e-06,1.578705922462118e-05,5.2081121134726705e-05,tokens/kWh,1209651.3789906262,,s,630,11.176699878692624,0.017740793458242265,0.00029612535460284,0.017657711982727052,0.0180024995803833,0.01817117118835449,0.019014234676361097,"[0.017864799499511717, 0.017692703247070313, 0.01762735939025879, 0.01767331123352051, 0.01756972885131836, 0.017605600357055665, 0.017727487564086913, 0.017770496368408203, 0.017764352798461915, 0.018087743759155273, 0.017756351470947264, 0.01768387222290039, 0.017666431427001954, 0.017735391616821288, 0.017735551834106446, 0.017759199142456054, 0.01789507293701172, 0.017671648025512694, 0.017693023681640625, 0.017569055557250978, 0.01766022491455078, 0.017584672927856447, 0.017732736587524413, 0.01768134307861328, 0.01761075210571289, 0.01756972885131836, 0.0176265926361084, 0.018002336502075195, 0.01767647933959961, 0.017547264099121093, 0.017704063415527344, 0.017658239364624025, 0.017627647399902344, 0.017588031768798827, 0.017600191116333007, 0.019212799072265627, 0.01799782371520996, 0.018074911117553712, 0.017809343338012696, 0.017707807540893555, 0.017703935623168944, 0.0176363525390625, 0.017624063491821287, 0.01773465538024902, 0.01764556884765625, 0.017625087738037108, 0.017657312393188476, 0.017555999755859374, 0.017524736404418945, 0.017620096206665038, 0.017707647323608397, 0.01756755256652832, 0.017543615341186522, 0.017555360794067384, 0.017578079223632814, 0.017584127426147463, 0.01762099266052246, 0.01760870361328125, 0.017638879776000975, 0.017523231506347655, 0.020685983657836915, 0.018528480529785157, 0.017826112747192382, 0.01776076889038086, 0.017960960388183594, 0.017564960479736328, 0.017708799362182618, 0.01763206481933594, 0.017829919815063478, 0.01798124885559082, 0.018213184356689453, 0.017772544860839845, 0.017496063232421876, 0.01770086479187012, 0.019812255859375, 0.017743967056274415, 0.01754521560668945, 0.017585344314575195, 0.017621824264526367, 0.017580032348632812, 0.017765888214111326, 0.017696704864501953, 0.017656288146972655, 0.01758415985107422, 0.017571903228759764, 0.017542943954467774, 0.017635488510131837, 0.0175534725189209, 0.017497695922851563, 0.017662303924560547, 0.017550432205200195, 0.01765456008911133, 0.017665632247924806, 0.017691232681274413, 0.0175861759185791, 0.01759846305847168, 0.017547264099121093, 0.017766016006469727, 0.017604991912841796, 0.017635072708129883, 0.01759187126159668, 0.017598207473754884, 0.017524991989135742, 0.017702720642089845, 0.017609600067138673, 0.01765376091003418, 0.01780940818786621, 0.017625087738037108, 0.017719039916992186, 0.017689952850341795, 0.017685407638549804, 0.017700544357299806, 0.01779715156555176, 0.01775644874572754, 0.017759424209594726, 0.01777542304992676, 0.017739072799682617, 0.017791296005249025, 0.01792243194580078, 0.01816166305541992, 0.01804204750061035, 0.017916736602783204, 0.01788217544555664, 0.01785331153869629, 0.01787091255187988, 0.018142784118652344, 0.018132991790771484, 0.01786444854736328, 0.01789673614501953, 0.017529024124145507, 0.017726240158081056, 0.017610240936279296, 0.017617408752441405, 0.017882335662841798, 0.01777129554748535, 0.017719295501708983, 0.017738912582397463, 0.01775292778015137, 0.017672191619873046, 0.01762054443359375, 0.017767936706542968, 0.017610719680786133, 0.017570751190185547, 0.017550975799560546, 0.0176376953125, 0.017705055236816408, 0.017719295501708983, 0.01752182388305664, 0.017559776306152342, 0.017588863372802733, 0.017612800598144532, 0.017969152450561524, 0.017917951583862304, 0.017778688430786133, 0.017795072555541993, 0.01767011260986328, 0.018067487716674803, 0.01765376091003418, 0.01761075210571289, 0.01845248031616211, 0.017936384201049805, 0.017609952926635742, 0.017720096588134764, 0.01785856056213379, 0.01780735969543457, 0.017780736923217775, 0.017870847702026366, 0.01780940818786621, 0.017827840805053712, 0.017803007125854493, 0.017862655639648437, 0.017858816146850587, 0.018030527114868165, 0.01793440055847168, 0.017950368881225587, 0.017889631271362304, 0.01778483200073242, 0.017675968170166017, 0.017754432678222656, 0.017905664443969727, 0.017966943740844725, 0.017690784454345704, 0.017731584548950196, 0.017762304306030274, 0.01778998374938965, 0.01765216064453125, 0.01776652717590332, 0.01762544059753418, 0.017592384338378907, 0.01809984016418457, 0.017715103149414064, 0.0177139835357666, 0.01764556884765625, 0.017630815505981445, 0.017637792587280272, 0.017663999557495116, 0.0175914249420166, 0.017693567276000976, 0.017924095153808595, 0.01796505546569824, 0.017903615951538086, 0.01804083251953125, 0.01802239990234375, 0.017931936264038086, 0.018085407257080077, 0.018436576843261717, 0.017955167770385742, 0.018520063400268554, 0.017944576263427735, 0.017878656387329103, 0.018010112762451173, 0.017951072692871092, 0.01804086494445801, 0.018077375411987305, 0.01790598487854004, 0.01788857650756836, 0.017867456436157225, 0.01764556884765625, 0.0175631046295166, 0.017591903686523438, 0.01755232048034668, 0.017656831741333007, 0.017802240371704102, 0.017817535400390626, 0.01774188804626465, 0.017612800598144532, 0.017679424285888673, 0.017585088729858398, 0.017526784896850587, 0.017725343704223632, 0.01757939147949219, 0.017593055725097655, 0.017563648223876953, 0.01762099266052246, 0.01764339256286621, 0.017725568771362304, 0.01762918472290039, 0.01761075210571289, 0.017582080841064454, 0.0176200008392334, 0.01784726333618164, 0.01790924835205078, 0.01785251235961914, 0.017886848449707032, 0.01790208053588867, 0.018223392486572267, 0.018255872726440428, 0.01807360076904297, 0.018043903350830077, 0.018037567138671873, 0.018145151138305664, 0.017888927459716798, 0.018495487213134765, 0.018255872726440428, 0.01823468780517578, 0.01816374397277832, 0.017996448516845703, 0.017889280319213868, 0.018147327423095702, 0.017835424423217772, 0.01776291275024414, 0.017864255905151366, 0.017883167266845704, 0.01790403175354004, 0.017874496459960938, 0.018106559753417968, 0.017873151779174805, 0.01803264045715332, 0.01763942337036133, 0.01761894416809082, 0.01763942337036133, 0.017710527420043944, 0.017685056686401367, 0.017625087738037108, 0.017655807495117186, 0.017682207107543944, 0.017637407302856446, 0.017701055526733397, 0.017643232345581055, 0.017583776473999023, 0.01772127914428711, 0.017600992202758788, 0.017665599822998045, 0.017828479766845703, 0.01799580764770508, 0.01789254379272461, 0.017879871368408202, 0.01864089584350586, 0.017898719787597658, 0.017756959915161134, 0.017712480545043947, 0.017676959991455077, 0.0177675838470459, 0.01750921630859375, 0.017657087326049804, 0.017627904891967774, 0.01761484718322754, 0.017669824600219725, 0.01764588737487793, 0.017582080841064454, 0.01802239990234375, 0.017716672897338866, 0.01776083183288574, 0.01840127944946289, 0.017709056854248048, 0.017563648223876953, 0.01769385528564453, 0.017718048095703126, 0.01763539123535156, 0.017718847274780274, 0.017626752853393556, 0.01765193557739258, 0.017635936737060546, 0.01767011260986328, 0.01763248062133789, 0.01763532829284668, 0.0176978874206543, 0.0175664005279541, 0.017688800811767578, 0.017628992080688476, 0.017567935943603515, 0.017504255294799806, 0.01754857635498047, 0.017586624145507813, 0.017481056213378907, 0.017761215209960938, 0.01753900718688965, 0.017598527908325196, 0.017564767837524413, 0.01758812713623047, 0.01756208038330078, 0.017535167694091795, 0.0175097599029541, 0.017572832107543946, 0.01755683135986328, 0.017558176040649413, 0.017508352279663086, 0.017501344680786134, 0.01747235107421875, 0.017522335052490234, 0.017455295562744142, 0.0175699520111084, 0.017571647644042968, 0.017598400115966795, 0.017530975341796876, 0.017590112686157226, 0.01754319953918457, 0.01757375907897949, 0.017521055221557617, 0.01760771179199219, 0.017504287719726563, 0.0175482234954834, 0.01763865661621094, 0.017676319122314453, 0.017803167343139647, 0.017767135620117187, 0.017647712707519532, 0.01760256004333496, 0.017747968673706056, 0.01787059211730957, 0.01755366325378418, 0.017573888778686524, 0.0175797119140625, 0.017548736572265626, 0.01749900817871094, 0.017528543472290038, 0.017613088607788086, 0.0175861759185791, 0.017557407379150392, 0.01759651184082031, 0.017528831481933595, 0.01762643241882324, 0.01763603210449219, 0.020391040802001954, 0.017656448364257813, 0.017705215454101562, 0.017596351623535157, 0.017549375534057617, 0.017657632827758788, 0.017557727813720704, 0.01763532829284668, 0.017498111724853514, 0.017604608535766602, 0.01753628730773926, 0.01769913673400879, 0.017588191986083985, 0.017680831909179687, 0.01800396728515625, 0.01826812744140625, 0.017889312744140625, 0.01802595138549805, 0.01790822410583496, 0.01778819274902344, 0.01787980842590332, 0.017821407318115233, 0.018286880493164064, 0.017895360946655274, 0.01787091255187988, 0.017726783752441407, 0.017746623992919923, 0.01776950454711914, 0.017630176544189455, 0.017645248413085936, 0.01762131118774414, 0.0176363525390625, 0.017622016906738282, 0.017663423538208007, 0.017558080673217773, 0.017715200424194336, 0.018067455291748045, 0.017674240112304687, 0.017625087738037108, 0.01764512062072754, 0.017682367324829102, 0.017526880264282226, 0.017524703979492188, 0.017529279708862304, 0.01785158348083496, 0.017531423568725585, 0.0177476806640625, 0.017543743133544922, 0.017510400772094727, 0.017528831481933595, 0.01762918472290039, 0.017669792175292968, 0.017529184341430665, 0.017542240142822265, 0.017561567306518554, 0.017578943252563477, 0.017524736404418945, 0.017475584030151366, 0.017539039611816406, 0.017755552291870116, 0.017561855316162108, 0.01760908889770508, 0.01925324821472168, 0.019099647521972657, 0.017713151931762695, 0.017677759170532225, 0.017588768005371094, 0.01766099166870117, 0.017606143951416017, 0.017539583206176757, 0.01762918472290039, 0.017520639419555666, 0.017491647720336914, 0.01746361541748047, 0.017483776092529296, 0.01753433609008789, 0.017604991912841796, 0.017488128662109376, 0.017508352279663086, 0.017589887619018554, 0.01880512046813965, 0.01765135955810547, 0.017561952590942384, 0.018274303436279296, 0.017886367797851563, 0.017492128372192384, 0.017535680770874022, 0.01756159973144531, 0.017537023544311522, 0.017528160095214844, 0.01775027275085449, 0.017830303192138672, 0.017713151931762695, 0.01784182357788086, 0.017694623947143554, 0.018373056411743163, 0.017571807861328125, 0.01756572723388672, 0.01762214469909668, 0.01749286460876465, 0.017952768325805665, 0.017530143737792967, 0.017799840927124024, 0.017604671478271484, 0.017645408630371093, 0.017627296447753907, 0.01757980728149414, 0.01751203155517578, 0.017582176208496093, 0.017473888397216798, 0.01771334457397461, 0.01758361625671387, 0.01754368019104004, 0.018017440795898437, 0.017774784088134765, 0.01753763198852539, 0.017631296157836915, 0.017649663925170898, 0.0175897274017334, 0.01757814407348633, 0.01757369613647461, 0.017582656860351563, 0.01769385528564453, 0.01769558334350586, 0.017661760330200196, 0.017811647415161135, 0.017767999649047853, 0.017793472290039063, 0.017915903091430666, 0.017991296768188475, 0.01801625633239746, 0.018569536209106445, 0.01821014404296875, 0.017996448516845703, 0.01799577522277832, 0.01804697608947754, 0.017958688735961913, 0.01786617660522461, 0.017807647705078124, 0.017731712341308593, 0.017514080047607423, 0.017677087783813477, 0.01761267280578613, 0.01766537666320801, 0.017697471618652344, 0.01823958396911621, 0.017909759521484374, 0.017739391326904295, 0.017615232467651367, 0.01764761543273926, 0.01757798385620117, 0.017534975051879884, 0.01755340766906738, 0.018585599899291993, 0.0178272647857666, 0.01760927963256836, 0.017541120529174805, 0.01761689567565918, 0.01760870361328125, 0.017780351638793945, 0.017667583465576172, 0.01763007926940918, 0.01762291145324707, 0.017684608459472655, 0.017833951950073243, 0.01792617607116699, 0.017756160736083985, 0.01776630401611328, 0.017582176208496093, 0.01817724800109863, 0.01766275215148926, 0.017575935363769533, 0.017534975051879884, 0.01753900718688965, 0.017450624465942383, 0.017588672637939454, 0.0175861759185791, 0.017532928466796875, 0.017548736572265626, 0.01767875289916992, 0.017921375274658202, 0.017603328704833984, 0.017672256469726564, 0.017616575241088867, 0.017688480377197266, 0.0175251522064209, 0.01761193656921387, 0.01765052795410156, 0.01761075210571289, 0.01756060791015625, 0.01750262451171875, 0.017611135482788087, 0.01754070472717285, 0.017611360549926756, 0.017570751190185547, 0.01798739242553711, 0.01769286346435547, 0.01749932861328125, 0.01761568069458008, 0.017588224411010742, 0.01757798385620117, 0.017537023544311522, 0.018276544570922853, 0.018802047729492188, 0.018051519393920898, 0.01785958480834961, 0.017728511810302734, 0.01779097557067871, 0.017657791137695313, 0.017590335845947266, 0.01763942337036133, 0.017506303787231444, 0.017582080841064454, 0.017653408050537108, 0.017612735748291017, 0.01762892723083496, 0.01765648078918457, 0.017530879974365234, 0.017557376861572264, 0.017537151336669922, 0.017551103591918946, 0.01772159957885742, 0.017665151596069337, 0.01749081611633301, 0.017596416473388672, 0.017534975051879884, 0.017711103439331053, 0.01776131248474121, 0.019202207565307616, 0.018428735733032227, 0.017860128402709962, 0.017736160278320312, 0.0176680965423584, 0.017633279800415038, 0.01782374382019043, 0.01761484718322754, 0.017620927810668947, 0.017495840072631837, 0.017481599807739258, 0.017487840652465822, 0.017760704040527344, 0.017616191864013673, 0.017918655395507813, 0.017556575775146483, 0.017505184173583984, 0.017496063232421876, 0.017530879974365234, 0.01767398452758789, 0.017524255752563476, 0.01749065589904785, 0.017510400772094727, 0.017588224411010742, 0.01756070327758789, 0.017590816497802735, 0.017665855407714842, 0.017537567138671876, 0.017540191650390623]",tokens/s,56.36726465215715,,, -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -33298,7 +33298,7 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 24668 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 24474 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6641.700864,9387.835392,0.0,8992.587776,8404.320768,s,1,14.4562421875,14.4562421875,0.0,14.4562421875,14.4562421875,14.4562421875,14.4562421875,[14.4562421875],,kWh,0.00021343599917499127,2.3536222536619963e-05,9.489063146797538e-05,0.0003318628531795866,,MB,1564.639232,9402.515456,0.0,8992.587776,7879.473152,s,10,3.2490415954589844,0.32490415954589846,0.0005050043342131917,0.3249153594970703,0.3256789093017578,0.32569710540771485,0.3257116622924805,"[0.3243965454101562, 0.3239740295410156, 0.324656494140625, 0.32492047119140627, 0.32567486572265625, 0.32468362426757813, 0.32495562744140627, 0.3257153015136719, 0.32515438842773436, 0.32491024780273436]",tokens/s,787.9246617150048,kWh,9.496914052687149e-06,1.0473449039228742e-06,6.288436930387383e-06,1.6832695886997404e-05,tokens/kWh,15208496.70894072,MB,1567.612928,9402.515456,0.0,8992.587776,8125.43744,s,10,22.417164306640625,2.2417164306640625,0.0017633449796451378,2.241837158203125,2.243854833984375,2.244349658203125,2.244745517578125,"[2.24002197265625, 2.2391669921875, 2.240408935546875, 2.244844482421875, 2.239778076171875, 2.243744873046875, 2.241950927734375, 2.241723388671875, 2.243001220703125, 2.2425234375]",tokens/s,28.10346533496993,kWh,6.519396790940043e-05,7.190828362416798e-06,4.344726952481049e-05,0.00011583206579662773,tokens/kWh,543890.8437548918,,s,630,22.412986602783178,0.03557616921076699,0.0003406645445687422,0.03560273551940918,0.036014133071899414,0.03606753921508789,0.036162389793396,"[0.03562540817260742, 0.03507193756103515, 0.03492460632324219, 0.0348364143371582, 0.03492435073852539, 0.035019008636474606, 0.035110080718994144, 0.03505596923828125, 0.035095008850097656, 0.03513753509521484, 0.035233440399169924, 0.035176799774169924, 0.035243648529052735, 0.035092609405517575, 0.03514291381835938, 0.03523673629760742, 0.03522544097900391, 0.03515158462524414, 0.0351965446472168, 0.03526342391967773, 0.03540566253662109, 0.03540598297119141, 0.0354304313659668, 0.03528496170043945, 0.03542985534667969, 0.035356704711914065, 0.03539926528930664, 0.03548787307739258, 0.03551337432861328, 0.035467105865478514, 0.03546511840820313, 0.03556272125244141, 0.035601184844970706, 0.03555327987670898, 0.03572537612915039, 0.0356638069152832, 0.035743743896484374, 0.0357210578918457, 0.03566608047485351, 0.03566294479370117, 0.03573980712890625, 0.03578668975830078, 0.0358326416015625, 0.035778209686279296, 0.035804672241210936, 0.03578524780273438, 0.03587923049926758, 0.03590348815917969, 0.035907230377197265, 0.03583350372314453, 0.03600624084472656, 0.03598332977294922, 0.03585676956176758, 0.03581740951538086, 0.03594655990600586, 0.03599087905883789, 0.03606185531616211, 0.03599283218383789, 0.0360568962097168, 0.0360621452331543, 0.03611008071899414, 0.036063488006591794, 0.03609571075439453, 0.03567795181274414, 0.035084320068359376, 0.035041534423828125, 0.03491635131835937, 0.034917953491210935, 0.03499411010742187, 0.03516604614257812, 0.03508700942993164, 0.035074047088623043, 0.03501039886474609, 0.03528646469116211, 0.035100833892822265, 0.035007232666015624, 0.03511072158813477, 0.03514934539794922, 0.03516985702514648, 0.035138721466064456, 0.03505740737915039, 0.03516019058227539, 0.035168128967285155, 0.035422206878662106, 0.03539952087402344, 0.03530358505249023, 0.0353361930847168, 0.035401569366455075, 0.03543056106567383, 0.035423328399658206, 0.035376033782958984, 0.035479328155517575, 0.03541638565063476, 0.03542825698852539, 0.03543392181396485, 0.03552707290649414, 0.03564300918579102, 0.03570127868652344, 0.03564739227294922, 0.03573360061645508, 0.0356940803527832, 0.03566233444213867, 0.035640705108642576, 0.03569113540649414, 0.035694751739501956, 0.03575190353393555, 0.03575590515136719, 0.03575209426879883, 0.03580912017822266, 0.035827713012695314, 0.035768318176269534, 0.035835166931152344, 0.03587350463867187, 0.036026336669921874, 0.03584841537475586, 0.03591088104248047, 0.035983230590820314, 0.036059711456298826, 0.03602025604248047, 0.036014110565185546, 0.03606537628173828, 0.03611366271972656, 0.035937023162841794, 0.036073471069335936, 0.036183807373046876, 0.036286720275878905, 0.03581542587280274, 0.03510204696655273, 0.03499689483642578, 0.03492470550537109, 0.03498787307739258, 0.03496160125732422, 0.03504048156738281, 0.03502755355834961, 0.035194881439208986, 0.03518217468261719, 0.03520467376708984, 0.03530217742919922, 0.035162078857421876, 0.03527484893798828, 0.03535257720947266, 0.035282943725585936, 0.03530342483520508, 0.03520675277709961, 0.03521769714355469, 0.035244480133056644, 0.035272384643554686, 0.03525228881835937, 0.03534019088745117, 0.035384574890136716, 0.03533903884887695, 0.03523497772216797, 0.0353897590637207, 0.035573951721191405, 0.035536319732666015, 0.035438720703125, 0.035449886322021486, 0.03541801452636719, 0.035546974182128904, 0.03555123138427734, 0.0355978889465332, 0.03563708877563477, 0.03571785736083984, 0.03579276657104492, 0.035784702301025394, 0.035702880859375, 0.03574262237548828, 0.03569561767578125, 0.035741855621337894, 0.0357210578918457, 0.035861534118652345, 0.03585142517089844, 0.03589510345458984, 0.035831649780273436, 0.035783039093017575, 0.0357509765625, 0.0358397445678711, 0.035799808502197265, 0.036026336669921874, 0.0360450553894043, 0.03613491058349609, 0.035983360290527344, 0.03597312164306641, 0.03606118392944336, 0.036132865905761716, 0.036067329406738284, 0.036050430297851564, 0.03606915283203125, 0.03614998245239258, 0.03597321701049805, 0.0352525749206543, 0.03530758285522461, 0.03508736038208008, 0.035324737548828124, 0.03512543869018555, 0.03517030334472656, 0.0350860481262207, 0.035330337524414064, 0.03527376174926758, 0.03523231887817383, 0.03515228652954101, 0.03523788833618164, 0.035186912536621096, 0.03527459335327148, 0.03516851043701172, 0.03522937774658203, 0.03528297424316406, 0.035315582275390625, 0.035299423217773435, 0.0353175048828125, 0.03534668731689453, 0.03546268844604492, 0.035425888061523435, 0.035496448516845705, 0.03551785659790039, 0.03565667343139649, 0.03578396987915039, 0.03573657608032227, 0.03558572769165039, 0.035593696594238285, 0.035565921783447266, 0.035541217803955076, 0.035560672760009765, 0.03561510467529297, 0.03574758529663086, 0.03587696075439453, 0.035770942687988284, 0.035833854675292966, 0.03573676681518555, 0.03577859115600586, 0.03576502227783203, 0.03580723190307617, 0.0357531852722168, 0.035865345001220704, 0.035823070526123046, 0.03581542587280274, 0.03575235366821289, 0.035829025268554686, 0.03586547088623047, 0.03600598526000977, 0.03596278381347656, 0.0360299186706543, 0.03621532821655273, 0.03617724609375, 0.03606771087646484, 0.03609219360351563, 0.036007102966308595, 0.03611219024658203, 0.03602092742919922, 0.036025726318359375, 0.03596777725219726, 0.03617308807373047, 0.03575833511352539, 0.03521353530883789, 0.035116832733154295, 0.03498147201538086, 0.03622092819213867, 0.034977344512939455, 0.03497865676879883, 0.035135486602783206, 0.03513459014892578, 0.03510156631469727, 0.03513328170776367, 0.03510444641113281, 0.035200576782226566, 0.035144065856933596, 0.035098400115966794, 0.035130111694335935, 0.035198974609375, 0.035209217071533204, 0.03525577545166016, 0.03530124664306641, 0.035485599517822264, 0.03546803283691406, 0.03548124694824219, 0.03543075180053711, 0.03543225479125976, 0.03539068984985352, 0.03549657440185547, 0.03555977630615234, 0.03554508972167969, 0.03544473648071289, 0.03545395278930664, 0.03538547134399414, 0.03543743896484375, 0.03540172958374024, 0.035471359252929685, 0.035671199798583984, 0.035655998229980466, 0.03560428619384766, 0.03569452667236328, 0.03568310546875, 0.03564246368408203, 0.03563020706176758, 0.035718944549560545, 0.035689952850341794, 0.03564352035522461, 0.03574620819091797, 0.035768192291259764, 0.03577775955200195, 0.03576924896240234, 0.03570483016967774, 0.035899391174316404, 0.03594649505615234, 0.03594153594970703, 0.03589120101928711, 0.03610323333740234, 0.03616745758056641, 0.036001792907714845, 0.03590758514404297, 0.03594768142700195, 0.03587916946411133, 0.036014335632324215, 0.036011966705322265, 0.03604931259155274, 0.03595468902587891, 0.035331966400146485, 0.0350885124206543, 0.03501571273803711, 0.035064609527587894, 0.03504352188110352, 0.03502643203735351, 0.03508070373535156, 0.03522099304199219, 0.03517113494873047, 0.03523142242431641, 0.0351723518371582, 0.03522355270385742, 0.035133472442626955, 0.035279838562011716, 0.035293598175048825, 0.03528755187988281, 0.03521987152099609, 0.03529404830932617, 0.035794849395751956, 0.035310462951660154, 0.03536288070678711, 0.03544678497314453, 0.035491584777832035, 0.03544281768798828, 0.03541516876220703, 0.03555228805541992, 0.035565536499023435, 0.03561881637573242, 0.035552928924560544, 0.03558457565307617, 0.03557936096191406, 0.035598751068115234, 0.03561580657958984, 0.035811328887939455, 0.035695457458496095, 0.035828990936279295, 0.03578291320800781, 0.03578112030029297, 0.0356864013671875, 0.03586457443237305, 0.03592396926879883, 0.0358823356628418, 0.03584579086303711, 0.03592704010009766, 0.03582156753540039, 0.035847518920898436, 0.03580380630493164, 0.035917438507080075, 0.035926273345947266, 0.03602035140991211, 0.03592953491210937, 0.03593606567382813, 0.03596774291992187, 0.03603472137451172, 0.03593603134155274, 0.03591993713378906, 0.03597638320922852, 0.03602924728393555, 0.03604889678955078, 0.03597107315063477, 0.03599155044555664, 0.03613081741333008, 0.036001182556152346, 0.03529584121704102, 0.03508367919921875, 0.0349923210144043, 0.03510857772827149, 0.03506041717529297, 0.035059711456298825, 0.03503104019165039, 0.03506300735473633, 0.035068126678466795, 0.035099006652832034, 0.03511881637573242, 0.03513782501220703, 0.03518624114990234, 0.035197566986083985, 0.03515801620483398, 0.035209217071533204, 0.03522544097900391, 0.03523535919189453, 0.035426334381103514, 0.03543888092041016, 0.03541241455078125, 0.03537395095825195, 0.03543276977539062, 0.035399681091308595, 0.03539558410644531, 0.03550892639160156, 0.03559417724609375, 0.035622974395751954, 0.035556991577148436, 0.0355425910949707, 0.03555411148071289, 0.035598175048828125, 0.03560630416870117, 0.035581695556640626, 0.035625503540039065, 0.035761920928955075, 0.03574390411376953, 0.035696830749511715, 0.03561270523071289, 0.035880672454833985, 0.03592012786865234, 0.03586191940307617, 0.03584588623046875, 0.03583369445800781, 0.03576115036010742, 0.035805118560791015, 0.03587475204467774, 0.035902751922607425, 0.03589616012573242, 0.035878623962402344, 0.03584233474731445, 0.03590758514404297, 0.035917823791503906, 0.03596492767333984, 0.035953792572021484, 0.0360398063659668, 0.03597040176391601, 0.0361453742980957, 0.036133056640625, 0.036139007568359374, 0.03609193420410156, 0.03614716720581055, 0.03589225769042969, 0.0351798095703125, 0.034998943328857425, 0.03501590347290039, 0.03501536178588867, 0.03497129440307617, 0.03505811309814453, 0.035147232055664064, 0.0352562255859375, 0.03523798370361328, 0.035160511016845704, 0.03510691070556641, 0.035297279357910154, 0.035138782501220704, 0.035189537048339846, 0.035192832946777344, 0.03512934494018555, 0.03506380844116211, 0.035149822235107424, 0.03524367904663086, 0.03523004913330078, 0.03520476913452148, 0.0354015998840332, 0.03536943817138672, 0.03537510299682617, 0.035383296966552735, 0.03547545623779297, 0.035544864654541014, 0.03561049652099609, 0.035572063446044924, 0.035495712280273435, 0.035496158599853514, 0.035520511627197264, 0.035555233001708986, 0.03563859176635742, 0.035608993530273435, 0.03579308700561523, 0.03583347320556641, 0.035762752532958984, 0.035773696899414065, 0.03579366302490234, 0.03586374282836914, 0.03590841674804687, 0.035792064666748044, 0.03584236907958984, 0.03573113632202148, 0.03580601501464844, 0.03592192077636719, 0.03595395278930664, 0.035976959228515626, 0.03595734405517578, 0.03588768005371094, 0.036031806945800784, 0.03600230407714844, 0.036128799438476564, 0.03609196853637695, 0.03613180923461914, 0.036035488128662106, 0.036020030975341795, 0.03603478240966797, 0.03609135818481445, 0.03608425521850586, 0.03611856079101562, 0.03573161697387695, 0.035061153411865234, 0.034914752960205075, 0.03488083267211914, 0.03489247894287109, 0.03491430282592774, 0.03507820892333984, 0.03523366546630859, 0.035498046875, 0.0352911376953125, 0.03536627197265625, 0.03595737457275391, 0.03553279876708984, 0.035272705078125, 0.03527280044555664, 0.0352639045715332, 0.035315841674804685, 0.035314048767089844, 0.03534774398803711, 0.035270977020263675, 0.03541158294677734, 0.03543734359741211, 0.03546316909790039, 0.03539904022216797, 0.03550067138671875, 0.035518657684326174, 0.03560432052612305, 0.03559408187866211, 0.03596915054321289, 0.03554899215698242, 0.035459262847900394, 0.03546656036376953, 0.035599040985107425, 0.035676158905029294, 0.03568374252319336, 0.03562268829345703, 0.03565856170654297, 0.035776512145996094, 0.035781761169433594, 0.035746623992919925, 0.0358031997680664, 0.03574288177490234, 0.03572848129272461, 0.03573859024047851, 0.035727745056152345, 0.03569295883178711, 0.03586777496337891, 0.035865215301513674, 0.035875072479248045, 0.03588083267211914, 0.035932449340820315, 0.03587836837768555, 0.035869247436523435, 0.0359381103515625, 0.03597875213623047, 0.03595929718017578, 0.03590758514404297, 0.03587276840209961, 0.035985408782958986, 0.0359628791809082, 0.03590956878662109, 0.03603462219238281, 0.03607513427734375, 0.03585244750976563, 0.035157089233398435, 0.03495769500732422, 0.03497315216064453, 0.03508303833007813, 0.035135616302490236, 0.03511641693115235, 0.03512911987304688, 0.0353267822265625, 0.035291168212890626, 0.035196769714355466, 0.03520937728881836, 0.035249473571777344, 0.0352749137878418, 0.035314208984375, 0.035272350311279295, 0.035324256896972654, 0.035209217071533204, 0.03520512008666992, 0.03529654312133789, 0.03527753448486328, 0.035362495422363284, 0.0355140495300293, 0.035377792358398434, 0.03546316909790039, 0.03537299346923828, 0.03551852798461914, 0.03550592041015625, 0.035635456085205075, 0.035620864868164064, 0.035640670776367185, 0.03550255966186523, 0.03556726455688477, 0.035566078186035154, 0.035649566650390624, 0.03568435287475586, 0.03580332946777344, 0.0357743034362793, 0.03579804611206055, 0.03579747009277344, 0.03576607894897461, 0.03595481491088867, 0.0359714241027832, 0.03579923248291016, 0.03578675079345703, 0.03578675079345703, 0.03577971267700195, 0.03575900650024414, 0.03589673614501953, 0.03590361785888672, 0.036001598358154294, 0.036006366729736325, 0.0360654411315918, 0.03601408004760742, 0.03605052947998047, 0.03602793502807617, 0.03596992111206055, 0.03590758514404297, 0.03590553665161133, 0.03586867141723633, 0.03600953674316406, 0.03599200057983398, 0.03596646499633789]",tokens/s,28.10870372455261,,, @@ -33400,7 +33400,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34114,7 +34114,7 @@ ValueError: CodeGenForCausalLM does not support an attention implementation thro " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1030.61504,965.67296,0.0,570.425344,536.326656,s,1,8.4580712890625,8.4580712890625,0.0,8.4580712890625,8.4580712890625,8.4580712890625,8.4580712890625,[8.4580712890625],,kWh,3.686940990834652e-05,4.05941187368778e-06,1.2865010291990275e-05,5.3793832074024575e-05,,MB,1262.75584,1034.878976,0.0,624.951296,594.377728,s,10,0.20789846420288088,0.020789846420288086,0.000156543809506488,0.020772943496704104,0.020961072158813477,0.02104437599182129,0.02111101905822754,"[0.0211276798248291, 0.020942560195922853, 0.02068841552734375, 0.02059040069580078, 0.020784351348876955, 0.020589759826660156, 0.02076153564453125, 0.02089727973937988, 0.02071865653991699, 0.02079782485961914]",tokens/s,12313.703277296869,kWh,6.076229467082312e-07,6.701004061386872e-08,3.9350112330144745e-07,1.0681341106235472e-06,tokens/kWh,239670278.71673742,MB,1275.990016,1049.55904,0.0,639.63136,607.71072,s,10,11.622058959960938,1.162205895996094,0.011934721498417466,1.1644623413085937,1.176913916015625,1.1784804565429687,1.1797336889648438,"[1.1765657958984375, 1.1506571044921876, 1.1425592041015624, 1.1505360107421876, 1.15258935546875, 1.1618824462890625, 1.168671630859375, 1.1715081787109376, 1.1800469970703125, 1.167042236328125]",tokens/s,54.207262428319105,kWh,3.2591786253707696e-05,3.594396489437822e-06,1.304845407469995e-05,4.9234636817845455e-05,tokens/kWh,1279586.9751833975,,s,630,11.615323753356947,0.018437021830725292,0.0004221994916844008,0.018455056190490725,0.018704652023315428,0.018967825794219968,0.01997489570617676,"[0.01814137649536133, 0.018436960220336914, 0.018668479919433593, 0.021637664794921876, 0.018501983642578126, 0.01927382469177246, 0.01839286422729492, 0.018638431549072267, 0.018555423736572266, 0.018327680587768555, 0.018392959594726564, 0.019791040420532226, 0.01818623924255371, 0.018191295623779295, 0.018612064361572266, 0.01861039924621582, 0.018245695114135742, 0.01832310485839844, 0.020714847564697266, 0.019788543701171876, 0.018700000762939453, 0.018786592483520506, 0.018552831649780274, 0.018560831069946288, 0.018687328338623046, 0.018686464309692383, 0.01859574317932129, 0.018573759078979492, 0.018503231048583986, 0.01861881637573242, 0.018733055114746093, 0.01859107208251953, 0.018687679290771485, 0.018704479217529296, 0.018702335357666015, 0.01866172790527344, 0.018683647155761717, 0.01862326431274414, 0.0186200008392334, 0.01868227195739746, 0.018569311141967772, 0.018708383560180664, 0.018476800918579103, 0.018690496444702148, 0.018503135681152343, 0.018526336669921876, 0.018546239852905273, 0.018535072326660157, 0.01859174346923828, 0.01841971206665039, 0.018475072860717773, 0.018436159133911133, 0.018484512329101564, 0.018520288467407227, 0.01853273582458496, 0.018518016815185546, 0.018453567504882813, 0.018423807144165038, 0.01841862487792969, 0.018448383331298827, 0.01851087951660156, 0.01847769546508789, 0.018246240615844726, 0.018312543869018556, 0.01865180778503418, 0.018472959518432617, 0.018333696365356447, 0.018339296340942383, 0.018265663146972658, 0.01824787139892578, 0.018158143997192382, 0.01814860725402832, 0.018253952026367187, 0.018168352127075196, 0.01827257537841797, 0.01827187156677246, 0.018235647201538085, 0.01822537612915039, 0.018236928939819336, 0.018172319412231446, 0.01814886474609375, 0.018061088562011718, 0.017932607650756837, 0.017897344589233397, 0.017904159545898437, 0.0177674560546875, 0.017744863510131836, 0.017612800598144532, 0.01760630416870117, 0.01762156867980957, 0.017600288391113283, 0.01761075210571289, 0.01765817642211914, 0.017761184692382814, 0.01796339225769043, 0.01787107276916504, 0.01774015998840332, 0.01849235153198242, 0.018161855697631835, 0.018285247802734376, 0.018273759841918945, 0.018332191467285156, 0.01842995262145996, 0.018417503356933592, 0.018472576141357423, 0.01843452835083008, 0.018575424194335936, 0.018757055282592774, 0.018626367568969727, 0.01847372817993164, 0.018288639068603514, 0.018353759765625, 0.018434463500976564, 0.018537919998168947, 0.018538175582885744, 0.018408319473266602, 0.01855232048034668, 0.018626527786254884, 0.018547231674194337, 0.018550783157348632, 0.01864089584350586, 0.018515968322753908, 0.019095552444458007, 0.018570655822753905, 0.01895689582824707, 0.018382848739624022, 0.018041536331176757, 0.018332927703857423, 0.018342144012451173, 0.018625120162963867, 0.018433664321899416, 0.01838688087463379, 0.018428447723388672, 0.018414527893066406, 0.018557823181152344, 0.018448383331298827, 0.01845248031616211, 0.018509824752807616, 0.01835411262512207, 0.018421823501586915, 0.018565120697021483, 0.01840358352661133, 0.01851545524597168, 0.01838719940185547, 0.01831679916381836, 0.018274816513061523, 0.018544607162475586, 0.018613920211791993, 0.01849996757507324, 0.01862006378173828, 0.018917760848999023, 0.018393056869506836, 0.018556575775146484, 0.018534143447875975, 0.01841971206665039, 0.018281055450439454, 0.018175167083740236, 0.018112319946289063, 0.018340864181518556, 0.018423807144165038, 0.01856716728210449, 0.018294368743896484, 0.017826047897338868, 0.01760643196105957, 0.017516927719116213, 0.017448352813720702, 0.017452959060668946, 0.017502143859863283, 0.01749478340148926, 0.017512128829956054, 0.017539392471313475, 0.017547168731689454, 0.017939807891845704, 0.017789695739746095, 0.01778892707824707, 0.017614112854003907, 0.017615583419799803, 0.017639167785644533, 0.01763555145263672, 0.017545055389404297, 0.017825632095336913, 0.01796131134033203, 0.01790496063232422, 0.01801491165161133, 0.017912895202636718, 0.017820159912109376, 0.01772812843322754, 0.01799260711669922, 0.018246559143066405, 0.01741804885864258, 0.017850656509399414, 0.017981407165527343, 0.017821887969970703, 0.017759647369384766, 0.01788105583190918, 0.017848031997680664, 0.017801984786987305, 0.017777727127075194, 0.01802876853942871, 0.018083967208862305, 0.017859167098999023, 0.017731584548950196, 0.017741695404052734, 0.017727615356445313, 0.017735679626464843, 0.018192384719848635, 0.018007295608520508, 0.018130847930908203, 0.01792086410522461, 0.017840192794799804, 0.017778303146362303, 0.017754432678222656, 0.017734848022460937, 0.01777542304992676, 0.01799977684020996, 0.018331392288208008, 0.018027040481567384, 0.017932096481323243, 0.018182144165039063, 0.018317440032958984, 0.018465856552124023, 0.01850364875793457, 0.018402143478393553, 0.0183767032623291, 0.018401119232177736, 0.018423967361450196, 0.018915327072143554, 0.018581504821777343, 0.018607711791992186, 0.018714048385620116, 0.01973062324523926, 0.018649887084960938, 0.019044479370117186, 0.01852150344848633, 0.01854863929748535, 0.018510400772094728, 0.018463775634765624, 0.018576351165771485, 0.018486335754394533, 0.01843315124511719, 0.01847420883178711, 0.01857391929626465, 0.018582656860351564, 0.018587648391723634, 0.018570112228393554, 0.018606048583984375, 0.01865897560119629, 0.01846886444091797, 0.018542976379394532, 0.01845248031616211, 0.01845452880859375, 0.01855036735534668, 0.01822537612915039, 0.01846272087097168, 0.018472320556640626, 0.018452735900878908, 0.018336128234863282, 0.018264064788818358, 0.018151424407958985, 0.018257823944091797, 0.01803468894958496, 0.018148576736450196, 0.018166656494140624, 0.018067455291748045, 0.01781760025024414, 0.01780940818786621, 0.017831487655639647, 0.017868896484375, 0.0179531192779541, 0.017663999557495116, 0.01760256004333496, 0.017662176132202147, 0.01799068832397461, 0.018075584411621094, 0.01852694320678711, 0.018358367919921875, 0.01845849609375, 0.018352256774902344, 0.018533504486083985, 0.018500736236572266, 0.018941568374633788, 0.018478336334228514, 0.018442527770996094, 0.018327903747558594, 0.01837491226196289, 0.01843715286254883, 0.018349023818969728, 0.018380800247192384, 0.01845644760131836, 0.018333152770996095, 0.018385568618774415, 0.018382816314697265, 0.01847817611694336, 0.018551168441772462, 0.018683904647827147, 0.018634368896484375, 0.018545440673828125, 0.018419136047363283, 0.01851055908203125, 0.01839836883544922, 0.01840246391296387, 0.018357984542846678, 0.01827862358093262, 0.018330368041992187, 0.018365440368652345, 0.0182476806640625, 0.01835219192504883, 0.018302656173706053, 0.01815888023376465, 0.018254816055297853, 0.018376768112182616, 0.018245248794555663, 0.01813667106628418, 0.018274080276489257, 0.018305856704711913, 0.017779392242431642, 0.018277727127075195, 0.018225248336791993, 0.018135616302490234, 0.01786675262451172, 0.01803878402709961, 0.018112415313720702, 0.018244800567626954, 0.018092960357666017, 0.018167808532714845, 0.01800956726074219, 0.017949216842651366, 0.01789132881164551, 0.017740928649902343, 0.017985471725463865, 0.01824444770812988, 0.017868896484375, 0.018159616470336915, 0.018520063400268554, 0.018454143524169922, 0.019347360610961914, 0.01880873680114746, 0.018600032806396483, 0.018535072326660157, 0.019037471771240235, 0.01850831985473633, 0.018556640625, 0.018651296615600586, 0.018550912857055665, 0.01860812759399414, 0.018548128128051757, 0.018538591384887695, 0.019111776351928712, 0.018538848876953125, 0.018444000244140626, 0.018395360946655274, 0.0185532169342041, 0.01846272087097168, 0.01843404769897461, 0.018452127456665038, 0.018458976745605468, 0.018423807144165038, 0.01844428825378418, 0.018460800170898437, 0.018521760940551756, 0.018577056884765623, 0.018436288833618163, 0.018385343551635742, 0.019048383712768555, 0.0182969913482666, 0.018485088348388672, 0.018357280731201173, 0.01826095962524414, 0.01837238311767578, 0.018289119720458984, 0.018351871490478514, 0.01828598403930664, 0.0183253116607666, 0.0182685432434082, 0.018264415740966797, 0.019470399856567382, 0.020112512588500976, 0.018842496871948243, 0.01804697608947754, 0.018122400283813477, 0.018231903076171875, 0.018262847900390625, 0.018332639694213868, 0.018440160751342773, 0.018314783096313476, 0.019291711807250977, 0.019452608108520508, 0.018624319076538085, 0.01853081512451172, 0.0199781436920166, 0.0183306884765625, 0.018428863525390624, 0.018472959518432617, 0.018579456329345705, 0.0184520320892334, 0.01842835235595703, 0.018327360153198243, 0.018374847412109374, 0.018308128356933594, 0.018311424255371092, 0.01833238410949707, 0.018296831130981444, 0.018403295516967774, 0.018409503936767577, 0.018562431335449218, 0.01846131134033203, 0.018374656677246092, 0.018391040802001952, 0.01927168083190918, 0.018388992309570314, 0.01843404769897461, 0.018431711196899413, 0.018401567459106444, 0.018472959518432617, 0.01835212707519531, 0.01848121643066406, 0.019966943740844727, 0.01867286491394043, 0.01858665657043457, 0.018655967712402344, 0.018644960403442382, 0.018509855270385744, 0.018554943084716797, 0.018646272659301757, 0.01882307243347168, 0.018313472747802734, 0.018315807342529297, 0.018503679275512695, 0.01854863929748535, 0.018514015197753905, 0.018907360076904297, 0.018380479812622072, 0.018579551696777344, 0.018488576889038086, 0.018434816360473633, 0.018744895935058594, 0.018504127502441407, 0.01844428825378418, 0.018372608184814454, 0.01832262420654297, 0.018455360412597658, 0.01829478454589844, 0.018794048309326173, 0.018706880569458007, 0.01884547233581543, 0.018403104782104492, 0.01839084815979004, 0.018829343795776367, 0.01860259246826172, 0.01841766357421875, 0.01846681594848633, 0.018465824127197265, 0.01841219139099121, 0.019022207260131836, 0.019781312942504882, 0.018579168319702147, 0.01846940803527832, 0.018601984024047852, 0.01845248031616211, 0.01841152000427246, 0.01857151985168457, 0.018562816619873048, 0.018536447525024414, 0.018547807693481445, 0.018551712036132813, 0.01859686470031738, 0.018723424911499024, 0.01868671989440918, 0.018708127975463867, 0.018808832168579103, 0.01862838363647461, 0.01850595283508301, 0.01848320007324219, 0.018554880142211915, 0.018550783157348632, 0.01845577621459961, 0.0182772159576416, 0.018271360397338867, 0.01820719909667969, 0.018255552291870116, 0.018322431564331054, 0.01846646308898926, 0.018495487213134765, 0.018912511825561522, 0.018602880477905273, 0.01863257598876953, 0.01860406494140625, 0.018608095169067383, 0.01857535934448242, 0.01855062484741211, 0.01892572784423828, 0.01857961654663086, 0.018634592056274414, 0.01855238342285156, 0.01864067268371582, 0.018600608825683592, 0.01861625671386719, 0.018556320190429687, 0.018697887420654296, 0.018534400939941405, 0.018496768951416016, 0.01860736083984375, 0.018625024795532227, 0.01851753616333008, 0.018356767654418946, 0.018487295150756835, 0.018544607162475586, 0.018276704788208007, 0.01832124710083008, 0.018431840896606447, 0.018421024322509767, 0.01837948799133301, 0.018308767318725584, 0.019818944931030272, 0.018652639389038085, 0.019240800857543944, 0.018862688064575195, 0.018568672180175782, 0.01851651191711426, 0.018499584197998048, 0.018505727767944336, 0.018976768493652343, 0.019820383071899414, 0.01951145553588867, 0.018425567626953125, 0.018493728637695314, 0.018411359786987304, 0.02023644828796387, 0.01848464012145996, 0.018332160949707032, 0.018507648468017578, 0.018454751968383788, 0.01839891242980957, 0.018481056213378907, 0.01882383918762207, 0.018414655685424806, 0.019628799438476563, 0.019998655319213868, 0.02098966407775879, 0.01866156768798828, 0.01859190368652344, 0.01848512077331543, 0.018663551330566405, 0.018822399139404297, 0.018500288009643553, 0.018470848083496094, 0.018470975875854494, 0.01843814468383789, 0.018739423751831054, 0.018706207275390626, 0.018481151580810547, 0.018382623672485353, 0.018378976821899415, 0.018280607223510742, 0.01989347267150879, 0.018350719451904297, 0.018524160385131837, 0.01861417579650879, 0.018813024520874022, 0.018706623077392577, 0.01850553512573242, 0.01856719970703125, 0.018668800354003905, 0.018504032135009764, 0.018473344802856444, 0.018520063400268554, 0.01856265640258789, 0.018309120178222657, 0.018538496017456055, 0.01865497589111328, 0.01854841613769531, 0.01862713623046875, 0.018569215774536133, 0.018452064514160156, 0.01877382469177246, 0.018606719970703126, 0.01859152030944824, 0.01856940841674805, 0.01842918395996094, 0.0185097599029541, 0.018459680557250977, 0.018411296844482422, 0.01835775947570801, 0.018416128158569335, 0.018506784439086914, 0.018501760482788086, 0.018303840637207032, 0.018425504684448243, 0.018261663436889647, 0.01843235206604004, 0.018546815872192382, 0.018388511657714844, 0.018496192932128907, 0.01856233596801758, 0.018448896408081054, 0.018493631362915038, 0.01850067138671875, 0.018537343978881834, 0.018540351867675782, 0.018409311294555666, 0.018444639205932617, 0.01835215950012207, 0.018268159866333008, 0.018411136627197264, 0.018358720779418945, 0.01857974433898926, 0.01840867233276367, 0.01846668815612793, 0.018501792907714844, 0.018554912567138673, 0.018583936691284178, 0.018594015121459962, 0.01849123191833496, 0.018507232666015627, 0.01851651191711426, 0.018524160385131837, 0.01846873664855957, 0.018489471435546877, 0.01852604866027832, 0.01856528091430664, 0.01862406349182129, 0.018548608779907227, 0.01854707145690918, 0.01853785514831543, 0.018666303634643555, 0.01861030387878418, 0.018593568801879883, 0.018575071334838867, 0.019274080276489258, 0.018583263397216797]",tokens/s,54.23869479470379,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34267,7 +34267,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34520,7 +34520,7 @@ AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7159.865344,10248.650752,0.0,9862.905856,9797.835264,s,1,13.036337890625,13.036337890625,0.0,13.036337890625,13.036337890625,13.036337890625,13.036337890625,[13.036337890625],,kWh,0.0001601455375041572,1.7655413268169356e-05,5.424448783999947e-05,0.00023204543861232602,,MB,2928.021504,10626.138112,0.0,10211.033088,10097.478144,s,10,7.080755126953125,0.7080755126953125,0.0021852999069200663,0.708653076171875,0.7098954345703126,0.7101543823242188,0.7103615405273438,"[0.702265625, 0.7067350463867188, 0.708219970703125, 0.707822509765625, 0.7079814453125, 0.7092393188476562, 0.709837890625, 0.709086181640625, 0.70915380859375, 0.710413330078125]",tokens/s,361.5433600090584,kWh,2.0654328066945408e-05,2.2778309918260785e-06,1.3706399853999814e-05,3.66385589127713e-05,tokens/kWh,6987174.375757577,MB,2932.051968,10630.332416,0.0,10213.13024,10097.480704,s,10,37.07304760742188,3.7073047607421876,0.0052103151914074265,3.7071820068359376,3.7146004150390626,3.716242102050781,3.717555451660156,"[3.708646484375, 3.704712890625, 3.714235595703125, 3.7004345703125, 3.708013671875, 3.7178837890625, 3.707966064453125, 3.7007197265625, 3.704036865234375, 3.70639794921875]",tokens/s,16.99347748993467,kWh,9.782617883013852e-05,1.0789192504527572e-05,6.500438533679996e-05,0.000173619756671466,tokens/kWh,362861.9300464317,,s,630,37.06995254516605,0.05884119451613652,0.0006394524138849767,0.058709760665893555,0.05922449226379395,0.05971522789001465,0.061702710342407235,"[0.059774974822998046, 0.059101184844970706, 0.05900502395629883, 0.05902121734619141, 0.05874697494506836, 0.05884236907958985, 0.05944553756713867, 0.05892118453979492, 0.06093344116210937, 0.059485023498535156, 0.058933246612548826, 0.058947582244873044, 0.05889011383056641, 0.0589169921875, 0.05895577621459961, 0.06150870513916016, 0.058915969848632815, 0.05879580688476563, 0.05876128005981445, 0.05875296020507813, 0.0585011215209961, 0.058431488037109375, 0.05849292755126953, 0.058517505645751956, 0.05834272003173828, 0.05849715042114258, 0.058714176177978514, 0.0586060791015625, 0.05867897415161133, 0.058491199493408204, 0.058744831085205076, 0.05909635162353516, 0.05887411117553711, 0.0585302734375, 0.05857228851318359, 0.05884924697875977, 0.05863375854492187, 0.05853696060180664, 0.05842739105224609, 0.05859088134765625, 0.05866940689086914, 0.058515167236328124, 0.058625984191894534, 0.058537982940673826, 0.05854854583740234, 0.05851091384887695, 0.05871196746826172, 0.058729022979736326, 0.05890652847290039, 0.05876540756225586, 0.05853507232666016, 0.058590015411376956, 0.05934425735473633, 0.058884353637695314, 0.05870198440551758, 0.05868364715576172, 0.0587243537902832, 0.05905203247070313, 0.058705921173095706, 0.05870182418823242, 0.05924678421020508, 0.05903523254394531, 0.058839488983154296, 0.059620769500732425, 0.05904646301269531, 0.05915849685668945, 0.05875868988037109, 0.058595550537109374, 0.0586366081237793, 0.05845305633544922, 0.0586577262878418, 0.058904159545898435, 0.058753246307373046, 0.05857033538818359, 0.05854620742797852, 0.05875081634521485, 0.0586899185180664, 0.05848099136352539, 0.058742782592773435, 0.058807552337646486, 0.058641151428222654, 0.0587789421081543, 0.05909779357910156, 0.05884723281860352, 0.058826751708984375, 0.05871615982055664, 0.05897625732421875, 0.05902272033691406, 0.059199840545654296, 0.05853555297851563, 0.05881711959838867, 0.05855599975585937, 0.05853411102294922, 0.05882511901855469, 0.05874016189575195, 0.05851587295532226, 0.0587243537902832, 0.05863238525390625, 0.05859513473510742, 0.05895577621459961, 0.05879379272460938, 0.05848432159423828, 0.058754688262939454, 0.058440383911132814, 0.05871030426025391, 0.062083263397216794, 0.058690559387207034, 0.058657470703125, 0.058622081756591796, 0.059211681365966794, 0.05918102264404297, 0.05884735870361328, 0.05847449493408203, 0.05866495895385742, 0.05836800003051758, 0.05918064117431641, 0.05892284774780274, 0.0589051513671875, 0.058052608489990234, 0.058515457153320315, 0.05864857482910156, 0.058840511322021484, 0.058913345336914065, 0.05856662368774414, 0.05868137741088867, 0.058480640411376954, 0.059736640930175784, 0.059768447875976564, 0.05863087844848633, 0.059154430389404294, 0.05850726318359375, 0.05845971298217773, 0.05850156784057617, 0.05863993453979492, 0.05856099319458008, 0.05866873550415039, 0.06027907180786133, 0.05872127914428711, 0.05865903854370117, 0.058477344512939455, 0.058355712890625, 0.05841100692749023, 0.058447872161865234, 0.059222015380859375, 0.059138046264648435, 0.059156478881835936, 0.058652000427246095, 0.05866121673583984, 0.0587421760559082, 0.058665889739990235, 0.05874595260620117, 0.059398334503173826, 0.06028307342529297, 0.060660255432128905, 0.06094956970214844, 0.059345855712890624, 0.058531841278076174, 0.05861075210571289, 0.05848294448852539, 0.058778305053710934, 0.058818336486816405, 0.06199456024169922, 0.058990943908691404, 0.05917318344116211, 0.05880009460449219, 0.05869366455078125, 0.05894863891601562, 0.058768352508544924, 0.05898758316040039, 0.05879043197631836, 0.05865718460083008, 0.05974016189575195, 0.05928879928588867, 0.058874015808105466, 0.05864857482910156, 0.05858777618408203, 0.058584991455078124, 0.059047134399414065, 0.058627201080322267, 0.05872150421142578, 0.05844022369384766, 0.05849087905883789, 0.058492992401123045, 0.058560447692871095, 0.058431137084960935, 0.05857519912719727, 0.058789344787597654, 0.05878758239746094, 0.058620193481445315, 0.05974259185791016, 0.05899433517456055, 0.05899472045898437, 0.058945632934570315, 0.05968905639648438, 0.059572673797607424, 0.05868323135375977, 0.0586399040222168, 0.05862899017333984, 0.05859932708740234, 0.05886547088623047, 0.058792224884033205, 0.05880944061279297, 0.05879641723632813, 0.05847001647949219, 0.058753952026367184, 0.058436702728271485, 0.05845801544189453, 0.058211326599121094, 0.058570751190185545, 0.05872150421142578, 0.05845446395874023, 0.05855417633056641, 0.059103328704833986, 0.058447616577148434, 0.05832160186767578, 0.05831884765625, 0.05846835327148438, 0.05835779190063477, 0.05865264129638672, 0.05872409439086914, 0.058423553466796875, 0.05944319915771484, 0.0583741455078125, 0.05845196914672852, 0.05848883056640625, 0.05893632125854492, 0.05862707138061524, 0.05900032043457031, 0.05874124908447265, 0.05868268966674805, 0.05868803024291992, 0.05840297698974609, 0.05849856185913086, 0.05886323165893555, 0.058655200958251955, 0.0587421760559082, 0.05876406478881836, 0.05883484649658203, 0.05843379211425781, 0.0585055046081543, 0.05844899368286133, 0.05847110366821289, 0.058894241333007816, 0.05856265640258789, 0.058464256286621094, 0.05907455825805664, 0.05860940933227539, 0.05877376174926758, 0.05853772735595703, 0.05837030410766601, 0.059066112518310544, 0.06052428817749023, 0.05958633422851563, 0.058863712310791017, 0.058318687438964845, 0.05855913543701172, 0.058581214904785156, 0.058823806762695316, 0.05861260986328125, 0.058589183807373046, 0.05842339324951172, 0.058881950378417966, 0.06003526306152344, 0.05871187210083008, 0.05900697708129883, 0.058759166717529294, 0.0586915512084961, 0.058882080078125, 0.05871516799926758, 0.05862403106689453, 0.05870892715454101, 0.05877958297729492, 0.058848960876464844, 0.058788288116455076, 0.059018177032470705, 0.05902438354492188, 0.058942527770996095, 0.058596286773681644, 0.05911142349243164, 0.058605567932128906, 0.0584089584350586, 0.058582080841064456, 0.058915775299072264, 0.059015167236328124, 0.05901625442504883, 0.05892192077636719, 0.05865267181396484, 0.058535934448242184, 0.058447872161865234, 0.05900672149658203, 0.05848831939697265, 0.05854489517211914, 0.05872780990600586, 0.058775585174560545, 0.05839014434814453, 0.05838499069213867, 0.058361793518066404, 0.05832953643798828, 0.05825641632080078, 0.058702560424804685, 0.062163200378417965, 0.05917900848388672, 0.05898380661010742, 0.05886220932006836, 0.05851136016845703, 0.05894144058227539, 0.05865859222412109, 0.06253385543823242, 0.05875116729736328, 0.0588941421508789, 0.05857484817504883, 0.05849497604370117, 0.05831248092651367, 0.05843123245239258, 0.05884771347045899, 0.059908096313476565, 0.05893033599853516, 0.05851004791259766, 0.05842956924438476, 0.05889775848388672, 0.05876201629638672, 0.05860073471069336, 0.058990753173828125, 0.058996448516845705, 0.05855920028686523, 0.05858303833007812, 0.058836513519287106, 0.058738689422607425, 0.05863801574707031, 0.05881862258911133, 0.05859340667724609, 0.05870857620239258, 0.058789344787597654, 0.05848118209838867, 0.05862809753417969, 0.05917695999145508, 0.05856367874145508, 0.05872275161743164, 0.05888582229614258, 0.05950678253173828, 0.05929439926147461, 0.05867046356201172, 0.059136638641357424, 0.05890252685546875, 0.058589183807373046, 0.0592026252746582, 0.058837566375732425, 0.058724735260009764, 0.058619232177734376, 0.05884934234619141, 0.05887241744995117, 0.058660255432128904, 0.0585693130493164, 0.05841715240478516, 0.06811965179443359, 0.0596591682434082, 0.059643905639648435, 0.058887680053710936, 0.058699745178222656, 0.05875763320922851, 0.05899817657470703, 0.05888678359985351, 0.058789249420166015, 0.05866953659057617, 0.05864668655395508, 0.05869977569580078, 0.059494400024414064, 0.05881587219238281, 0.058931838989257815, 0.05913705444335938, 0.05878195190429687, 0.05872057723999023, 0.058823070526123046, 0.05978112030029297, 0.059133438110351565, 0.0586833610534668, 0.05878838348388672, 0.05843337631225586, 0.0596701774597168, 0.058657470703125, 0.058584545135498045, 0.05858595275878906, 0.058726398468017575, 0.058660160064697264, 0.06025484848022461, 0.059482177734375, 0.05885081481933594, 0.05886767959594726, 0.05888668823242187, 0.05897129440307617, 0.061781951904296875, 0.05884169769287109, 0.05885318374633789, 0.05848320007324219, 0.05843088150024414, 0.058544734954833984, 0.058421150207519534, 0.05835171127319336, 0.05830620956420898, 0.05842160034179687, 0.058380287170410154, 0.05938790512084961, 0.05866416168212891, 0.058780448913574215, 0.05869120025634766, 0.05882819366455078, 0.05898649597167969, 0.05871305465698242, 0.05986435317993164, 0.05868003082275391, 0.058705726623535154, 0.05900511932373047, 0.0586217269897461, 0.05893756866455078, 0.058671104431152345, 0.05879308700561523, 0.05872870254516602, 0.05878761672973633, 0.058522174835205075, 0.059264991760253904, 0.05883526229858398, 0.05866099166870117, 0.05877337646484375, 0.059254783630371094, 0.058775550842285154, 0.058660511016845704, 0.058685249328613284, 0.05875334548950195, 0.05850543975830078, 0.058828800201416016, 0.05872844696044922, 0.058584545135498045, 0.05849520111083984, 0.05850143814086914, 0.05847590255737305, 0.05901910400390625, 0.0589628791809082, 0.058920799255371095, 0.058617313385009764, 0.058898975372314456, 0.05907046508789063, 0.06258278274536133, 0.05930108642578125, 0.0587743034362793, 0.05889168167114258, 0.059095008850097656, 0.05864716720581055, 0.0584007682800293, 0.05850080108642578, 0.05834783935546875, 0.058460224151611326, 0.05890028762817383, 0.05876339340209961, 0.058358848571777346, 0.05825203323364258, 0.05861600112915039, 0.05875225448608398, 0.05914291381835937, 0.05909299087524414, 0.05861151885986328, 0.05895395278930664, 0.0586640625, 0.058966880798339845, 0.058824478149414064, 0.05844137573242188, 0.05847900772094727, 0.05870739364624023, 0.058597503662109376, 0.05879571151733398, 0.0587305908203125, 0.0584835205078125, 0.05854163360595703, 0.05829843139648438, 0.05839651107788086, 0.059092960357666015, 0.058698177337646484, 0.05843695831298828, 0.058547008514404295, 0.058763137817382814, 0.05900912094116211, 0.05878579330444336, 0.058548225402832034, 0.058508766174316405, 0.058655200958251955, 0.05844588851928711, 0.05887180709838867, 0.05867472076416016, 0.05830425643920899, 0.05847683334350586, 0.05856512069702149, 0.05858707046508789, 0.059184417724609376, 0.05864214324951172, 0.058606239318847654, 0.058671455383300784, 0.05898649597167969, 0.058773086547851565, 0.05884150314331055, 0.058611358642578125, 0.0584185905456543, 0.05867200088500977, 0.05846172714233398, 0.058507137298583985, 0.05869366455078125, 0.05956307220458985, 0.05890963363647461, 0.05859532928466797, 0.05901251220703125, 0.05887241744995117, 0.058533889770507816, 0.0583842887878418, 0.0584617919921875, 0.0584967041015625, 0.0591748161315918, 0.05858710479736328, 0.05866796875, 0.058508449554443356, 0.05837705612182617, 0.05875711822509765, 0.0587694091796875, 0.0585011215209961, 0.05836800003051758, 0.05854844665527344, 0.05833049774169922, 0.05825763320922851, 0.0583251838684082, 0.05850547027587891, 0.058406654357910155, 0.058343425750732425, 0.05840825653076172, 0.060211902618408204, 0.05907455825805664, 0.0595148811340332, 0.05856166458129883, 0.05837913513183594, 0.058638336181640625, 0.059829566955566404, 0.05954995346069336, 0.05859721755981445, 0.05911795043945312, 0.058611934661865234, 0.05867033767700195, 0.05862067031860352, 0.05867929458618164, 0.0587097282409668, 0.058528030395507816, 0.05816902542114258, 0.05959507369995117, 0.05845171356201172, 0.05870198440551758, 0.058350719451904294, 0.05848121643066406, 0.05862236785888672, 0.059154239654541016, 0.060373184204101565, 0.05870121765136719, 0.05929596710205078, 0.058675582885742185, 0.05859894561767578, 0.059275745391845706, 0.05849660873413086, 0.058667423248291016, 0.05879369735717774, 0.059015457153320315, 0.0597988166809082, 0.05858582305908203, 0.05894960021972656, 0.05973689651489258, 0.058867263793945315, 0.059333057403564454, 0.058638336181640625, 0.05861548614501953, 0.05858745574951172, 0.058517505645751956, 0.058515457153320315, 0.05876736068725586, 0.0592097282409668, 0.05881996917724609, 0.0591855354309082, 0.06104089736938476, 0.059906047821044923, 0.0594659538269043, 0.05892591857910156, 0.05877446365356445, 0.058799232482910156, 0.05863955307006836, 0.059067329406738284, 0.06032806396484375, 0.05911750411987305, 0.05903833770751953, 0.05913401412963867, 0.05865267181396484, 0.058700927734375, 0.05921862411499024, 0.059029697418212894, 0.05852569580078125, 0.05845401763916016, 0.058898368835449216, 0.05891692733764648, 0.058603553771972655, 0.05843270492553711, 0.058808223724365234, 0.058565502166748044, 0.05904159927368164, 0.0585709457397461, 0.05836713409423828, 0.058420063018798825, 0.05870979309082031, 0.05851286315917969, 0.058571521759033206, 0.05898553466796875, 0.0588922233581543, 0.05886387252807617, 0.058540702819824215, 0.058728542327880856, 0.05882060623168945, 0.058451774597167966, 0.05851286315917969, 0.05838716888427734, 0.05816729736328125, 0.058666240692138674, 0.058545024871826175, 0.05835663986206055, 0.05839152145385742, 0.05844172668457031, 0.05826355361938477, 0.058506526947021485, 0.058786304473876956, 0.05868566513061523, 0.05903481674194336]",tokens/s,16.99489631750697,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34565,12 +34565,12 @@ ChildProcessError: Traceback (most recent call last): self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 106984 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 105801 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,869.863424,556.72832,0.0,178.25792,176.546816,s,1,7.713306640625,7.713306640625,0.0,7.713306640625,7.713306640625,7.713306640625,7.713306640625,[7.713306640625],,kWh,2.018954481249201e-05,2.2169501482602328e-06,6.545560791992866e-06,2.895205575274511e-05,,MB,1167.09376,661.58592,0.0,253.755392,221.106688,s,16,0.22666860866546631,0.014166788041591645,7.366024324280652e-05,0.014159088134765626,0.014224016189575195,0.01426743197441101,0.01435408148765564,"[0.014181344032287598, 0.014150176048278808, 0.014187104225158691, 0.01410108757019043, 0.014125760078430177, 0.014212160110473633, 0.014231328010559083, 0.014052319526672363, 0.014131360054016114, 0.014070048332214355, 0.014132672309875489, 0.014129983901977538, 0.014375743865966796, 0.014202816009521484, 0.014216704368591309, 0.014168000221252442]",tokens/s,18070.43341429412,kWh,4.245650116763654e-07,4.6821627602398355e-08,2.809361097616254e-07,7.523227490403892e-07,tokens/kWh,340279488.19377834,MB,1201.364992,676.265984,0.0,268.435456,221.109248,s,16,10.183209472656252,0.6364505920410156,0.006485617121469593,0.6356436767578124,0.6450568542480469,0.646190658569336,0.6476012664794921,"[0.6382002563476562, 0.6479539184570312, 0.6319490966796875, 0.6264183959960937, 0.6369337768554687, 0.6389736328125, 0.6244791259765625, 0.6310694580078124, 0.6333386840820312, 0.6341722412109375, 0.6456029052734376, 0.639483642578125, 0.6445108032226563, 0.6432012329101563, 0.6343535766601562, 0.6325687255859375]",tokens/s,98.98647402929906,kWh,1.8477347904209184e-05,2.0375971334138364e-06,7.2614310352382455e-06,2.777637607286127e-05,tokens/kWh,2268114.4521784373,,s,1008,10.174912675857541,0.010094159400652326,0.00023526298569966127,0.01006612777709961,0.010324982166290284,0.010412306880950928,0.01069016888618469,"[0.009861120223999023, 0.010414079666137695, 0.009979904174804688, 0.00999443244934082, 0.009987903594970702, 0.010186752319335938, 0.010055071830749511, 0.010114912033081055, 0.010099136352539062, 0.010152095794677735, 0.010123200416564941, 0.010078847885131836, 0.010112607955932617, 0.01022976016998291, 0.01031987190246582, 0.010203071594238281, 0.010119071960449219, 0.010082464218139648, 0.010083711624145508, 0.01004361629486084, 0.01006015968322754, 0.01005951976776123, 0.010055968284606934, 0.010016575813293457, 0.010135744094848633, 0.010137439727783203, 0.010311840057373046, 0.01031372833251953, 0.010389535903930665, 0.010439680099487305, 0.01033340835571289, 0.010251615524291992, 0.010205087661743164, 0.01019545555114746, 0.010227007865905762, 0.010268447875976562, 0.010138848304748534, 0.010229439735412597, 0.010141247749328614, 0.010085984230041504, 0.01012822437286377, 0.010097920417785644, 0.010217696189880372, 0.010148320198059082, 0.010129216194152833, 0.010099295616149903, 0.010123968124389648, 0.010046431541442871, 0.010053631782531738, 0.010096351623535156, 0.01026863956451416, 0.009992511749267578, 0.010031328201293946, 0.010026592254638672, 0.009978048324584961, 0.009887807846069336, 0.009973695755004883, 0.010045760154724121, 0.010106240272521973, 0.010027327537536622, 0.00999014377593994, 0.009934847831726074, 0.010046527862548829, 0.009910079956054687, 0.01032032012939453, 0.0103088321685791, 0.010349311828613282, 0.010326527595520019, 0.010358431816101073, 0.010336383819580078, 0.010327232360839845, 0.01022432041168213, 0.010190848350524903, 0.010346495628356933, 0.010219648361206054, 0.010196864128112793, 0.010554431915283203, 0.010563872337341308, 0.010399616241455079, 0.010332415580749512, 0.010281791687011719, 0.010198752403259277, 0.01021939182281494, 0.01028825569152832, 0.010473952293395997, 0.010359328269958496, 0.010303263664245605, 0.010283231735229492, 0.010337823867797852, 0.0102423677444458, 0.01025654411315918, 0.010231807708740234, 0.010190848350524903, 0.01010483169555664, 0.010465567588806152, 0.010211039543151855, 0.010157952308654785, 0.010056896209716797, 0.010228832244873047, 0.010055392265319825, 0.010091808319091797, 0.010249343872070313, 0.010220800399780273, 0.010250271797180176, 0.010377152442932129, 0.010487584114074707, 0.010420960426330566, 0.010375167846679688, 0.010466783523559571, 0.010422271728515625, 0.010435232162475586, 0.010432383537292481, 0.010298751831054687, 0.010276512145996093, 0.010095583915710448, 0.010268671989440918, 0.010164223670959472, 0.010092543601989747, 0.010117407798767089, 0.010167072296142578, 0.01025324821472168, 0.010211647987365723, 0.010186431884765625, 0.010287103652954101, 0.010282527923583985, 0.010229472160339356, 0.009875455856323241, 0.010094592094421387, 0.011318943977355956, 0.010265055656433106, 0.010109888076782227, 0.010261407852172852, 0.010049216270446778, 0.01019878387451172, 0.010219552040100098, 0.010252287864685058, 0.010283583641052247, 0.010244128227233886, 0.010175647735595704, 0.010064703941345215, 0.009984000205993653, 0.00993391990661621, 0.009972640037536621, 0.00988588809967041, 0.010098496437072753, 0.009881216049194336, 0.009777183532714845, 0.009850751876831055, 0.009832927703857422, 0.009841823577880859, 0.009816512107849121, 0.009830304145812988, 0.00983296012878418, 0.009891839981079101, 0.009851103782653808, 0.009885312080383301, 0.009861215591430664, 0.009910335540771484, 0.009914591789245605, 0.009835488319396972, 0.009877856254577638, 0.009834400177001953, 0.009929344177246094, 0.009834752082824708, 0.009954943656921386, 0.009838239669799805, 0.009841055870056152, 0.009830016136169434, 0.009788127899169921, 0.009835583686828613, 0.009785311698913574, 0.00984124755859375, 0.009768992424011231, 0.009850624084472657, 0.009799648284912109, 0.009819552421569825, 0.010087295532226563, 0.009842144012451173, 0.009804320335388184, 0.009795167922973632, 0.009779616355895996, 0.009825375556945801, 0.010572064399719238, 0.013265536308288575, 0.010495776176452637, 0.009938400268554687, 0.009855520248413087, 0.009866496086120606, 0.009823007583618164, 0.00959718418121338, 0.009935199737548828, 0.009860639572143554, 0.009914496421813964, 0.009828351974487304, 0.009793536186218262, 0.00991641616821289, 0.009865216255187988, 0.009873408317565918, 0.009887743949890136, 0.009887743949890136, 0.009973759651184083, 0.010074111938476562, 0.009979968070983886, 0.009965503692626953, 0.009919615745544433, 0.009878399848937989, 0.009861023902893066, 0.00987564754486084, 0.009848735809326171, 0.009885696411132813, 0.009814016342163086, 0.009862208366394043, 0.009872320175170899, 0.009747648239135742, 0.009795968055725097, 0.009869631767272949, 0.009906304359436036, 0.009934847831726074, 0.010026080131530762, 0.009939743995666503, 0.009968768119812012, 0.010019840240478516, 0.0101396484375, 0.010088095664978028, 0.010088735580444336, 0.010030143737792969, 0.009984416007995605, 0.009937151908874512, 0.009976351737976074, 0.00992204761505127, 0.009888256072998047, 0.00987936019897461, 0.009834495544433594, 0.009840895652770996, 0.009884767532348633, 0.00983516788482666, 0.009951231956481933, 0.00991427230834961, 0.010010335922241211, 0.009986207962036133, 0.010010784149169921, 0.010004672050476074, 0.010059552192687988, 0.009930944442749024, 0.009956992149353028, 0.010073920249938965, 0.01005350399017334, 0.010084287643432617, 0.010075072288513183, 0.010046751976013183, 0.01001638412475586, 0.010025792121887207, 0.009690655708312988, 0.010018783569335938, 0.009960255622863769, 0.010053343772888183, 0.009957440376281738, 0.010059743881225587, 0.01009996795654297, 0.01018329620361328, 0.01012339210510254, 0.010123167991638184, 0.010166367530822755, 0.010168319702148437, 0.010145792007446289, 0.010141759872436524, 0.010081536293029785, 0.010014944076538086, 0.00998265552520752, 0.009842656135559081, 0.009916288375854492, 0.010153599739074707, 0.010182975769042969, 0.010069600105285645, 0.010280896186828613, 0.010145440101623535, 0.010197823524475097, 0.01009455966949463, 0.0101048641204834, 0.01005891227722168, 0.010107744216918945, 0.010026335716247559, 0.009987903594970702, 0.00990511989593506, 0.009917632102966308, 0.00987615966796875, 0.00999833583831787, 0.009973504066467286, 0.009924863815307617, 0.009909664154052734, 0.009908576011657715, 0.009847040176391602, 0.009885120391845702, 0.01007260799407959, 0.009881631851196289, 0.009901663780212403, 0.009922975540161133, 0.009930751800537109, 0.009945088386535645, 0.010000384330749512, 0.010071519851684571, 0.0099901123046875, 0.01005615997314453, 0.0100414400100708, 0.011681792259216308, 0.010293439865112304, 0.010350111961364747, 0.010420767784118653, 0.010315520286560059, 0.010378463745117188, 0.010541152000427247, 0.01032431983947754, 0.010314111709594726, 0.010307552337646485, 0.01035654354095459, 0.009958208084106445, 0.01022383975982666, 0.010217184066772462, 0.010540575981140136, 0.010324095726013184, 0.010307807922363282, 0.010330240249633789, 0.01032102394104004, 0.010195551872253418, 0.01031942367553711, 0.01023692798614502, 0.01034937572479248, 0.010135519981384277, 0.01014470386505127, 0.010149888038635254, 0.010059231758117676, 0.010079839706420898, 0.010078944206237794, 0.00998243236541748, 0.010070048332214355, 0.009999808311462402, 0.010107135772705078, 0.00998198413848877, 0.010114751815795898, 0.01101036834716797, 0.010404959678649902, 0.01118835163116455, 0.01003996753692627, 0.010096768379211425, 0.010348544120788575, 0.010207136154174805, 0.010116607666015624, 0.01010371208190918, 0.010158047676086426, 0.01009763240814209, 0.010316703796386719, 0.010350432395935059, 0.010317888259887695, 0.010193023681640625, 0.01027462387084961, 0.009965567588806153, 0.010042688369750977, 0.010007519721984864, 0.009975520133972168, 0.009852288246154785, 0.009939583778381347, 0.009948639869689942, 0.009957056045532226, 0.009851743698120117, 0.010210463523864746, 0.009913311958312989, 0.009926527976989745, 0.009943360328674317, 0.009894720077514648, 0.00983948802947998, 0.009881119728088378, 0.009872896194458008, 0.00997001552581787, 0.009964159965515137, 0.010047264099121093, 0.010073568344116211, 0.009992608070373535, 0.00995475196838379, 0.009736191749572755, 0.010069408416748048, 0.010011136054992676, 0.010172415733337402, 0.010296480178833008, 0.010412351608276367, 0.010320799827575684, 0.010171680450439454, 0.01008499240875244, 0.010004063606262208, 0.010023136138916016, 0.010212960243225098, 0.009865632057189941, 0.009903776168823241, 0.00991049575805664, 0.009969792366027831, 0.009881600379943848, 0.009796607971191406, 0.009792799949645997, 0.009809632301330567, 0.009801792144775391, 0.009767999649047851, 0.009752639770507813, 0.009798399925231934, 0.010280832290649414, 0.0098920316696167, 0.009856351852416992, 0.009790111541748046, 0.009803775787353516, 0.009875455856323241, 0.009797632217407226, 0.009987968444824219, 0.009846912384033204, 0.00981606388092041, 0.009791487693786622, 0.009852928161621094, 0.00987980842590332, 0.009807616233825684, 0.01001471996307373, 0.009775103569030762, 0.009802911758422852, 0.009771424293518067, 0.009803199768066406, 0.009833215713500976, 0.009825887680053711, 0.009839263916015625, 0.009959615707397462, 0.009814911842346192, 0.009830495834350587, 0.00981283187866211, 0.009780287742614746, 0.009751168251037598, 0.009760576248168945, 0.009798144340515137, 0.009844736099243164, 0.00976089572906494, 0.009920384407043457, 0.009911359786987305, 0.009825216293334962, 0.009846879959106445, 0.00994489574432373, 0.009947232246398926, 0.009981951713562011, 0.009586591720581055, 0.009936991691589356, 0.009971808433532715, 0.009973759651184083, 0.009965567588806153, 0.009904288291931152, 0.00998588752746582, 0.009958975791931153, 0.009912032127380371, 0.009917152404785157, 0.009954784393310547, 0.009982144355773926, 0.010090911865234375, 0.00989583969116211, 0.009985119819641113, 0.01002182388305664, 0.009960800170898438, 0.009951231956481933, 0.00992527961730957, 0.009977984428405761, 0.009943103790283203, 0.010172063827514649, 0.010049759864807129, 0.009993887901306153, 0.01000271987915039, 0.010123231887817382, 0.010366815567016602, 0.009945152282714843, 0.01003651237487793, 0.009945631980895996, 0.010010687828063965, 0.010076416015625, 0.010082559585571289, 0.009977791786193848, 0.010041152000427246, 0.009930751800537109, 0.010017024040222169, 0.009959168434143066, 0.009968864440917968, 0.010025152206420898, 0.010009247779846192, 0.009965184211730958, 0.01005190372467041, 0.00999785614013672, 0.00996953582763672, 0.009964287757873535, 0.009999296188354493, 0.009957951545715332, 0.01020963191986084, 0.010037247657775878, 0.010076160430908204, 0.010074272155761718, 0.010032863616943359, 0.010086624145507813, 0.010049728393554688, 0.01006982421875, 0.010056768417358398, 0.010089311599731445, 0.010051839828491211, 0.010041088104248046, 0.010048704147338867, 0.010153087615966797, 0.010026880264282226, 0.009779680252075195, 0.010229984283447265, 0.01021343994140625, 0.010170304298400878, 0.010073344230651855, 0.010033408164978027, 0.009996800422668458, 0.010057727813720703, 0.010012672424316407, 0.010040448188781739, 0.010168800354003906, 0.009985535621643067, 0.010013888359069825, 0.010174176216125489, 0.010280960083007813, 0.010116288185119629, 0.01008518409729004, 0.00996947193145752, 0.010144288063049317, 0.009989983558654785, 0.009981887817382813, 0.00996947193145752, 0.009957056045532226, 0.009927103996276856, 0.00993785572052002, 0.009903103828430175, 0.009922335624694825, 0.009916576385498047, 0.009952832221984863, 0.010025312423706054, 0.009955648422241212, 0.010092063903808594, 0.009941568374633789, 0.009959168434143066, 0.010026528358459472, 0.010041824340820312, 0.00998307228088379, 0.009987071990966797, 0.010039551734924317, 0.010110624313354492, 0.010174015998840331, 0.010096799850463868, 0.010041631698608399, 0.009973759651184083, 0.010051103591918945, 0.010122847557067872, 0.010112992286682129, 0.01010700798034668, 0.01018992042541504, 0.010104512214660645, 0.010210368156433105, 0.010130304336547852, 0.01004963207244873, 0.010090016365051269, 0.010090944290161132, 0.010068256378173828, 0.010272480010986329, 0.010137248039245605, 0.00990447998046875, 0.00992972755432129, 0.009947135925292968, 0.009962207794189453, 0.00992636775970459, 0.009659616470336915, 0.00990287971496582, 0.009897983551025391, 0.009928671836853028, 0.009817279815673829, 0.009830368041992188, 0.009819007873535157, 0.009740608215332031, 0.009842368125915527, 0.009857024192810059, 0.009827584266662597, 0.009788448333740235, 0.009805536270141602, 0.009774592399597168, 0.009938528060913086, 0.010105759620666503, 0.010258560180664062, 0.009937984466552734, 0.00994591999053955, 0.01010812759399414, 0.009948287963867188, 0.009870495796203613, 0.010078495979309082, 0.00993667221069336, 0.010001184463500977, 0.010025952339172363, 0.009907999992370605, 0.00989247989654541, 0.009916223526000977, 0.010693087577819824, 0.010110495567321777, 0.010797087669372558, 0.011152095794677734, 0.010643168449401856, 0.01012451171875, 0.010148639678955079, 0.010169471740722656, 0.010208127975463867, 0.010249919891357422, 0.01011900806427002, 0.010138015747070312, 0.010233920097351074, 0.01012707233428955, 0.010223936080932618, 0.010166080474853515, 0.010301600456237793, 0.01016755199432373, 0.010113375663757324, 0.010069984436035156, 0.010186304092407226, 0.010043807983398437, 0.010044032096862793, 0.010116640090942383, 0.010000800132751465, 0.009969599723815918, 0.00998192024230957, 0.009940447807312012, 0.009947680473327636, 0.01005568027496338, 0.009977855682373048, 0.010038304328918456, 0.01003974437713623, 0.010002976417541503, 0.009648384094238282, 0.010227295875549316, 0.010117216110229492, 0.010055808067321778, 0.01007817554473877, 0.01004748821258545, 0.009969568252563477, 0.010036607742309571, 0.010410240173339844, 0.012155263900756837, 0.011288352012634277, 0.010651391983032226, 0.010070143699645996, 0.010055871963500976, 0.010213536262512207, 0.010379263877868652, 0.010612735748291016, 0.01030784034729004, 0.01034768009185791, 0.010402463912963867, 0.010422207832336427, 0.01023795223236084, 0.010166367530822755, 0.010167360305786133, 0.010134495735168458, 0.010264320373535156, 0.010514752388000488, 0.010232640266418456, 0.010228672027587891, 0.010432191848754883, 0.010406496047973633, 0.01044480037689209, 0.010198816299438477, 0.010166080474853515, 0.010348735809326172, 0.010114720344543457, 0.01031817626953125, 0.01028332805633545, 0.010159808158874512, 0.010197152137756347, 0.01020911979675293, 0.010173600196838379, 0.010132575988769531, 0.010145183563232421, 0.010202816009521484, 0.010084320068359375, 0.010043647766113281, 0.01008841609954834, 0.009973407745361328, 0.009968447685241699, 0.01006287956237793, 0.009987199783325195, 0.010469311714172364, 0.010004287719726562, 0.009936991691589356, 0.010004480361938477, 0.010119168281555176, 0.010288288116455078, 0.010297727584838868, 0.010171072006225586, 0.010130687713623047, 0.010068511962890625, 0.01003110408782959, 0.009707615852355958, 0.010123167991638184, 0.010076160430908204, 0.010195232391357422, 0.010052831649780273, 0.009981535911560058, 0.010019743919372558, 0.010123264312744141, 0.010067968368530274, 0.010024448394775391, 0.01006438446044922, 0.009959424018859863, 0.009953056335449219, 0.010029631614685059, 0.010182016372680664, 0.010113311767578124, 0.009998271942138672, 0.01013152027130127, 0.010057567596435547, 0.010106304168701171, 0.010031071662902832, 0.009974464416503907, 0.009913663864135742, 0.00996224021911621, 0.009977855682373048, 0.010282688140869141, 0.010264896392822265, 0.010178560256958008, 0.010192288398742675, 0.010123680114746094, 0.010102975845336913, 0.010309632301330567, 0.010233856201171876, 0.010194944381713868, 0.01022156810760498, 0.01028003215789795, 0.010249055862426758, 0.010230912208557128, 0.010164640426635741, 0.010094431877136231, 0.010158816337585449, 0.010149855613708497, 0.01017420768737793, 0.010315711975097657, 0.010154623985290528, 0.010159808158874512, 0.010102784156799317, 0.010186880111694337, 0.010162015914916993, 0.010233983993530274, 0.010207263946533204, 0.010339776039123534, 0.010243840217590332, 0.010232159614562988, 0.010223679542541504, 0.010170304298400878, 0.010109248161315918, 0.010104415893554687, 0.010227359771728516, 0.0101046724319458, 0.010439616203308105, 0.01027609634399414, 0.01044159984588623, 0.009976544380187989, 0.010237792015075683, 0.010256511688232422, 0.010419936180114745, 0.010379584312438964, 0.010550559997558594, 0.010304384231567383, 0.010344287872314454, 0.01039568042755127, 0.010283200263977051, 0.01026643180847168, 0.010202752113342286, 0.010147744178771973, 0.010226112365722656, 0.010288352012634278, 0.010314528465270995, 0.010201087951660156, 0.0101910400390625, 0.010270591735839843, 0.010205120086669921, 0.010248224258422851, 0.010226655960083007, 0.010971872329711915, 0.010174752235412598, 0.010110591888427735, 0.010094976425170899, 0.01012268829345703, 0.009974335670471192, 0.010055135726928711, 0.009966015815734863, 0.010018143653869629, 0.010044159889221192, 0.010084351539611817, 0.010036800384521484, 0.010027520179748535, 0.009913311958312989, 0.009974111557006836, 0.01014844799041748, 0.010090784072875976, 0.010149567604064942, 0.010231871604919434, 0.010392736434936524, 0.010314751625061035, 0.01015497589111328, 0.010331007957458496, 0.010320896148681641, 0.010259455680847167, 0.01026153564453125, 0.010345439910888671, 0.010292767524719238, 0.010311327934265136, 0.010279071807861329, 0.01035536003112793, 0.010358624458312989, 0.010423871994018554, 0.01024675178527832, 0.010225664138793946, 0.010098688125610352, 0.010235903739929199, 0.010170368194580079, 0.010110976219177246, 0.010164511680603028, 0.010188223838806153, 0.009944319725036621, 0.010488287925720215, 0.010380831718444825, 0.01035542392730713, 0.010346752166748047, 0.010420000076293946, 0.010294367790222168, 0.010204416275024414, 0.010092191696166992, 0.010035136222839355, 0.01000864028930664, 0.009928095817565917, 0.009942975997924805, 0.009953760147094726, 0.010049856185913086, 0.009955039978027344, 0.009959584236145019, 0.010001888275146484, 0.009970208168029786, 0.009941023826599121, 0.010055904388427734, 0.010137184143066406, 0.010170528411865234, 0.010106271743774414, 0.010056320190429688, 0.010069727897644043, 0.010080672264099122, 0.009998175621032715, 0.009975808143615723, 0.010051360130310058, 0.010092767715454101, 0.010151488304138184, 0.010239935874938965, 0.010401439666748048, 0.010414560317993165, 0.010181280136108398, 0.010233119964599609, 0.010262944221496583, 0.010207263946533204, 0.010553471565246582, 0.010388895988464356, 0.010472991943359374, 0.010353599548339844, 0.010283007621765136, 0.010309344291687011, 0.010209440231323242, 0.010189184188842773, 0.010272512435913086, 0.010291199684143066, 0.010553407669067382, 0.010279935836791992, 0.010269632339477539, 0.010452128410339356, 0.010348575592041016, 0.010249119758605957, 0.010272671699523925, 0.010489055633544922, 0.010276991844177246, 0.010268896102905274, 0.010243552207946777, 0.010173407554626465, 0.010169695854187012, 0.010139936447143555, 0.009957088470458984, 0.010216383934020997, 0.01022156810760498, 0.010188608169555665, 0.010209471702575683, 0.01020041561126709, 0.01024182415008545, 0.01015283203125, 0.010108927726745605, 0.010202848434448242, 0.010403679847717286, 0.010285504341125488, 0.010283200263977051, 0.010321696281433106, 0.010254015922546387, 0.010267007827758788, 0.010247296333312988, 0.010238816261291504, 0.01023795223236084, 0.010160127639770507, 0.010412223815917969, 0.010519424438476562, 0.009982912063598633, 0.009995391845703125, 0.009971776008605958, 0.0099268798828125, 0.009943296432495117, 0.00992086410522461, 0.009904224395751953, 0.00997977638244629, 0.009976096153259277, 0.009964384078979493, 0.01011631965637207, 0.010073599815368652, 0.010031295776367187, 0.010046527862548829, 0.01002387237548828, 0.010090496063232422, 0.010090496063232422, 0.00998140811920166, 0.01003600025177002, 0.010057696342468262, 0.009977631568908692, 0.01002086353302002, 0.009967776298522949, 0.009953120231628417, 0.00987679958343506, 0.009894880294799804, 0.009997983932495118, 0.009982015609741211, 0.009920512199401856, 0.009989472389221191, 0.009924960136413575, 0.009879776000976562, 0.009941087722778321, 0.009861120223999023, 0.009914048194885254, 0.009863167762756348, 0.009879487991333008, 0.009930399894714356, 0.009847455978393555, 0.009875200271606445, 0.009910592079162598, 0.009927935600280761, 0.010320896148681641, 0.010408639907836914, 0.01033625602722168, 0.0101397123336792, 0.01012342357635498, 0.01040886402130127, 0.009882495880126954, 0.009893695831298828, 0.010173824310302735, 0.009904543876647949, 0.009979392051696777, 0.009921440124511719, 0.009943039894104003, 0.009926655769348144, 0.009946240425109863, 0.009927552223205566, 0.009932703971862793, 0.00983356761932373, 0.010013728141784669, 0.00999830436706543, 0.009975968360900879, 0.009983648300170898, 0.009927071571350098, 0.009922335624694825, 0.010004192352294923, 0.009997759819030762, 0.010265695571899413, 0.010173791885375977, 0.01008681583404541, 0.010175616264343261, 0.010047552108764648, 0.01007481575012207, 0.010160032272338868, 0.010088352203369141, 0.010101056098937989, 0.010162176132202149, 0.010196991920471191, 0.010112544059753419, 0.010031776428222656, 0.010000191688537597, 0.009996288299560547, 0.009928704261779785, 0.009963520050048828, 0.009957056045532226, 0.010084735870361327, 0.009988032341003417, 0.009965151786804198, 0.010004511833190918, 0.009938336372375489, 0.009948127746582032, 0.010036319732666015, 0.009919391632080079, 0.009904128074645996, 0.009862239837646485, 0.010056608200073243, 0.01008841609954834, 0.009953439712524414, 0.009928288459777833, 0.010012351989746093, 0.009976415634155274, 0.010035167694091797, 0.010067551612854005]",tokens/s,99.06718928327761,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1139.027968,1075.707904,0.0,710.934528,686.03904,s,1,8.6485009765625,8.6485009765625,0.0,8.6485009765625,8.6485009765625,8.6485009765625,8.6485009765625,[8.6485009765625],,kWh,2.4731579891666418e-05,2.720960264943441e-06,8.533617937983573e-06,3.598615809459343e-05,,MB,1501.863936,1436.418048,0.0,1021.313024,985.00096,s,10,0.34727174377441405,0.034727174377441404,0.00020048635533280468,0.03472707176208496,0.034873468017578124,0.03501990203857422,0.035137049255371096,"[0.034793983459472655, 0.03432419204711914, 0.03464787292480469, 0.03471615982055664, 0.03516633605957031, 0.03484092712402344, 0.03473798370361328, 0.03478505706787109, 0.03461593627929688, 0.034643295288085935]",tokens/s,7371.748625949144,kWh,1.0597837427537235e-06,1.1687371799714404e-07,7.063552349709435e-07,1.8830126957218112e-06,tokens/kWh,135952349.43536484,MB,1534.697472,1486.749696,0.0,1071.644672,985.00352,s,10,12.299974487304686,1.2299974487304686,0.006145748553420057,1.2299616088867187,1.2381208618164063,1.2388652404785157,1.239460743408203,"[1.21961376953125, 1.2272628173828124, 1.2351158447265624, 1.23228369140625, 1.239609619140625, 1.2379554443359375, 1.2316201171875, 1.2283031005859375, 1.224498779296875, 1.2237113037109375]",tokens/s,51.219618435001564,kWh,3.557996262016117e-05,3.924023964454741e-06,1.5841796892428184e-05,5.53457834770441e-05,tokens/kWh,1138298.0968393132,,s,630,12.296335512161262,0.019517992876446438,0.0002630778216794924,0.019482864379882815,0.019722240447998047,0.019802812480926513,0.020368323764801032,"[0.01948297691345215, 0.019397823333740235, 0.019278656005859374, 0.01969152069091797, 0.019234304428100587, 0.01920595169067383, 0.01921686363220215, 0.01926371192932129, 0.019158592224121095, 0.01917977523803711, 0.019249343872070314, 0.019258495330810546, 0.019157247543334963, 0.01908915138244629, 0.01919615936279297, 0.019229440689086913, 0.019299392700195313, 0.01940153694152832, 0.019148319244384766, 0.01922470474243164, 0.019220832824707032, 0.019201471328735353, 0.019294784545898436, 0.019203071594238282, 0.019266464233398437, 0.01912022399902344, 0.019152576446533204, 0.01927609634399414, 0.019187551498413086, 0.01936639976501465, 0.019291807174682617, 0.019705856323242187, 0.019572736740112305, 0.01941094398498535, 0.01942678451538086, 0.019431039810180663, 0.019735456466674805, 0.019580928802490235, 0.01964166450500488, 0.019479232788085936, 0.019424415588378905, 0.01934124755859375, 0.019493791580200197, 0.0193702392578125, 0.019424800872802735, 0.01941100883483887, 0.01935478401184082, 0.01939699172973633, 0.01954060745239258, 0.019400703430175782, 0.01935468864440918, 0.019544416427612305, 0.01935420799255371, 0.019349504470825195, 0.019268800735473633, 0.01958585548400879, 0.019380191802978515, 0.01939859199523926, 0.01934499168395996, 0.019337728500366212, 0.019310592651367187, 0.0194969596862793, 0.01947238349914551, 0.01983283233642578, 0.019424640655517578, 0.01955075263977051, 0.01950271987915039, 0.01938684844970703, 0.019455135345458983, 0.019394432067871094, 0.019391456604003907, 0.019535871505737306, 0.01943552017211914, 0.019366943359375, 0.019251232147216798, 0.019450239181518555, 0.019494848251342775, 0.019401344299316406, 0.019290111541748048, 0.019423135757446287, 0.0194899845123291, 0.01938115119934082, 0.019371103286743165, 0.019752927780151367, 0.019544832229614256, 0.019650751113891602, 0.019469568252563477, 0.01946291160583496, 0.019722240447998047, 0.019369728088378908, 0.01948847961425781, 0.019407295227050782, 0.019417184829711914, 0.019337215423583985, 0.019331071853637697, 0.01927577590942383, 0.019449855804443358, 0.019517215728759765, 0.019455392837524413, 0.019404928207397462, 0.019360000610351563, 0.019519935607910155, 0.01957459259033203, 0.019415231704711915, 0.019408895492553712, 0.019556352615356445, 0.019722240447998047, 0.019656896591186523, 0.01975276756286621, 0.019459999084472657, 0.01942947196960449, 0.019334815979003907, 0.019446111679077147, 0.019396608352661132, 0.019597312927246095, 0.019548160552978516, 0.019519264221191407, 0.019544288635253905, 0.019723583221435546, 0.019390783309936523, 0.019474815368652344, 0.019404800415039062, 0.01946339225769043, 0.01938102340698242, 0.019416671752929687, 0.01954243278503418, 0.01972652816772461, 0.019496768951416017, 0.01946006393432617, 0.019563199996948243, 0.019428960800170897, 0.01951785659790039, 0.0194969596862793, 0.01946009635925293, 0.019499008178710937, 0.019493152618408203, 0.019492576599121094, 0.019568639755249022, 0.020105215072631837, 0.01947238349914551, 0.019554304122924804, 0.019541343688964843, 0.019784000396728514, 0.019666528701782225, 0.019380992889404296, 0.01953990364074707, 0.019404863357543944, 0.0194703369140625, 0.01956991958618164, 0.01945471954345703, 0.01985536003112793, 0.019824384689331054, 0.020134143829345703, 0.019684608459472657, 0.019478303909301758, 0.019493120193481445, 0.019604095458984373, 0.019759199142456055, 0.01998409652709961, 0.019562784194946288, 0.019517440795898438, 0.01961369514465332, 0.019562496185302734, 0.019698688507080078, 0.019790239334106445, 0.019553184509277344, 0.019504480361938477, 0.019609952926635744, 0.019377824783325195, 0.01970604705810547, 0.019563680648803712, 0.019675519943237303, 0.019588895797729492, 0.01978041648864746, 0.019595264434814453, 0.01955753517150879, 0.019450752258300782, 0.01956800079345703, 0.01956710433959961, 0.019519615173339843, 0.01946396827697754, 0.019364063262939452, 0.0194703369140625, 0.019621631622314454, 0.019683839797973633, 0.019580768585205077, 0.019531679153442384, 0.01997209548950195, 0.01964563179016113, 0.01951708793640137, 0.019691232681274415, 0.019657344818115235, 0.01957209587097168, 0.019702112197875977, 0.019460384368896484, 0.019380224227905272, 0.019394176483154297, 0.01958745574951172, 0.019464319229125976, 0.01970572853088379, 0.019525503158569334, 0.01938003158569336, 0.019637632369995116, 0.019616640090942383, 0.019556415557861327, 0.019492799758911133, 0.01953593635559082, 0.019514463424682618, 0.019495840072631835, 0.019484031677246095, 0.01968160057067871, 0.019518848419189452, 0.019489728927612304, 0.019396608352661132, 0.019688671112060546, 0.01947238349914551, 0.01958787155151367, 0.019369983673095705, 0.019331071853637697, 0.019360992431640626, 0.01938640022277832, 0.019544511795043944, 0.019341695785522462, 0.019466175079345702, 0.019494911193847657, 0.019512928009033204, 0.019669408798217772, 0.01946041679382324, 0.01957436752319336, 0.019467552185058593, 0.02001798439025879, 0.019650144577026366, 0.01945062446594238, 0.019436960220336915, 0.019574560165405274, 0.019558879852294923, 0.019613119125366212, 0.019362367630004883, 0.01967919921875, 0.01965648078918457, 0.01957913589477539, 0.019972448348999024, 0.01997558403015137, 0.019785888671875, 0.01964041519165039, 0.019458047866821288, 0.019506399154663085, 0.0195850887298584, 0.01933785629272461, 0.019565664291381835, 0.01967001533508301, 0.019652448654174804, 0.019784992218017577, 0.01977827262878418, 0.019559616088867186, 0.019675968170166015, 0.0196177921295166, 0.01957683181762695, 0.019372032165527343, 0.01946739196777344, 0.019587936401367186, 0.01939254379272461, 0.019340768814086914, 0.01921059226989746, 0.019275840759277345, 0.019447328567504883, 0.019460704803466795, 0.019283967971801756, 0.019224159240722655, 0.019452320098876954, 0.02325299263000488, 0.01962508773803711, 0.01948057556152344, 0.019488704681396483, 0.019620031356811524, 0.01939263916015625, 0.019791744232177735, 0.019394655227661133, 0.01985807991027832, 0.021024032592773436, 0.01955913543701172, 0.019877887725830077, 0.019732479095458985, 0.020934751510620117, 0.019762975692749023, 0.019546239852905274, 0.01944329643249512, 0.019617759704589843, 0.019704256057739258, 0.019601119995117187, 0.019420543670654298, 0.019403263092041014, 0.019567007064819335, 0.019666015625, 0.01941391944885254, 0.019414112091064452, 0.019454431533813477, 0.01951705551147461, 0.019650976181030275, 0.019675552368164064, 0.019666336059570313, 0.019984991073608398, 0.019568639755249022, 0.01956038475036621, 0.019679136276245117, 0.019650720596313478, 0.019510976791381834, 0.019600959777832033, 0.01947110366821289, 0.01965827178955078, 0.019588895797729492, 0.019681919097900392, 0.019733728408813475, 0.019644895553588868, 0.019859840393066406, 0.01976518440246582, 0.019708576202392577, 0.01952934455871582, 0.01939484786987305, 0.019460191726684572, 0.019357696533203125, 0.019482624053955077, 0.019519487380981446, 0.01944883155822754, 0.01945497512817383, 0.01963212776184082, 0.019935232162475586, 0.019426368713378907, 0.02085481643676758, 0.019786655426025392, 0.020024896621704102, 0.01956012725830078, 0.019546335220336913, 0.019601375579833984, 0.019550783157348633, 0.019445119857788087, 0.019455808639526367, 0.019688255310058595, 0.019668991088867188, 0.019398656845092774, 0.019598335266113282, 0.019479551315307618, 0.019587072372436523, 0.0196212158203125, 0.019507871627807618, 0.01952467155456543, 0.0195532169342041, 0.0194150390625, 0.019719200134277345, 0.01968035125732422, 0.019724159240722655, 0.02021504020690918, 0.02128060722351074, 0.01983942413330078, 0.01955273628234863, 0.01975872039794922, 0.019433792114257813, 0.019600799560546875, 0.019581600189208983, 0.01949286460876465, 0.019564544677734375, 0.019568639755249022, 0.019552255630493166, 0.020242431640625, 0.019697664260864257, 0.019556352615356445, 0.019529727935791014, 0.01954803276062012, 0.019654783248901367, 0.019656160354614257, 0.01966102409362793, 0.01952367973327637, 0.01940912055969238, 0.019441600799560546, 0.019510751724243165, 0.0194671688079834, 0.019596128463745116, 0.019548927307128906, 0.0198371524810791, 0.019533439636230467, 0.02008073616027832, 0.01957676887512207, 0.01972220802307129, 0.01951091194152832, 0.019532800674438477, 0.0194150390625, 0.019963903427124022, 0.019529727935791014, 0.01949056053161621, 0.01940915107727051, 0.01939558410644531, 0.019528287887573242, 0.019544479370117187, 0.01959321594238281, 0.019615743637084963, 0.019417087554931642, 0.019606592178344727, 0.019569599151611328, 0.0196177921295166, 0.019781183242797852, 0.019593055725097657, 0.0193603515625, 0.01961369514465332, 0.019367935180664063, 0.019563711166381836, 0.01957356834411621, 0.01948057556152344, 0.01941913604736328, 0.019494207382202148, 0.01949910354614258, 0.019474016189575196, 0.019534847259521485, 0.01943961524963379, 0.019418624877929686, 0.019599872589111327, 0.019473695755004884, 0.019448543548583986, 0.019412992477416992, 0.019482624053955077, 0.019564544677734375, 0.01941913604736328, 0.019406528472900392, 0.019349407196044922, 0.019380640029907227, 0.01935500717163086, 0.01959385681152344, 0.019505151748657225, 0.019615392684936523, 0.019515743255615236, 0.01951686477661133, 0.019554880142211913, 0.0194368953704834, 0.019374624252319336, 0.019462272644042967, 0.01938969612121582, 0.019519519805908204, 0.01952227210998535, 0.020419744491577147, 0.019579584121704102, 0.019592992782592772, 0.019652992248535155, 0.019706783294677736, 0.019726335525512697, 0.019736576080322265, 0.019670463562011718, 0.01943516731262207, 0.019476991653442383, 0.01955580711364746, 0.019454912185668947, 0.019589120864868165, 0.019437215805053712, 0.0194051513671875, 0.019586399078369142, 0.019369728088378908, 0.019551136016845702, 0.019388416290283202, 0.019406848907470704, 0.01944166374206543, 0.019358976364135742, 0.01942959976196289, 0.01924764823913574, 0.019298112869262696, 0.019412639617919922, 0.01939900779724121, 0.01979769515991211, 0.01962828826904297, 0.01942963218688965, 0.019333120346069335, 0.019404607772827147, 0.019402240753173827, 0.019190719604492187, 0.019241888046264647, 0.01940768051147461, 0.01932598304748535, 0.019317663192749024, 0.019243104934692383, 0.019306079864501953, 0.01936774444580078, 0.01963222312927246, 0.019278335571289062, 0.0194268798828125, 0.020761024475097655, 0.0192491512298584, 0.019340448379516602, 0.019296159744262697, 0.019546911239624022, 0.019441823959350586, 0.019766687393188476, 0.01977334403991699, 0.01981839942932129, 0.019798271179199217, 0.019640127182006837, 0.019503839492797853, 0.019337215423583985, 0.019656095504760742, 0.019552255630493166, 0.01938060760498047, 0.019561887741088867, 0.01977836799621582, 0.019373760223388672, 0.01947270393371582, 0.019341312408447265, 0.019296255111694336, 0.01941689682006836, 0.019669952392578124, 0.019564544677734375, 0.01957446479797363, 0.019540288925170898, 0.019367935180664063, 0.019384479522705077, 0.019375072479248048, 0.01954844856262207, 0.019806528091430665, 0.019298591613769532, 0.01925734329223633, 0.01942643165588379, 0.019377023696899413, 0.019478527069091797, 0.019258655548095704, 0.01941289520263672, 0.019147584915161133, 0.019279712677001952, 0.01940870475769043, 0.019667295455932616, 0.019505151748657225, 0.019478303909301758, 0.01962825584411621, 0.01924710464477539, 0.019367935180664063, 0.01927743911743164, 0.019482751846313477, 0.019360000610351563, 0.019191648483276365, 0.019298463821411132, 0.019316064834594728, 0.019380640029907227, 0.019417152404785157, 0.019394752502441406, 0.0194150390625, 0.019205888748168944, 0.019542335510253906, 0.019650432586669923, 0.019202272415161134, 0.019354656219482423, 0.019446592330932617, 0.019514751434326173, 0.01933171272277832, 0.01927577590942383, 0.019523584365844726, 0.019528959274291994, 0.01950796890258789, 0.019381248474121093, 0.019426464080810547, 0.0196496639251709, 0.019505088806152343, 0.019475231170654295, 0.01972364807128906, 0.01943404769897461, 0.019238143920898437, 0.01960838317871094, 0.019400703430175782, 0.019359039306640624, 0.019460927963256835, 0.01940671920776367, 0.019599359512329103, 0.019357696533203125, 0.01945142364501953, 0.019742687225341796, 0.019685375213623048, 0.019388416290283202, 0.01918297576904297, 0.019455936431884764, 0.0193371524810791, 0.0193625602722168, 0.019596479415893556, 0.019364671707153322, 0.019394208908081054, 0.019403104782104493, 0.01941913604736328, 0.01925529670715332, 0.019663871765136717, 0.019370304107666016, 0.019415456771850585, 0.01925542449951172, 0.01926918411254883, 0.019298912048339844, 0.019514848709106445, 0.019326847076416017, 0.01933737564086914, 0.019478496551513673, 0.01932956886291504, 0.019357696533203125, 0.019453983306884765, 0.01937942314147949, 0.019431488037109375, 0.019296064376831054, 0.019386943817138673, 0.01966854476928711, 0.019519392013549804, 0.019630943298339844, 0.01920204734802246, 0.01934694480895996, 0.01934332847595215, 0.019403295516967775, 0.019992256164550783, 0.019375551223754884, 0.01939708709716797, 0.01930633544921875, 0.019335744857788086, 0.019636224746704102, 0.019367935180664063, 0.019239967346191406, 0.01925766372680664, 0.01950172805786133, 0.019428447723388673, 0.019295488357543945, 0.019393632888793946, 0.019322559356689452, 0.0192992000579834, 0.019369855880737304, 0.019500160217285158, 0.01958710479736328, 0.01965769577026367, 0.019301984786987306, 0.01940220832824707, 0.019485631942749024, 0.01937129592895508, 0.019420896530151367, 0.019324960708618163, 0.019506143569946288]",tokens/s,51.23477635893399,,, -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34714,7 +34714,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -34924,7 +34924,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -35021,7 +35021,7 @@ ChildProcessError: Traceback (most recent call last): AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " -4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): +4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch