diff --git "a/perf-df-unquantized-1xA10.csv" "b/perf-df-unquantized-1xA10.csv" --- "a/perf-df-unquantized-1xA10.csv" +++ "b/perf-df-unquantized-1xA10.csv" @@ -1,5 +1,5 @@ config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.processor_kwargs.trust_remote_code,config.backend.hub_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.latency,config.scenario.memory,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency,config.backend.hub_kwargs.revision,config.backend.hub_kwargs.force_download,config.backend.hub_kwargs.local_files_only -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -67,7 +67,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96ba-76a6def964b02c866046346b;82629335-4ea9-442f-80ea-8118f6d3d110) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe877-47b9639e2eb2f6c156cbe418;919d2b32-d428-4c7f-b45d-ca14e5716cfa) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -99,7 +99,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1283.33824,3848.798208,0.0,3202.351104,3105.82784,s,10,0.2737464962005615,0.027374649620056157,0.0006798933589168986,0.027108223915100097,0.02760761890411377,0.028481137561798094,0.029179952487945558,"[0.02935465621948242, 0.02739606475830078, 0.027413503646850586, 0.027041791915893554, 0.027088895797729492, 0.026967872619628908, 0.027058176040649414, 0.027127552032470702, 0.026950016021728515, 0.027347967147827147]",tokens/s,9351.717868653212,kWh,3.225466720883932e-07,1.767401729368767e-07,1.3880180646975452e-06,1.8873049097228154e-06,tokens/kWh,135643159.02595633,MB,1283.784704,3848.798208,0.0,3202.351104,3188.152832,s,10,12.537179443359374,1.2537179443359376,0.028535391056794507,1.2414683227539063,1.3036857299804687,1.3105398254394531,1.3160231018066406,"[1.3173939208984375, 1.30216259765625, 1.2359407958984374, 1.2357900390625, 1.2407454833984375, 1.242191162109375, 1.2385311279296876, 1.2330062255859375, 1.2484185791015625, 1.24299951171875]",tokens/s,50.250537040346416,kWh,1.468646836486543e-05,8.047551752407129e-06,3.957804248590371e-05,6.231206260317627e-05,tokens/kWh,1011040.1962009304,,s,629,12.760228870391852,0.020286532385360646,0.0033330012219880433,0.01966796875,0.02081628189086914,0.020942233657836914,0.04715091812133789,"[0.020752384185791017, 0.02205388832092285, 0.021751808166503905, 0.021104639053344726, 0.02088140869140625, 0.020815872192382814, 0.021105663299560547, 0.02084147262573242, 0.020876287460327148, 0.020999168395996092, 0.020888576507568358, 0.02066022491455078, 0.0215285758972168, 0.02082918357849121, 0.020912128448486327, 0.020779008865356444, 0.02101759910583496, 0.02084556770324707, 0.020922367095947265, 0.020817920684814452, 0.020831232070922853, 0.020884479522705078, 0.020876287460327148, 0.02083635139465332, 0.020773887634277344, 0.02068992042541504, 0.02126335906982422, 0.02141900825500488, 0.021533695220947266, 0.020939775466918945, 0.021082111358642578, 0.02090598487854004, 0.020802560806274413, 0.020731903076171874, 0.020817983627319337, 0.0207523193359375, 0.020578304290771485, 0.02048409652709961, 0.020519935607910156, 0.02046463966369629, 0.020503583908081054, 0.020443103790283204, 0.020504575729370117, 0.020761600494384767, 0.021131263732910157, 0.021990400314331054, 0.02130636787414551, 0.020955135345458984, 0.02090291213989258, 0.020781055450439453, 0.020626432418823244, 0.020625408172607423, 0.02084147262573242, 0.02083020782470703, 0.02079539108276367, 0.020908031463623047, 0.020839424133300782, 0.020782079696655274, 0.020839424133300782, 0.020747264862060546, 0.020585472106933594, 0.020488191604614257, 0.04849971389770508, 0.02047488021850586, 0.020789247512817383, 0.020855808258056642, 0.02059775924682617, 0.020713504791259767, 0.020808671951293944, 0.020616191864013672, 0.020479999542236327, 0.02043801689147949, 0.02045952033996582, 0.020447231292724608, 0.02040729522705078, 0.020478975296020507, 0.02042470359802246, 0.020477951049804686, 0.020493375778198243, 0.020526016235351562, 0.020404224395751954, 0.020701183319091796, 0.020930559158325195, 0.020867071151733398, 0.020847616195678712, 0.02082713508605957, 0.020824064254760744, 0.02077903938293457, 0.020808671951293944, 0.02087120056152344, 0.02074415969848633, 0.020904960632324218, 0.020616191864013672, 0.020970495223999023, 0.021130239486694336, 0.022185983657836913, 0.021149696350097655, 0.020943872451782225, 0.020711423873901368, 0.020899839401245117, 0.020682752609252928, 0.020558847427368163, 0.02069196891784668, 0.020488191604614257, 0.02049228858947754, 0.02042572784423828, 0.020428800582885744, 0.020504575729370117, 0.020473920822143554, 0.020490175247192384, 0.02045337677001953, 0.020406272888183592, 0.020462591171264647, 0.020576255798339844, 0.021213184356689452, 0.020418560028076172, 0.0204769287109375, 0.020534271240234374, 0.02045747184753418, 0.020449312210083007, 0.02049020767211914, 0.020470783233642577, 0.02051584053039551, 0.020504575729370117, 0.020794368743896483, 0.04849868774414062, 0.020488256454467772, 0.020458463668823243, 0.020754400253295897, 0.019559423446655275, 0.01964236831665039, 0.019742719650268553, 0.019551231384277345, 0.01947750473022461, 0.019491840362548828, 0.01946931266784668, 0.01944063949584961, 0.01965465545654297, 0.019701759338378907, 0.01965465545654297, 0.019706880569458008, 0.019604480743408204, 0.019567615509033204, 0.019451904296875, 0.019538944244384765, 0.01968639945983887, 0.01966796875, 0.019517440795898438, 0.01960860824584961, 0.019422176361083985, 0.019505151748657225, 0.01945497512817383, 0.019438592910766602, 0.019536895751953123, 0.019517440795898438, 0.01945907211303711, 0.019554304122924804, 0.019515392303466796, 0.019530752182006835, 0.019533824920654298, 0.019494911193847657, 0.01944883155822754, 0.01949286460876465, 0.019449855804443358, 0.01965363121032715, 0.019553279876708983, 0.019501056671142578, 0.01943654441833496, 0.01953286361694336, 0.019500991821289063, 0.019475456237792968, 0.019497983932495116, 0.019574783325195313, 0.019405824661254883, 0.019474431991577147, 0.01945395278930664, 0.019558399200439454, 0.01965260887145996, 0.0196495361328125, 0.01962598419189453, 0.01983078384399414, 0.019604480743408204, 0.019545087814331053, 0.019599359512329103, 0.019589120864868165, 0.01946316719055176, 0.01952672004699707, 0.019445695877075196, 0.04718694305419922, 0.01945395278930664, 0.01942527961730957, 0.019422208786010742, 0.01946419143676758, 0.019504127502441407, 0.01946828842163086, 0.019526655197143555, 0.019430400848388672, 0.019450944900512697, 0.019450815200805664, 0.0194467830657959, 0.019529727935791014, 0.019574783325195313, 0.019586048126220702, 0.019567615509033204, 0.019538944244384765, 0.01963417625427246, 0.01960550308227539, 0.019566591262817384, 0.019533824920654298, 0.019602432250976562, 0.019544063568115236, 0.019714048385620117, 0.019720191955566405, 0.01967820739746094, 0.019733503341674806, 0.01963520050048828, 0.019768320083618163, 0.01968435287475586, 0.019483648300170898, 0.019528703689575197, 0.019909631729125975, 0.019811328887939454, 0.019688447952270507, 0.019710975646972655, 0.01967513656616211, 0.01964851188659668, 0.01945702362060547, 0.0196945915222168, 0.019695615768432616, 0.019787776947021486, 0.019969024658203126, 0.019981311798095702, 0.019934207916259765, 0.019843072891235353, 0.019647487640380858, 0.019550207138061524, 0.019612672805786133, 0.01961676788330078, 0.019655679702758787, 0.019732479095458985, 0.01966592025756836, 0.019636224746704102, 0.0196628475189209, 0.019529727935791014, 0.01947750473022461, 0.019555328369140625, 0.01944063949584961, 0.01943654441833496, 0.019497983932495116, 0.019384319305419923, 0.019497983932495116, 0.04707123184204102, 0.019400703430175782, 0.019500032424926757, 0.019604480743408204, 0.019777536392211914, 0.01968230438232422, 0.02022604751586914, 0.0203284797668457, 0.02008265686035156, 0.019982336044311523, 0.02007756805419922, 0.020009983062744142, 0.020129791259765627, 0.02000588798522949, 0.020110368728637695, 0.02028848075866699, 0.02028646469116211, 0.020344831466674804, 0.02036332893371582, 0.020086719512939454, 0.019926015853881835, 0.019555328369140625, 0.019610624313354492, 0.01980521583557129, 0.01953481674194336, 0.0194334716796875, 0.019401727676391603, 0.01967001533508301, 0.019693567276000978, 0.0196997127532959, 0.019679231643676756, 0.019777536392211914, 0.01957683181762695, 0.01942630386352539, 0.01943449592590332, 0.019518463134765626, 0.019487743377685548, 0.019489791870117186, 0.019532800674438477, 0.019737600326538086, 0.019737600326538086, 0.01967513656616211, 0.01944371223449707, 0.0195020809173584, 0.01945599937438965, 0.019483648300170898, 0.019441728591918946, 0.01955731201171875, 0.01948569679260254, 0.01942937660217285, 0.019521535873413084, 0.019481599807739256, 0.01949388885498047, 0.019466239929199217, 0.019471359252929688, 0.019553279876708983, 0.019528703689575197, 0.01943552017211914, 0.01949286460876465, 0.01942630386352539, 0.019571712493896484, 0.019414016723632813, 0.019443775177001955, 0.04715206527709961, 0.019668991088867188, 0.01965056037902832, 0.019501056671142578, 0.019495935440063478, 0.019539968490600586, 0.019520511627197267, 0.019569664001464843, 0.019499008178710937, 0.019777536392211914, 0.019778560638427735, 0.01960960006713867, 0.019680255889892577, 0.01966182327270508, 0.01965670394897461, 0.019793920516967774, 0.019670047760009766, 0.01966998481750488, 0.019680255889892577, 0.01967001533508301, 0.019736576080322265, 0.01969152069091797, 0.01967411231994629, 0.019770368576049805, 0.019693567276000978, 0.019664896011352538, 0.019728384017944335, 0.019677183151245118, 0.019722240447998047, 0.019548160552978516, 0.01960140800476074, 0.019787776947021486, 0.019668991088867188, 0.01967616081237793, 0.01964646339416504, 0.019688447952270507, 0.020065280914306642, 0.019489791870117186, 0.019467264175415038, 0.019552255630493166, 0.01947648048400879, 0.019486719131469727, 0.019481599807739256, 0.019504127502441407, 0.019474431991577147, 0.019408895492553712, 0.0195020809173584, 0.01959321594238281, 0.01967417526245117, 0.019500991821289063, 0.019548160552978516, 0.019575807571411134, 0.01943449592590332, 0.019531776428222656, 0.019563520431518554, 0.019560447692871095, 0.01966182327270508, 0.020564992904663085, 0.02141900825500488, 0.02045644760131836, 0.020355104446411133, 0.020384735107421875, 0.02023219108581543, 0.04725350570678711, 0.019685375213623048, 0.01947340774536133, 0.019580928802490235, 0.01969152069091797, 0.019482624053955077, 0.019521600723266603, 0.019620800018310548, 0.01948057556152344, 0.01944780731201172, 0.019596288681030274, 0.019889184951782227, 0.01975600051879883, 0.019781631469726564, 0.019741695404052736, 0.019673088073730468, 0.019537984848022463, 0.019412927627563477, 0.01944063949584961, 0.02012057685852051, 0.020361215591430663, 0.020368383407592772, 0.020388864517211915, 0.02042572784423828, 0.020107263565063475, 0.01997209548950195, 0.019771392822265626, 0.01947648048400879, 0.01948467254638672, 0.0195850887298584, 0.019675071716308595, 0.019555328369140625, 0.01947340774536133, 0.01947648048400879, 0.019509248733520508, 0.01961884880065918, 0.019649503707885742, 0.019587072372436523, 0.01963315200805664, 0.019745792388916016, 0.019568639755249022, 0.019499008178710937, 0.01944371223449707, 0.01941913604736328, 0.019390464782714844, 0.019449855804443358, 0.019570688247680663, 0.019672063827514647, 0.01944576072692871, 0.019438592910766602, 0.019435552597045897, 0.019430368423461915, 0.01943961524963379, 0.01945497512817383, 0.01942630386352539, 0.0195020809173584, 0.0194969596862793, 0.01963212776184082, 0.019677248001098633, 0.019666879653930665, 0.01963417625427246, 0.01959321594238281, 0.020166656494140626, 0.04744192123413086, 0.019589120864868165, 0.019693567276000978, 0.01966796875, 0.019631103515625, 0.019614784240722657, 0.019687360763549804, 0.019723264694213868, 0.0196495361328125, 0.019731456756591798, 0.019483648300170898, 0.01944063949584961, 0.01944780731201172, 0.01945292854309082, 0.0194652156829834, 0.01949286460876465, 0.01941094398498535, 0.019555328369140625, 0.01966592025756836, 0.019629056930541993, 0.019606527328491212, 0.019615743637084963, 0.019681312561035155, 0.019786720275878907, 0.019518463134765626, 0.01947238349914551, 0.019386367797851564, 0.01943756866455078, 0.01957891273498535, 0.01964134407043457, 0.01968636894226074, 0.019731456756591798, 0.01960857582092285, 0.01946316719055176, 0.019636224746704102, 0.01960550308227539, 0.01967616081237793, 0.01945702362060547, 0.01946009635925293, 0.01965465545654297, 0.01965260887145996, 0.019599359512329103, 0.019449855804443358, 0.019458047866821288, 0.019520511627197267, 0.0194150390625, 0.019491840362548828, 0.019504127502441407, 0.019414016723632813, 0.01945599937438965, 0.0194334716796875, 0.0194703369140625, 0.019489791870117186, 0.019528703689575197, 0.01965056037902832, 0.019746816635131836, 0.01962393569946289, 0.01965772819519043, 0.01966694450378418, 0.01945702362060547, 0.01946419143676758, 0.019512319564819337, 0.019456064224243164, 0.04714796829223633, 0.01969049644470215, 0.019733503341674806, 0.019688447952270507, 0.019715072631835938, 0.01964543914794922, 0.01943961524963379, 0.019769344329833984, 0.01945292854309082, 0.01942835235595703, 0.019588096618652344, 0.01963417625427246, 0.019449855804443358, 0.01949286460876465, 0.01943961524963379, 0.019531776428222656, 0.019449855804443358, 0.01943961524963379, 0.019733503341674806, 0.019647487640380858, 0.01960857582092285, 0.019594240188598632, 0.01965056037902832, 0.019706880569458008, 0.019439647674560547, 0.01968943977355957, 0.019710975646972655, 0.01967103958129883, 0.01966182327270508, 0.01960960006713867, 0.019671104431152345, 0.019703744888305664, 0.01960550308227539, 0.01963212776184082, 0.01961881637573242, 0.019914751052856446, 0.021032960891723632, 0.020563968658447264, 0.020339712142944336, 0.02016972732543945, 0.02042367935180664, 0.020288511276245116, 0.0202106876373291, 0.02022707176208496, 0.019929088592529298, 0.019595264434814453, 0.019697664260864257, 0.01965670394897461, 0.01963827133178711, 0.01968639945983887, 0.01943552017211914, 0.019685375213623048, 0.0204769287109375, 0.020320255279541014, 0.02024448013305664, 0.02036844825744629, 0.02019424057006836, 0.019942399978637695, 0.01964543914794922, 0.01964646339416504, 0.02026188850402832, 0.01982464027404785, 0.02025881576538086, 0.04818329620361328, 0.02022809600830078, 0.02007347106933594, 0.01964646339416504, 0.01970790481567383, 0.01967103958129883, 0.01965875244140625, 0.019698688507080078, 0.019573759078979493, 0.01948569679260254, 0.019404800415039062, 0.01965158462524414, 0.019677183151245118, 0.01963417625427246, 0.01966694450378418, 0.019719167709350584, 0.019574783325195313, 0.01946112060546875, 0.019560447692871095, 0.019681280136108398, 0.019495935440063478, 0.01942732810974121, 0.019368959426879884, 0.019764223098754884, 0.020344831466674804, 0.020753408432006838, 0.01984511947631836, 0.019703807830810546, 0.019574783325195313, 0.01965465545654297, 0.019673088073730468, 0.019714048385620117, 0.019685375213623048, 0.01965772819519043, 0.019679231643676756, 0.019723264694213868, 0.01965363121032715, 0.01966694450378418, 0.01968435287475586, 0.01979903984069824, 0.019654687881469728, 0.01968841552734375, 0.01963417625427246, 0.019604480743408204, 0.019697664260864257, 0.019701759338378907, 0.01964031982421875, 0.019741695404052736, 0.019713056564331054, 0.01970479965209961, 0.01967001533508301, 0.019679231643676756, 0.01962700843811035, 0.01967820739746094, 0.01963212776184082, 0.019720191955566405, 0.019705856323242187, 0.01970790481567383, 0.01962291145324707, 0.01964851188659668, 0.01964646339416504, 0.020050943374633787, 0.020362239837646484]",tokens/s,49.293786685871915,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -138,7 +138,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45c8-1648525d2c3b0dd35d9fc7c5;730fabc0-0679-47bb-9c35-eaf04764acb5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe91f-0769b902350afac4090c8f7f;2cab6199-0030-4e24-8dc0-2d5c4df1fe6b) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -317,7 +317,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9027-6df0fc2a224ac4275fad694f;96e8a20d-3013-410b-9f91-6872defdd038) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1c6-154f03703c325abd3a91ae85;917eaffc-0cb3-4088-b6a3-a860df3b2062) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -442,7 +442,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96f0-1a7f16773101f88659db330e;d441ad1b-2299-4c00-9314-e667756e7b67) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8b1-3d68f5bd19bc28c35eac33ae;7d84975c-3648-4f65-bea4-156d9499408d) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -502,7 +502,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8ff2-28efe957069001c67f3f24be;acabbcd4-5a5f-49a4-98e6-37c1ffc0bd19) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe18e-7addcf131e6091b81366747b;fa62f64a-f096-46a2-9ed1-80ea6cb0384c) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -954,7 +954,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8de4-3189e2b75dfc844b3af23fe8;ab7bdd01-b02b-4bb9-b4a9-35acc58cfdd9) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf78-4431d3bf442e23d067b980a5;b62d4d38-1ff9-4a41-9953-78631838f8eb) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -1039,7 +1039,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e964f-4020ed41490f8fee6d8afdb6;94c26db3-b196-443f-aa1d-6526832a68ce) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe808-47942b9a4e23cdae05a304d7;40f56a1e-2e94-4b76-b8bc-3bf6631d2fa7) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1071,7 +1071,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1256.435712,14529.593344,0.0,13883.14624,13705.186304,s,10,0.9733994750976562,0.09733994750976563,0.00037133338389398903,0.0972454376220703,0.09747267303466797,0.09793678359985351,0.09830807205200195,"[0.09840089416503907, 0.0972484130859375, 0.09692841339111329, 0.09732080078125, 0.09719913482666015, 0.09736953735351563, 0.09724246215820312, 0.09730467224121093, 0.09718070220947266, 0.09720444488525391]",tokens/s,2629.9582704656464,kWh,1.1503372104293963e-06,6.30330825863461e-07,6.299953799398075e-06,8.080621835690931e-06,tokens/kWh,31680730.17218616,MB,1256.435712,14529.593344,0.0,13883.14624,13856.22528,s,10,22.825175292968748,2.282517529296875,0.0019184782227131703,2.2821934814453124,2.2837198486328125,2.2856649780273437,2.2872210815429685,"[2.281288818359375, 2.2814697265625, 2.279878662109375, 2.28247021484375, 2.28216357421875, 2.281911376953125, 2.282871826171875, 2.287610107421875, 2.28328759765625, 2.282223388671875]",tokens/s,27.60110237550159,kWh,2.6882328671416297e-05,1.4732435174316816e-05,0.00011731889509560169,0.00015893365894133484,tokens/kWh,396391.80535857665,,s,629,23.663276023864768,0.037620470626176065,0.011627260591752632,0.03621376037597656,0.03636470413208008,0.03641712570190429,0.13402455993652343,"[0.03610009765625, 0.03621376037597656, 0.03596492767333984, 0.03605401611328125, 0.036021247863769534, 0.036089855194091795, 0.03613183975219727, 0.03608063888549805, 0.03601615905761719, 0.03608163070678711, 0.03603968048095703, 0.03607961654663086, 0.03606425476074219, 0.03615334320068359, 0.03612979125976563, 0.03618611145019531, 0.036057086944580076, 0.03612057495117187, 0.03602739334106445, 0.0361267204284668, 0.03606937789916992, 0.03617484664916992, 0.03607551956176758, 0.036154369354248046, 0.03612057495117187, 0.03623321533203125, 0.03616767883300781, 0.03624652862548828, 0.03609804916381836, 0.03628441619873047, 0.03612979125976563, 0.036162559509277346, 0.036171775817871094, 0.036201473236083984, 0.036116481781005856, 0.03630694580078125, 0.03614720153808594, 0.03620761489868164, 0.036155391693115234, 0.036222976684570314, 0.03618406295776367, 0.03628339385986328, 0.036155391693115234, 0.03626803207397461, 0.03616665649414062, 0.03627315139770508, 0.03618201446533203, 0.036291584014892575, 0.03629363250732422, 0.03651686477661133, 0.03641958236694336, 0.036547584533691405, 0.03626399993896484, 0.03632940673828125, 0.03628646469116211, 0.036395008087158204, 0.03629264068603515, 0.036413406372070314, 0.03625574493408203, 0.03643801498413086, 0.036280319213867186, 0.03637145614624023, 0.1340590057373047, 0.03606220626831055, 0.03607551956176758, 0.03600384140014649, 0.03609190368652344, 0.0360816650390625, 0.03611347198486328, 0.036071361541748045, 0.03611340713500977, 0.036067329406738284, 0.03613695907592773, 0.036037631988525394, 0.0361420783996582, 0.036057086944580076, 0.036157440185546875, 0.036055038452148434, 0.036178943634033206, 0.03611443328857422, 0.036144126892089845, 0.036057086944580076, 0.03619123077392578, 0.03609702301025391, 0.03616159820556641, 0.03610822296142578, 0.036176895141601564, 0.03614617538452149, 0.03623526382446289, 0.03608883285522461, 0.0362239990234375, 0.03610726547241211, 0.03624857711791992, 0.036203521728515625, 0.036296703338623046, 0.03619430541992188, 0.036206592559814454, 0.036173824310302735, 0.036275199890136715, 0.03620249557495117, 0.03624755096435547, 0.03620044708251953, 0.03625164794921875, 0.03616563034057617, 0.03628441619873047, 0.03625881576538086, 0.03632332611083984, 0.036413440704345705, 0.03636636734008789, 0.03630179214477539, 0.03638579177856445, 0.03624652862548828, 0.03631923294067383, 0.036245502471923825, 0.036316158294677735, 0.03623116683959961, 0.0362977294921875, 0.03623321533203125, 0.0363765754699707, 0.03633561706542969, 0.036410369873046876, 0.03628441619873047, 0.036413440704345705, 0.03626291275024414, 0.0364031982421875, 0.13376205444335937, 0.036067329406738284, 0.03623219299316406, 0.0361451530456543, 0.03606835174560547, 0.036050945281982424, 0.03609088134765625, 0.036037631988525394, 0.03605299377441406, 0.03598745727539063, 0.03612364959716797, 0.035983360290527344, 0.036071422576904294, 0.03602431869506836, 0.036103168487548826, 0.03605913543701172, 0.036122623443603515, 0.03608268737792969, 0.036168704986572264, 0.03604582214355469, 0.03616460800170898, 0.03611852645874023, 0.03615334320068359, 0.03614003372192383, 0.036157440185546875, 0.03611033630371094, 0.0362239990234375, 0.03611443328857422, 0.03623833465576172, 0.03625881576538086, 0.036431873321533206, 0.03622809600830078, 0.03619123077392578, 0.03614822387695313, 0.036257793426513675, 0.03617996978759765, 0.03623116683959961, 0.03611545562744141, 0.03616563034057617, 0.03611033630371094, 0.03625881576538086, 0.03624038314819336, 0.03630080032348633, 0.03615129470825195, 0.03625164794921875, 0.03625164794921875, 0.036261886596679685, 0.03621376037597656, 0.03626700973510742, 0.036122623443603515, 0.03628134536743164, 0.03618815994262695, 0.03629260635375976, 0.03619839859008789, 0.03628339385986328, 0.036211711883544925, 0.0363059196472168, 0.036190208435058595, 0.036307968139648435, 0.036209663391113284, 0.03632844924926758, 0.03625369644165039, 0.036446208953857424, 0.1341265869140625, 0.036038654327392575, 0.036087806701660154, 0.036018177032470705, 0.03612160110473633, 0.036029441833496094, 0.0361082878112793, 0.03606118392944336, 0.03617484664916992, 0.03613183975219727, 0.03647078323364258, 0.03630387115478516, 0.03627212905883789, 0.036131904602050784, 0.03622393417358399, 0.03615027236938476, 0.03619839859008789, 0.036152320861816405, 0.03620556640625, 0.036111358642578126, 0.036173824310302735, 0.03606937789916992, 0.03614310455322266, 0.03614003372192383, 0.03625369644165039, 0.03621478271484375, 0.03623731231689453, 0.03613491058349609, 0.03629574584960937, 0.03612768173217774, 0.03625164794921875, 0.036171775817871094, 0.036302879333496095, 0.036175838470458986, 0.036170753479003906, 0.03616563034057617, 0.03629260635375976, 0.03613183975219727, 0.03625267028808594, 0.03623014450073242, 0.036310016632080076, 0.036168704986572264, 0.0362762222290039, 0.03616665649414062, 0.03648614501953125, 0.0362158088684082, 0.03628953552246094, 0.03620249557495117, 0.03630182266235352, 0.036241409301757815, 0.036318206787109376, 0.0363059196472168, 0.03643699264526367, 0.03621683120727539, 0.03628851318359375, 0.03629264068603515, 0.03633660888671875, 0.03628851318359375, 0.036347904205322266, 0.036206592559814454, 0.036362239837646484, 0.036278270721435545, 0.03643801498413086, 0.13411737060546874, 0.036025375366210935, 0.03606934356689453, 0.03601510238647461, 0.036155391693115234, 0.03602227020263672, 0.03608473587036133, 0.03605811309814453, 0.036162559509277346, 0.036078590393066406, 0.03609907150268555, 0.036050945281982424, 0.0361451530456543, 0.03615027236938476, 0.036203521728515625, 0.03611340713500977, 0.03621478271484375, 0.03617279815673828, 0.03617792129516602, 0.03618099212646484, 0.03620556640625, 0.036071422576904294, 0.03611545562744141, 0.036073566436767575, 0.03621161651611328, 0.036132865905761716, 0.036160511016845705, 0.03608473587036133, 0.03627212905883789, 0.03618201446533203, 0.03627212905883789, 0.03616153717041016, 0.03630284881591797, 0.03618304061889648, 0.0362608642578125, 0.03609907150268555, 0.03623628616333008, 0.03621376037597656, 0.03625164794921875, 0.03615027236938476, 0.036241409301757815, 0.03626291275024414, 0.03623731231689453, 0.03641145706176758, 0.03646150588989258, 0.03624038314819336, 0.03630182266235352, 0.0362762222290039, 0.036354049682617184, 0.03625676727294922, 0.03633356857299805, 0.036219905853271485, 0.03638579177856445, 0.03624959945678711, 0.03640524673461914, 0.036354049682617184, 0.036364288330078126, 0.03641958236694336, 0.03639091110229492, 0.03627315139770508, 0.03632844924926758, 0.03624345779418945, 0.03643801498413086, 0.13464781188964844, 0.03604787063598633, 0.03614617538452149, 0.03604787063598633, 0.03610419082641601, 0.03603968048095703, 0.03611340713500977, 0.03607654571533203, 0.0361451530456543, 0.03605923080444336, 0.03619318389892578, 0.03607756805419922, 0.03615334320068359, 0.03606016159057617, 0.03612876892089844, 0.0360816650390625, 0.0361451530456543, 0.03615846252441406, 0.03624857711791992, 0.036055038452148434, 0.03617996978759765, 0.03612364959716797, 0.03618918228149414, 0.03616563034057617, 0.03630284881591797, 0.03625267028808594, 0.03627724838256836, 0.03623321533203125, 0.03632128143310547, 0.03615948867797852, 0.036241409301757815, 0.036178943634033206, 0.03621683120727539, 0.036160511016845705, 0.03622611236572266, 0.03613894271850586, 0.03625888061523438, 0.03615430450439453, 0.03624755096435547, 0.03618406295776367, 0.03623526382446289, 0.036149246215820316, 0.03628339385986328, 0.036176895141601564, 0.03626598358154297, 0.03625881576538086, 0.036310016632080076, 0.03621376037597656, 0.03632537460327148, 0.036242431640625, 0.036348960876464845, 0.03623011016845703, 0.036350975036621096, 0.03623219299316406, 0.0363059196472168, 0.03623526382446289, 0.03631411361694336, 0.03629056167602539, 0.036404254913330075, 0.036235233306884766, 0.036340736389160154, 0.03622809600830078, 0.03642060852050781, 0.13402828979492187, 0.036004863739013675, 0.03602534484863281, 0.03595775985717774, 0.036044830322265624, 0.03599459075927734, 0.03605196762084961, 0.0359741439819336, 0.03605913543701172, 0.035972095489501955, 0.036078590393066406, 0.03600588989257812, 0.03608883285522461, 0.03602534484863281, 0.036057086944580076, 0.03617996978759765, 0.03640115356445312, 0.03615846252441406, 0.036187137603759766, 0.03611443328857422, 0.03639807891845703, 0.03616972732543945, 0.03623731231689453, 0.036193279266357424, 0.03622707366943359, 0.03621785736083984, 0.03622195053100586, 0.036073471069335936, 0.036192256927490236, 0.03611033630371094, 0.03620249557495117, 0.036111358642578126, 0.036208641052246096, 0.03620454406738281, 0.03634995269775391, 0.036278270721435545, 0.036310016632080076, 0.036192256927490236, 0.03622198486328125, 0.036195297241210934, 0.03633356857299805, 0.03624448013305664, 0.036595775604248044, 0.036914112091064454, 0.036413440704345705, 0.0362690544128418, 0.03637247848510742, 0.03630182266235352, 0.03640524673461914, 0.03625369644165039, 0.03636326217651367, 0.03625574493408203, 0.036421630859375, 0.03629568099975586, 0.03634380722045898, 0.03623833465576172, 0.03632025527954102, 0.03628339385986328, 0.03640524673461914, 0.03631411361694336, 0.03642675018310547, 0.03628543853759766, 0.036347904205322266, 0.13416038513183592, 0.03612979125976563, 0.03619123077392578, 0.036117504119873044, 0.03619839859008789, 0.03617996978759765, 0.03621478271484375, 0.036063297271728516, 0.036230079650878905, 0.03615337753295898, 0.036203487396240235, 0.03609395217895508, 0.03620761489868164, 0.036122623443603515, 0.03622092819213867, 0.036116481781005856, 0.036291584014892575, 0.03614726257324219, 0.03615430450439453, 0.03609088134765625, 0.03620761489868164, 0.036089855194091795, 0.036222976684570314, 0.03629363250732422, 0.04064767837524414, 0.036400127410888675, 0.03630697631835938, 0.03615740966796875, 0.03634995269775391, 0.03615846252441406, 0.03626496124267578, 0.03614720153808594, 0.036241409301757815, 0.03618819046020508, 0.036238304138183595, 0.036154369354248046, 0.036278270721435545, 0.036187137603759766, 0.036299774169921875, 0.036185089111328124, 0.03623219299316406, 0.036187137603759766, 0.036359169006347655, 0.03624345779418945, 0.03638886260986328, 0.03628646469116211, 0.036348926544189454, 0.03632230377197266, 0.03640524673461914, 0.03624448013305664, 0.036291584014892575, 0.03616358566284179, 0.036299774169921875, 0.0362158088684082, 0.03630899047851562, 0.03622403335571289, 0.03629155349731445, 0.03626598358154297, 0.036340736389160154, 0.03625471878051758, 0.03638476943969727, 0.03624038314819336, 0.03637247848510742, 0.13407232666015625, 0.03602329635620117, 0.03604377746582031, 0.035996673583984375, 0.03604787063598633, 0.0360079345703125, 0.03604889678955078, 0.03602227020263672, 0.03611340713500977, 0.036122623443603515, 0.036511745452880856, 0.036154369354248046, 0.036222976684570314, 0.036162559509277346, 0.036193279266357424, 0.036173824310302735, 0.03620044708251953, 0.03625369644165039, 0.03621785736083984, 0.036144126892089845, 0.03626291275024414, 0.036187137603759766, 0.03623628616333008, 0.036160511016845705, 0.036242431640625, 0.03621478271484375, 0.036257793426513675, 0.036245502471923825, 0.03621785736083984, 0.03614617538452149, 0.03620044708251953, 0.0361267204284668, 0.03619942474365234, 0.03618304061889648, 0.03624959945678711, 0.036149246215820316, 0.036239360809326174, 0.03623833465576172, 0.03631718444824219, 0.03624345779418945, 0.03631718444824219, 0.0362874870300293, 0.036350975036621096, 0.036203521728515625, 0.03624038314819336, 0.03626598358154297, 0.0363059196472168, 0.036222976684570314, 0.036296703338623046, 0.0362608642578125, 0.03638784027099609, 0.03618201446533203, 0.036377601623535157, 0.036278270721435545, 0.03638886260986328, 0.0369172477722168, 0.036364288330078126, 0.03633152008056641, 0.03633152008056641, 0.036229118347167966, 0.036354049682617184, 0.03620454406738281, 0.036345855712890625, 0.13401496887207032, 0.03605811309814453, 0.036078590393066406, 0.03600896072387695, 0.03609395217895508, 0.036029441833496094, 0.03610214233398437, 0.03604172897338867, 0.03609500885009766, 0.036033504486083986, 0.036109310150146484, 0.03609600067138672, 0.036327457427978514, 0.03635708618164062, 0.03621068954467774, 0.03616460800170898, 0.036241409301757815, 0.03617996978759765, 0.036222015380859375, 0.03611743927001953, 0.03622707366943359, 0.0361451530456543, 0.036241409301757815, 0.03617792129516602, 0.0362342414855957, 0.03619942474365234, 0.03640729522705078, 0.03611545562744141, 0.03621686553955078, 0.036116447448730465, 0.0362342414855957, 0.036168704986572264, 0.03624345779418945, 0.036157440185546875, 0.03620556640625, 0.03614310455322266, 0.03623321533203125, 0.03612057495117187, 0.03621068954467774, 0.0362874870300293, 0.03632025527954102, 0.036171775817871094, 0.036261886596679685, 0.036185089111328124, 0.036275199890136715, 0.03627212905883789, 0.036348926544189454, 0.03630489730834961, 0.03631206512451172, 0.036162559509277346, 0.036340736389160154, 0.03616972732543945, 0.036310016632080076, 0.0361973762512207, 0.03628646469116211, 0.03625267028808594, 0.036334590911865236, 0.036291584014892575, 0.03647180938720703, 0.036350975036621096, 0.036395008087158204, 0.03625062561035156, 0.03650764846801758]",tokens/s,26.581272997265664,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1185,7 +1185,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e953c-326734055993a17b202d417f;5825ef43-46db-4a8b-ac5f-ef7d2aa33377) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6fa-0c414eba4a4c6cb92410de0f;b9b6f0e6-9461-49ad-bb8d-3ef768734775) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1283,7 +1283,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1238.130688,3041.394688,0.0,2394.947584,2279.417856,s,10,0.26880492782592774,0.026880492782592774,0.0008987526138610299,0.026595680236816407,0.026978752708435055,0.028274463939666743,0.029311032924652102,"[0.02957017517089844, 0.02656800079345703, 0.026624223709106446, 0.026498048782348634, 0.02669081687927246, 0.02653455924987793, 0.026623359680175783, 0.026660736083984376, 0.026516223907470705, 0.026518783569335937]",tokens/s,9523.63493000322,kWh,3.126741712721287e-07,1.7133035382704246e-07,1.2450208372857195e-06,1.7290253623848905e-06,tokens/kWh,148060291.982584,MB,1238.519808,3041.394688,0.0,2394.947584,2279.420416,s,10,15.5807939453125,1.55807939453125,0.005216512186615283,1.5575559692382812,1.5656029785156251,1.5660965942382812,1.5664914868164062,"[1.5609205322265625, 1.5654932861328126, 1.561662353515625, 1.5665902099609375, 1.557181884765625, 1.5550050048828126, 1.5579300537109375, 1.5519298095703125, 1.55321826171875, 1.550862548828125]",tokens/s,40.43439648911705,kWh,1.8012090795692592e-05,9.867705139837371e-06,3.8868872364914635e-05,6.674866830044458e-05,tokens/kWh,943839.0548322053,,s,629,15.792589826583871,0.025107456004107888,0.003264401894865061,0.024826879501342772,0.025016524124145508,0.025216838455200195,0.05177774688720703,"[0.024412160873413087, 0.02388684844970703, 0.02385817527770996, 0.023840768814086914, 0.023856128692626953, 0.02386944007873535, 0.023968767166137696, 0.023797760009765623, 0.023783424377441405, 0.023767040252685546, 0.023969791412353517, 0.024361984252929687, 0.023824384689331055, 0.024758272171020508, 0.024879104614257814, 0.025027584075927735, 0.02490163230895996, 0.024863744735717775, 0.024832000732421877, 0.024099872589111327, 0.024890335083007812, 0.024851455688476562, 0.02529280090332031, 0.025255935668945313, 0.025250816345214845, 0.024894464492797853, 0.026001407623291017, 0.026261503219604493, 0.025214975357055663, 0.02508083152770996, 0.024953855514526366, 0.024921087265014647, 0.025012224197387696, 0.025011199951171875, 0.025042943954467774, 0.024936447143554686, 0.024970239639282226, 0.02492620849609375, 0.025629695892333985, 0.025084928512573244, 0.024816640853881834, 0.02491596794128418, 0.024893440246582032, 0.02483404731750488, 0.02492313575744629, 0.024061952590942383, 0.024167423248291017, 0.024824832916259764, 0.024826879501342772, 0.02488319969177246, 0.025025535583496093, 0.02498150444030762, 0.025061376571655275, 0.02512179183959961, 0.02505523109436035, 0.024887296676635744, 0.024947711944580078, 0.02508185577392578, 0.024972288131713868, 0.024036352157592773, 0.024808448791503908, 0.023994367599487306, 0.052125694274902344, 0.02493440055847168, 0.024770559310913084, 0.024303615570068358, 0.024801279067993166, 0.02483404731750488, 0.02483404731750488, 0.024774688720703125, 0.024644575119018554, 0.02490572738647461, 0.024028160095214843, 0.024809471130371095, 0.024835071563720702, 0.024818687438964843, 0.024962047576904296, 0.0249487361907959, 0.02496614456176758, 0.02487603187561035, 0.02473574447631836, 0.024835071563720702, 0.024853504180908204, 0.025457664489746092, 0.024824832916259764, 0.028212223052978515, 0.025609216690063476, 0.02507980728149414, 0.02476851272583008, 0.024641536712646486, 0.024608768463134766, 0.024506368637084962, 0.02408857536315918, 0.024813568115234375, 0.024793088912963866, 0.024794111251831053, 0.024810495376586913, 0.02486579132080078, 0.024019968032836913, 0.024184831619262694, 0.02476851272583008, 0.02503987121582031, 0.024816640853881834, 0.024791072845458985, 0.024844255447387695, 0.023839744567871093, 0.025226240158081056, 0.025016319274902343, 0.02503987121582031, 0.024958976745605467, 0.024790016174316407, 0.024827903747558593, 0.024738815307617186, 0.02472652816772461, 0.02453606414794922, 0.02494259262084961, 0.02487500762939453, 0.024927232742309572, 0.024946687698364257, 0.024151039123535157, 0.02488832092285156, 0.024975360870361327, 0.02492416000366211, 0.024902656555175783, 0.02490982437133789, 0.05210521697998047, 0.024751136779785157, 0.024843231201171875, 0.024861696243286133, 0.024839168548583986, 0.02486783981323242, 0.02497433662414551, 0.02495692825317383, 0.024007680892944337, 0.024584224700927734, 0.024926176071166994, 0.024943616867065428, 0.024954912185668945, 0.024878047943115236, 0.024885248184204102, 0.024845312118530274, 0.024589311599731444, 0.024983552932739257, 0.025022464752197264, 0.02446233558654785, 0.024020992279052734, 0.024847360610961915, 0.02391142463684082, 0.024878080368041993, 0.024819711685180663, 0.02490163230895996, 0.024793088912963866, 0.024999935150146483, 0.024837120056152344, 0.024506368637084962, 0.025018367767333984, 0.024065023422241212, 0.02465279960632324, 0.024007680892944337, 0.02447257614135742, 0.02495078468322754, 0.024859647750854492, 0.02488422393798828, 0.024995840072631836, 0.025009151458740234, 0.02496512031555176, 0.024862720489501954, 0.024863744735717775, 0.02487500762939453, 0.024945663452148437, 0.025001983642578125, 0.02493440055847168, 0.023961599349975587, 0.02487603187561035, 0.025052160263061524, 0.02493337631225586, 0.024839168548583986, 0.025028608322143556, 0.024869888305664063, 0.024869888305664063, 0.025030656814575194, 0.02490777587890625, 0.024962047576904296, 0.02484940719604492, 0.02489241600036621, 0.024919040679931642, 0.024978431701660156, 0.024213504791259766, 0.05179296112060547, 0.02481657600402832, 0.024852479934692383, 0.02490675163269043, 0.024778783798217772, 0.024894432067871095, 0.02482896041870117, 0.025025503158569336, 0.024422399520874022, 0.024992767333984374, 0.024378368377685547, 0.024847360610961915, 0.02487500762939453, 0.024999935150146483, 0.024856576919555663, 0.024987648010253907, 0.025062400817871092, 0.02488217544555664, 0.025001983642578125, 0.025218080520629883, 0.025987039566040038, 0.025017343521118163, 0.024895488739013674, 0.024832000732421877, 0.02492313575744629, 0.02488422393798828, 0.024773632049560547, 0.024792095184326172, 0.024828895568847657, 0.02488422393798828, 0.02498252868652344, 0.02491494369506836, 0.024781824111938477, 0.023945215225219727, 0.024795135498046874, 0.024893440246582032, 0.024928255081176756, 0.024847360610961915, 0.02492416000366211, 0.024999935150146483, 0.024879104614257814, 0.024944639205932616, 0.024848384857177733, 0.024847360610961915, 0.02493337631225586, 0.024828927993774414, 0.024806400299072266, 0.024819711685180663, 0.024583168029785156, 0.024525823593139647, 0.02484121513366699, 0.024802303314208983, 0.024800256729125978, 0.02473574447631836, 0.02492006492614746, 0.02490777587890625, 0.024904703140258787, 0.02487295913696289, 0.024845312118530274, 0.024771583557128905, 0.02484121513366699, 0.024809471130371095, 0.024800256729125978, 0.05202742385864258, 0.02481865692138672, 0.024795135498046874, 0.024838144302368165, 0.024809471130371095, 0.02492006492614746, 0.02475116729736328, 0.024717248916625977, 0.024571903228759767, 0.02395136070251465, 0.02489241600036621, 0.02494156837463379, 0.02490880012512207, 0.024998912811279295, 0.0249487361907959, 0.024951808929443358, 0.0249487361907959, 0.024871936798095705, 0.02486579132080078, 0.02527846336364746, 0.026267648696899414, 0.02528358459472656, 0.02490777587890625, 0.02505625534057617, 0.02488012886047363, 0.02488217544555664, 0.024838144302368165, 0.024763391494750975, 0.024772607803344726, 0.024623104095458984, 0.024543231964111328, 0.0248985595703125, 0.024559616088867187, 0.024816640853881834, 0.0248156795501709, 0.02498963165283203, 0.024824832916259764, 0.02491187286376953, 0.024800256729125978, 0.024913919448852538, 0.0248668155670166, 0.024851455688476562, 0.024754175186157225, 0.024798208236694336, 0.024777727127075197, 0.024792064666748048, 0.024770559310913084, 0.0247511043548584, 0.024801279067993166, 0.024817663192749022, 0.02470809555053711, 0.0244715518951416, 0.023734272003173826, 0.02390732765197754, 0.02379884719848633, 0.02382636833190918, 0.023888896942138672, 0.02392678451538086, 0.023868415832519533, 0.023840768814086914, 0.023931903839111326, 0.02504806327819824, 0.024816640853881834, 0.0512542724609375, 0.02474496078491211, 0.025034751892089844, 0.024895488739013674, 0.02386124801635742, 0.02387353515625, 0.024439807891845702, 0.024792064666748048, 0.024861696243286133, 0.024916032791137695, 0.024928192138671874, 0.02489139175415039, 0.024839168548583986, 0.024826879501342772, 0.024833023071289064, 0.024902656555175783, 0.02494156837463379, 0.024830976486206056, 0.024808448791503908, 0.024807424545288087, 0.024821760177612305, 0.024854528427124024, 0.024800256729125978, 0.024976383209228514, 0.024868864059448242, 0.024947711944580078, 0.02490777587890625, 0.024754175186157225, 0.024812543869018554, 0.024769535064697267, 0.02479315185546875, 0.024920000076293945, 0.02485862350463867, 0.024844287872314453, 0.024847360610961915, 0.024885248184204102, 0.025118719100952147, 0.02490777587890625, 0.02487603187561035, 0.024928255081176756, 0.024792064666748048, 0.02472550392150879, 0.02489036750793457, 0.024799232482910157, 0.024766464233398438, 0.024715263366699217, 0.024800256729125978, 0.024846336364746095, 0.02476748847961426, 0.02391347122192383, 0.024555519104003908, 0.024863744735717775, 0.024750080108642578, 0.02498252868652344, 0.02477670478820801, 0.02468454360961914, 0.023827455520629884, 0.02388582420349121, 0.023963647842407225, 0.023823360443115234, 0.02390630340576172, 0.023831552505493164, 0.023932928085327147, 0.051848190307617184, 0.023780351638793946, 0.024373247146606446, 0.0246824951171875, 0.024754175186157225, 0.02472038459777832, 0.024748031616210937, 0.024707071304321288, 0.0247511043548584, 0.024810495376586913, 0.024046592712402344, 0.024801279067993166, 0.024808448791503908, 0.02473472023010254, 0.02487094306945801, 0.024796127319335937, 0.02490982437133789, 0.024740863800048828, 0.02468351936340332, 0.024800256729125978, 0.02488217544555664, 0.024839168548583986, 0.024791040420532227, 0.024854528427124024, 0.024754175186157225, 0.02472243118286133, 0.024854528427124024, 0.024763391494750975, 0.024791040420532227, 0.024845312118530274, 0.024952831268310546, 0.023967744827270508, 0.023805952072143553, 0.02393600082397461, 0.024805376052856445, 0.024887296676635744, 0.024847360610961915, 0.02448896026611328, 0.02412031936645508, 0.024836095809936523, 0.024919040679931642, 0.02490163230895996, 0.02476748847961426, 0.024456192016601562, 0.023806976318359374, 0.023984128952026368, 0.024790016174316407, 0.026219520568847656, 0.025365503311157226, 0.024988672256469727, 0.025042943954467774, 0.024845312118530274, 0.02472857666015625, 0.025008127212524413, 0.0249036808013916, 0.02486783981323242, 0.024947711944580078, 0.024778751373291014, 0.02512281608581543, 0.02507980728149414, 0.024796159744262695, 0.024844287872314453, 0.02428108787536621, 0.05197516632080078, 0.024771583557128905, 0.02471014404296875, 0.024820735931396484, 0.02472857666015625, 0.024707071304321288, 0.024650751113891603, 0.02489753532409668, 0.02468454360961914, 0.024788991928100586, 0.024731647491455077, 0.023900159835815428, 0.02373222351074219, 0.023639039993286134, 0.023879680633544922, 0.024001535415649415, 0.024008703231811524, 0.023796735763549806, 0.023875583648681642, 0.023764991760253908, 0.023948287963867186, 0.023787519454956055, 0.02351411247253418, 0.023626752853393555, 0.024016895294189454, 0.024758272171020508, 0.024790016174316407, 0.02473369598388672, 0.024796159744262695, 0.02475315284729004, 0.024811519622802734, 0.024815616607666017, 0.024822784423828126, 0.024810495376586913, 0.024972288131713868, 0.02484121513366699, 0.0249036808013916, 0.024414239883422853, 0.024735712051391603, 0.025019392013549805, 0.025440256118774415, 0.024836095809936523, 0.02488832092285156, 0.02490060806274414, 0.02587750434875488, 0.02549247932434082, 0.02492620849609375, 0.024797183990478516, 0.024819711685180663, 0.02492416000366211, 0.02484940719604492, 0.02488630485534668, 0.024916959762573243, 0.024824832916259764, 0.02490060806274414, 0.024769535064697267, 0.024740863800048828, 0.024771583557128905, 0.02485660743713379, 0.02479203224182129, 0.024893440246582032, 0.024833023071289064, 0.024773632049560547, 0.05281587219238281, 0.024863744735717775, 0.024772607803344726, 0.024766464233398438, 0.024763391494750975, 0.024827903747558593, 0.024795135498046874, 0.02488319969177246, 0.02472857666015625, 0.024851455688476562, 0.024689664840698244, 0.024748031616210937, 0.024979455947875977, 0.024801279067993166, 0.024864767074584963, 0.024928287506103517, 0.024886240005493165, 0.024927232742309572, 0.024937471389770507, 0.024754175186157225, 0.024666112899780275, 0.02472755241394043, 0.02412646484375, 0.02432204818725586, 0.024697856903076174, 0.02493235206604004, 0.02508902359008789, 0.02387046432495117, 0.023801855087280274, 0.023829504013061522, 0.024846336364746095, 0.024953855514526366, 0.02486070442199707, 0.02477667236328125, 0.024755199432373046, 0.024793088912963866, 0.024863744735717775, 0.024861696243286133, 0.024802303314208983, 0.024521728515625, 0.023559167861938478, 0.023641088485717773, 0.023735296249389647, 0.023802879333496094, 0.023868415832519533, 0.023742464065551756, 0.023763967514038087, 0.024792064666748048, 0.024763423919677733, 0.024796127319335937, 0.024770559310913084, 0.024862720489501954, 0.02473472023010254, 0.024938495635986328, 0.024855552673339845, 0.02484121513366699, 0.024747007369995116, 0.024805408477783203, 0.024812511444091797, 0.024819711685180663, 0.02486579132080078, 0.024791040420532227, 0.024771583557128905, 0.051738624572753904, 0.02491494369506836, 0.024739839553833007, 0.02484121513366699, 0.024792064666748048, 0.02488934326171875, 0.02471833610534668, 0.02473779106140137, 0.024693792343139648, 0.024723424911499023, 0.024671232223510742, 0.024799232482910157, 0.02469068717956543, 0.024761344909667967, 0.02490060806274414, 0.024809471130371095, 0.024826879501342772, 0.024854528427124024, 0.02385305595397949, 0.023837696075439452, 0.02384588813781738, 0.02391244888305664, 0.02390425682067871, 0.024458240509033204, 0.024790016174316407, 0.024835071563720702, 0.024810495376586913, 0.025235456466674806, 0.02491801643371582, 0.024899616241455078, 0.024842208862304687, 0.024863744735717775, 0.024778751373291014, 0.024197120666503907, 0.023946239471435548, 0.02388275146484375, 0.0238919677734375, 0.024006656646728516, 0.023948320388793944, 0.0246015682220459, 0.025985023498535157, 0.025234432220458985, 0.024904703140258787, 0.02477670478820801, 0.024785951614379884, 0.02492720031738281, 0.02492313575744629, 0.02493337631225586, 0.02483404731750488, 0.024913919448852538, 0.024786943435668944, 0.024894464492797853, 0.024829952239990235, 0.02482585525512695, 0.024787967681884765, 0.024861696243286133, 0.025195520401000978, 0.023906368255615235, 0.024120256423950194, 0.023986175537109376, 0.023991296768188477, 0.023818239212036133, 0.023834623336791993]",tokens/s,39.82880622538531,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1322,7 +1322,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d466b-1cdc68740855257b6dc07fe2;83a94b75-b6c6-45db-9a21-9d91d44b7472) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9c3-6e84837f764bc287472b5e15;df3cb7d5-63e4-4883-884d-b66bddeb1240) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1393,7 +1393,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9507-2128776d54ae563a0585fb99;38c31317-b2a1-4a44-b3b7-bc84479a1f3a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6c4-3c7c41966a29885e4fa1f293;2c5f2ef7-2a8d-4d44-ba4a-4771be013d53) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1463,7 +1463,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95db-6f2fb2d844d918a87348e71a;7fd1bbe4-5162-4422-8cf4-5bbfe4224645) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe79e-66b3705567b0d2fe1b228499;e8bde47f-5d21-4c6a-8086-3484b89de5bf) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1533,7 +1533,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1562,7 +1562,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4867-2a1c60bd1c5a46ea3dc1ce31;88ec891f-86c9-4ac6-8b2a-4555c9e2f064) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febdd-765f73c741154bdd0bc6bd20;83b5da42-b145-4d1b-8ab1-d49c72e88b47) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -1647,7 +1647,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9614-71ef47993494ffe2022fc482;af2972c9-dfbc-4706-916c-74863a376f11) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7d3-5937139629f5388f1b3965dd;da8a5693-c0fb-4146-b165-337c6a906388) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1709,7 +1709,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fbc-682ba0fe6f1938b825241c40;7672c517-44f4-460d-b253-29b6a2c739c3) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe156-1899c1771bab728b2447503b;f34e84d4-9789-4e05-bc80-e26ef39dcc1a) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -1835,7 +1835,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95a7-300e61f0007d8a433be7f58a;0e5a6a55-a5ca-4024-8e42-06730f8d321d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe769-742e53922a827a456f87991e;0d63fa43-8515-43e9-a62d-579be2ddaf54) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -1980,7 +1980,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e905c-744743641d932e1042f1d8c5;6b10e0b6-f0df-46c8-b666-434d0725a5e0) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe1fc-711b8f5702c39066069f05c1;59750334-ca1c-46fa-a058-5a051831680e) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2050,7 +2050,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1399.795712,11335.630848,0.0,10689.183744,9357.549568,s,10,0.6931103744506837,0.06931103744506836,0.0007273844990548417,0.06909251022338866,0.06949516677856446,0.0704782543182373,0.07126472434997559,"[0.07146134185791016, 0.06901350402832031, 0.06909916687011719, 0.06926525115966797, 0.06927670288085938, 0.06911849975585938, 0.06899801635742188, 0.06908585357666015, 0.06888761901855468, 0.0689044189453125]",tokens/s,3693.495429250929,kWh,8.190749831126568e-07,4.487266654466331e-07,3.7329838292961606e-06,5.00078547785545e-06,tokens/kWh,51191957.97012746,MB,1400.123392,11335.630848,0.0,10689.183744,9490.407424,s,10,19.49065673828125,1.9490656738281253,0.00564540932897227,1.94903955078125,1.9541781249999999,1.9582277954101563,1.9614675317382813,"[1.9622774658203126, 1.951441650390625, 1.9532781982421874, 1.9501212158203125, 1.944335693359375, 1.9479578857421875, 1.9436546630859375, 1.950752197265625, 1.94369677734375, 1.9431409912109374]",tokens/s,32.323179688585256,kWh,2.291057484873867e-05,1.2555659928028286e-05,8.698272763210706e-05,0.000122448962408874,tokens/kWh,514500.07219852385,,s,629,20.080927755355795,0.03192516336304584,0.008289819758241598,0.030857215881347655,0.03127746543884277,0.03142776336669922,0.10010099670410157,"[0.03142451286315918, 0.03177471923828125, 0.03153203201293946, 0.03144806480407715, 0.03142451286315918, 0.03144704055786133, 0.03201228713989258, 0.0314768009185791, 0.03138041687011719, 0.031473663330078124, 0.031492095947265625, 0.031318016052246093, 0.0313118724822998, 0.03120639991760254, 0.03201433563232422, 0.03167948722839355, 0.03183923149108887, 0.03119206428527832, 0.031884288787841795, 0.031527936935424807, 0.031236095428466795, 0.031080448150634765, 0.03135487937927246, 0.030707712173461913, 0.03096780776977539, 0.031120384216308594, 0.03114291191101074, 0.03096575927734375, 0.030935039520263673, 0.03062579154968262, 0.03098624038696289, 0.0310435848236084, 0.031341567993164066, 0.031072256088256835, 0.03112652778625488, 0.030695423126220703, 0.030818304061889647, 0.0307957763671875, 0.03099750328063965, 0.031162368774414063, 0.03115519905090332, 0.03075379180908203, 0.030819328308105468, 0.03080089569091797, 0.030860288619995117, 0.030834688186645507, 0.03096883201599121, 0.030793727874755858, 0.030851072311401367, 0.030863359451293947, 0.03103443145751953, 0.030746559143066406, 0.03082444763183594, 0.030847999572753908, 0.031389696121215824, 0.03080601692199707, 0.030882816314697265, 0.030833663940429686, 0.030908416748046875, 0.030926847457885744, 0.030856191635131838, 0.03080601692199707, 0.10063871765136718, 0.030942207336425782, 0.030659584045410155, 0.030818304061889647, 0.03100262451171875, 0.030891008377075195, 0.031024127960205077, 0.03138457679748535, 0.031122432708740235, 0.030822399139404297, 0.030999551773071288, 0.031110143661499022, 0.03129958343505859, 0.030757888793945313, 0.031055871963500976, 0.031303680419921875, 0.03147878456115723, 0.030891008377075195, 0.030730272293090822, 0.030776287078857423, 0.030827520370483398, 0.030877695083618165, 0.030817279815673827, 0.03098521614074707, 0.031037439346313478, 0.03082854461669922, 0.03073843193054199, 0.030769151687622072, 0.03102207946777344, 0.03102720069885254, 0.030864383697509764, 0.03103436851501465, 0.031080448150634765, 0.0311910400390625, 0.031140863418579103, 0.03077939224243164, 0.030757888793945313, 0.030833663940429686, 0.0307906551361084, 0.030947328567504883, 0.031071231842041015, 0.030728191375732423, 0.031138816833496095, 0.030909503936767578, 0.03090937614440918, 0.030819328308105468, 0.030851072311401367, 0.03101900863647461, 0.03127910423278808, 0.03097292709350586, 0.03080703926086426, 0.030825471878051756, 0.030826496124267577, 0.030914560317993164, 0.030866432189941406, 0.030948352813720704, 0.030819328308105468, 0.03103027153015137, 0.031172607421875, 0.031127552032470703, 0.030859264373779297, 0.03138559913635254, 0.031144960403442383, 0.10035302734375, 0.030691328048706053, 0.032535552978515625, 0.03151769638061523, 0.031337472915649416, 0.031088640213012695, 0.031152128219604492, 0.0313118724822998, 0.030978080749511718, 0.030718944549560548, 0.03077324867248535, 0.030856191635131838, 0.030856191635131838, 0.03080089569091797, 0.030745599746704103, 0.031239200592041015, 0.03086332893371582, 0.03098931121826172, 0.03079475212097168, 0.030793727874755858, 0.031107072830200196, 0.03125760078430176, 0.03127603149414063, 0.03138355255126953, 0.031081472396850586, 0.030714879989624022, 0.030870527267456056, 0.03097702407836914, 0.030809087753295897, 0.03078758430480957, 0.031124479293823244, 0.03127705574035645, 0.030869504928588868, 0.030964736938476563, 0.031160320281982422, 0.03080601692199707, 0.031014911651611327, 0.03091254425048828, 0.030758880615234376, 0.03098624038696289, 0.031235071182250978, 0.031185920715332032, 0.030673919677734376, 0.031074304580688477, 0.0309749755859375, 0.030904319763183592, 0.030930944442749023, 0.031108095169067384, 0.031130624771118165, 0.03082956886291504, 0.030907392501831055, 0.030872575759887694, 0.03082035255432129, 0.03094528007507324, 0.030863359451293947, 0.03136000061035156, 0.030840831756591795, 0.03079475212097168, 0.030852096557617188, 0.030861312866210938, 0.031080448150634765, 0.030814207077026368, 0.03082035255432129, 0.10102067565917969, 0.030846975326538087, 0.03077017593383789, 0.03081318473815918, 0.03134668731689453, 0.031242271423339844, 0.030676959991455078, 0.03096883201599121, 0.030697471618652345, 0.03081216049194336, 0.03098111915588379, 0.03080601692199707, 0.030895103454589845, 0.03079680061340332, 0.031094783782958983, 0.030895103454589845, 0.030680063247680665, 0.03079270362854004, 0.030719999313354493, 0.030810111999511718, 0.030871551513671876, 0.030729215621948244, 0.03057766342163086, 0.03103539276123047, 0.030717952728271485, 0.030846975326538087, 0.0307906551361084, 0.031718399047851564, 0.03120947265625, 0.03092787170410156, 0.03075584030151367, 0.03133235168457031, 0.030833663940429686, 0.03118284797668457, 0.030871551513671876, 0.03080089569091797, 0.03102207946777344, 0.03081216049194336, 0.030719999313354493, 0.031025152206420898, 0.030670848846435547, 0.030855167388916017, 0.03081523132324219, 0.030912511825561522, 0.03082444763183594, 0.03081020736694336, 0.030791584014892577, 0.03127705574035645, 0.031265792846679685, 0.03129343986511231, 0.031065088272094726, 0.03081625556945801, 0.03119206428527832, 0.03097395133972168, 0.030873600006103515, 0.03156582450866699, 0.03138662338256836, 0.03082342338562012, 0.030707712173461913, 0.030988288879394532, 0.030742528915405274, 0.03152595138549805, 0.030772159576416016, 0.10050867462158203, 0.03074662399291992, 0.030819328308105468, 0.03076710319519043, 0.03059814453125, 0.03075993537902832, 0.03080499267578125, 0.03100262451171875, 0.030745599746704103, 0.03075379180908203, 0.030785535812377928, 0.030862335205078126, 0.030672895431518556, 0.030705696105957032, 0.030995424270629884, 0.03127193641662598, 0.03102822494506836, 0.0309616641998291, 0.030742528915405274, 0.030908416748046875, 0.030769151687622072, 0.030709760665893555, 0.03078963279724121, 0.030920703887939452, 0.03081523132324219, 0.030912511825561522, 0.031109119415283205, 0.03079270362854004, 0.03077939224243164, 0.030742528915405274, 0.030873600006103515, 0.030963712692260743, 0.03080396842956543, 0.030913536071777343, 0.030752767562866212, 0.030818304061889647, 0.030861312866210938, 0.03082041549682617, 0.030872512817382812, 0.03096063995361328, 0.030825471878051756, 0.03077836799621582, 0.03081318473815918, 0.030859264373779297, 0.030883840560913086, 0.030838783264160157, 0.031032320022583007, 0.03137228775024414, 0.030983167648315428, 0.030742528915405274, 0.030843904495239258, 0.030877695083618165, 0.030884864807128907, 0.030818304061889647, 0.030875648498535156, 0.030909439086914063, 0.030695423126220703, 0.030818304061889647, 0.030841856002807616, 0.030882879257202147, 0.030872543334960936, 0.030844959259033203, 0.030943168640136718, 0.10048102569580078, 0.030737407684326173, 0.030765056610107422, 0.030661632537841797, 0.030683135986328124, 0.030774272918701173, 0.03079884719848633, 0.03101900863647461, 0.030900224685668946, 0.030852096557617188, 0.030817279815673827, 0.03078144073486328, 0.03114905548095703, 0.030688255310058594, 0.030807071685791016, 0.03122172737121582, 0.03248953628540039, 0.031426496505737304, 0.030987295150756836, 0.03063804817199707, 0.03078041648864746, 0.030938112258911132, 0.03075481605529785, 0.030888959884643553, 0.03075379180908203, 0.030867456436157226, 0.03083263969421387, 0.030845951080322266, 0.030814207077026368, 0.03079270362854004, 0.03094528007507324, 0.03097804832458496, 0.03075584030151367, 0.03079884719848633, 0.030863359451293947, 0.030874624252319335, 0.03082035255432129, 0.030891008377075195, 0.030923776626586914, 0.031177728652954102, 0.03097395133972168, 0.030879743576049806, 0.030857215881347655, 0.030897151947021483, 0.03095961570739746, 0.03080089569091797, 0.030850048065185546, 0.03102822494506836, 0.03078348731994629, 0.03082444763183594, 0.030979103088378906, 0.03136508750915527, 0.031340543746948245, 0.030835712432861328, 0.030868480682373047, 0.03096985626220703, 0.030817279815673827, 0.030864383697509764, 0.030869504928588868, 0.030877695083618165, 0.030839807510375978, 0.03082342338562012, 0.030835712432861328, 0.1001164779663086, 0.030713855743408205, 0.030827520370483398, 0.03100160026550293, 0.030695423126220703, 0.030825471878051756, 0.030818304061889647, 0.0309586238861084, 0.030872543334960936, 0.03082035255432129, 0.030748672485351562, 0.03078451156616211, 0.030766080856323243, 0.030650367736816408, 0.030769151687622072, 0.030914560317993164, 0.03079270362854004, 0.03079167938232422, 0.0307957763671875, 0.030707712173461913, 0.03073843193054199, 0.030684160232543944, 0.030883840560913086, 0.031037439346313478, 0.03073843193054199, 0.03075071907043457, 0.030882816314697265, 0.030845951080322266, 0.030863359451293947, 0.03081523132324219, 0.030839807510375978, 0.03105075263977051, 0.0307640323638916, 0.03080703926086426, 0.030833663940429686, 0.030860288619995117, 0.030940160751342774, 0.030818304061889647, 0.030876672744750977, 0.03097907257080078, 0.03079270362854004, 0.03084492874145508, 0.03077120018005371, 0.030915584564208985, 0.03085318374633789, 0.030823360443115233, 0.030855167388916017, 0.03099750328063965, 0.03077836799621582, 0.030839807510375978, 0.03082444763183594, 0.030907392501831055, 0.030903295516967775, 0.03080806350708008, 0.030870527267456056, 0.03142860794067383, 0.030842880249023437, 0.03082444763183594, 0.03095347213745117, 0.030894079208374024, 0.030860288619995117, 0.030958591461181642, 0.030757888793945313, 0.10313728332519531, 0.03141734313964844, 0.031082496643066407, 0.031084543228149415, 0.030875648498535156, 0.031067136764526368, 0.030841856002807616, 0.03094937515258789, 0.030701568603515625, 0.031129600524902344, 0.03082137680053711, 0.03076710319519043, 0.03078963279724121, 0.030850048065185546, 0.0307589111328125, 0.031106048583984375, 0.03075379180908203, 0.030831615447998048, 0.030946304321289062, 0.030830591201782227, 0.030892032623291016, 0.0307906551361084, 0.03077631950378418, 0.03098624038696289, 0.03075071907043457, 0.030915584564208985, 0.030954496383666992, 0.030877695083618165, 0.03083263969421387, 0.030874624252319335, 0.030749696731567383, 0.0309749755859375, 0.030726144790649414, 0.030848031997680665, 0.030838752746582033, 0.030861312866210938, 0.0307957763671875, 0.030934015274047853, 0.030846975326538087, 0.031057920455932617, 0.03080089569091797, 0.030841856002807616, 0.030769151687622072, 0.031152128219604492, 0.03118182373046875, 0.031301631927490234, 0.030906368255615234, 0.030983167648315428, 0.0307957763671875, 0.030892032623291016, 0.03076300811767578, 0.031006719589233397, 0.03131699180603027, 0.030931968688964844, 0.0309749755859375, 0.03131391906738281, 0.030793727874755858, 0.030859264373779297, 0.03080089569091797, 0.030900224685668946, 0.03080089569091797, 0.03098624038696289, 0.03078348731994629, 0.09996697235107421, 0.030726144790649414, 0.030734399795532226, 0.030624704360961916, 0.03073023986816406, 0.030854143142700196, 0.030649343490600587, 0.03078860855102539, 0.030646272659301758, 0.030711807250976563, 0.03075993537902832, 0.03062272071838379, 0.03073945617675781, 0.030740480422973632, 0.03077120018005371, 0.03094118309020996, 0.03108768081665039, 0.030775232315063475, 0.030745599746704103, 0.03077529525756836, 0.03079680061340332, 0.03061043167114258, 0.030718975067138672, 0.030883840560913086, 0.03085312080383301, 0.030845951080322266, 0.0307906551361084, 0.030834688186645507, 0.030852096557617188, 0.03075481605529785, 0.030690303802490236, 0.03085312080383301, 0.030954496383666992, 0.03072204780578613, 0.030740480422973632, 0.030846975326538087, 0.030856191635131838, 0.03080396842956543, 0.03128217506408691, 0.0310435848236084, 0.03080703926086426, 0.030846975326538087, 0.030825471878051756, 0.030946304321289062, 0.030860288619995117, 0.030814207077026368, 0.030877695083618165, 0.03097088050842285, 0.03083263969421387, 0.03075174331665039, 0.030840831756591795, 0.030867456436157226, 0.030962688446044922, 0.03159347152709961, 0.03078246307373047, 0.030929920196533203, 0.03074764823913574, 0.030814207077026368, 0.03129241561889649, 0.031509504318237305, 0.030930944442749023, 0.03080601692199707, 0.030851072311401367, 0.10006118774414062, 0.030687231063842774, 0.030809087753295897, 0.030819328308105468, 0.030670848846435547, 0.030708736419677734, 0.03074355125427246, 0.030852096557617188, 0.030779487609863283, 0.030888864517211914, 0.030857215881347655, 0.03080089569091797, 0.030831615447998048, 0.030745599746704103, 0.030694400787353516, 0.030874624252319335, 0.03079680061340332, 0.030741504669189453, 0.030835712432861328, 0.030845951080322266, 0.031048704147338867, 0.030672895431518556, 0.03098624038696289, 0.03116543960571289, 0.03096985626220703, 0.030876672744750977, 0.030867456436157226, 0.030810111999511718, 0.030841856002807616, 0.03080703926086426, 0.030793727874755858, 0.03080396842956543, 0.030689279556274415, 0.03074051284790039, 0.030847967147827147, 0.030834688186645507, 0.03082342338562012, 0.03096780776977539, 0.030878719329833985, 0.031080448150634765, 0.03074355125427246, 0.03080294418334961, 0.03077734375, 0.030846975326538087, 0.030857215881347655, 0.030872575759887694, 0.030827520370483398, 0.03097088050842285, 0.030814207077026368, 0.030838783264160157, 0.03079782485961914, 0.030915647506713866, 0.03086329650878906, 0.03085312080383301, 0.030865407943725585, 0.030956575393676758, 0.03081007957458496, 0.03084492874145508, 0.030868480682373047, 0.030846975326538087, 0.030876672744750977, 0.030845951080322266, 0.03083776092529297]",tokens/s,31.323253968295255,,,main,False,False -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2089,7 +2089,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4602-30398fe2068fca7928d52e8e;320d3798-82b1-4272-aeab-fa96e3e4d911) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe954-399e137632693c6f2112c423;dbee3f0a-585f-4873-bd78-fcefb54ccc6a) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2120,7 +2120,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2197,7 +2197,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9359-4412565175b212703481313d;8a67f823-4a18-4877-8078-400150a887b8) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe50c-5ee7dc2f7ce89d374d8642ce;9b692a78-c32f-4cfe-a72f-06429b34eec6) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2453,7 +2453,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e92ec-664bb4f279f83934187ff04e;85ac936d-f828-40f2-879b-0940ffe8813a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4a2-6880bd2f06915445418969a7;11e8e3c9-70ce-47e8-be42-3269f8f56dfe) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2557,7 +2557,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2596,7 +2596,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d470b-30b175d327ca7415249a11b5;1e5701f5-9167-42d1-9217-8edfc4df5ac5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea6a-2647d85a361502380ee0c5b2;93306797-b808-43fc-93a3-155f6a562ae9) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2627,7 +2627,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2666,7 +2666,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46a2-0838fefa32a6e2c371a9339c;dfe49963-5cb8-45e7-adf8-136c4ede2737) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9fb-2b1573f01ef0063f1c72e1c7;99a6b5e7-37e8-4a0a-8c32-3529734acec8) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2698,7 +2698,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1298.464768,15125.184512,0.0,14478.737408,14241.165312,s,10,0.9019541702270508,0.09019541702270509,0.0014070067081702311,0.08971971130371094,0.09028701171875,0.09235055999755859,0.09400139862060547,"[0.09441410827636719, 0.08973532867431641, 0.08973795318603515, 0.08967823791503907, 0.08969872283935547, 0.08982844543457032, 0.08970409393310547, 0.08967440032958984, 0.08969414520263672, 0.08978873443603516]",tokens/s,2838.2816827107363,kWh,1.0628362437562336e-06,5.823814746512263e-07,5.464292565874963e-06,7.109510284282423e-06,tokens/kWh,36008106.01061513,MB,1298.792448,15125.184512,0.0,14478.737408,14438.898176,s,10,23.840818603515626,2.3840818603515626,0.0038407329163161565,2.3829688720703124,2.3896716552734376,2.3911252563476566,2.3922881372070313,"[2.392578857421875, 2.38539697265625, 2.3893486328125, 2.38275048828125, 2.38209912109375, 2.383369384765625, 2.38180029296875, 2.381286376953125, 2.379001220703125, 2.383187255859375]",tokens/s,26.425267121788288,kWh,2.8074540981934186e-05,1.5385748123880732e-05,0.00011904586259772357,0.0001625061517035385,tokens/kWh,387677.6315208762,,s,629,24.610037715911872,0.03912565614612379,0.01069945614568616,0.03781631851196289,0.03808378982543945,0.03821977615356445,0.12785565490722656,"[0.038160385131835936, 0.038356990814208985, 0.03793612670898437, 0.03827302551269531, 0.03815321731567383, 0.0381736946105957, 0.038370304107666016, 0.038329345703125, 0.03819007873535156, 0.03809280014038086, 0.038166526794433595, 0.03850137710571289, 0.03811123275756836, 0.03811840057373047, 0.038191104888916014, 0.037935104370117184, 0.03791462326049805, 0.03787366485595703, 0.037988353729248046, 0.03803340911865234, 0.03801804733276367, 0.037940223693847655, 0.03791155242919922, 0.03766988754272461, 0.0376995849609375, 0.03779072189331055, 0.0377457275390625, 0.037734336853027343, 0.03803852844238281, 0.038040576934814455, 0.037884929656982425, 0.03802521514892578, 0.037958656311035156, 0.037972991943359374, 0.037991424560546876, 0.0379422721862793, 0.037716991424560545, 0.03786342239379883, 0.03785318374633789, 0.03779993438720703, 0.037776382446289065, 0.0377446403503418, 0.037795841217041014, 0.03778662490844727, 0.03778662490844727, 0.038075393676757815, 0.03825356674194336, 0.03782451248168945, 0.037765121459960936, 0.037935104370117184, 0.03790643310546875, 0.03793612670898437, 0.03788800048828125, 0.03784806442260742, 0.037958656311035156, 0.037874687194824216, 0.03793920135498047, 0.037926910400390625, 0.03788700866699219, 0.03785827255249023, 0.0378419189453125, 0.03788185501098633, 0.12786483001708984, 0.03756748962402344, 0.037736446380615234, 0.03762278366088867, 0.03761151885986328, 0.03758694458007812, 0.03767097473144531, 0.038031295776367186, 0.03779072189331055, 0.03758899307250976, 0.03765862274169922, 0.0376360969543457, 0.03763814544677734, 0.03763814544677734, 0.03765964889526367, 0.037751808166503906, 0.037684223175048825, 0.037631999969482424, 0.03776409530639648, 0.03765760040283203, 0.0376995849609375, 0.03765657424926758, 0.037684223175048825, 0.037823486328125, 0.03770470428466797, 0.037718017578125, 0.03770880126953125, 0.03769651031494141, 0.03774771118164062, 0.03767295837402344, 0.03777536010742188, 0.03784499359130859, 0.03777228927612305, 0.03781836700439453, 0.03849420928955078, 0.03841846466064453, 0.03812662506103515, 0.03796985626220703, 0.038046718597412106, 0.03781836700439453, 0.037945343017578126, 0.03781017684936523, 0.03786137771606445, 0.03796889495849609, 0.03781631851196289, 0.03776204681396484, 0.03782758331298828, 0.03801804733276367, 0.03782963180541992, 0.0378419189453125, 0.037817344665527344, 0.03812454223632813, 0.03841843032836914, 0.03810508728027344, 0.03787161636352539, 0.038300670623779294, 0.03822387313842773, 0.03794841766357422, 0.03809382247924804, 0.0381399040222168, 0.038174720764160154, 0.038250495910644534, 0.03801190567016602, 0.12795187377929687, 0.03770675277709961, 0.037969921112060545, 0.03774054336547852, 0.037779457092285154, 0.0377262077331543, 0.037884929656982425, 0.03777433776855469, 0.03765353775024414, 0.03775689697265625, 0.03794739151000977, 0.0378419189453125, 0.037959678649902344, 0.03790857696533203, 0.037978015899658206, 0.03792793655395508, 0.03798015975952149, 0.03845529556274414, 0.03829452896118164, 0.03778662490844727, 0.037689342498779296, 0.038007808685302735, 0.03810508728027344, 0.0380313606262207, 0.03783782577514649, 0.037928958892822266, 0.037959678649902344, 0.0378419189453125, 0.03773952102661133, 0.037981182098388674, 0.03804876708984375, 0.03805491256713867, 0.03775590515136719, 0.03786444854736328, 0.03793203353881836, 0.038004737854003906, 0.037928958892822266, 0.037823486328125, 0.037943294525146484, 0.03804159927368164, 0.03782656097412109, 0.03800678253173828, 0.03809894561767578, 0.037994495391845705, 0.037823486328125, 0.03788496017456055, 0.037933025360107425, 0.03845529556274414, 0.03800166320800781, 0.037787647247314454, 0.0378869743347168, 0.03784908676147461, 0.03785728073120117, 0.03784601593017578, 0.038091777801513675, 0.038035457611083984, 0.037874687194824216, 0.03786751937866211, 0.037884929656982425, 0.03782656097412109, 0.03793817520141601, 0.03787059020996094, 0.037923839569091795, 0.12783206176757814, 0.037689342498779296, 0.037719039916992186, 0.03771187210083008, 0.03787161636352539, 0.03788185501098633, 0.03760332870483398, 0.03772518539428711, 0.03775590515136719, 0.037689342498779296, 0.03766067123413086, 0.03903180694580078, 0.03814092636108399, 0.037997566223144534, 0.03782451248168945, 0.037784576416015625, 0.03763711929321289, 0.03768115234375, 0.037703678131103514, 0.03761459350585938, 0.03776921463012695, 0.03770880126953125, 0.0377446403503418, 0.03784499359130859, 0.03771596908569336, 0.03769241714477539, 0.03778559875488281, 0.03771187210083008, 0.03762688064575195, 0.03779891204833984, 0.037751808166503906, 0.037819393157958986, 0.037850112915039064, 0.03774054336547852, 0.03775897598266602, 0.0378081283569336, 0.03788800048828125, 0.03771187210083008, 0.03784601593017578, 0.03792793655395508, 0.03776515197753906, 0.037797855377197265, 0.03776409530639648, 0.03770265579223633, 0.03786547088623047, 0.037748737335205076, 0.03784294509887695, 0.03806105422973633, 0.03782758331298828, 0.03784499359130859, 0.03787571334838867, 0.03779379272460937, 0.037814273834228515, 0.03784396743774414, 0.03781631851196289, 0.037920768737792966, 0.03781631851196289, 0.03782553482055664, 0.0378603515625, 0.03785318374633789, 0.037891071319580076, 0.03788185501098633, 0.03800166320800781, 0.1278730239868164, 0.03751833724975586, 0.037631999969482424, 0.03757567977905273, 0.03765862274169922, 0.037591041564941405, 0.03764326477050781, 0.037720062255859374, 0.03769036865234375, 0.03764633560180664, 0.03772927856445313, 0.03763820648193359, 0.037772224426269534, 0.03777024078369141, 0.03776921463012695, 0.037766143798828124, 0.037743614196777346, 0.03782553482055664, 0.03775392150878906, 0.037682113647460935, 0.03772518539428711, 0.03762588882446289, 0.0378900146484375, 0.03798015975952149, 0.03775897598266602, 0.037735424041748046, 0.03780710220336914, 0.0376627197265625, 0.037771263122558595, 0.03764326477050781, 0.03781222534179687, 0.03782048034667969, 0.0380568962097168, 0.037967872619628903, 0.03797919845581055, 0.03772716903686523, 0.037823486328125, 0.03773132705688476, 0.03784396743774414, 0.03785420989990235, 0.03782553482055664, 0.037784576416015625, 0.037820415496826174, 0.037722110748291016, 0.03783987045288086, 0.0377077751159668, 0.037852161407470705, 0.03786240005493164, 0.03783475112915039, 0.03798732757568359, 0.03809382247924804, 0.03799552154541016, 0.03807027053833008, 0.03791667175292969, 0.03812454223632813, 0.037959678649902344, 0.03790240097045899, 0.03793196868896485, 0.03789004898071289, 0.03775283050537109, 0.03789516830444336, 0.03788595199584961, 0.037966846466064456, 0.1280061492919922, 0.03753472137451172, 0.037720062255859374, 0.03759308624267578, 0.037781505584716796, 0.037773311614990236, 0.03770675277709961, 0.0377968635559082, 0.037713920593261716, 0.03781836700439453, 0.037904384613037106, 0.03783375930786133, 0.0377558708190918, 0.03770265579223633, 0.037713920593261716, 0.037771263122558595, 0.03776204681396484, 0.03763302230834961, 0.03772927856445313, 0.03766681671142578, 0.037667839050292966, 0.03761875152587891, 0.037690303802490235, 0.03776310348510742, 0.03773129653930664, 0.037689342498779296, 0.03782656097412109, 0.037700607299804685, 0.037700607299804685, 0.037733375549316404, 0.03783679962158203, 0.03781843185424805, 0.03779884719848633, 0.037749759674072264, 0.03779379272460937, 0.03770470428466797, 0.037852161407470705, 0.03827302551269531, 0.03829558563232422, 0.03823817443847656, 0.037989376068115234, 0.03789209747314453, 0.037874687194824216, 0.037978111267089845, 0.03787161636352539, 0.037754878997802735, 0.03783478546142578, 0.037881824493408205, 0.03783168029785156, 0.03778559875488281, 0.03778047943115234, 0.03778662490844727, 0.03785625457763672, 0.03782553482055664, 0.0384634895324707, 0.038247425079345705, 0.037884929656982425, 0.03792998504638672, 0.037959678649902344, 0.03781119918823242, 0.03784499359130859, 0.03784294509887695, 0.03790233612060547, 0.1278699493408203, 0.037564414978027344, 0.037628929138183595, 0.03758489608764649, 0.03766067123413086, 0.03759206390380859, 0.037651454925537106, 0.037776382446289065, 0.037655616760253904, 0.03765651321411133, 0.03769343948364258, 0.037641246795654296, 0.03767804718017578, 0.03780505752563477, 0.03793407821655274, 0.03801500701904297, 0.03773641586303711, 0.03768320083618164, 0.03769241714477539, 0.0378603515625, 0.03839900970458984, 0.03777942276000976, 0.037817344665527344, 0.0380948486328125, 0.03793305587768555, 0.03774771118164062, 0.037735424041748046, 0.03775283050537109, 0.03770880126953125, 0.03776409530639648, 0.037743614196777346, 0.03788083267211914, 0.03776204681396484, 0.03774054336547852, 0.03768320083618164, 0.03772415924072266, 0.03777228927612305, 0.03775692749023438, 0.03774566268920899, 0.037823486328125, 0.03782656097412109, 0.03774054336547852, 0.03779481506347656, 0.03779993438720703, 0.03775078582763672, 0.03787673568725586, 0.03788294219970703, 0.03790943908691406, 0.037776382446289065, 0.03781222534179687, 0.038184959411621096, 0.037953536987304685, 0.037889022827148434, 0.037838848114013675, 0.037789695739746096, 0.037940223693847655, 0.037814273834228515, 0.03780505752563477, 0.037803009033203126, 0.037835777282714846, 0.038009952545166016, 0.03791247940063477, 0.03784198379516602, 0.12780025482177734, 0.037574657440185545, 0.037689342498779296, 0.03759718322753906, 0.037647361755371096, 0.03764940643310547, 0.03764019012451172, 0.0377077751159668, 0.03766988754272461, 0.03762688064575195, 0.03762483215332031, 0.03768115234375, 0.03775692749023438, 0.03765760040283203, 0.037823486328125, 0.03818086242675781, 0.03785113525390625, 0.03764940643310547, 0.03772723388671875, 0.03767910385131836, 0.03775590515136719, 0.03782758331298828, 0.037664768218994144, 0.03784707260131836, 0.037832672119140626, 0.037689342498779296, 0.03774054336547852, 0.037733375549316404, 0.03787673568725586, 0.03807027053833008, 0.03794841766357422, 0.03805593490600586, 0.03790643310546875, 0.03779379272460937, 0.03786240005493164, 0.03777849578857422, 0.03777231979370117, 0.037769119262695314, 0.037959678649902344, 0.0381317138671875, 0.03788595199584961, 0.03778559875488281, 0.037773311614990236, 0.03769753646850586, 0.03779379272460937, 0.03773446273803711, 0.03772819137573242, 0.037920768737792966, 0.03785113525390625, 0.037810207366943356, 0.037814239501953124, 0.037781505584716796, 0.037781566619873044, 0.03779065704345703, 0.037814273834228515, 0.037904384613037106, 0.03784601593017578, 0.03783683013916016, 0.03785007858276367, 0.03782252883911133, 0.03784799957275391, 0.03778355026245117, 0.03790335845947266, 0.12795187377929687, 0.037571582794189456, 0.03760947036743164, 0.037561344146728515, 0.03763097763061524, 0.03761356735229492, 0.03763302230834961, 0.037719039916992186, 0.037694465637207034, 0.037550079345703126, 0.0377262077331543, 0.03759001541137695, 0.03772825622558594, 0.03763916778564453, 0.03769651031494141, 0.037823486328125, 0.03783065414428711, 0.03775692749023438, 0.03769548797607422, 0.03764223861694336, 0.037743614196777346, 0.03767910385131836, 0.03773235321044922, 0.037749759674072264, 0.03776313781738281, 0.037639137268066405, 0.03782447814941406, 0.03768832015991211, 0.03774771118164062, 0.0376360969543457, 0.03778559875488281, 0.03784806442260742, 0.037779457092285154, 0.037700607299804685, 0.037806079864501956, 0.03794636917114258, 0.03799244689941406, 0.03773132705688476, 0.037817344665527344, 0.037806079864501956, 0.03780710220336914, 0.03768524932861328, 0.0378092155456543, 0.037690303802490235, 0.037820415496826174, 0.03767603302001953, 0.03787571334838867, 0.03784601593017578, 0.03789004898071289, 0.037730304718017575, 0.03780198287963867, 0.037716991424560545, 0.03781836700439453, 0.03784703826904297, 0.03787366485595703, 0.037928958892822266, 0.03785318374633789, 0.037776382446289065, 0.03788390350341797, 0.03778047943115234, 0.0378869743347168, 0.03777228927612305, 0.03787980651855469, 0.12812083435058594, 0.03765657424926758, 0.03765350341796875, 0.03755724716186523, 0.037645313262939455, 0.03758489608764649, 0.03768217468261719, 0.03793817520141601, 0.03759206390380859, 0.03762688064575195, 0.03763097763061524, 0.03764940643310547, 0.037698558807373043, 0.03762688064575195, 0.0377784309387207, 0.03776409530639648, 0.03770265579223633, 0.03771084976196289, 0.037694465637207034, 0.03762688064575195, 0.03770470428466797, 0.037730304718017575, 0.037726272583007814, 0.03785004806518555, 0.03773747253417969, 0.037663745880126956, 0.037787647247314454, 0.0376627197265625, 0.03767705535888672, 0.037678081512451174, 0.03769548797607422, 0.0378152961730957, 0.037776382446289065, 0.03772415924072266, 0.0378081283569336, 0.03777024078369141, 0.03784294509887695, 0.037835777282714846, 0.0378152961730957, 0.037928958892822266, 0.03818086242675781, 0.03828940963745117, 0.03800166320800781, 0.03778867340087891, 0.03781740951538086, 0.03781830215454102, 0.03806924819946289, 0.038201343536376955, 0.03808256149291992, 0.03789926528930664, 0.0379422721862793, 0.03803859329223633, 0.03790943908691406, 0.037806079864501956, 0.037874687194824216, 0.037907455444335936, 0.03783168029785156, 0.03810406494140625, 0.03812044906616211, 0.037926910400390625, 0.03791155242919922, 0.03808870315551758, 0.03821363067626953]",tokens/s,25.55867679931729,,,main,False,False -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2766,7 +2766,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96c1-106108e575ddbbbd1eb573c5;b8da0230-af5b-41ea-a88e-5ffa475a7938) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe87e-50f5a80624065fa86ca811b4;3999ed65-6af4-4b3f-a941-eb9745ceb7c1) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -2798,7 +2798,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1275.834368,3848.798208,0.0,3202.351104,3105.82784,s,10,0.24147904014587404,0.024147904014587402,0.0005044599971227972,0.023981328010559085,0.024345826911926268,0.024975265407562255,0.025478816204071043,"[0.02560470390319824, 0.024193920135498048, 0.024107711791992188, 0.02385955238342285, 0.023864992141723634, 0.02388572883605957, 0.0237938232421875, 0.024205951690673827, 0.024017663955688478, 0.023944992065429688]",tokens/s,10601.334171502176,kWh,2.849579786998999e-07,1.5612320748476186e-07,1.239496306187915e-06,1.6805774923725767e-06,tokens/kWh,152328590.11969078,MB,1276.280832,3848.798208,0.0,3202.351104,3188.152832,s,10,12.560411621093749,1.256041162109375,0.02114655036495289,1.2566647338867187,1.2818962890625,1.2891886840820312,1.2950226000976561,"[1.2964810791015624, 1.2592294921875, 1.2549832763671875, 1.2301461181640625, 1.2293675537109374, 1.2498057861328125, 1.27036181640625, 1.2802757568359375, 1.25834619140625, 1.23141455078125]",tokens/s,50.15759188512487,kWh,1.4725982288280166e-05,8.068005656063925e-06,3.874639679361247e-05,6.154038473795654e-05,tokens/kWh,1023718.0067732532,,s,629,12.753896476745602,0.020276464986876956,0.0029344008735951803,0.019969024658203126,0.02045972480773926,0.020908031845092774,0.04386340957641602,"[0.02070528030395508, 0.020354047775268554, 0.020370431900024414, 0.02124492835998535, 0.021164031982421876, 0.02126335906982422, 0.021398527145385742, 0.02124799919128418, 0.021144575119018554, 0.02103910446166992, 0.020414464950561522, 0.020752384185791017, 0.02122444725036621, 0.021200895309448242, 0.021166080474853514, 0.02107494354248047, 0.021120000839233398, 0.02123673629760742, 0.020541471481323244, 0.020411392211914063, 0.02013795280456543, 0.02007756805419922, 0.02003763198852539, 0.020918272018432618, 0.021218303680419923, 0.021421056747436523, 0.019960832595825196, 0.019557376861572266, 0.020304895401000975, 0.020333568572998048, 0.020575231552124023, 0.020436992645263673, 0.020371456146240235, 0.0203786563873291, 0.02093257522583008, 0.021024768829345702, 0.020485120773315428, 0.02048307228088379, 0.02053638458251953, 0.02037446403503418, 0.020382720947265624, 0.020145183563232423, 0.020263904571533202, 0.020380672454833985, 0.020332544326782227, 0.020358144760131838, 0.020419584274291993, 0.02040012741088867, 0.020378623962402344, 0.020348928451538087, 0.020362239837646484, 0.020328447341918944, 0.02041651153564453, 0.020331520080566406, 0.019620864868164063, 0.019512319564819337, 0.0194334716796875, 0.020167680740356447, 0.02040115165710449, 0.02031718444824219, 0.02037555122375488, 0.02045030403137207, 0.044227584838867184, 0.019522560119628905, 0.019489791870117186, 0.019510271072387696, 0.019533824920654298, 0.019515392303466796, 0.019489791870117186, 0.01965465545654297, 0.01949286460876465, 0.020237312316894532, 0.020355072021484375, 0.020084735870361328, 0.02045337677001953, 0.02044108772277832, 0.020374528884887694, 0.020376575469970702, 0.02006118392944336, 0.02030284881591797, 0.020371456146240235, 0.020379648208618165, 0.019475456237792968, 0.019507200241088866, 0.0194467830657959, 0.019582975387573243, 0.01945395278930664, 0.019354623794555666, 0.01927168083190918, 0.019327999114990235, 0.01927884864807129, 0.019384319305419923, 0.0192675838470459, 0.019412992477416992, 0.01926655960083008, 0.01928704071044922, 0.019339263916015623, 0.02007347106933594, 0.020024320602416993, 0.02006118392944336, 0.020023296356201172, 0.020125696182250977, 0.020148223876953125, 0.02008576011657715, 0.020148223876953125, 0.02039193534851074, 0.02042572784423828, 0.020436992645263673, 0.020360191345214843, 0.020487167358398437, 0.020344831466674804, 0.020420608520507814, 0.020410367965698242, 0.020410367965698242, 0.020402175903320312, 0.02044108772277832, 0.020398080825805662, 0.020528127670288086, 0.02045337677001953, 0.020386816024780274, 0.020366336822509764, 0.02044313621520996, 0.020349952697753908, 0.020397056579589845, 0.020379680633544922, 0.04500579071044922, 0.020388864517211915, 0.020336639404296874, 0.02040934371948242, 0.02042163276672363, 0.020540416717529295, 0.02044108772277832, 0.020518911361694335, 0.020380672454833985, 0.019586048126220702, 0.019580928802490235, 0.019500032424926757, 0.01948876762390137, 0.019500032424926757, 0.01947648048400879, 0.01962188720703125, 0.019500032424926757, 0.019522560119628905, 0.019481599807739256, 0.019557376861572266, 0.019562496185302734, 0.019528703689575197, 0.019554304122924804, 0.019587072372436523, 0.019491840362548828, 0.019555328369140625, 0.01948569679260254, 0.019479551315307618, 0.01947648048400879, 0.019490816116333007, 0.019553279876708983, 0.019568639755249022, 0.019526655197143555, 0.019551231384277345, 0.01955740737915039, 0.019505119323730467, 0.019482624053955077, 0.019517440795898438, 0.01946419143676758, 0.019589120864868165, 0.019535871505737306, 0.01946214485168457, 0.019487743377685548, 0.01960140800476074, 0.020320255279541014, 0.020385791778564453, 0.02040934371948242, 0.02046463966369629, 0.020331520080566406, 0.02041548728942871, 0.020386816024780274, 0.020389888763427736, 0.020381696701049806, 0.02040934371948242, 0.020338687896728515, 0.020419584274291993, 0.02043391990661621, 0.020397056579589845, 0.020329471588134765, 0.020376575469970702, 0.020357120513916017, 0.020377599716186523, 0.020331520080566406, 0.04388147354125976, 0.020065343856811524, 0.020249536514282226, 0.02024140739440918, 0.020237312316894532, 0.020303871154785155, 0.02028441619873047, 0.020315135955810547, 0.0194652156829834, 0.019374080657958984, 0.019397632598876953, 0.01945907211303711, 0.019486719131469727, 0.019482624053955077, 0.01923891258239746, 0.019329023361206055, 0.019466239929199217, 0.01946214485168457, 0.019449855804443358, 0.01942732810974121, 0.019514368057250975, 0.0195020809173584, 0.0194969596862793, 0.019505151748657225, 0.0195020809173584, 0.01942527961730957, 0.019579904556274414, 0.01947340774536133, 0.01943654441833496, 0.019503103256225587, 0.01947340774536133, 0.019514368057250975, 0.019575807571411134, 0.0194201602935791, 0.019458047866821288, 0.01947648048400879, 0.01945702362060547, 0.01948467254638672, 0.01944473648071289, 0.01949286460876465, 0.019526655197143555, 0.01940275192260742, 0.019555328369140625, 0.01942835235595703, 0.019475456237792968, 0.019491840362548828, 0.019481599807739256, 0.019544063568115236, 0.019315711975097655, 0.0192675838470459, 0.019260416030883788, 0.019293184280395507, 0.019306495666503908, 0.019286016464233398, 0.019482624053955077, 0.019508224487304687, 0.019314687728881837, 0.019401727676391603, 0.01925017547607422, 0.01926246452331543, 0.01927065658569336, 0.019300384521484373, 0.019288032531738282, 0.04381695938110351, 0.019474431991577147, 0.01946316719055176, 0.01945702362060547, 0.019504127502441407, 0.01944063949584961, 0.0194652156829834, 0.019526655197143555, 0.019503103256225587, 0.01945702362060547, 0.019404800415039062, 0.01944268798828125, 0.019305471420288087, 0.019280895233154297, 0.01938739204406738, 0.019308544158935546, 0.01926553535461426, 0.019194879531860352, 0.019224576950073242, 0.01924608039855957, 0.01924300765991211, 0.01949286460876465, 0.01946112060546875, 0.019560447692871095, 0.01945907211303711, 0.01942118453979492, 0.019353631973266602, 0.01927881622314453, 0.01922764778137207, 0.019333120346069335, 0.01925119972229004, 0.019369983673095705, 0.019478527069091797, 0.019464256286621094, 0.019474367141723632, 0.01943449592590332, 0.01945395278930664, 0.019408895492553712, 0.019486719131469727, 0.02039910316467285, 0.020373504638671876, 0.020363264083862305, 0.020267040252685546, 0.020330463409423828, 0.020291584014892578, 0.02031718444824219, 0.01923072052001953, 0.019321855545043946, 0.019316736221313476, 0.01923788833618164, 0.019353599548339845, 0.019305471420288087, 0.01926246452331543, 0.019300352096557616, 0.019333120346069335, 0.019400703430175782, 0.019339263916015623, 0.01926553535461426, 0.019374080657958984, 0.019530752182006835, 0.019969024658203126, 0.019983360290527344, 0.019554304122924804, 0.04390911865234375, 0.019574783325195313, 0.019544063568115236, 0.019508224487304687, 0.019512319564819337, 0.019563520431518554, 0.019490816116333007, 0.019573759078979493, 0.01950828742980957, 0.0195665283203125, 0.01960038375854492, 0.019553279876708983, 0.019501056671142578, 0.01945497512817383, 0.0194467830657959, 0.019560447692871095, 0.019487743377685548, 0.019515392303466796, 0.019527679443359376, 0.01948876762390137, 0.0195020809173584, 0.019546112060546874, 0.019466239929199217, 0.020892671585083008, 0.020764671325683593, 0.02047283172607422, 0.020455423355102538, 0.020406272888183592, 0.020398080825805662, 0.020281343460083007, 0.020406272888183592, 0.02046463966369629, 0.020344831466674804, 0.020144128799438478, 0.020092927932739257, 0.020039680480957032, 0.02042470359802246, 0.020374528884887694, 0.020371488571166992, 0.020474847793579103, 0.02042572784423828, 0.020403200149536133, 0.020350976943969725, 0.020436992645263673, 0.02030080032348633, 0.019607551574707033, 0.01967513656616211, 0.019617824554443358, 0.019503072738647462, 0.019362815856933592, 0.019405824661254883, 0.019475456237792968, 0.019525632858276368, 0.01949286460876465, 0.019507200241088866, 0.019664928436279296, 0.0196976318359375, 0.019513343811035155, 0.019506175994873046, 0.019555328369140625, 0.019513343811035155, 0.01948467254638672, 0.0194847354888916, 0.043734977722167966, 0.01948876762390137, 0.01947750473022461, 0.0195020809173584, 0.019481599807739256, 0.019458047866821288, 0.019471359252929688, 0.019557376861572266, 0.01944166374206543, 0.01943449592590332, 0.020115455627441405, 0.019501056671142578, 0.019750911712646483, 0.020393983840942383, 0.020279296875, 0.020289535522460937, 0.020175872802734376, 0.020017152786254884, 0.0202106876373291, 0.020343807220458983, 0.02026188850402832, 0.020001792907714845, 0.020238336563110353, 0.020478975296020507, 0.02021171188354492, 0.020242431640625, 0.02027827262878418, 0.02025164794921875, 0.02030899238586426, 0.02025881576538086, 0.02027827262878418, 0.02030182456970215, 0.02027827262878418, 0.02030080032348633, 0.020257791519165038, 0.020246528625488282, 0.02025164794921875, 0.020324352264404297, 0.02030182456970215, 0.020340736389160157, 0.020392959594726562, 0.020292640686035156, 0.020290592193603515, 0.020275136947631837, 0.020315135955810547, 0.020487167358398437, 0.020333568572998048, 0.020340736389160157, 0.02024550437927246, 0.02023628807067871, 0.02024345588684082, 0.020980768203735352, 0.020469728469848632, 0.020322303771972656, 0.02038374328613281, 0.02084966468811035, 0.02032640075683594, 0.020349952697753908, 0.020304895401000975, 0.02030182456970215, 0.02030080032348633, 0.020247552871704103, 0.020346879959106445, 0.045080577850341794, 0.02028646469116211, 0.020323328018188477, 0.020152320861816408, 0.020926464080810548, 0.020344831466674804, 0.02003660774230957, 0.020320255279541014, 0.02023526382446289, 0.020033536911010744, 0.019998720169067383, 0.020141056060791016, 0.020063232421875, 0.0200263671875, 0.019958784103393554, 0.02007040023803711, 0.020269088745117188, 0.020373472213745115, 0.020374528884887694, 0.020279296875, 0.020345855712890625, 0.020313087463378905, 0.020321279525756835, 0.020379648208618165, 0.020351999282836913, 0.02030899238586426, 0.020556800842285155, 0.020509695053100584, 0.020248575210571287, 0.020364288330078126, 0.02045849609375, 0.02043391990661621, 0.02032537651062012, 0.02031001663208008, 0.02030284881591797, 0.02101862335205078, 0.021251071929931642, 0.020554752349853517, 0.020332544326782227, 0.020374528884887694, 0.02032640075683594, 0.020330495834350586, 0.020356096267700196, 0.02031001663208008, 0.02037660789489746, 0.02031920051574707, 0.020372480392456056, 0.02043084716796875, 0.020368383407592772, 0.02042265510559082, 0.020287488937377928, 0.02032640075683594, 0.02039091110229492, 0.020371456146240235, 0.020876287460327148, 0.020567039489746093, 0.02027622413635254, 0.020340736389160157, 0.02040934371948242, 0.02028646469116211, 0.01942835235595703, 0.019503103256225587, 0.01947238349914551, 0.04482867050170898, 0.020380672454833985, 0.020378623962402344, 0.020414464950561522, 0.020526079177856444, 0.02045849609375, 0.02033459281921387, 0.020371456146240235, 0.020264959335327147, 0.020323328018188477, 0.02026700782775879, 0.020314111709594726, 0.020319232940673827, 0.02031718444824219, 0.020335615158081053, 0.020533248901367186, 0.02035916709899902, 0.020351999282836913, 0.02025984001159668, 0.02027008056640625, 0.020229120254516602, 0.020526079177856444, 0.02068889617919922, 0.021384191513061524, 0.020746240615844725, 0.020366336822509764, 0.020341760635375978, 0.020356096267700196, 0.02022809600830078, 0.02020147132873535, 0.02025984001159668, 0.020366336822509764, 0.020331520080566406, 0.020254751205444337, 0.01949590492248535, 0.01943756866455078, 0.01944473648071289, 0.01948057556152344, 0.01946316719055176, 0.01946828842163086, 0.019501056671142578, 0.0194201602935791, 0.019486719131469727, 0.019451904296875, 0.01942835235595703, 0.019400703430175782, 0.019507200241088866, 0.019582975387573243, 0.01945088005065918, 0.019495935440063478, 0.0194201602935791, 0.019478527069091797, 0.019551231384277345, 0.01926246452331543, 0.019342336654663086, 0.019517440795898438, 0.019481599807739256, 0.01945292854309082, 0.019528703689575197, 0.019449855804443358, 0.01949388885498047, 0.019563520431518554, 0.01947750473022461, 0.04393471908569336, 0.019523584365844726, 0.019506175994873046, 0.019482624053955077, 0.01948057556152344, 0.019487808227539063, 0.01951123237609863, 0.019580928802490235, 0.019521535873413084, 0.01946316719055176, 0.01945292854309082, 0.01944268798828125, 0.020059135437011717, 0.020361215591430663, 0.020371456146240235, 0.019590143203735352, 0.0194969596862793, 0.019479551315307618, 0.019545087814331053, 0.019577856063842772, 0.019490816116333007, 0.01947238349914551, 0.01948467254638672, 0.01957683181762695, 0.019494911193847657, 0.019497983932495116, 0.019504127502441407, 0.019557376861572266, 0.0194652156829834, 0.0196177921295166, 0.019575807571411134, 0.019567615509033204, 0.019537919998168944, 0.01946214485168457, 0.01958198356628418, 0.01953481674194336, 0.019504127502441407, 0.01941196823120117, 0.019481599807739256, 0.019607551574707033, 0.01945395278930664, 0.01945599937438965, 0.01944883155822754, 0.01946419143676758, 0.019505151748657225, 0.019615808486938478, 0.019454912185668947, 0.019512319564819337, 0.01946316719055176, 0.01943654441833496, 0.01943142318725586, 0.01946316719055176, 0.01944063949584961, 0.019508224487304687, 0.01946009635925293, 0.019536895751953123, 0.019571712493896484, 0.019436607360839842, 0.019512256622314452, 0.019511295318603517, 0.019533824920654298, 0.01942425537109375, 0.01945702362060547]",tokens/s,49.3182613757973,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2837,7 +2837,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45cf-20db95bc2e4e23d06d7d9a42;7304c319-ee19-4195-b194-e0cbe519558e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe926-7651186a660df07d20a1f2a6;3181a5e7-b684-40fd-b009-650e8a0303f6) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3016,7 +3016,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e902e-6e3db2b84e07e8f45f6c4594;04ffb607-6382-4457-8e96-e39cb9dfa7e0) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1cd-1ff3dbfc6c8bd35f0786d060;956c614b-7f53-45b3-9141-9f2b2c6988bb) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -3141,7 +3141,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96f6-46f9f1ec00a591cb2029f89c;697c8257-efca-4184-b5f8-631ab6ac692b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8b7-4833e9e90f2d6a2f10db8caf;5439af69-f0a2-4720-b709-1a49fe678fc9) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3201,7 +3201,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8ff9-280c26bc1998848575e7dd66;b7ccce57-442e-407d-8e59-cf8693ea88f4) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe195-5cd76eaa7752d0005defc449;6c197d52-02ac-4585-ae91-26767d7e69e4) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -3653,7 +3653,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8deb-561a6c795b03c46e117fd4bb;1933cd94-bc27-41e2-8794-d3755581b117) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf7f-619ccb266fc2a0cf7ebb7b1f;52317d96-f6cd-48d3-8ac4-2ab86d90289b) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -3738,7 +3738,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9656-442f9c99215f1820100fc19f;2010dc31-6463-4019-915f-feca45653ce9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe80f-0758e60757818974237d8202;79e98a92-aa86-40be-b2a3-12c7eb13ed3f) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3770,7 +3770,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1252.605952,14529.593344,0.0,13883.14624,13705.186304,s,10,0.9879193649291993,0.09879193649291992,0.0002512661472968626,0.09873307418823242,0.09890953521728515,0.09920227279663087,0.09943646286010742,"[0.09949501037597656, 0.098844482421875, 0.0985115203857422, 0.09878438568115235, 0.09873267364501953, 0.09880620574951172, 0.09872038269042968, 0.09862966156005859, 0.09866156768798828, 0.09873347473144531]",tokens/s,2591.3046052938403,kWh,1.1688001729823925e-06,6.402306186400357e-07,6.192654736254908e-06,8.001685527877335e-06,tokens/kWh,31993259.308693547,MB,1252.605952,14529.593344,0.0,13883.14624,13856.22528,s,10,22.93839282226563,2.293839282226563,0.001022246015489629,2.2933924560546872,2.2952232177734375,2.295579504394531,2.295864533691406,"[2.29338037109375, 2.29463623046875, 2.2931396484375, 2.293404541015625, 2.292788818359375, 2.29299072265625, 2.294100830078125, 2.29514404296875, 2.292871826171875, 2.295935791015625]",tokens/s,27.464871008245936,kWh,2.6996926405915512e-05,1.4795383739971892e-05,0.000113753996776545,0.0001555463069224324,tokens/kWh,405024.0809086952,,s,629,23.789682689666723,0.03782143511870707,0.011804791302641934,0.03638886260986328,0.0365588493347168,0.036620288848876956,0.1356418212890625,"[0.0362158088684082, 0.03625574493408203, 0.03614617538452149, 0.03632025527954102, 0.03617587280273438, 0.03626393508911133, 0.03632230377197266, 0.03629056167602539, 0.03623321533203125, 0.0362608642578125, 0.03621068954467774, 0.03624345779418945, 0.036299774169921875, 0.03666841506958008, 0.036674560546875, 0.03648614501953125, 0.03627008056640625, 0.0363765754699707, 0.03635200119018555, 0.03639603042602539, 0.036313087463378906, 0.03638886260986328, 0.036324352264404294, 0.03636633682250977, 0.03628953552246094, 0.03641958236694336, 0.03631718444824219, 0.036400127410888675, 0.03633356857299805, 0.03639910507202149, 0.036311038970947264, 0.0363469123840332, 0.03631510543823242, 0.036397056579589845, 0.036337665557861325, 0.03641958236694336, 0.03633868789672851, 0.03642675018310547, 0.03638272094726563, 0.03640422439575195, 0.03632844924926758, 0.03643807983398437, 0.03634272003173828, 0.036393985748291016, 0.03638988876342773, 0.036620288848876956, 0.03661004638671875, 0.036512767791748044, 0.03643904113769531, 0.03642985534667969, 0.036356063842773435, 0.03649539184570313, 0.03638678359985351, 0.036670463562011715, 0.03657113647460938, 0.03648921585083008, 0.03639910507202149, 0.03650764846801758, 0.036375553131103515, 0.03649740982055664, 0.03641139221191406, 0.036580352783203124, 0.13603021240234375, 0.03622195053100586, 0.03626291275024414, 0.03629363250732422, 0.036350975036621096, 0.03625881576538086, 0.03630495834350586, 0.036385726928710935, 0.03644416046142578, 0.03632844924926758, 0.03636019134521484, 0.03621785736083984, 0.03639295959472656, 0.03627724838256836, 0.0363612174987793, 0.03628547286987305, 0.03642569732666016, 0.03630694580078125, 0.03640422439575195, 0.03630080032348633, 0.036431873321533206, 0.03633663940429688, 0.03640422439575195, 0.0362977294921875, 0.03634483337402344, 0.036307968139648435, 0.036501502990722655, 0.0362762222290039, 0.03638169479370117, 0.03633356857299805, 0.03647999954223633, 0.03634688186645508, 0.03637350463867187, 0.03633868789672851, 0.03646262359619141, 0.03634172821044922, 0.03641753768920898, 0.03635302352905274, 0.03654560089111328, 0.03641337585449219, 0.036514816284179685, 0.0364400634765625, 0.03647795104980469, 0.03643904113769531, 0.03655475234985352, 0.03644313430786133, 0.036620288848876956, 0.03646156692504883, 0.036450336456298825, 0.03637961578369141, 0.0365219841003418, 0.03645439910888672, 0.03650048065185547, 0.036410369873046876, 0.03666435241699219, 0.036446174621582034, 0.036575263977050784, 0.03641238403320313, 0.03650969696044922, 0.036408321380615234, 0.03663872146606445, 0.03671449661254883, 0.036563007354736325, 0.13556729125976563, 0.0362762222290039, 0.03630284881591797, 0.036229118347167966, 0.036287551879882814, 0.03623110580444336, 0.036383743286132815, 0.03626598358154297, 0.0362977294921875, 0.03626803207397461, 0.03638886260986328, 0.03626291275024414, 0.036291584014892575, 0.0362874870300293, 0.03635302352905274, 0.036357120513916014, 0.03630182266235352, 0.03627110290527344, 0.03639910507202149, 0.036318206787109376, 0.03638579177856445, 0.03629875183105469, 0.03647180938720703, 0.03640115356445312, 0.036370464324951175, 0.03629974365234375, 0.03641856002807617, 0.036359169006347655, 0.036400127410888675, 0.03635200119018555, 0.03640115356445312, 0.03631513595581055, 0.03639295959472656, 0.03635200119018555, 0.03638272094726563, 0.036318206787109376, 0.03638681411743164, 0.03632128143310547, 0.03639910507202149, 0.03638476943969727, 0.03640217590332031, 0.03635302352905274, 0.03644313430786133, 0.03645951843261719, 0.036450302124023434, 0.03639910507202149, 0.03645849609375, 0.036498432159423826, 0.036511745452880856, 0.03646771240234375, 0.03654553604125976, 0.036441089630126954, 0.03652403259277344, 0.03638169479370117, 0.036485118865966795, 0.036416511535644534, 0.036511745452880856, 0.03641958236694336, 0.03653222274780273, 0.03638886260986328, 0.03654348754882813, 0.036485118865966795, 0.036729854583740236, 0.13590835571289062, 0.036152320861816405, 0.0362158088684082, 0.03620761489868164, 0.036225025177001956, 0.03625574493408203, 0.03640217590332031, 0.03624857711791992, 0.03629056167602539, 0.03624857711791992, 0.036345855712890625, 0.03624038314819336, 0.03643392181396484, 0.03623833465576172, 0.0362874870300293, 0.03629056167602539, 0.03628441619873047, 0.03624345779418945, 0.036383743286132815, 0.03624857711791992, 0.03634175872802734, 0.036359169006347655, 0.03642367935180664, 0.03636633682250977, 0.036348926544189454, 0.0362874870300293, 0.03641856002807617, 0.03627008056640625, 0.03639295959472656, 0.03633049774169922, 0.036393985748291016, 0.036340736389160154, 0.03644316864013672, 0.03637039947509765, 0.0363765754699707, 0.036299774169921875, 0.03642777633666992, 0.036354049682617184, 0.03643904113769531, 0.03643494415283203, 0.03647283172607422, 0.03668070220947266, 0.037223423004150394, 0.03657321548461914, 0.036579296112060546, 0.03645337677001953, 0.03649126434326172, 0.036413440704345705, 0.03647283172607422, 0.0363612174987793, 0.036506622314453126, 0.036441089630126954, 0.036490238189697266, 0.036393985748291016, 0.03644723129272461, 0.036431873321533206, 0.03647590255737305, 0.03640627288818359, 0.03650867080688477, 0.03642367935180664, 0.036547584533691405, 0.03643699264526367, 0.03651686477661133, 0.13570252990722656, 0.03622092819213867, 0.036206592559814454, 0.03616358566284179, 0.03624652862548828, 0.03619635009765625, 0.03626700973510742, 0.036291584014892575, 0.03628953552246094, 0.036208641052246096, 0.036307968139648435, 0.036237342834472656, 0.036375518798828124, 0.036326400756835936, 0.036391937255859375, 0.03624959945678711, 0.03625164794921875, 0.0362690544128418, 0.03629260635375976, 0.03621887969970703, 0.03631411361694336, 0.03622604751586914, 0.03634995269775391, 0.0362762222290039, 0.03638988876342773, 0.036279296875, 0.03639807891845703, 0.03629568099975586, 0.03636019134521484, 0.03628851318359375, 0.036397056579589845, 0.0363694076538086, 0.03644723129272461, 0.03631923294067383, 0.03636019134521484, 0.036413440704345705, 0.03640729522705078, 0.036375553131103515, 0.03644416046142578, 0.03671039962768555, 0.036550655364990234, 0.036354049682617184, 0.036429824829101565, 0.03633152008056641, 0.03645542526245117, 0.036390975952148436, 0.03646150588989258, 0.036435966491699216, 0.03644723129272461, 0.0363765754699707, 0.03647590255737305, 0.03637145614624023, 0.03645849609375, 0.036395008087158204, 0.03648819351196289, 0.03644518280029297, 0.03657932662963867, 0.03674009704589844, 0.03700838470458984, 0.036587520599365236, 0.03657932662963867, 0.036429824829101565, 0.036582462310791014, 0.13591033935546876, 0.036274177551269535, 0.036296703338623046, 0.03617491149902344, 0.03623827362060547, 0.036219905853271485, 0.03631206512451172, 0.0363059196472168, 0.03630080032348633, 0.03623321533203125, 0.036424705505371094, 0.03636326217651367, 0.03638681411743164, 0.03627212905883789, 0.036318206787109376, 0.0362608642578125, 0.036310016632080076, 0.03623628616333008, 0.03633868789672851, 0.036466686248779294, 0.03648921585083008, 0.03634995269775391, 0.03639807891845703, 0.036340736389160154, 0.03644313430786133, 0.03630080032348633, 0.036431873321533206, 0.03633561706542969, 0.03649228668212891, 0.036441089630126954, 0.03638784027099609, 0.0363397102355957, 0.036393985748291016, 0.03635609436035156, 0.03642879867553711, 0.03638988876342773, 0.03642060852050781, 0.0363059196472168, 0.03639603042602539, 0.036383743286132815, 0.03642265701293945, 0.0362874870300293, 0.036421630859375, 0.03631718444824219, 0.03645337677001953, 0.03642060852050781, 0.03647385787963867, 0.036395008087158204, 0.03647385787963867, 0.036350975036621096, 0.03648716735839844, 0.03634175872802734, 0.036618240356445314, 0.0363765754699707, 0.0365291519165039, 0.036446208953857424, 0.0365588493347168, 0.03640729522705078, 0.036582401275634766, 0.03638579177856445, 0.036600894927978515, 0.03641644668579101, 0.03663568115234375, 0.135990234375, 0.0362608642578125, 0.03622604751586914, 0.036149246215820316, 0.036278270721435545, 0.03627008056640625, 0.03632844924926758, 0.03861913681030273, 0.03653734588623047, 0.0362874870300293, 0.03628236770629883, 0.036209663391113284, 0.03628441619873047, 0.03632844924926758, 0.03628543853759766, 0.036318206787109376, 0.036310016632080076, 0.03625983810424805, 0.03637247848510742, 0.03624652862548828, 0.036347904205322266, 0.03623526382446289, 0.03630694580078125, 0.036327423095703124, 0.036307968139648435, 0.036275199890136715, 0.036367359161376955, 0.03625267028808594, 0.036410369873046876, 0.036324352264404294, 0.036380672454833986, 0.036332542419433594, 0.03638988876342773, 0.03630899047851562, 0.03637145614624023, 0.036276256561279294, 0.03638166427612305, 0.036313087463378906, 0.03640934371948242, 0.036357120513916014, 0.036431873321533206, 0.03638169479370117, 0.036431873321533206, 0.03640524673461914, 0.03642777633666992, 0.036441089630126954, 0.03645644760131836, 0.036400127410888675, 0.03646361541748047, 0.03638886260986328, 0.03645849609375, 0.036364288330078126, 0.036514816284179685, 0.03639091110229492, 0.03646771240234375, 0.03649945449829101, 0.03649331283569336, 0.03637964630126953, 0.03653843307495117, 0.03643078231811524, 0.03649740982055664, 0.03644211196899414, 0.036620288848876956, 0.13562265014648436, 0.03616767883300781, 0.03624038314819336, 0.03623833465576172, 0.036313087463378906, 0.03627724838256836, 0.036296703338623046, 0.03633561706542969, 0.036326400756835936, 0.03625062561035156, 0.036342784881591796, 0.036316158294677735, 0.036397056579589845, 0.036280319213867186, 0.0363059196472168, 0.03632844924926758, 0.036416511535644534, 0.03628851318359375, 0.036318206787109376, 0.03624448013305664, 0.03633868789672851, 0.0362608642578125, 0.036350975036621096, 0.036334590911865236, 0.036391937255859375, 0.03628236770629883, 0.03638169479370117, 0.03627212905883789, 0.03632844924926758, 0.03629363250732422, 0.03647078323364258, 0.03645951843261719, 0.03647999954223633, 0.03644518280029297, 0.036468734741210936, 0.036466686248779294, 0.03645439910888672, 0.0364031982421875, 0.03638784027099609, 0.03636025619506836, 0.036504512786865236, 0.03647180938720703, 0.036483070373535154, 0.03644934463500977, 0.03663148880004883, 0.036466686248779294, 0.03727667236328125, 0.03662847900390625, 0.03653324890136719, 0.036441089630126954, 0.0365588493347168, 0.03643904113769531, 0.03647488021850586, 0.036452350616455076, 0.03672883224487305, 0.03656294250488281, 0.036555774688720705, 0.036511745452880856, 0.03660800170898437, 0.036452350616455076, 0.03658137512207031, 0.03649228668212891, 0.03656499099731445, 0.13564927673339844, 0.03618201446533203, 0.03623635101318359, 0.03616044616699219, 0.036245502471923825, 0.0362239990234375, 0.036296703338623046, 0.03638476943969727, 0.036348926544189454, 0.03622809600830078, 0.03628851318359375, 0.036206592559814454, 0.036310016632080076, 0.036307968139648435, 0.03627008056640625, 0.03626598358154297, 0.036395008087158204, 0.036326400756835936, 0.036318206787109376, 0.03632025527954102, 0.036506622314453126, 0.03630899047851562, 0.03634380722045898, 0.03630182266235352, 0.03649126434326172, 0.03635200119018555, 0.03635609436035156, 0.03627008056640625, 0.036395008087158204, 0.0362977294921875, 0.036395008087158204, 0.036362239837646484, 0.036342784881591796, 0.036337665557861325, 0.03643801498413086, 0.03631718444824219, 0.036421630859375, 0.036318206787109376, 0.03638681411743164, 0.03637247848510742, 0.03642265701293945, 0.03629056167602539, 0.03644825744628906, 0.036313087463378906, 0.036424705505371094, 0.0363581428527832, 0.03648819351196289, 0.0365588493347168, 0.03657727813720703, 0.03638886260986328, 0.03649740982055664, 0.03644211196899414, 0.03659571075439453, 0.036468734741210936, 0.036566017150878906, 0.03651686477661133, 0.03659571075439453, 0.036501502990722655, 0.036618240356445314, 0.03649228668212891, 0.0366376953125, 0.036452350616455076, 0.03662847900390625, 0.1357506561279297, 0.036208641052246096, 0.03622809600830078, 0.0362690544128418, 0.03670937728881836, 0.03640422439575195, 0.036393985748291016, 0.0363694076538086, 0.03634175872802734, 0.036327423095703124, 0.036383743286132815, 0.03631718444824219, 0.0363694076538086, 0.03638476943969727, 0.03637350463867187, 0.03638272094726563, 0.03635200119018555, 0.036332542419433594, 0.03638169479370117, 0.036334590911865236, 0.03639295959472656, 0.03629363250732422, 0.03639807891845703, 0.036606975555419925, 0.03640934371948242, 0.036359169006347655, 0.03647078323364258, 0.036334590911865236, 0.03645849609375, 0.03632844924926758, 0.03646464157104492, 0.03679948806762695, 0.036604927062988284, 0.03640217590332031, 0.03642265701293945, 0.03640524673461914, 0.03644416046142578, 0.03640524673461914, 0.036557823181152346, 0.0364769287109375, 0.03648921585083008, 0.036446208953857424, 0.03648716735839844, 0.03645644760131836, 0.0365219841003418, 0.0364400634765625, 0.036512832641601566, 0.03652703857421875, 0.03654553604125976, 0.03638579177856445, 0.03648716735839844, 0.03638988876342773, 0.036587520599365236, 0.03645542526245117, 0.03650969696044922, 0.03640627288818359, 0.03658444976806641, 0.036400127410888675, 0.03651686477661133, 0.036424705505371094, 0.03655168151855469, 0.036393985748291016, 0.03654246520996094]",tokens/s,26.440033194440694,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -3884,7 +3884,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9543-15cb221e2fbf1cf9306a6c11;ce18f8a4-ef31-4e7d-a629-e352f8991247) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe700-4d2ddac64adf3a960f04e97b;979c0886-be8c-4069-96a4-62ced47af4a1) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -3982,7 +3982,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1262.280704,3045.588992,0.0,2399.141888,2279.02464,s,10,0.26803974342346193,0.026803974342346194,0.0008872097979867859,0.02658950424194336,0.027117384910583495,0.028239908695220944,0.029137927722930907,"[0.029362432479858397, 0.026646303176879882, 0.025867424011230468, 0.026602527618408204, 0.026576480865478515, 0.026551040649414062, 0.02645132827758789, 0.026464672088623048, 0.026867935180664063, 0.026649599075317384]",tokens/s,9550.822453801526,kWh,3.082963249956568e-07,1.6892854569618973e-07,1.2088724948749882e-06,1.6860973655668348e-06,tokens/kWh,151829903.3187431,MB,1262.280704,3045.588992,0.0,2399.141888,2279.0272,s,10,15.70307958984375,1.570307958984375,0.01128738515553608,1.5718734741210938,1.580663903808594,1.580894451904297,1.5810788903808592,"[1.567904052734375, 1.56319921875, 1.5758428955078125, 1.567379150390625, 1.5806126708984376, 1.5433612060546875, 1.580206787109375, 1.563749267578125, 1.581125, 1.5796993408203126]",tokens/s,40.119519002340404,kWh,1.8042147945509187e-05,9.887089594834605e-06,3.866046495612481e-05,6.658970249646861e-05,tokens/kWh,946092.2280489393,,s,629,15.915335668563834,0.025302600426969545,0.0032474409372474896,0.025011199951171875,0.025325159072875976,0.025517874908447265,0.05186424896240235,"[0.02527846336364746, 0.02408243179321289, 0.024011775970458983, 0.023972864151000976, 0.023982080459594726, 0.023946239471435548, 0.024259584426879883, 0.023997440338134765, 0.023938047409057618, 0.023942144393920898, 0.024162303924560546, 0.024039424896240235, 0.024019968032836913, 0.02411520004272461, 0.024186880111694335, 0.024184831619262694, 0.024024063110351563, 0.024056831359863282, 0.023998464584350586, 0.024804351806640625, 0.024863744735717775, 0.024839168548583986, 0.025450496673583983, 0.025654272079467775, 0.025636863708496094, 0.024983552932739257, 0.024997888565063478, 0.024921087265014647, 0.025003007888793945, 0.02493235206604004, 0.025284608840942382, 0.026445823669433592, 0.02552524757385254, 0.025208831787109375, 0.025007104873657225, 0.02510438346862793, 0.025204736709594725, 0.02574131202697754, 0.02515558433532715, 0.025396223068237304, 0.025199615478515625, 0.024928255081176756, 0.025037824630737306, 0.025242624282836915, 0.024870912551879884, 0.02510233688354492, 0.025449472427368162, 0.02551603126525879, 0.024990751266479493, 0.02501321601867676, 0.025166847229003905, 0.025240575790405274, 0.02514739227294922, 0.025082880020141602, 0.02554982376098633, 0.02502348709106445, 0.02491494369506836, 0.02512179183959961, 0.025347072601318358, 0.024994815826416016, 0.02497331237792969, 0.024953855514526366, 0.0523581428527832, 0.02492518424987793, 0.024698879241943358, 0.024224767684936522, 0.024817663192749022, 0.024937471389770507, 0.024987680435180664, 0.025025503158569336, 0.024814592361450196, 0.025164800643920897, 0.024839168548583986, 0.025011199951171875, 0.02471731185913086, 0.024242176055908202, 0.024130559921264647, 0.024374271392822267, 0.024225791931152343, 0.02422684860229492, 0.02419708824157715, 0.024233983993530273, 0.02434764862060547, 0.024209407806396483, 0.024606719970703125, 0.025027584075927735, 0.02550067138671875, 0.024970239639282226, 0.02493337631225586, 0.025020416259765626, 0.025166847229003905, 0.024970239639282226, 0.02494259262084961, 0.024964096069335938, 0.02512076759338379, 0.025218048095703126, 0.025241600036621094, 0.026783775329589844, 0.02593276786804199, 0.02533171272277832, 0.02512384033203125, 0.024308736801147462, 0.024182783126831055, 0.023976959228515626, 0.024236032485961914, 0.024078336715698243, 0.02408243179321289, 0.024251392364501953, 0.02405171203613281, 0.024066047668457033, 0.024048639297485352, 0.024980480194091798, 0.025074687957763672, 0.025191423416137695, 0.0250849609375, 0.02521494483947754, 0.025069568634033205, 0.025251840591430662, 0.025076736450195314, 0.024937471389770507, 0.025033727645874023, 0.024986623764038086, 0.025056320190429686, 0.02503059196472168, 0.025047040939331053, 0.05121535873413086, 0.02489958381652832, 0.025034751892089844, 0.02515456008911133, 0.024936447143554686, 0.025058303833007813, 0.02492313575744629, 0.02508902359008789, 0.024851455688476562, 0.0248985595703125, 0.024830976486206056, 0.024938495635986328, 0.025091072082519532, 0.025020416259765626, 0.025209856033325196, 0.025207807540893554, 0.02503167915344238, 0.025000959396362304, 0.024904703140258787, 0.02492416000366211, 0.02490060806274414, 0.024854528427124024, 0.02508185577392578, 0.02503987121582031, 0.024840192794799806, 0.024913919448852538, 0.024919040679931642, 0.025133056640625, 0.02489958381652832, 0.024937471389770507, 0.024978431701660156, 0.025057279586791992, 0.024958976745605467, 0.024997888565063478, 0.02508185577392578, 0.025187328338623048, 0.025030656814575194, 0.024986623764038086, 0.025082880020141602, 0.02491494369506836, 0.025196544647216795, 0.024987648010253907, 0.025006080627441408, 0.02490982437133789, 0.02498252868652344, 0.02506547164916992, 0.025011199951171875, 0.025111551284790038, 0.025028608322143556, 0.02490572738647461, 0.025050111770629883, 0.025011199951171875, 0.024922111511230468, 0.024975360870361327, 0.02492620849609375, 0.025093120574951173, 0.024970239639282226, 0.025049087524414062, 0.025192447662353516, 0.025093120574951173, 0.025061376571655275, 0.025015296936035155, 0.025158655166625975, 0.05207040023803711, 0.02484223937988281, 0.025009151458740234, 0.024770559310913084, 0.02409984016418457, 0.02430668830871582, 0.02430975914001465, 0.024188928604125977, 0.02411110305786133, 0.02431692886352539, 0.024376352310180663, 0.024242143630981445, 0.024135679244995118, 0.024191999435424806, 0.02415718460083008, 0.024276992797851563, 0.024401920318603516, 0.025268224716186522, 0.025027584075927735, 0.024971263885498047, 0.025034751892089844, 0.02508188819885254, 0.025491424560546875, 0.026066944122314452, 0.025316383361816405, 0.02499171257019043, 0.02492313575744629, 0.02498252868652344, 0.024964096069335938, 0.024996864318847657, 0.02498252868652344, 0.025038848876953124, 0.02494054412841797, 0.024984575271606444, 0.025013248443603517, 0.02506547164916992, 0.024983552932739257, 0.025064512252807616, 0.024803264617919922, 0.02508799934387207, 0.02485043144226074, 0.02492313575744629, 0.024843263626098632, 0.024844287872314453, 0.024928255081176756, 0.024887327194213868, 0.025060319900512697, 0.025034751892089844, 0.024988672256469727, 0.024913919448852538, 0.0251013126373291, 0.025159679412841796, 0.02491801643371582, 0.024959999084472655, 0.024922111511230468, 0.02507161521911621, 0.025042943954467774, 0.025409536361694338, 0.025070592880249022, 0.02511769676208496, 0.02508902359008789, 0.024999935150146483, 0.02498150444030762, 0.052120609283447264, 0.024907743453979492, 0.02495078468322754, 0.0248985595703125, 0.024975391387939454, 0.025040864944458008, 0.02492620849609375, 0.02505936050415039, 0.024880096435546874, 0.024878080368041993, 0.02458527946472168, 0.024750015258789063, 0.025052160263061524, 0.025447423934936524, 0.025183231353759765, 0.02508185577392578, 0.025767936706542968, 0.0253439998626709, 0.025091072082519532, 0.02508697509765625, 0.0249487361907959, 0.02527539253234863, 0.025648128509521483, 0.025867263793945314, 0.025367551803588868, 0.025151487350463866, 0.02506342315673828, 0.024945663452148437, 0.024979455947875977, 0.025026559829711914, 0.02509619140625, 0.025057279586791992, 0.024816640853881834, 0.025000959396362304, 0.02513817596435547, 0.025078784942626952, 0.025143295288085937, 0.025221120834350585, 0.025166847229003905, 0.02517913627624512, 0.024930303573608398, 0.02488115119934082, 0.025034751892089844, 0.024977407455444335, 0.024930303573608398, 0.024992767333984374, 0.02506547164916992, 0.024975360870361327, 0.02507980728149414, 0.025185279846191407, 0.02502348709106445, 0.02535424041748047, 0.025074687957763672, 0.02494156837463379, 0.025169919967651368, 0.02510438346862793, 0.025042943954467774, 0.025078784942626952, 0.02514022445678711, 0.02502348709106445, 0.02495078468322754, 0.02503167915344238, 0.025042943954467774, 0.05189836883544922, 0.0249036808013916, 0.024863807678222657, 0.02485958480834961, 0.024105983734130858, 0.024183807373046876, 0.024022016525268555, 0.024174591064453126, 0.024414207458496092, 0.024164352416992187, 0.02405887985229492, 0.024061952590942383, 0.024351743698120116, 0.02408038330078125, 0.024187904357910156, 0.024135679244995118, 0.024163328170776367, 0.024035327911376952, 0.02406707191467285, 0.024069120407104492, 0.02405580711364746, 0.024071168899536134, 0.02409984016418457, 0.024062976837158204, 0.024188928604125977, 0.024030208587646484, 0.02405887985229492, 0.02386534309387207, 0.023981056213378905, 0.024089599609375, 0.024024063110351563, 0.02410393524169922, 0.024161279678344725, 0.023974912643432617, 0.024467456817626954, 0.024985599517822265, 0.025022464752197264, 0.02491801643371582, 0.02551910400390625, 0.025252864837646483, 0.0248668155670166, 0.025037824630737306, 0.024959999084472655, 0.024855552673339845, 0.025095232009887697, 0.024868799209594727, 0.024862720489501954, 0.024871936798095705, 0.02493337631225586, 0.02515558433532715, 0.025158655166625975, 0.024337408065795898, 0.0241530876159668, 0.024114175796508788, 0.024240127563476564, 0.024143871307373048, 0.025028608322143556, 0.024922111511230468, 0.024980480194091798, 0.024993791580200195, 0.02490163230895996, 0.024852479934692383, 0.024860671997070313, 0.051776512145996094, 0.026083391189575197, 0.02483705520629883, 0.02497331237792969, 0.02491801643371582, 0.025012224197387696, 0.02494259262084961, 0.024971263885498047, 0.02495692825317383, 0.025011199951171875, 0.02494156837463379, 0.02489958381652832, 0.02493337631225586, 0.02491596794128418, 0.02490982437133789, 0.02510233688354492, 0.02515660858154297, 0.02509619140625, 0.02509004783630371, 0.02487603187561035, 0.0251463680267334, 0.02513408088684082, 0.02487295913696289, 0.024979455947875977, 0.024990720748901366, 0.02508697509765625, 0.025340927124023437, 0.02496614456176758, 0.025057279586791992, 0.025027584075927735, 0.025028608322143556, 0.02511359977722168, 0.025034751892089844, 0.025161727905273438, 0.024970239639282226, 0.0243507194519043, 0.0243507194519043, 0.02431590461730957, 0.024653823852539062, 0.025184255599975586, 0.02519862365722656, 0.024940511703491212, 0.02506342315673828, 0.025074687957763672, 0.025407487869262696, 0.024989696502685548, 0.025020416259765626, 0.025586687088012695, 0.025421823501586914, 0.027659263610839844, 0.02534604835510254, 0.025019392013549805, 0.025203712463378908, 0.02502556800842285, 0.025029600143432616, 0.02508799934387207, 0.025186304092407227, 0.025024511337280272, 0.0249354248046875, 0.02533478355407715, 0.025038848876953124, 0.025000959396362304, 0.02493440055847168, 0.05200083160400391, 0.02500499153137207, 0.024988672256469727, 0.024877056121826172, 0.024807424545288087, 0.02491494369506836, 0.024809471130371095, 0.024913919448852538, 0.024787967681884765, 0.024930303573608398, 0.025018367767333984, 0.024962047576904296, 0.024837120056152344, 0.024896511077880858, 0.024830976486206056, 0.025092096328735353, 0.02507366371154785, 0.024912895202636717, 0.02487603187561035, 0.024919040679931642, 0.024896511077880858, 0.02490982437133789, 0.024832000732421877, 0.02508799934387207, 0.02503987121582031, 0.024912895202636717, 0.024870912551879884, 0.024954879760742187, 0.024739839553833007, 0.024065023422241212, 0.024066047668457033, 0.02427903938293457, 0.024172544479370117, 0.02404351997375488, 0.02411929512023926, 0.024010751724243166, 0.024163328170776367, 0.02409267234802246, 0.024155136108398437, 0.024177663803100585, 0.02413670349121094, 0.024152063369750978, 0.024061952590942383, 0.024022016525268555, 0.024181760787963868, 0.025453567504882812, 0.025448448181152345, 0.025057279586791992, 0.025185279846191407, 0.025261056900024413, 0.02515558433532715, 0.025127935409545898, 0.02507366371154785, 0.02529177665710449, 0.02549452781677246, 0.02553548812866211, 0.025379840850830077, 0.025164800643920897, 0.02532352066040039, 0.025196544647216795, 0.025177087783813477, 0.025166847229003905, 0.025124864578247072, 0.05255782318115235, 0.025229312896728515, 0.02508902359008789, 0.025656320571899413, 0.025281536102294923, 0.025242624282836915, 0.026050559997558592, 0.025387008666992186, 0.02508083152770996, 0.025227264404296876, 0.0251146240234375, 0.02509823989868164, 0.025157632827758788, 0.025208831787109375, 0.025290752410888673, 0.025226240158081056, 0.02516275215148926, 0.025351167678833008, 0.02549862480163574, 0.024993791580200195, 0.025001983642578125, 0.025025535583496093, 0.02498252868652344, 0.025069568634033205, 0.025054208755493163, 0.02505625534057617, 0.02508595275878906, 0.02511769676208496, 0.024995840072631836, 0.02508083152770996, 0.02510233688354492, 0.025029632568359376, 0.024968255996704103, 0.02517190361022949, 0.02510745620727539, 0.025036863327026367, 0.025021375656127928, 0.025236480712890624, 0.025203712463378908, 0.02513715171813965, 0.025225248336791992, 0.025107423782348634, 0.025290752410888673, 0.02506547164916992, 0.02511769676208496, 0.025264127731323242, 0.025033727645874023, 0.023986175537109376, 0.024232959747314452, 0.024037376403808593, 0.023983104705810547, 0.02505625534057617, 0.02510438346862793, 0.025068544387817384, 0.024814592361450196, 0.025033727645874023, 0.025059328079223633, 0.025029632568359376, 0.025571327209472656, 0.02512588882446289, 0.025124864578247072, 0.025083904266357423, 0.025265151977539063, 0.05209395217895508, 0.02475212860107422, 0.024958976745605467, 0.024977407455444335, 0.024992767333984374, 0.024928255081176756, 0.024954879760742187, 0.025021440505981447, 0.025052160263061524, 0.02492416000366211, 0.02503167915344238, 0.025116672515869142, 0.025134111404418947, 0.025076736450195314, 0.025121759414672852, 0.025250816345214845, 0.025054208755493163, 0.025012224197387696, 0.024810495376586913, 0.0251463680267334, 0.02472038459777832, 0.02509004783630371, 0.02550579261779785, 0.02527539253234863, 0.02488934326171875, 0.02507776069641113, 0.025366527557373047, 0.025151487350463866, 0.024987648010253907, 0.02570035171508789, 0.025272319793701172, 0.02491801643371582, 0.025188352584838865, 0.024176639556884767, 0.02426982307434082, 0.024205312728881836, 0.02405068778991699, 0.02403945541381836, 0.02415203285217285, 0.02573311996459961, 0.025563135147094726, 0.025259008407592775, 0.025394176483154295, 0.025249792098999024, 0.025178112030029298, 0.025214975357055663, 0.025247743606567383, 0.025359359741210938, 0.025126911163330077, 0.025214975357055663, 0.025193471908569336, 0.02509823989868164, 0.025426944732666015, 0.02524569511413574, 0.025322496414184572, 0.025362432479858397, 0.025199615478515625, 0.0251463680267334, 0.025264127731323242, 0.02531123161315918, 0.025229312896728515, 0.02523750305175781, 0.02529484748840332]",tokens/s,39.52162952129299,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4021,7 +4021,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4672-7b4e8d1d7134e7a50b42936a;28b17d84-d066-4096-bc24-1dd1b2ed2ca3) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9ca-09b18806715900763d51c643;4046ba54-c58f-4dfe-a681-57df6ae04578) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4092,7 +4092,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e950e-467dc5cd6fd519875ac9db19;673a82df-c782-4327-97f6-580f68b9ac62) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6cb-029b4e8a345a93f8686025c5;1103f82d-223b-4de1-8cc0-408fc38900cb) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4162,7 +4162,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95e2-7f695539648f89995c32c840;af56c2a0-38d4-4ff3-8554-8fcf76505d05) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7a4-7ecdb56e23bd90ef05f0334f;ec038a9d-7a93-4ea0-b1a9-f0d49e022025) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4232,7 +4232,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4261,7 +4261,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d486e-6ef13fd46475492367dac5c8;49e6d498-70fb-44eb-98bd-f46c42039efe) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febe4-0f1314a44d9731e36181c9ff;17edd0ed-eac6-467b-b824-7868364016e1) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -4346,7 +4346,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e961b-720f6ab64f765a3b06c332b6;d6c73504-7ea4-4a0d-bb5f-f28e5dc3a68f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7d9-4f77ff98498dcc8c749e9ebe;17ea13e3-dc3e-40e3-930f-da50e59c2e42) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4408,7 +4408,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fc2-51592a67247c79696e736769;0fed2ac7-8661-458b-87a4-b7617c47472e) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe15c-3bb5ffd21c98587d21755fe9;9e546aef-a6a2-40b2-99cd-66a5137f584f) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -4534,7 +4534,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95ad-49f0a95132aa170d424571d7;73c4d416-2dbe-44a9-a153-8763d9091fe7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe76f-7c30e7d27126915134e829c7;a4be9409-3942-4de7-866b-b321ee0ae5d8) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4679,7 +4679,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9063-59d5a20e2c97052f476e064f;fd1c420d-9b38-408f-b67d-3b89a432471e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe206-095b50c57c5b125a31d1f971;197846f8-32d9-4235-94ec-6126ed1ee831) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4749,7 +4749,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1384.71424,11335.630848,0.0,10689.183744,9357.549568,s,10,0.5806327590942382,0.05806327590942382,0.0006150812332001308,0.05790153503417969,0.05850998077392578,0.05911591796875,0.059600667724609376,"[0.05972185516357422, 0.057774398803710936, 0.057828575134277346, 0.05752217483520508, 0.05797449493408203, 0.05802070236206055, 0.058375328063964844, 0.05752883148193359, 0.05764361572265625, 0.058242782592773434]",tokens/s,4408.983061847024,kWh,6.824863444190677e-07,3.739691475481056e-07,3.1574546332414543e-06,4.213910125208628e-06,tokens/kWh,60751177.02879949,MB,1385.04192,11335.630848,0.0,10689.183744,9490.407424,s,10,19.066519165039065,1.9066519165039064,0.004705715644958097,1.9076243896484375,1.912862060546875,1.91339599609375,1.9138231445312501,"[1.900245361328125, 1.901219482421875, 1.90844189453125, 1.9002161865234375, 1.912743408203125, 1.913929931640625, 1.9088603515625, 1.9046826171875, 1.906806884765625, 1.909373046875]",tokens/s,33.042213659805654,kWh,2.2234201332884884e-05,1.2183572898631048e-05,8.273878075036202e-05,0.00011715655498187796,tokens/kWh,537741.998386219,,s,629,19.556330526351925,0.031091145510893366,0.006896125826306854,0.030336000442504882,0.030623960876464844,0.030774271774291994,0.08797329864501953,"[0.030104576110839845, 0.029843456268310548, 0.029835296630859376, 0.029777887344360352, 0.02991923141479492, 0.02980147171020508, 0.03020185661315918, 0.03019878387451172, 0.030243839263916016, 0.029743104934692382, 0.030219263076782226, 0.02981990432739258, 0.029857791900634766, 0.030270463943481447, 0.030512128829956055, 0.029892608642578124, 0.030327808380126952, 0.030716928482055664, 0.03100160026550293, 0.030443519592285157, 0.030327808380126952, 0.029813760757446288, 0.030055423736572266, 0.030131200790405273, 0.030311424255371092, 0.030295040130615233, 0.030110719680786133, 0.029944831848144532, 0.029944831848144532, 0.02992233657836914, 0.030110687255859376, 0.029852672576904295, 0.030049280166625978, 0.03000422477722168, 0.030019584655761718, 0.02992742347717285, 0.02995712089538574, 0.029921279907226563, 0.030166015625, 0.029885440826416015, 0.029855743408203125, 0.029816831588745117, 0.030068735122680663, 0.030043136596679686, 0.03078963279724121, 0.03032678413391113, 0.030628864288330077, 0.030362688064575195, 0.030394304275512696, 0.030437376022338865, 0.02999398422241211, 0.029932607650756837, 0.030230464935302733, 0.031065088272094726, 0.03082956886291504, 0.030084096908569335, 0.030195711135864257, 0.029970432281494142, 0.030518272399902343, 0.029869056701660155, 0.02999091148376465, 0.02993561553955078, 0.08813670349121094, 0.03056947135925293, 0.030520320892333985, 0.030486528396606444, 0.03040563201904297, 0.03038310432434082, 0.030131200790405273, 0.030854143142700196, 0.02994380760192871, 0.030111743927001954, 0.02997657585144043, 0.029742143630981446, 0.029866943359375, 0.02978099250793457, 0.0299550724029541, 0.030612480163574218, 0.029867008209228517, 0.02989676856994629, 0.029805503845214843, 0.030038015365600586, 0.030102527618408204, 0.030481407165527344, 0.029916160583496092, 0.03002060890197754, 0.0301844482421875, 0.0304005126953125, 0.02991619110107422, 0.029802528381347657, 0.029846464157104492, 0.029869056701660155, 0.029863935470581054, 0.0301711368560791, 0.029875200271606447, 0.02977689552307129, 0.03018342399597168, 0.030505983352661133, 0.029891584396362306, 0.03037900733947754, 0.03061452865600586, 0.030455808639526367, 0.03038310432434082, 0.030494720458984374, 0.029893632888793945, 0.030475263595581056, 0.03037696075439453, 0.030478368759155272, 0.03007177543640137, 0.030255104064941408, 0.030448640823364258, 0.030479360580444335, 0.030500864028930662, 0.029973503112792968, 0.029901824951171874, 0.029929567337036132, 0.03033795166015625, 0.03077120018005371, 0.030537727355957032, 0.030500864028930662, 0.029875200271606447, 0.02998784065246582, 0.02992639923095703, 0.030092287063598632, 0.02998067283630371, 0.08810189056396485, 0.029808639526367187, 0.03017932891845703, 0.030468095779418947, 0.030245887756347657, 0.030645248413085937, 0.03118396759033203, 0.030800800323486328, 0.03034316825866699, 0.030471168518066406, 0.030101503372192383, 0.030484479904174806, 0.03041996765136719, 0.029831167221069335, 0.03035852813720703, 0.030026752471923827, 0.029870080947875976, 0.030243839263916016, 0.030432256698608398, 0.030507007598876954, 0.03123404884338379, 0.03076300811767578, 0.030324735641479493, 0.03057459259033203, 0.03040563201904297, 0.03039948844909668, 0.03034316825866699, 0.030402559280395508, 0.03040460777282715, 0.030482431411743165, 0.029847583770751952, 0.030039072036743164, 0.030203840255737305, 0.030465024948120117, 0.03040358352661133, 0.030417919158935547, 0.030538751602172853, 0.030457855224609375, 0.03038412857055664, 0.030091264724731445, 0.029864992141723633, 0.02986595153808594, 0.029862911224365234, 0.030050304412841795, 0.03042099189758301, 0.030543872833251953, 0.03037491226196289, 0.03075584030151367, 0.02999193572998047, 0.030263328552246095, 0.030429216384887697, 0.03046803283691406, 0.030436351776123048, 0.02994278335571289, 0.029957183837890623, 0.030086080551147462, 0.029920255661010742, 0.029912063598632813, 0.03000217628479004, 0.030202880859375, 0.030437376022338865, 0.029944831848144532, 0.02989779281616211, 0.0878611831665039, 0.03036262321472168, 0.030328832626342773, 0.030444543838500978, 0.03037491226196289, 0.030365695953369142, 0.02981990432739258, 0.030105600357055663, 0.029897727966308595, 0.029875200271606447, 0.030061567306518554, 0.029912063598632813, 0.03035443115234375, 0.030734336853027344, 0.030491647720336915, 0.02999091148376465, 0.029920255661010742, 0.03035136032104492, 0.03036774444580078, 0.029898752212524415, 0.02988857650756836, 0.029796287536621092, 0.02994892883300781, 0.030507007598876954, 0.030398464202880858, 0.029922304153442384, 0.029891584396362306, 0.029921279907226563, 0.029900800704956054, 0.029868032455444334, 0.029894655227661132, 0.029983808517456054, 0.02984441566467285, 0.02997145652770996, 0.03033907127380371, 0.030038015365600586, 0.02993561553955078, 0.030518272399902343, 0.030499839782714845, 0.030574655532836913, 0.030612415313720703, 0.030446592330932616, 0.029875200271606447, 0.030089279174804688, 0.029985727310180663, 0.030504959106445313, 0.03040153694152832, 0.030646272659301758, 0.030414848327636718, 0.029965311050415038, 0.030283775329589844, 0.03056230354309082, 0.030336000442504882, 0.030480384826660156, 0.030030847549438477, 0.030123008728027343, 0.0299683837890625, 0.02997248077392578, 0.03001241683959961, 0.02998374366760254, 0.030039072036743164, 0.029973472595214844, 0.029884416580200194, 0.08879513549804688, 0.03039641571044922, 0.030567424774169922, 0.03075071907043457, 0.03101798439025879, 0.030859264373779297, 0.030496768951416016, 0.030650367736816408, 0.030514175415039063, 0.030479360580444335, 0.030940160751342774, 0.030685184478759765, 0.029908992767333983, 0.03032268714904785, 0.029880319595336914, 0.030213119506835938, 0.029867008209228517, 0.029817855834960938, 0.03022336006164551, 0.03060736083984375, 0.03053260803222656, 0.03001241683959961, 0.029903871536254883, 0.030054399490356445, 0.030239744186401366, 0.030486528396606444, 0.030443519592285157, 0.030539775848388673, 0.030492671966552733, 0.030492671966552733, 0.029879295349121093, 0.03019059181213379, 0.02991923141479492, 0.02991001510620117, 0.02987014389038086, 0.030530496597290038, 0.030031871795654298, 0.029978624343872072, 0.03033087921142578, 0.03080601692199707, 0.030616575241088868, 0.030488576889038086, 0.03061043167114258, 0.030623743057250977, 0.02997555160522461, 0.02995199966430664, 0.03038003158569336, 0.03077631950378418, 0.03059507179260254, 0.029911039352416992, 0.029860864639282225, 0.03035545539855957, 0.03060223960876465, 0.03061043167114258, 0.030437376022338865, 0.030499839782714845, 0.029853696823120116, 0.03020595169067383, 0.030496768951416016, 0.030619647979736327, 0.030612480163574218, 0.030523391723632814, 0.030495744705200195, 0.08832819366455077, 0.02975948715209961, 0.029772800445556642, 0.030340095520019532, 0.030274560928344726, 0.030442495346069336, 0.03040460777282715, 0.030604288101196288, 0.030323711395263672, 0.03036262321472168, 0.030478336334228515, 0.03021414375305176, 0.03040460777282715, 0.030481407165527344, 0.030546943664550782, 0.030555135726928712, 0.030342144012451173, 0.03041996765136719, 0.030427135467529298, 0.030441471099853516, 0.03038515281677246, 0.03017728042602539, 0.030461952209472655, 0.030474239349365235, 0.030315519332885742, 0.02996428871154785, 0.029845504760742186, 0.029924352645874022, 0.0299550724029541, 0.030278656005859376, 0.030254079818725587, 0.031038463592529295, 0.031352832794189454, 0.03059916877746582, 0.030511104583740234, 0.02999193572998047, 0.03037183952331543, 0.03058073616027832, 0.030694400787353516, 0.030964736938476563, 0.030525440216064452, 0.029920255661010742, 0.030345216751098632, 0.030005247116088866, 0.029878271102905272, 0.02993971252441406, 0.030485504150390624, 0.03101388740539551, 0.030500864028930662, 0.030502912521362304, 0.030361600875854492, 0.030468095779418947, 0.03055820846557617, 0.030451711654663087, 0.030440448760986328, 0.0307640323638916, 0.030437376022338865, 0.03053670310974121, 0.03036262321472168, 0.030538751602172853, 0.030461952209472655, 0.029965311050415038, 0.030406656265258788, 0.08879933166503906, 0.029881248474121092, 0.029874176025390626, 0.029879295349121093, 0.030308351516723633, 0.030487552642822265, 0.03038515281677246, 0.030875648498535156, 0.030427135467529298, 0.029978624343872072, 0.03037900733947754, 0.029825023651123047, 0.02993561553955078, 0.030274560928344726, 0.030457855224609375, 0.030670848846435547, 0.029844480514526366, 0.03016908836364746, 0.030278656005859376, 0.03040153694152832, 0.030632959365844727, 0.030482431411743165, 0.030643199920654295, 0.03076198387145996, 0.030515199661254884, 0.030398464202880858, 0.030402559280395508, 0.030526464462280273, 0.029900800704956054, 0.030023712158203125, 0.030428192138671876, 0.030513151168823242, 0.030415807723999023, 0.030093311309814453, 0.030382080078125, 0.030013439178466796, 0.030457855224609375, 0.03052851104736328, 0.030686208724975586, 0.030737407684326173, 0.030629888534545898, 0.03060223960876465, 0.030534656524658203, 0.031015935897827147, 0.02995916748046875, 0.02998271942138672, 0.029997055053710937, 0.030673919677734376, 0.030478336334228515, 0.030445568084716795, 0.03040460777282715, 0.030487552642822265, 0.029946943283081055, 0.030144447326660155, 0.030439424514770507, 0.030111743927001954, 0.029953023910522462, 0.029962240219116212, 0.029888511657714844, 0.030187519073486328, 0.0299550724029541, 0.03000217628479004, 0.029870080947875976, 0.08782848358154297, 0.02974515151977539, 0.030303232192993163, 0.029858848571777345, 0.02981679916381836, 0.030200832366943358, 0.030327808380126952, 0.03058278465270996, 0.03041587257385254, 0.0299182071685791, 0.02982809638977051, 0.031097856521606446, 0.0307957763671875, 0.030418943405151368, 0.03040563201904297, 0.03058585548400879, 0.030513151168823242, 0.0299233283996582, 0.029880319595336914, 0.02979532814025879, 0.029844480514526366, 0.029930496215820314, 0.029898752212524415, 0.030069759368896484, 0.029887487411499023, 0.029890623092651367, 0.030273536682128906, 0.031237056732177734, 0.03061452865600586, 0.030439424514770507, 0.029896703720092774, 0.030144512176513674, 0.029970432281494142, 0.029877248764038085, 0.02994380760192871, 0.029922304153442384, 0.02995712089538574, 0.02996326446533203, 0.03014656066894531, 0.030765056610107422, 0.03038105583190918, 0.030066688537597655, 0.03053670310974121, 0.030018560409545897, 0.030085119247436523, 0.03057459259033203, 0.030450687408447266, 0.03073945617675781, 0.03040358352661133, 0.030489599227905274, 0.03042918395996094, 0.03017420768737793, 0.03054080009460449, 0.02992639923095703, 0.029981760025024413, 0.030638015747070313, 0.030480384826660156, 0.03052441596984863, 0.03022643280029297, 0.030038015365600586, 0.02995097541809082, 0.03020595169067383, 0.030440448760986328, 0.08801689910888671, 0.029865983963012696, 0.029885440826416015, 0.029886463165283202, 0.029708288192749024, 0.02975334358215332, 0.030003200531005858, 0.030520320892333985, 0.029724672317504884, 0.02976153564453125, 0.030283775329589844, 0.030493696212768553, 0.029855743408203125, 0.029892608642578124, 0.02994175910949707, 0.030041088104248048, 0.02998886489868164, 0.0304005126953125, 0.029893632888793945, 0.030418943405151368, 0.030624832153320312, 0.030375871658325195, 0.030349311828613282, 0.030700544357299804, 0.030368831634521483, 0.030441408157348634, 0.030481407165527344, 0.030510080337524413, 0.030058496475219725, 0.030442495346069336, 0.030327808380126952, 0.030559232711791992, 0.030507007598876954, 0.030492671966552733, 0.0303687686920166, 0.030515199661254884, 0.030342144012451173, 0.030476287841796876, 0.029911039352416992, 0.03037491226196289, 0.030441471099853516, 0.03039948844909668, 0.030492671966552733, 0.030588960647583006, 0.030455808639526367, 0.030536672592163087, 0.030432256698608398, 0.030678016662597656, 0.0299233283996582, 0.029922304153442384, 0.03018854331970215, 0.030547967910766603, 0.030462976455688476, 0.030521343231201172, 0.03058892822265625, 0.030127103805541993, 0.03038515281677246, 0.030448640823364258, 0.029944831848144532, 0.030070783615112305, 0.030084096908569335, 0.03019878387451172, 0.030457855224609375, 0.08856575775146484, 0.030151679992675783, 0.03038617515563965, 0.03033907127380371, 0.03018649673461914, 0.02976870346069336, 0.029740095138549805, 0.03007583999633789, 0.030682111740112306, 0.031228927612304686, 0.0307640323638916, 0.030611455917358397, 0.02989673614501953, 0.029823999404907226, 0.02987414360046387, 0.030040063858032227, 0.030227455139160156, 0.03056435203552246, 0.030444543838500978, 0.030546943664550782, 0.03038719940185547, 0.030490623474121094, 0.029868032455444334, 0.03017523193359375, 0.03021107292175293, 0.030465024948120117, 0.030479360580444335, 0.030458879470825196, 0.029938688278198244, 0.02994380760192871, 0.030202880859375, 0.03057561683654785, 0.03038617515563965, 0.03038924789428711, 0.030478336334228515, 0.0305797119140625, 0.030455808639526367, 0.030687231063842774, 0.030456832885742188, 0.03055001640319824, 0.030461952209472655, 0.030423040390014647, 0.030488576889038086, 0.030535680770874023, 0.03056844711303711, 0.03053673553466797, 0.030519296646118164, 0.03048137664794922, 0.030495744705200195, 0.030065664291381834, 0.029949951171875, 0.030034944534301757, 0.03022336006164551, 0.03053260803222656, 0.03038310432434082, 0.030652416229248046, 0.029973503112792968, 0.030414848327636718, 0.02987424087524414, 0.030026687622070312, 0.030011392593383788, 0.03000115203857422, 0.029936639785766602]",tokens/s,32.16349811394473,,,main,False,False -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4788,7 +4788,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4609-676e582f5d8dfda3317dcfec;71f94749-391d-4df8-b83e-c9e51325d6e3) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe95d-7f920d3254c2dc7d22981d2c;aa8d1559-0c21-4e13-987b-ba223cb496b3) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -4819,7 +4819,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4896,7 +4896,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9360-3892e60a372a80ab7c9d9f25;2e36b931-4306-416d-9de9-fc9ac1a76814) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe513-40492d812b7680be2a669b85;ddcc0203-ac84-4f59-9432-d1d4d6b102a4) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5152,7 +5152,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e92f3-72185842513dbaf1794c3d68;45f990a9-f5aa-4b37-a87f-fcb5340f9f1f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4a9-1b92f8d0374252267a748480;4bc00013-fa3a-445d-a2ce-ff072dbe36b5) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5256,7 +5256,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5295,7 +5295,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4711-7f7cf5063e1c5d176b5ccaea;a1fec9e5-0a36-4760-99d6-842209a161a3) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea72-423573b4564f272b767adce9;6da8cab4-a028-40ee-838b-99af24b337cf) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5326,7 +5326,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5365,7 +5365,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46a9-5c64bb6f4a5a1f9008dd63ba;fc2e43d9-ae90-4b5d-86b4-48705f600cb2) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea02-486c0a71134a4c6f794d9b4c;fcfbc7fe-47ab-40b4-9cb3-f0973aeab748) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5397,7 +5397,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1287.274496,15125.184512,0.0,14478.737408,14241.165312,s,10,0.8262053833007812,0.08262053833007812,0.00121076900145203,0.08222753524780274,0.08265813827514648,0.08445479774475097,0.08589212532043457,"[0.08625145721435547, 0.08217072296142579, 0.08225759887695312, 0.08218064117431641, 0.08218204498291015, 0.08225004577636719, 0.08220502471923828, 0.08225421142578125, 0.08225888061523437, 0.08219475555419922]",tokens/s,3098.503170933744,kWh,9.743499693431924e-07,5.338999817169797e-07,4.857622283180546e-06,6.365872234240718e-06,tokens/kWh,40214442.04032695,MB,1287.274496,15125.184512,0.0,14478.737408,14438.898176,s,10,23.88951025390625,2.3889510253906248,0.007925318159070799,2.3869569091796876,2.3999682617187497,2.4018271484375,2.4033142578124997,"[2.383995361328125, 2.386448486328125, 2.39711279296875, 2.380974365234375, 2.37921484375, 2.388448974609375, 2.39955517578125, 2.40368603515625, 2.38746533203125, 2.38260888671875]",tokens/s,26.371407086379545,kWh,2.808398980857679e-05,1.53910336679044e-05,0.00011702191966481678,0.000160496943141298,tokens/kWh,392530.8405689458,,s,629,24.591448070526134,0.03909610186093184,0.009791683602354094,0.03785324859619141,0.038254598999023434,0.03832709121704102,0.12031136047363282,"[0.03765760040283203, 0.03772927856445313, 0.037774368286132814, 0.037732318878173826, 0.03763814544677734, 0.037731361389160153, 0.037762016296386716, 0.037705726623535156, 0.03785932922363281, 0.03764326477050781, 0.03775078582763672, 0.037783615112304686, 0.03797804641723633, 0.03842047882080078, 0.038012928009033206, 0.03769036865234375, 0.03768115234375, 0.03785113525390625, 0.03768012619018555, 0.03778355026245117, 0.03778662490844727, 0.037776382446289065, 0.037994495391845705, 0.03778355026245117, 0.037746688842773435, 0.03783782577514649, 0.03771187210083008, 0.037855232238769534, 0.037855232238769534, 0.03778867340087891, 0.03791872024536133, 0.037868545532226565, 0.03785625457763672, 0.03783270263671875, 0.03786342239379883, 0.03787059020996094, 0.03777027130126953, 0.037800926208496094, 0.03788390350341797, 0.03778665542602539, 0.037865440368652345, 0.03775078582763672, 0.03777740859985351, 0.03783475112915039, 0.03782963180541992, 0.0378869743347168, 0.03796582412719727, 0.03783987045288086, 0.03789619064331055, 0.037901313781738284, 0.03782656097412109, 0.03796889495849609, 0.03785728073120117, 0.037889057159423825, 0.03794326400756836, 0.03787980651855469, 0.03788083267211914, 0.0379156494140625, 0.037868545532226565, 0.03789004898071289, 0.03781635284423828, 0.03791049575805664, 0.12029440307617187, 0.037648384094238284, 0.037703678131103514, 0.03761151885986328, 0.0376258544921875, 0.03767603302001953, 0.03767500686645508, 0.03775494384765625, 0.037726142883300784, 0.03766067123413086, 0.037766143798828124, 0.037689342498779296, 0.037759998321533206, 0.03773235321044922, 0.03772415924072266, 0.03778252792358398, 0.037688385009765624, 0.037685184478759765, 0.03783987045288086, 0.037787647247314454, 0.03774156951904297, 0.03775692749023438, 0.037677120208740235, 0.0378223991394043, 0.03784806442260742, 0.037722110748291016, 0.03782656097412109, 0.03768627166748047, 0.03778252792358398, 0.037763072967529294, 0.03776204681396484, 0.03781631851196289, 0.03782451248168945, 0.03792793655395508, 0.03823820877075195, 0.03789209747314453, 0.03785932922363281, 0.03777433776855469, 0.03785830307006836, 0.03788185501098633, 0.03780505752563477, 0.03777433776855469, 0.03776716613769531, 0.03774569702148438, 0.03779888153076172, 0.037779457092285154, 0.03781222534179687, 0.03788288116455078, 0.037768192291259765, 0.03769139099121094, 0.03781324768066406, 0.03812659072875976, 0.03837849426269531, 0.03829248046875, 0.03834368133544922, 0.03848191833496094, 0.03822284698486328, 0.03826073455810547, 0.03828121566772461, 0.038212608337402344, 0.03830886459350586, 0.03856793594360351, 0.03826278305053711, 0.120416259765625, 0.037631999969482424, 0.037736446380615234, 0.037596160888671876, 0.03758489608764649, 0.03760332870483398, 0.03761356735229492, 0.0377262077331543, 0.03767295837402344, 0.03763711929321289, 0.03764841461181641, 0.03760329437255859, 0.03767603302001953, 0.03769343948364258, 0.03768115234375, 0.03775795364379883, 0.03783065414428711, 0.03834265518188477, 0.038199295043945314, 0.03809280014038086, 0.03811328125, 0.0380682258605957, 0.038507553100585935, 0.038306785583496095, 0.03810406494140625, 0.038073345184326174, 0.038079486846923825, 0.03810406494140625, 0.038141952514648435, 0.038056961059570314, 0.038114303588867186, 0.03821158218383789, 0.0381030387878418, 0.038145023345947264, 0.03807743835449219, 0.03820851135253906, 0.0379791374206543, 0.03781222534179687, 0.037992481231689454, 0.03818288040161133, 0.038073345184326174, 0.03809894561767578, 0.038400001525878906, 0.03832115173339844, 0.03815321731567383, 0.03824844741821289, 0.03816550445556641, 0.03827814483642578, 0.038623233795166016, 0.038193153381347655, 0.03820236968994141, 0.03818086242675781, 0.038089729309082034, 0.038147071838378906, 0.03823820877075195, 0.03833139038085937, 0.03817779159545898, 0.0382371826171875, 0.0382105598449707, 0.038254623413085935, 0.03821360015869141, 0.03820032119750977, 0.038209537506103515, 0.12031795501708985, 0.03759718322753906, 0.037648384094238284, 0.03760537719726562, 0.037571582794189456, 0.03763711929321289, 0.03763302230834961, 0.03775795364379883, 0.03769139099121094, 0.03775283050537109, 0.03767091369628906, 0.03766579055786133, 0.0377149429321289, 0.03762483215332031, 0.03773747253417969, 0.03781017684936523, 0.037644287109375, 0.03790643310546875, 0.038356990814208985, 0.03805286407470703, 0.03795667266845703, 0.03774764633178711, 0.037748737335205076, 0.03784908676147461, 0.037711902618408205, 0.03766678237915039, 0.03775078582763672, 0.037736446380615234, 0.03770470428466797, 0.03770982360839844, 0.03777433776855469, 0.03778662490844727, 0.03783782577514649, 0.03773235321044922, 0.03782451248168945, 0.03779072189331055, 0.038171646118164065, 0.03774156951904297, 0.037787647247314454, 0.03781222534179687, 0.037695518493652345, 0.037827552795410155, 0.037797889709472655, 0.03779177474975586, 0.03779068756103516, 0.037719039916992186, 0.03781119918823242, 0.03790028762817383, 0.037822463989257815, 0.0377968635559082, 0.037862464904785155, 0.03777734375, 0.03782553482055664, 0.03779174423217774, 0.037823486328125, 0.037910526275634765, 0.03788083267211914, 0.03782457733154297, 0.03783571243286133, 0.03788185501098633, 0.03787366485595703, 0.03783168029785156, 0.03791155242919922, 0.12024217224121093, 0.03752140808105469, 0.03763926315307617, 0.03759811019897461, 0.037658687591552734, 0.037622718811035155, 0.03765862274169922, 0.03773440170288086, 0.03770470428466797, 0.037664768218994144, 0.03769651031494141, 0.037610496520996094, 0.03769651031494141, 0.037591041564941405, 0.03767295837402344, 0.03769651031494141, 0.03767398452758789, 0.03760950469970703, 0.03772003173828125, 0.03760639953613281, 0.03772927856445313, 0.037684223175048825, 0.03778252792358398, 0.03794636917114258, 0.037797889709472655, 0.03768627166748047, 0.03782758331298828, 0.037720062255859374, 0.03776204681396484, 0.03775897598266602, 0.03778047943115234, 0.03778355026245117, 0.037787647247314454, 0.03770374298095703, 0.03779065704345703, 0.037713920593261716, 0.03784908676147461, 0.03774259185791016, 0.03783782577514649, 0.03783168029785156, 0.037833728790283204, 0.03767705535888672, 0.03780198287963867, 0.03773651123046875, 0.037854145050048825, 0.037754878997802735, 0.037850112915039064, 0.03785017776489258, 0.03784185409545898, 0.03775897598266602, 0.03786751937866211, 0.03782860946655273, 0.03783993530273438, 0.03777017593383789, 0.0379791374206543, 0.03790643310546875, 0.03791360092163086, 0.03786342239379883, 0.03786137771606445, 0.037795841217041014, 0.037920768737792966, 0.03784089660644531, 0.03794432067871094, 0.12033740997314453, 0.037531646728515625, 0.03764223861694336, 0.03759001541137695, 0.03765555191040039, 0.03767295837402344, 0.03766681671142578, 0.037776382446289065, 0.03765248107910156, 0.03767295837402344, 0.03768012619018555, 0.038027263641357424, 0.037698558807373043, 0.037718017578125, 0.037700607299804685, 0.037716991424560545, 0.037722110748291016, 0.03765964889526367, 0.037700607299804685, 0.03765043258666992, 0.03768217468261719, 0.03769548797607422, 0.03770982360839844, 0.037917697906494144, 0.037789695739746096, 0.03769753646850586, 0.037935104370117184, 0.03805081558227539, 0.03806719970703125, 0.03806105422973633, 0.03812351989746094, 0.03814604949951172, 0.03818905639648437, 0.03809075164794922, 0.038160446166992185, 0.03812243270874023, 0.038168575286865236, 0.03829248046875, 0.038269950866699216, 0.037975040435791016, 0.03779481506347656, 0.037986305236816405, 0.03818188858032227, 0.038193153381347655, 0.03830374526977539, 0.03806003189086914, 0.03784908676147461, 0.03794028854370117, 0.03785004806518555, 0.037784576416015625, 0.03778972625732422, 0.037763038635253904, 0.0378787841796875, 0.03785420989990235, 0.038035457611083984, 0.03862428665161133, 0.0383037109375, 0.038209537506103515, 0.03810713577270508, 0.03789516830444336, 0.03792486572265625, 0.03785932922363281, 0.03792697525024414, 0.12031890869140625, 0.03758899307250976, 0.0375838737487793, 0.03757363128662109, 0.0377149429321289, 0.03777542495727539, 0.03768108749389648, 0.03780198287963867, 0.03770982360839844, 0.037684223175048825, 0.03771289443969727, 0.037700607299804685, 0.03769241714477539, 0.037781505584716796, 0.037991424560546876, 0.03811635208129883, 0.03810611343383789, 0.03807849502563477, 0.038070240020751954, 0.03826073455810547, 0.03833651351928711, 0.03822182464599609, 0.0381317138671875, 0.03828121566772461, 0.038042625427246096, 0.03846246337890625, 0.03809075164794922, 0.03808665466308594, 0.038073345184326174, 0.0381399040222168, 0.03804876708984375, 0.03825459289550781, 0.03811532974243164, 0.038133792877197266, 0.03803753662109375, 0.03813267135620117, 0.03806924819946289, 0.03812460708618164, 0.03808153533935547, 0.03820640182495117, 0.03804774475097656, 0.03806617736816406, 0.038117374420166016, 0.038182910919189454, 0.0381399040222168, 0.03828736114501953, 0.03825766372680664, 0.038300704956054685, 0.038214622497558595, 0.03823110580444336, 0.038174720764160154, 0.03845727920532226, 0.0383559684753418, 0.038279232025146485, 0.03825350570678711, 0.038368255615234374, 0.038214656829833986, 0.03828224182128906, 0.03824435043334961, 0.03822694396972656, 0.03827507019042969, 0.03840716934204102, 0.03835084915161133, 0.12041932678222657, 0.03776102447509765, 0.03801599884033203, 0.03795251083374023, 0.03797401428222656, 0.038029312133789066, 0.038040576934814455, 0.03810508728027344, 0.037921791076660154, 0.037981182098388674, 0.03805286407470703, 0.03806412887573242, 0.03846451187133789, 0.03798425674438476, 0.03807846450805664, 0.03814809417724609, 0.03802624130249024, 0.03804876708984375, 0.03813683319091797, 0.03818393707275391, 0.03808870315551758, 0.03827609634399414, 0.03832524871826172, 0.038307838439941407, 0.03811532974243164, 0.038056961059570314, 0.03817062377929688, 0.03809894561767578, 0.038059009552001956, 0.03813478469848633, 0.0381102066040039, 0.038245376586914064, 0.038204414367675785, 0.03817574310302734, 0.03817062377929688, 0.03806105422973633, 0.03815423965454102, 0.038161407470703124, 0.038553600311279294, 0.038302719116210936, 0.038145023345947264, 0.038191104888916014, 0.03815935897827148, 0.038130687713623046, 0.03828121566772461, 0.03811942291259766, 0.03814604949951172, 0.03822079849243164, 0.03817062377929688, 0.03815423965454102, 0.03828121566772461, 0.03815526580810547, 0.03818700790405274, 0.038196224212646485, 0.038215679168701173, 0.03828736114501953, 0.03822182464599609, 0.03827302551269531, 0.038263809204101565, 0.03830374526977539, 0.03823513412475586, 0.038247425079345705, 0.03824332809448242, 0.12043571472167969, 0.037596160888671876, 0.03760025787353516, 0.03772927856445313, 0.03765350341796875, 0.03773747253417969, 0.03774774551391601, 0.03776713562011719, 0.03772313690185547, 0.037617664337158206, 0.03774156951904297, 0.037582847595214845, 0.037792766571044925, 0.03766886520385742, 0.03776409530639648, 0.037776382446289065, 0.03773747253417969, 0.03772415924072266, 0.037779457092285154, 0.03768115234375, 0.03796582412719727, 0.038109184265136715, 0.03815731048583984, 0.038150142669677735, 0.038163455963134765, 0.03808358383178711, 0.038166526794433595, 0.038130687713623046, 0.0382371826171875, 0.03808051300048828, 0.03820140838623047, 0.03816032028198242, 0.03814604949951172, 0.038095870971679685, 0.0382033920288086, 0.03806719970703125, 0.038234111785888675, 0.03810815811157227, 0.03802828979492188, 0.03782451248168945, 0.03784396743774414, 0.03775897598266602, 0.03786547088623047, 0.037766143798828124, 0.03785318374633789, 0.037746688842773435, 0.03784294509887695, 0.03785324859619141, 0.037895103454589844, 0.037823486328125, 0.03787571334838867, 0.03780505752563477, 0.03786649703979492, 0.03780198287963867, 0.03785420989990235, 0.037901313781738284, 0.037928958892822266, 0.03783679962158203, 0.037917697906494144, 0.03789926528930664, 0.03789823913574219, 0.03783168029785156, 0.03795254516601562, 0.12032611083984375, 0.03749273681640625, 0.03768729782104492, 0.03760332870483398, 0.0377077751159668, 0.037571582794189456, 0.037648384094238284, 0.03781119918823242, 0.03775795364379883, 0.03766886520385742, 0.03766681671142578, 0.037628929138183595, 0.037713920593261716, 0.03765043258666992, 0.037738494873046875, 0.03777740859985351, 0.037719039916992186, 0.037751808166503906, 0.037738494873046875, 0.03761971282958984, 0.03770675277709961, 0.03771084976196289, 0.037736446380615234, 0.03785625457763672, 0.03772931289672852, 0.03771798324584961, 0.03778559875488281, 0.03774566268920899, 0.0377446403503418, 0.037733375549316404, 0.03778047943115234, 0.03779072189331055, 0.03781324768066406, 0.03775692749023438, 0.03778252792358398, 0.03771596908569336, 0.03785932922363281, 0.03779379272460937, 0.037789695739746096, 0.037907455444335936, 0.03832831954956055, 0.038424575805664066, 0.03815321731567383, 0.03777024078369141, 0.03784089660644531, 0.03783987045288086, 0.037956607818603515, 0.03789311981201172, 0.037820415496826174, 0.037804031372070314, 0.03785625457763672, 0.03780198287963867, 0.03786342239379883, 0.037817344665527344, 0.037852161407470705, 0.03788288116455078, 0.037923839569091795, 0.03825356674194336, 0.038114303588867186, 0.0379607048034668, 0.03799552154541016, 0.03803443145751953, 0.03790950393676758]",tokens/s,25.577997611042786,,,main,False,False -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5465,7 +5465,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96ce-0598e6072b7bd2173be664c6;8b13368c-db27-481a-88c4-6b4369113982) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe88b-29d1da62545d1555725e19e3;18364b01-3800-4fa6-bbd0-e429437c256c) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5496,7 +5496,7 @@ OSError: 8 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5535,7 +5535,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45de-7c2e8bf24d2571e76607f7ac;0404a3ad-6c88-48ef-bde0-9650359ed1ff) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe933-40ba071b319d5f5818143bd2;791581b3-645d-40ac-8626-0bb0a6c2c506) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -5736,7 +5736,7 @@ ChildProcessError: Traceback (most recent call last): TypeError: DeciCoderAttention.forward() got an unexpected keyword argument 'cache_position' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5792,7 +5792,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e903b-65417a1576fc70f64a2b6746;bd0cee10-96a0-48fc-912d-fb3acd892578) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1db-47c05b5b11c9962b27fe9f38;3d44bfd3-cc57-4bb4-a6e0-6643451f7c49) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -5896,7 +5896,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -5989,7 +5989,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9586-2743e1d666cb5d8c6b62536c;1125136a-b3bb-437d-a8cc-2da192ada0ba) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8c5-3219c12a4adc7cf4687309d9;e820ef7b-9a11-4f62-b99d-fbd5bba17ed1) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6049,7 +6049,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9006-04eab3262390183351bbc205;09e2a4ba-d0fb-4ef0-bd92-5ca922d77986) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1a2-63acdfb4007c8b660581789d;2da56b5b-000f-407f-9592-44dbda56fd71) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -6475,7 +6475,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8df9-3ec508a66c6fa20540c8b80b;1dea153f-8d54-4a69-8a67-b499843f8b69) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf8c-2a5680c331c47c7220e6fdf1;ae18d382-0eb9-45c5-8d96-77e80411b04b) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -6560,7 +6560,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9665-61bda8c350173f3533b83ff2;858c61e5-7d69-47b6-a32a-b1ceac57a0d7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe81e-190e3c897acbc8b56aa6503a;c818c325-3dd9-4bdf-8c59-98f983752b4e) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6592,7 +6592,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1219.93216,14529.593344,0.0,13883.14624,13705.186304,s,10,0.9362810974121093,0.09362810974121094,0.00027920329109976734,0.09354251098632813,0.09373921356201172,0.09408875732421876,0.09436839233398438,"[0.09443830108642579, 0.0934741439819336, 0.09354873657226563, 0.09362611389160157, 0.093427490234375, 0.09344755554199219, 0.09353238677978516, 0.09353628540039062, 0.09366153717041016, 0.09358854675292969]",tokens/s,2734.221599769414,kWh,1.1074903613557943e-06,6.068454596681698e-07,6.0343921587663614e-06,7.748727979790327e-06,tokens/kWh,33037680.593212295,MB,1219.93216,14529.593344,0.0,13883.14624,13856.22528,s,10,22.526626953125,2.2526626953125,0.004122940525000741,2.2514234619140625,2.2538315185546876,2.259308093261719,2.263689353027344,"[2.26478466796875, 2.250800537109375, 2.25008447265625, 2.25150439453125, 2.251889404296875, 2.25090234375, 2.25018798828125, 2.25251611328125, 2.252614501953125, 2.251342529296875]",tokens/s,27.966903403290186,kWh,2.6531057572408135e-05,1.4539835276311148e-05,0.00011534956605943347,0.0001564204589081527,tokens/kWh,402760.61353964236,,s,629,23.331680217742928,0.03709329128416999,0.011191790459388166,0.035729408264160156,0.0358287353515625,0.03590696868896485,0.12985827392578125,"[0.03639910507202149, 0.03638579177856445, 0.03628134536743164, 0.03638476943969727, 0.036380672454833986, 0.03639807891845703, 0.03640217590332031, 0.03637247848510742, 0.03649638366699219, 0.03633049774169922, 0.036241409301757815, 0.03701760101318359, 0.035983360290527344, 0.03586764907836914, 0.03579596710205078, 0.035767295837402346, 0.035748863220214845, 0.03577241516113281, 0.03568947219848633, 0.03576422500610352, 0.035844097137451174, 0.035724288940429685, 0.03568947219848633, 0.0357212142944336, 0.035784702301025394, 0.03567923355102539, 0.035740673065185545, 0.03583795166015625, 0.03578777694702148, 0.03602841567993164, 0.036229118347167966, 0.035740673065185545, 0.035727359771728515, 0.03571916961669922, 0.0357386245727539, 0.03573452758789063, 0.035740673065185545, 0.03571814346313477, 0.035783679962158206, 0.035833854675292966, 0.03577036666870117, 0.03577241516113281, 0.03581235122680664, 0.035760128021240234, 0.035773441314697264, 0.03577958297729492, 0.035745792388916016, 0.035778560638427735, 0.03579084777832031, 0.035922943115234376, 0.036125694274902344, 0.036101119995117184, 0.035953662872314454, 0.03583180618286133, 0.035846145629882815, 0.03582668685913086, 0.03582156753540039, 0.035800064086914066, 0.03576115036010742, 0.0358021125793457, 0.035784702301025394, 0.035850238800048825, 0.129902587890625, 0.03562393569946289, 0.03563622283935547, 0.03566592025756836, 0.03564031982421875, 0.0356383056640625, 0.0356259536743164, 0.03567103958129883, 0.035714046478271484, 0.035672065734863284, 0.03563827133178711, 0.03567001724243164, 0.03570175933837891, 0.03565363311767578, 0.035683326721191407, 0.03567103958129883, 0.03567103958129883, 0.035661823272705076, 0.035659774780273434, 0.03567001724243164, 0.03573555374145508, 0.03570278549194336, 0.035700767517089844, 0.03569353485107422, 0.03562700653076172, 0.035659774780273434, 0.03570892715454101, 0.035694625854492186, 0.03568022537231445, 0.03567411041259766, 0.03571916961669922, 0.035716094970703126, 0.03577036666870117, 0.0357386245727539, 0.035705856323242184, 0.035729408264160156, 0.03571712112426758, 0.03573452758789063, 0.03570175933837891, 0.03568742370605469, 0.0357212142944336, 0.03570687866210937, 0.035745792388916016, 0.03571200180053711, 0.035683326721191407, 0.035727359771728515, 0.03578777694702148, 0.035768318176269534, 0.03580416107177734, 0.03587583923339844, 0.03578572845458984, 0.035773441314697264, 0.03578572845458984, 0.035745792388916016, 0.035762176513671876, 0.03582259368896484, 0.03579289627075195, 0.035776512145996094, 0.035762176513671876, 0.03575296020507813, 0.03580416107177734, 0.03581644821166992, 0.03585331344604492, 0.12987289428710938, 0.035609600067138675, 0.03558707046508789, 0.03564134216308594, 0.035620864868164064, 0.0356577262878418, 0.035622913360595705, 0.03563724899291992, 0.03561369705200195, 0.035800064086914066, 0.03600588989257812, 0.035783679962158206, 0.03567308807373047, 0.03566387176513672, 0.035619873046875, 0.035608543395996096, 0.03562496185302735, 0.03563417434692383, 0.03565465545654297, 0.03563417434692383, 0.03565465545654297, 0.035676158905029294, 0.03569356918334961, 0.03570175933837891, 0.03565158462524414, 0.035676158905029294, 0.03570175933837891, 0.0357283821105957, 0.03566694259643555, 0.03568851089477539, 0.035676097869873045, 0.03569561767578125, 0.035678207397460936, 0.03566694259643555, 0.03567411041259766, 0.035707904815673826, 0.035683326721191407, 0.03567411041259766, 0.03568947219848633, 0.035714046478271484, 0.035729408264160156, 0.03577446365356445, 0.035716094970703126, 0.03572326278686523, 0.0357130241394043, 0.03578777694702148, 0.035737598419189456, 0.03577036666870117, 0.035737598419189456, 0.03573964691162109, 0.03578675079345703, 0.03579091262817383, 0.035752895355224606, 0.03571916961669922, 0.03572844696044922, 0.03578156661987305, 0.03572633743286133, 0.035760128021240234, 0.03573657608032227, 0.03574272155761719, 0.03576934432983398, 0.03578777694702148, 0.03585331344604492, 0.1300305938720703, 0.03561983871459961, 0.03560857772827149, 0.03559632110595703, 0.035631103515625, 0.03564028930664063, 0.03567308807373047, 0.0357498893737793, 0.03584000015258789, 0.03570380783081055, 0.0357130241394043, 0.03570483016967774, 0.03568742370605469, 0.03564652633666992, 0.03566483306884766, 0.03566796875, 0.03568230438232422, 0.035676158905029294, 0.03566796875, 0.035691520690917966, 0.03570073699951172, 0.03570892715454101, 0.03571712112426758, 0.03568537521362305, 0.035659774780273434, 0.0356864013671875, 0.0357283821105957, 0.035697662353515625, 0.035678207397460936, 0.035707904815673826, 0.035743743896484374, 0.03578060913085938, 0.035716094970703126, 0.035722240447998044, 0.03573452758789063, 0.035740673065185545, 0.03575091171264649, 0.035730430603027344, 0.035714046478271484, 0.03570995330810547, 0.03575302505493164, 0.03575392150878906, 0.03573964691162109, 0.03570892715454101, 0.035781631469726564, 0.03574070358276367, 0.03580105590820312, 0.03578060913085938, 0.03577753448486328, 0.03578879928588867, 0.035789825439453124, 0.035806209564208984, 0.03579391860961914, 0.03576319885253906, 0.035773441314697264, 0.035784736633300784, 0.03578979110717773, 0.03577036666870117, 0.03578572845458984, 0.03580416107177734, 0.0358205451965332, 0.03585539245605469, 0.03582255935668945, 0.1298206787109375, 0.03562700653076172, 0.035604480743408204, 0.03560246276855469, 0.03565052795410156, 0.035643390655517575, 0.035664894104003905, 0.03566284942626953, 0.03566284942626953, 0.035664894104003905, 0.03566592025756836, 0.03564646530151367, 0.03564851379394531, 0.03564646530151367, 0.03565363311767578, 0.03565875244140625, 0.03564851379394531, 0.035743743896484374, 0.03563827133178711, 0.035691520690917966, 0.03563827133178711, 0.03568947219848633, 0.03566694259643555, 0.035746814727783204, 0.03581235122680664, 0.03571916961669922, 0.03567923355102539, 0.03571814346313477, 0.03576838302612305, 0.035703742980957034, 0.03570691299438476, 0.03571503829956055, 0.03571200180053711, 0.03575500869750976, 0.03575603103637695, 0.03575603103637695, 0.03574169540405273, 0.035737598419189456, 0.03575091171264649, 0.03579391860961914, 0.035746814727783204, 0.03571507263183594, 0.035705856323242184, 0.03575603103637695, 0.03575193786621094, 0.03578371047973633, 0.035756000518798826, 0.03585126495361328, 0.03609292984008789, 0.03587583923339844, 0.03577241516113281, 0.03575296020507813, 0.03578675079345703, 0.03575398254394531, 0.03580825424194336, 0.03581235122680664, 0.03579084777832031, 0.03580416107177734, 0.035794944763183595, 0.03582463836669922, 0.03589324951171875, 0.03588915252685547, 0.03580518341064453, 0.129817626953125, 0.03563520050048828, 0.03562902450561523, 0.03563212966918945, 0.035625984191894534, 0.03565363311767578, 0.03564031982421875, 0.03561369705200195, 0.035622913360595705, 0.03567411041259766, 0.035643390655517575, 0.035697662353515625, 0.03565260696411133, 0.035676158905029294, 0.035659774780273434, 0.03565465545654297, 0.03566284942626953, 0.03568947219848633, 0.03565875244140625, 0.03564646530151367, 0.03564441680908203, 0.03569356918334961, 0.035743743896484374, 0.035675136566162106, 0.03567718505859375, 0.03572326278686523, 0.03571814346313477, 0.035762176513671876, 0.035724288940429685, 0.03576422500610352, 0.035730430603027344, 0.03575091171264649, 0.03569868850708008, 0.03570175933837891, 0.03568435287475586, 0.035699710845947266, 0.035722240447998044, 0.03571507263183594, 0.035773441314697264, 0.0357314567565918, 0.03573555374145508, 0.03577753448486328, 0.03573964691162109, 0.03574480056762695, 0.03575190353393555, 0.03578060913085938, 0.0358021125793457, 0.03575091171264649, 0.035748863220214845, 0.035789825439453124, 0.03577241516113281, 0.03582361602783203, 0.035783679962158206, 0.035737598419189456, 0.03575296020507813, 0.035775489807128906, 0.03577036666870117, 0.035800064086914066, 0.03578675079345703, 0.03580723190307617, 0.035811328887939455, 0.03583283233642578, 0.03580723190307617, 0.1299752960205078, 0.03566796875, 0.03555942535400391, 0.03559628677368164, 0.03561983871459961, 0.03561881637573242, 0.035609600067138675, 0.03561983871459961, 0.035642368316650394, 0.03577958297729492, 0.03565465545654297, 0.03566592025756836, 0.03563315200805664, 0.035675136566162106, 0.03568537521362305, 0.03563520050048828, 0.0356577262878418, 0.03563315200805664, 0.03569561767578125, 0.03570483016967774, 0.03565465545654297, 0.035688449859619144, 0.035642368316650394, 0.035664894104003905, 0.03561983871459961, 0.03562393569946289, 0.03568025588989258, 0.035714046478271484, 0.03578675079345703, 0.035724288940429685, 0.035727359771728515, 0.0357130241394043, 0.035676193237304685, 0.0357611198425293, 0.03567718505859375, 0.035707904815673826, 0.03570483016967774, 0.03570995330810547, 0.035699710845947266, 0.0356864013671875, 0.0357283821105957, 0.03577964782714844, 0.035733440399169925, 0.035697662353515625, 0.03579391860961914, 0.03578060913085938, 0.035740673065185545, 0.03571814346313477, 0.03571712112426758, 0.035789825439453124, 0.03588608169555664, 0.03582156753540039, 0.03573350524902344, 0.0357386245727539, 0.035722240447998044, 0.035745792388916016, 0.035746814727783204, 0.035740673065185545, 0.035740673065185545, 0.035783679962158206, 0.03581644821166992, 0.03581951904296875, 0.035765247344970705, 0.1299752960205078, 0.03565875244140625, 0.03561369705200195, 0.03566796875, 0.03568947219848633, 0.03567308807373047, 0.03566899108886719, 0.035661823272705076, 0.03564441680908203, 0.03567411041259766, 0.035729408264160156, 0.035699710845947266, 0.0357130241394043, 0.03568537521362305, 0.03566796875, 0.03565363311767578, 0.03571814346313477, 0.03571200180053711, 0.0356802864074707, 0.03573040008544922, 0.035691520690917966, 0.03567923355102539, 0.035681278228759765, 0.0356864013671875, 0.03569359970092773, 0.035773406982421874, 0.035904510498046875, 0.035768318176269534, 0.03570073699951172, 0.035768318176269534, 0.03587071990966797, 0.03590041732788086, 0.03577446365356445, 0.035768318176269534, 0.03579084777832031, 0.03578265762329102, 0.03573555374145508, 0.03573964691162109, 0.035694591522216795, 0.035737598419189456, 0.035740734100341794, 0.03573446273803711, 0.03573964691162109, 0.03577241516113281, 0.03577139282226562, 0.03581644821166992, 0.03576115036010742, 0.035775489807128906, 0.03575603103637695, 0.035760128021240234, 0.03581951904296875, 0.035796993255615236, 0.03580825424194336, 0.03579391860961914, 0.035810302734375, 0.03578675079345703, 0.035783679962158206, 0.0357918701171875, 0.0357918701171875, 0.03580416107177734, 0.0358287353515625, 0.03581849670410156, 0.03578579330444336, 0.13018515014648438, 0.03566592025756836, 0.03563417434692383, 0.03563724899291992, 0.03563417434692383, 0.035655681610107424, 0.035729408264160156, 0.03564851379394531, 0.035643390655517575, 0.035681278228759765, 0.035625984191894534, 0.03570483016967774, 0.03566899108886719, 0.03570175933837891, 0.035645439147949216, 0.0356864013671875, 0.03568947219848633, 0.0358287353515625, 0.03568230438232422, 0.035664894104003905, 0.03567923355102539, 0.03585228729248047, 0.035817470550537106, 0.035705856323242184, 0.035692543029785154, 0.0357314567565918, 0.035722240447998044, 0.035765247344970705, 0.03571712112426758, 0.03578675079345703, 0.03579913711547852, 0.03577231979370117, 0.03571200180053711, 0.035699710845947266, 0.035699710845947266, 0.03570995330810547, 0.035737598419189456, 0.03571814346313477, 0.03575603103637695, 0.03574476623535156, 0.035778560638427735, 0.0358287353515625, 0.03575500869750976, 0.035776512145996094, 0.03581644821166992, 0.035776512145996094, 0.03572739028930664, 0.035780574798583986, 0.03574784088134766, 0.03585843276977539, 0.03590860748291016, 0.03581235122680664, 0.03577446365356445, 0.035767295837402346, 0.03578060913085938, 0.03580825424194336, 0.03580313491821289, 0.03582566452026367, 0.035794944763183595, 0.0357918701171875, 0.03583900833129883, 0.03592086410522461, 0.03580518341064453, 0.13022720336914062, 0.03563724899291992, 0.03561369705200195, 0.0355676155090332, 0.03562188720703125, 0.03561881637573242, 0.03562188720703125, 0.03565055847167969, 0.03564134216308594, 0.035620864868164064, 0.03564851379394531, 0.03566796875, 0.03566080093383789, 0.03567103958129883, 0.035620864868164064, 0.03565260696411133, 0.0356577262878418, 0.035655681610107424, 0.035676223754882816, 0.03566175842285156, 0.03569356918334961, 0.03571507263183594, 0.03568025588989258, 0.03564646530151367, 0.03566284942626953, 0.035672065734863284, 0.03570073699951172, 0.03567411041259766, 0.03570278549194336, 0.03571712112426758, 0.03573657608032227, 0.035768318176269534, 0.0357386245727539, 0.035697662353515625, 0.03568742370605469, 0.03570892715454101, 0.03570892715454101, 0.03570073699951172, 0.03566899108886719, 0.03571916961669922, 0.03576422500610352, 0.03585126495361328, 0.035762176513671876, 0.0357386245727539, 0.03573555374145508, 0.035806209564208984, 0.03589734268188476, 0.035784702301025394, 0.035827713012695314, 0.035814399719238284, 0.03584307098388672, 0.03577958297729492, 0.0358287353515625, 0.0357498893737793, 0.035740673065185545, 0.035772480010986325, 0.0357907829284668, 0.035796993255615236, 0.03585638427734375, 0.035798015594482424, 0.03581849670410156, 0.035901439666748046, 0.03587583923339844]",tokens/s,26.95905284702418,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6706,7 +6706,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9550-42adcc3166559bce6ce46f3d;6c2347fd-f90e-481f-a568-a341a8650b9f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe70d-754e377d2341008e67214b01;7dfc3872-cf27-4649-8104-d0a075d6f406) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6775,7 +6775,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,,cuda,0,42,,,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1298.96448,5222.432768,0.0,4575.985664,4408.404992,s,10,0.23840172576904298,0.023840172576904295,0.0005106541420409152,0.023670864105224607,0.02390000705718994,0.024634579563140867,0.025222237567901613,"[0.025369152069091797, 0.023706623077392578, 0.02363369560241699, 0.023661151885986328, 0.023677087783813475, 0.023684959411621093, 0.0236376953125, 0.02373676872253418, 0.02366464042663574, 0.02362995147705078]",tokens/s,10738.177300277002,kWh,2.8159645578217885e-07,1.5430172882848377e-07,1.3087894068190103e-06,1.7446875914296731e-06,tokens/kWh,146731140.43885785,MB,1298.96448,5222.432768,0.0,4575.985664,4408.407552,s,10,12.470606933593752,1.247060693359375,0.00442238733599079,1.2463931274414062,1.2521589477539063,1.254918768310547,1.2571266247558595,"[1.2576785888671875, 1.241505615234375, 1.2468797607421875, 1.2479893798828126, 1.2451905517578126, 1.24659228515625, 1.2461939697265625, 1.251545654296875, 1.2443409423828125, 1.242690185546875]",tokens/s,50.518792177057904,kWh,1.4861989345342398e-05,8.14409445473497e-06,3.876174264958031e-05,6.176782644965767e-05,tokens/kWh,1019948.468663481,,s,629,12.662182926177977,0.02013065648041014,0.002876504415868315,0.019755008697509766,0.019963495254516603,0.020318616867065428,0.04381483139038086,"[0.02206105613708496, 0.020191232681274415, 0.020123647689819335, 0.02004787254333496, 0.0196945915222168, 0.01988096046447754, 0.019949567794799804, 0.019783679962158202, 0.01986867141723633, 0.020222976684570314, 0.019762176513671875, 0.019762176513671875, 0.019983360290527344, 0.020343807220458983, 0.02008166313171387, 0.019764223098754884, 0.01967616081237793, 0.019738624572753907, 0.01984819221496582, 0.019794944763183595, 0.01970790481567383, 0.020213760375976563, 0.019758079528808595, 0.019673088073730468, 0.019745855331420897, 0.019688383102416992, 0.019740671157836915, 0.02042367935180664, 0.020728832244873048, 0.020775936126708985, 0.02050764846801758, 0.019894271850585937, 0.0198656005859375, 0.019779584884643556, 0.019750911712646483, 0.01986457633972168, 0.01987379264831543, 0.01983283233642578, 0.01985228729248047, 0.01984614372253418, 0.019842048645019532, 0.019961856842041017, 0.020126752853393555, 0.019762144088745118, 0.019746816635131836, 0.019745792388916016, 0.019760128021240234, 0.019728384017944335, 0.019713024139404296, 0.019730432510375977, 0.019769344329833984, 0.019810304641723633, 0.019851264953613282, 0.01984511947631836, 0.019755008697509766, 0.019777536392211914, 0.019784704208374023, 0.019901439666748046, 0.019812351226806642, 0.019801088333129883, 0.019761152267456054, 0.01983692741394043, 0.0439183349609375, 0.019715072631835938, 0.019772415161132813, 0.019745792388916016, 0.019778560638427735, 0.019738624572753907, 0.01979903984069824, 0.019723264694213868, 0.019748863220214845, 0.019666976928710937, 0.01965667152404785, 0.019647487640380858, 0.019709951400756837, 0.019499008178710937, 0.019628032684326172, 0.01962393569946289, 0.0196945915222168, 0.019697664260864257, 0.019697664260864257, 0.019688447952270507, 0.019489791870117186, 0.019494911193847657, 0.019571712493896484, 0.01983590316772461, 0.01980723190307617, 0.01965158462524414, 0.019754047393798827, 0.01972831916809082, 0.019727392196655272, 0.019847135543823242, 0.019727359771728514, 0.01964031982421875, 0.019507200241088866, 0.019784704208374023, 0.019727359771728514, 0.019783679962158202, 0.01981439971923828, 0.01969152069091797, 0.019563520431518554, 0.019773439407348634, 0.019853311538696287, 0.019520511627197267, 0.019566591262817384, 0.01981439971923828, 0.019817472457885742, 0.019767295837402343, 0.019938304901123048, 0.019550207138061524, 0.01965465545654297, 0.019672063827514647, 0.0198656005859375, 0.019718143463134767, 0.019589120864868165, 0.019519487380981446, 0.019512319564819337, 0.019629056930541993, 0.019594240188598632, 0.01984614372253418, 0.01978982353210449, 0.019959808349609375, 0.019817472457885742, 0.019629056930541993, 0.019535871505737306, 0.04380672073364258, 0.019698688507080078, 0.019570688247680663, 0.019580928802490235, 0.019551231384277345, 0.01962700843811035, 0.019545087814331053, 0.019568639755249022, 0.019555328369140625, 0.019495935440063478, 0.019796991348266603, 0.019715072631835938, 0.019802112579345704, 0.019741695404052736, 0.019734527587890623, 0.019791872024536132, 0.019802112579345704, 0.019768320083618163, 0.01984102439880371, 0.019803136825561524, 0.01985945510864258, 0.01984102439880371, 0.019685375213623048, 0.0198287353515625, 0.019680255889892577, 0.019727359771728514, 0.01968639945983887, 0.01985638427734375, 0.019801088333129883, 0.019739648818969727, 0.019767295837402343, 0.019763200759887696, 0.019744768142700195, 0.01977446365356445, 0.019808256149291992, 0.0198604793548584, 0.01968435287475586, 0.019528703689575197, 0.019729408264160156, 0.01980620765686035, 0.019693567276000978, 0.020011007308959963, 0.019931135177612306, 0.019732511520385743, 0.019831775665283203, 0.019861503601074217, 0.019902463912963866, 0.019813375473022463, 0.019811328887939454, 0.019766271591186522, 0.019735551834106444, 0.01986662483215332, 0.019809280395507813, 0.02062745666503906, 0.019810304641723633, 0.019742719650268553, 0.019735551834106444, 0.01979084777832031, 0.019862527847290038, 0.019764223098754884, 0.020163583755493163, 0.02040115165710449, 0.019919872283935547, 0.04389990234375, 0.019764223098754884, 0.01987993621826172, 0.019767295837402343, 0.019763200759887696, 0.019727359771728514, 0.019886112213134764, 0.019773408889770507, 0.019787776947021486, 0.01983283233642578, 0.019786752700805665, 0.019769344329833984, 0.0198287353515625, 0.019768320083618163, 0.01982156753540039, 0.019796991348266603, 0.019800064086914062, 0.019775487899780272, 0.019813375473022463, 0.020162559509277343, 0.02006937599182129, 0.019783679962158202, 0.019533824920654298, 0.01959321594238281, 0.019564544677734375, 0.019582975387573243, 0.019513343811035155, 0.019520511627197267, 0.019607551574707033, 0.019602432250976562, 0.019577856063842772, 0.01990553665161133, 0.01985740852355957, 0.019783679962158202, 0.019753984451293945, 0.019771392822265626, 0.019731456756591798, 0.019819583892822266, 0.01976313591003418, 0.019687423706054686, 0.019522560119628905, 0.019486719131469727, 0.019751935958862304, 0.019704832077026366, 0.019505151748657225, 0.019518463134765626, 0.019580928802490235, 0.01967103958129883, 0.01990665626525879, 0.01981532859802246, 0.019654720306396485, 0.019724224090576174, 0.019743743896484374, 0.02006118392944336, 0.019769344329833984, 0.019703807830810546, 0.019603456497192383, 0.019636224746704102, 0.020520959854125977, 0.022184959411621095, 0.020471807479858398, 0.019868703842163087, 0.019851232528686525, 0.04391628646850586, 0.01960960006713867, 0.019595264434814453, 0.019737663269042968, 0.01975494384765625, 0.019705856323242187, 0.01981439971923828, 0.01985228729248047, 0.019747840881347657, 0.019853311538696287, 0.01977039909362793, 0.01973244857788086, 0.01971609687805176, 0.019715072631835938, 0.01983283233642578, 0.019743743896484374, 0.01969254493713379, 0.019744768142700195, 0.019740671157836915, 0.019718143463134767, 0.019729408264160156, 0.019725311279296876, 0.0196997127532959, 0.019761152267456054, 0.019722240447998047, 0.01968639945983887, 0.019688447952270507, 0.019713024139404296, 0.019581951141357423, 0.01946316719055176, 0.019503103256225587, 0.019606527328491212, 0.01946316719055176, 0.019528703689575197, 0.019540992736816407, 0.01987174415588379, 0.019677183151245118, 0.019728384017944335, 0.02002739143371582, 0.02017791938781738, 0.020374528884887694, 0.020351999282836913, 0.020057088851928712, 0.019695615768432616, 0.01959219169616699, 0.019843072891235353, 0.019719167709350584, 0.019935232162475586, 0.019760128021240234, 0.019736576080322265, 0.019505151748657225, 0.019497983932495116, 0.01967001533508301, 0.019550207138061524, 0.019471359252929688, 0.019495935440063478, 0.019979263305664064, 0.020353023529052734, 0.019886079788208007, 0.020256767272949217, 0.02004582405090332, 0.019729408264160156, 0.019514368057250975, 0.04391219329833984, 0.01970278358459473, 0.019717119216918946, 0.019720191955566405, 0.01985638427734375, 0.0196997127532959, 0.019772415161132813, 0.019772415161132813, 0.019720191955566405, 0.019697664260864257, 0.019753984451293945, 0.019704864501953124, 0.019889184951782227, 0.01972012710571289, 0.019692607879638672, 0.01976108741760254, 0.019752960205078125, 0.019711999893188475, 0.019741695404052736, 0.019731456756591798, 0.019747840881347657, 0.019751935958862304, 0.019565568923950196, 0.019479551315307618, 0.019681280136108398, 0.019494911193847657, 0.019705856323242187, 0.02046566390991211, 0.020164608001708984, 0.020915199279785156, 0.01982361602783203, 0.019758079528808595, 0.019787839889526367, 0.019821504592895507, 0.019761152267456054, 0.019745792388916016, 0.01988096046447754, 0.019703807830810546, 0.019520511627197267, 0.019520511627197267, 0.01974790382385254, 0.01976927947998047, 0.019732479095458985, 0.019677183151245118, 0.01982156753540039, 0.01981439971923828, 0.019752960205078125, 0.01984716796875, 0.019763200759887696, 0.019843072891235353, 0.019745792388916016, 0.019794944763183595, 0.019861503601074217, 0.019775487899780272, 0.019786752700805665, 0.01977446365356445, 0.019786752700805665, 0.019745792388916016, 0.0196997127532959, 0.019764223098754884, 0.019981311798095702, 0.019794944763183595, 0.0196997127532959, 0.04375040054321289, 0.01945702362060547, 0.01962598419189453, 0.01968639945983887, 0.019752960205078125, 0.01984614372253418, 0.019759103775024413, 0.019753984451293945, 0.01978982353210449, 0.01983795166015625, 0.019765247344970704, 0.019766271591186522, 0.019811328887939454, 0.019750911712646483, 0.019800064086914062, 0.020477951049804686, 0.019723264694213868, 0.01978265571594238, 0.019741695404052736, 0.01977446365356445, 0.019734527587890623, 0.019730432510375977, 0.01988403129577637, 0.020558847427368163, 0.02027212715148926, 0.02027008056640625, 0.019736576080322265, 0.019685375213623048, 0.019826688766479493, 0.020303871154785155, 0.019802112579345704, 0.01984102439880371, 0.019784704208374023, 0.019826688766479493, 0.019693567276000978, 0.019728384017944335, 0.019718143463134767, 0.019748863220214845, 0.019795967102050782, 0.019761152267456054, 0.01962700843811035, 0.01985843276977539, 0.019697664260864257, 0.01966182327270508, 0.019721216201782226, 0.019685375213623048, 0.019523584365844726, 0.019569664001464843, 0.019565568923950196, 0.019490816116333007, 0.019519487380981446, 0.01948876762390137, 0.019494976043701172, 0.019692480087280275, 0.019681280136108398, 0.019679231643676756, 0.019794944763183595, 0.019762176513671875, 0.019887168884277343, 0.01977337646484375, 0.019757055282592775, 0.0196997127532959, 0.019862527847290038, 0.044418048858642575, 0.02021990394592285, 0.02042572784423828, 0.02021990394592285, 0.01990553665161133, 0.019703807830810546, 0.01982259178161621, 0.019805183410644533, 0.01974787139892578, 0.01976419258117676, 0.019735551834106444, 0.019725311279296876, 0.01991372871398926, 0.019731456756591798, 0.019723264694213868, 0.019760128021240234, 0.019748863220214845, 0.019842048645019532, 0.01977446365356445, 0.01988198471069336, 0.019740671157836915, 0.019801088333129883, 0.019711999893188475, 0.019755008697509766, 0.019788799285888673, 0.019817472457885742, 0.019810304641723633, 0.019862527847290038, 0.019787776947021486, 0.019767295837402343, 0.019728384017944335, 0.019753984451293945, 0.019752960205078125, 0.019851264953613282, 0.019742719650268553, 0.019745824813842773, 0.019810272216796876, 0.019779584884643556, 0.019730432510375977, 0.019811328887939454, 0.019796991348266603, 0.019715072631835938, 0.019896320343017578, 0.019756032943725587, 0.019714048385620117, 0.019970048904418947, 0.019750911712646483, 0.019726335525512697, 0.019721216201782226, 0.01986867141723633, 0.019719167709350584, 0.019544063568115236, 0.019589120864868165, 0.01980620765686035, 0.019752960205078125, 0.020616191864013672, 0.01997209548950195, 0.020328447341918944, 0.020497407913208008, 0.020485120773315428, 0.02007961654663086, 0.019791872024536132, 0.019809280395507813, 0.043817985534667966, 0.02004991912841797, 0.01978982353210449, 0.019769344329833984, 0.019760128021240234, 0.019837984085083006, 0.01976419258117676, 0.019744768142700195, 0.01970790481567383, 0.019810304641723633, 0.019776512145996093, 0.01968230438232422, 0.01978265571594238, 0.019786752700805665, 0.019725311279296876, 0.019739648818969727, 0.019696640014648437, 0.019751935958862304, 0.019725311279296876, 0.019778560638427735, 0.019750911712646483, 0.01975606346130371, 0.019735519409179687, 0.0196997127532959, 0.019723264694213868, 0.019729408264160156, 0.019705856323242187, 0.01977446365356445, 0.01985536003112793, 0.01966694450378418, 0.019719167709350584, 0.01982054328918457, 0.01971609687805176, 0.019769344329833984, 0.019793920516967774, 0.019743743896484374, 0.01964851188659668, 0.019719167709350584, 0.019745792388916016, 0.019904512405395508, 0.019500032424926757, 0.019572736740112305, 0.019711999893188475, 0.019742719650268553, 0.019556352615356445, 0.019518463134765626, 0.019697696685791015, 0.01971299171447754, 0.019696640014648437, 0.019722240447998047, 0.01978265571594238, 0.019701759338378907, 0.019765247344970704, 0.019751935958862304, 0.01979084777832031, 0.019688512802124022, 0.019884992599487304, 0.01987276840209961, 0.019760128021240234, 0.01979084777832031, 0.019791872024536132, 0.019788799285888673, 0.019767295837402343, 0.04391321563720703, 0.019793920516967774, 0.019719167709350584, 0.019770368576049805, 0.019741695404052736, 0.01966182327270508, 0.01981439971923828, 0.019695680618286134, 0.01975187110900879, 0.019819520950317384, 0.019687423706054686, 0.0196997127532959, 0.019762176513671875, 0.019732479095458985, 0.019711999893188475, 0.019758079528808595, 0.019532800674438477, 0.019589120864868165, 0.019728384017944335, 0.019759103775024413, 0.019717119216918946, 0.019791872024536132, 0.01975196838378906, 0.019744735717773437, 0.019779584884643556, 0.01982361602783203, 0.019785728454589844, 0.019736576080322265, 0.019768320083618163, 0.019513343811035155, 0.019506175994873046, 0.01945907211303711, 0.019478527069091797, 0.019495935440063478, 0.019704832077026366, 0.01969152069091797, 0.019751935958862304, 0.01966592025756836, 0.01994041633605957, 0.01954092788696289, 0.019628095626831054, 0.019602432250976562, 0.020024255752563478, 0.019668991088867188, 0.01967411231994629, 0.019763200759887696, 0.019711999893188475, 0.019768320083618163, 0.019742719650268553, 0.01983692741394043, 0.019767295837402343, 0.019885120391845704, 0.019687360763549804, 0.01968230438232422, 0.019698688507080078, 0.019709951400756837, 0.019777536392211914, 0.019770368576049805, 0.0196997127532959, 0.019791872024536132, 0.01970790481567383, 0.01970790481567383, 0.019741695404052736]",tokens/s,49.675478838612925,,,main,False,False -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6830,7 +6830,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1270.849536,3041.394688,0.0,2394.947584,2279.417856,s,10,0.24058313369750975,0.024058313369750974,0.000541909354183334,0.024157487869262695,0.024371293067932127,0.024764110088348387,0.025078363704681396,"[0.02515692710876465, 0.02312175941467285, 0.02419491195678711, 0.024222591400146484, 0.024074432373046874, 0.024284000396728515, 0.024172800064086914, 0.02320191955566406, 0.02401161575317383, 0.024142175674438476]",tokens/s,10640.812432091529,kWh,2.8420888229758824e-07,1.5573286271826592e-07,1.1061206765625098e-06,1.546062421578364e-06,tokens/kWh,165581930.21640837,MB,1270.849536,3041.394688,0.0,2394.947584,2279.420416,s,10,14.563057861328126,1.4563057861328124,0.006222302227402314,1.4575923461914062,1.4609573974609376,1.4624493408203125,1.4636428955078125,"[1.459085205078125, 1.45394970703125, 1.4639412841796875, 1.4589393310546874, 1.457260009765625, 1.4557384033203125, 1.4579246826171874, 1.439390380859375, 1.4562030029296875, 1.4606258544921875]",tokens/s,43.260145362256026,kWh,1.7034398785784336e-05,9.334815048509355e-06,3.722665998963787e-05,6.359587382393158e-05,tokens/kWh,990630.3068406405,,s,629,14.753925128936764,0.023456160777323955,0.002903868648751255,0.02310655975341797,0.023323033905029298,0.023503257369995117,0.04722995300292972,"[0.023405567169189453, 0.02228428840637207, 0.022031360626220704, 0.022098943710327147, 0.022079488754272462, 0.02208460807800293, 0.02220953559875488, 0.023191551208496093, 0.023191551208496093, 0.02304204750061035, 0.023015424728393553, 0.02310041618347168, 0.022992895126342772, 0.023149568557739256, 0.02305433654785156, 0.023166976928710937, 0.023104511260986327, 0.023161855697631836, 0.023040000915527343, 0.02300214385986328, 0.02331644821166992, 0.02328166389465332, 0.023373823165893554, 0.023451711654663084, 0.023891904830932616, 0.023974912643432617, 0.023771135330200196, 0.023152639389038086, 0.02307891273498535, 0.023611391067504883, 0.023446527481079102, 0.023044095993041993, 0.02313523292541504, 0.023104511260986327, 0.023196672439575194, 0.023022592544555662, 0.02327961540222168, 0.023166976928710937, 0.023228416442871092, 0.02304310417175293, 0.023060447692871095, 0.0230645751953125, 0.023128063201904296, 0.02294272041320801, 0.02314854431152344, 0.023028736114501954, 0.02309529685974121, 0.023006208419799806, 0.02309939193725586, 0.023005184173583985, 0.023079935073852538, 0.022964223861694336, 0.02327347183227539, 0.023152639389038086, 0.02309225654602051, 0.024512544631958007, 0.024862655639648436, 0.023362560272216795, 0.023192575454711914, 0.023189504623413085, 0.022988800048828126, 0.022981632232666017, 0.0464005126953125, 0.02307891273498535, 0.02307379150390625, 0.023059455871582032, 0.022936576843261718, 0.022932479858398438, 0.022964223861694336, 0.023112703323364257, 0.022986751556396484, 0.023029760360717775, 0.02313216018676758, 0.022985727310180663, 0.02309734344482422, 0.023134208679199218, 0.023010303497314453, 0.023014400482177736, 0.023133184432983397, 0.0230645751953125, 0.02309427261352539, 0.023197696685791015, 0.023037952423095705, 0.02306355285644531, 0.023129087448120117, 0.022966272354125978, 0.023053312301635744, 0.02304921531677246, 0.02307379150390625, 0.022953983306884765, 0.022965248107910157, 0.023006208419799806, 0.02301644706726074, 0.022978559494018554, 0.02307174491882324, 0.023145471572875977, 0.023088127136230468, 0.023111679077148437, 0.023202816009521485, 0.023205888748168944, 0.02309939193725586, 0.023146495819091797, 0.02309529685974121, 0.02306764793395996, 0.0230645751953125, 0.023093248367309572, 0.023015424728393553, 0.023014400482177736, 0.023211008071899415, 0.023060480117797853, 0.023020544052124024, 0.02306252861022949, 0.023128063201904296, 0.023121919631958008, 0.023009279251098632, 0.023133184432983397, 0.023234560012817384, 0.02311680030822754, 0.02307481575012207, 0.02310553550720215, 0.023038976669311522, 0.023169023513793945, 0.02295091247558594, 0.0231147518157959, 0.023378944396972655, 0.04778598403930664, 0.023315456390380858, 0.023211008071899415, 0.023187456130981447, 0.023003135681152344, 0.02302566337585449, 0.023045120239257814, 0.023137279510498047, 0.023198720932006835, 0.023162879943847657, 0.023112703323364257, 0.02310041618347168, 0.02309836769104004, 0.023113727569580078, 0.023134208679199218, 0.023104511260986327, 0.022922239303588866, 0.023538688659667968, 0.023500799179077148, 0.022957056045532227, 0.02303385543823242, 0.023215103149414062, 0.02331648063659668, 0.023388160705566406, 0.02307792091369629, 0.023046112060546874, 0.02306255912780762, 0.023044063568115235, 0.023022592544555662, 0.02330419158935547, 0.023153663635253906, 0.02329804801940918, 0.02309427261352539, 0.023376895904541017, 0.023163904190063478, 0.02386636734008789, 0.02477568054199219, 0.02368819236755371, 0.02351513671875, 0.02327347183227539, 0.02367692756652832, 0.023191551208496093, 0.023183359146118163, 0.0230328311920166, 0.02329599952697754, 0.023157760620117186, 0.02332876777648926, 0.02324787139892578, 0.02312294387817383, 0.023319551467895508, 0.023126079559326173, 0.023301055908203125, 0.023113727569580078, 0.02309427261352539, 0.023243776321411135, 0.023192575454711914, 0.02329497528076172, 0.023362560272216795, 0.023161855697631836, 0.02304102325439453, 0.023163904190063478, 0.023006208419799806, 0.02313523292541504, 0.048102401733398435, 0.02312499237060547, 0.023173120498657225, 0.023206911087036132, 0.023015424728393553, 0.023002111434936523, 0.02309017562866211, 0.023076864242553712, 0.023197696685791015, 0.023068672180175782, 0.023030784606933592, 0.023178272247314453, 0.023213024139404296, 0.023069696426391603, 0.023129087448120117, 0.023387136459350585, 0.023047168731689452, 0.023225343704223633, 0.023137279510498047, 0.022795263290405272, 0.02310655975341797, 0.023173120498657225, 0.023152639389038086, 0.023052352905273438, 0.02300511932373047, 0.023137279510498047, 0.022951936721801756, 0.023169023513793945, 0.02304921531677246, 0.023113727569580078, 0.02309734344482422, 0.02309836769104004, 0.023068672180175782, 0.023004159927368165, 0.023137279510498047, 0.02309017562866211, 0.023243776321411135, 0.023401472091674806, 0.023205888748168944, 0.023128063201904296, 0.023590911865234376, 0.024308736801147462, 0.02345779228210449, 0.02309939193725586, 0.023096319198608398, 0.023204864501953124, 0.023027711868286133, 0.02315673637390137, 0.023014400482177736, 0.023053312301635744, 0.02310758399963379, 0.023085056304931642, 0.02308608055114746, 0.02304921531677246, 0.023179264068603517, 0.02332262420654297, 0.023142400741577147, 0.02308095932006836, 0.02311577606201172, 0.02311884880065918, 0.023053312301635744, 0.023060480117797853, 0.023150592803955077, 0.04772147369384765, 0.023504896163940428, 0.023121919631958008, 0.02312294387817383, 0.02308095932006836, 0.023126047134399415, 0.023094240188598632, 0.02311577606201172, 0.022972415924072266, 0.02314035224914551, 0.023085056304931642, 0.023187456130981447, 0.023011327743530274, 0.023170047760009766, 0.023138303756713868, 0.023216127395629883, 0.0231147518157959, 0.023013376235961915, 0.023045120239257814, 0.023060480117797853, 0.023187456130981447, 0.02302566337585449, 0.022943744659423827, 0.023356416702270507, 0.023152639389038086, 0.023045120239257814, 0.023197696685791015, 0.02308198356628418, 0.023157760620117186, 0.02326118469238281, 0.02308095932006836, 0.023351295471191406, 0.023059455871582032, 0.02331340789794922, 0.023119871139526366, 0.023155712127685548, 0.023178239822387696, 0.023164928436279295, 0.023225343704223633, 0.02295910453796387, 0.022973440170288087, 0.023171072006225587, 0.02326016044616699, 0.023120895385742187, 0.02306662368774414, 0.02307788848876953, 0.023054399490356446, 0.023076799392700194, 0.023111679077148437, 0.02312396812438965, 0.02307174491882324, 0.023117824554443358, 0.02304614448547363, 0.02312499237060547, 0.02307174491882324, 0.023137279510498047, 0.02310553550720215, 0.023040000915527343, 0.02301644706726074, 0.02307481575012207, 0.023023616790771483, 0.02310348892211914, 0.0233175048828125, 0.04755251312255859, 0.022854656219482423, 0.022975488662719725, 0.023353343963623048, 0.023152639389038086, 0.02314854431152344, 0.02305843162536621, 0.02309836769104004, 0.023037952423095705, 0.023147520065307618, 0.02307891273498535, 0.0230830078125, 0.02305536079406738, 0.023223360061645507, 0.023193599700927735, 0.02304300880432129, 0.02308608055114746, 0.023192575454711914, 0.02326425552368164, 0.022939647674560547, 0.02311680030822754, 0.02394419288635254, 0.023348224639892577, 0.02305740737915039, 0.023009279251098632, 0.023167999267578124, 0.023144447326660156, 0.022809600830078124, 0.022800384521484376, 0.023015424728393553, 0.02313523292541504, 0.023006208419799806, 0.0230645751953125, 0.023166976928710937, 0.023175167083740233, 0.02311577606201172, 0.023161855697631836, 0.02304921531677246, 0.023134208679199218, 0.023093248367309572, 0.02309734344482422, 0.023030784606933592, 0.023034912109375, 0.022987743377685547, 0.023060480117797853, 0.02312396812438965, 0.023242752075195314, 0.023349248886108398, 0.023112703323364257, 0.023111679077148437, 0.023141376495361327, 0.023121919631958008, 0.023111679077148437, 0.023208959579467774, 0.02305023956298828, 0.023026687622070312, 0.023161855697631836, 0.022963199615478515, 0.022746112823486327, 0.023113759994506836, 0.023089120864868164, 0.02312396812438965, 0.023011327743530274, 0.04798668670654297, 0.023198720932006835, 0.023079935073852538, 0.023096319198608398, 0.023208959579467774, 0.02288128089904785, 0.02272051239013672, 0.023003135681152344, 0.022995967864990235, 0.02310860824584961, 0.02305023956298828, 0.022961151123046874, 0.023149568557739256, 0.023373823165893554, 0.023361536026000978, 0.02596659278869629, 0.023402496337890624, 0.023341056823730468, 0.02310553550720215, 0.023129087448120117, 0.023195648193359376, 0.022996992111206056, 0.023002111434936523, 0.022964223861694336, 0.023008256912231444, 0.023119871139526366, 0.02313932800292969, 0.023022592544555662, 0.023030784606933592, 0.023009279251098632, 0.022921215057373046, 0.02306559944152832, 0.022990848541259764, 0.023144447326660156, 0.02306662368774414, 0.022993919372558593, 0.023187456130981447, 0.02310758399963379, 0.022982656478881838, 0.02309222412109375, 0.023020544052124024, 0.023142400741577147, 0.023266304016113282, 0.023081024169921874, 0.023039936065673828, 0.023121919631958008, 0.022969343185424804, 0.023117824554443358, 0.023021568298339845, 0.02302774429321289, 0.023051231384277344, 0.023006208419799806, 0.02306355285644531, 0.02308403205871582, 0.02312704086303711, 0.02306662368774414, 0.023104511260986327, 0.023133184432983397, 0.023158784866333007, 0.023085056304931642, 0.023061504364013673, 0.02309119987487793, 0.02312396812438965, 0.04583731079101563, 0.022203456878662108, 0.02216134452819824, 0.022226943969726562, 0.022037504196166992, 0.022280256271362306, 0.022361024856567383, 0.02212761688232422, 0.022123552322387694, 0.021957599639892578, 0.022017023086547852, 0.02204979133605957, 0.022320127487182616, 0.022146047592163084, 0.02210304069519043, 0.02234880065917969, 0.022205440521240235, 0.02222591972351074, 0.021987327575683592, 0.0224532470703125, 0.022833152770996092, 0.022792192459106447, 0.023018495559692383, 0.023052288055419923, 0.02313523292541504, 0.023206911087036132, 0.023060543060302734, 0.023125951766967773, 0.023023616790771483, 0.02324684715270996, 0.022953983306884765, 0.02307583999633789, 0.023133184432983397, 0.023149568557739256, 0.02301644706726074, 0.02313216018676758, 0.023147520065307618, 0.02290892791748047, 0.02285977554321289, 0.023060480117797853, 0.02306252861022949, 0.02309222412109375, 0.025249792098999024, 0.023840768814086914, 0.023362560272216795, 0.023129087448120117, 0.023014400482177736, 0.022960128784179686, 0.023056415557861327, 0.023097312927246094, 0.023189504623413085, 0.02311680030822754, 0.023061504364013673, 0.023026687622070312, 0.023216127395629883, 0.023191551208496093, 0.023008256912231444, 0.023112703323364257, 0.02313113594055176, 0.02313113594055176, 0.023143423080444335, 0.023120895385742187, 0.023212032318115236, 0.047608833312988284, 0.02329395294189453, 0.023111679077148437, 0.02310655975341797, 0.02308403205871582, 0.02309119987487793, 0.0230830078125, 0.022982656478881838, 0.023038976669311522, 0.023051263809204102, 0.023031808853149413, 0.02308095932006836, 0.023060480117797853, 0.023021568298339845, 0.0231014404296875, 0.023044095993041993, 0.02312396812438965, 0.02310758399963379, 0.023217151641845703, 0.02310758399963379, 0.023192575454711914, 0.023007232666015624, 0.022994943618774414, 0.022928384780883788, 0.023113727569580078, 0.02289254379272461, 0.02278508758544922, 0.022734783172607423, 0.023166976928710937, 0.023109632492065428, 0.023586816787719726, 0.023093248367309572, 0.023017471313476562, 0.023234560012817384, 0.023157760620117186, 0.023044095993041993, 0.02302566337585449, 0.023150592803955077, 0.023169023513793945, 0.02308608055114746, 0.022939680099487304, 0.023059423446655274, 0.02305023956298828, 0.022963199615478515, 0.02302566337585449, 0.022994943618774414, 0.023315456390380858, 0.023192575454711914, 0.0234803524017334, 0.023378911972045897, 0.023433216094970705, 0.02332467269897461, 0.023188480377197264, 0.0230328311920166, 0.023040000915527343, 0.023022592544555662, 0.023121919631958008, 0.02322329521179199, 0.023230464935302734, 0.023113727569580078, 0.022987775802612305, 0.02313113594055176, 0.023158784866333007, 0.047710208892822265, 0.023205888748168944, 0.023052288055419923, 0.023023616790771483, 0.022976512908935546, 0.022992895126342772, 0.022973440170288087, 0.023619583129882812, 0.02326527976989746, 0.023347200393676756, 0.022972415924072266, 0.023111679077148437, 0.023133184432983397, 0.02310758399963379, 0.02323865509033203, 0.02305843162536621, 0.023149568557739256, 0.023197696685791015, 0.023204864501953124, 0.023143423080444335, 0.023742464065551756, 0.024250368118286132, 0.023478271484375, 0.02314854431152344, 0.023423999786376954, 0.023174144744873046, 0.023266304016113282, 0.023119871139526366, 0.023229440689086913, 0.023147520065307618, 0.023191551208496093, 0.023224319458007812, 0.023133184432983397, 0.023096319198608398, 0.023164928436279295, 0.02327654457092285, 0.02330624008178711, 0.023302143096923827, 0.023157760620117186, 0.023134208679199218, 0.02307072067260742, 0.02306662368774414, 0.023096319198608398, 0.023129087448120117, 0.023036928176879884, 0.02313523292541504, 0.023166976928710937, 0.02310553550720215, 0.02305536079406738, 0.02313113594055176, 0.023143423080444335, 0.023166976928710937, 0.0231014404296875, 0.022992895126342772, 0.02305740737915039, 0.023044095993041993, 0.02310758399963379, 0.02325299263000488, 0.023207935333251953, 0.02304204750061035, 0.02314035224914551, 0.02306662368774414, 0.0230328311920166]",tokens/s,42.63272278414554,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -6869,7 +6869,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d467f-2f7264440250fde7318f38e2;0b1e3bc1-03f9-4e25-ab9f-4fc11a776422) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9da-23fac0e73f52f09d0964de26;5ea197ac-aeb6-45eb-bef5-c7b356b29910) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -6966,7 +6966,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e951b-6f165c377ff9ca535f5dfb93;8fb70e76-343e-42e9-a9c6-dd3eb51abf4d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6d8-2285f3875c1bb8b156e78ab4;877aaffd-43ac-4887-b1a5-096a3f8eec69) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7036,7 +7036,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95f0-7d8f47690865332f7f18d1af;088dc2ca-c817-44e5-a33d-350b149358c9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7b2-080901e620e2d61561fd8461;98f8e391-05bb-497b-80da-a409fc6eb92d) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7068,7 +7068,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,,cuda,0,42,,,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1297.969152,17102.798848,0.0,16456.351744,16359.853056,s,10,1.0316815719604493,0.10316815719604491,0.0002858635983448075,0.10308147048950195,0.10334511642456055,0.10366564598083496,0.10392206962585449,"[0.10398617553710937, 0.10306684875488281, 0.1030960922241211, 0.10312809753417969, 0.10292483520507813, 0.1030156478881836, 0.10302108764648438, 0.10305548858642578, 0.10311341094970704, 0.10327388763427735]",tokens/s,2481.385797301167,kWh,1.2201533830998827e-06,6.685899876493308e-07,5.9327739329280695e-06,7.821517303677283e-06,tokens/kWh,32730222.290711,MB,1297.969152,17102.798848,0.0,16456.351744,16416.242176,s,10,23.347927978515628,2.334792797851563,0.0034280681689723264,2.3335817871093747,2.337003662109375,2.340813720703125,2.343861767578125,"[2.344623779296875, 2.333737060546875, 2.33296240234375, 2.332305419921875, 2.336156982421875, 2.333426513671875, 2.332965576171875, 2.3332919921875, 2.3338671875, 2.334591064453125]",tokens/s,26.983122467214883,kWh,2.7499233605116095e-05,1.5070586503284129e-05,0.0001175103525992714,0.0001600801727076716,tokens/kWh,393552.79879068263,,s,629,24.23761405563358,0.03853356765601517,0.012321844083848062,0.037031936645507815,0.03714355087280274,0.03729612808227539,0.14069289001464844,"[0.03764432144165039, 0.037763038635253904, 0.03763711929321289, 0.03765657424926758, 0.03772313690185547, 0.037664768218994144, 0.03767091369628906, 0.037716991424560545, 0.037797889709472655, 0.03747945785522461, 0.037622753143310546, 0.037645313262939455, 0.03738521575927734, 0.03702579116821289, 0.03698995208740234, 0.036985855102539066, 0.036997119903564454, 0.03695001602172852, 0.03700428771972656, 0.03702169418334961, 0.03701145553588867, 0.03698483276367188, 0.03700838470458984, 0.037005313873291014, 0.036994049072265625, 0.03697971343994141, 0.03704524612426758, 0.03710464096069336, 0.03711180877685547, 0.03726131057739258, 0.03727052688598633, 0.037032958984375, 0.03700735855102539, 0.037064704895019535, 0.03705753707885742, 0.0370513916015625, 0.03701964950561523, 0.03704217529296875, 0.03706985473632812, 0.037058528900146485, 0.0370145263671875, 0.037154815673828126, 0.03728179168701172, 0.03710464096069336, 0.037187583923339845, 0.037100543975830076, 0.03710566329956055, 0.03720191955566406, 0.0371517448425293, 0.0370780143737793, 0.03706572723388672, 0.037108734130859376, 0.03710976028442383, 0.037082111358642575, 0.037103614807128905, 0.03712819290161133, 0.037190654754638675, 0.037100543975830076, 0.037100543975830076, 0.03712819290161133, 0.0370882568359375, 0.037119998931884765, 0.14084197998046874, 0.03694182586669922, 0.036893695831298826, 0.0369090576171875, 0.03691417694091797, 0.036942848205566405, 0.036949024200439454, 0.03697865676879883, 0.036947967529296875, 0.03698483276367188, 0.036969470977783206, 0.036982784271240236, 0.036975616455078124, 0.037026817321777344, 0.03704934310913086, 0.03696332931518555, 0.03697049713134765, 0.03696537780761719, 0.036992000579833983, 0.037012481689453126, 0.036961280822753906, 0.03697663879394531, 0.037015552520751956, 0.03709542465209961, 0.037125118255615236, 0.037087230682373046, 0.03700940704345703, 0.03706265640258789, 0.03699609756469727, 0.036994049072265625, 0.03698390579223633, 0.03700624084472656, 0.036961280822753906, 0.03701145553588867, 0.03718860626220703, 0.037341182708740234, 0.03707904052734375, 0.03705651092529297, 0.03700940704345703, 0.03700940704345703, 0.03709648132324219, 0.037037025451660155, 0.03700844955444336, 0.03702880096435547, 0.03706265640258789, 0.03703807830810547, 0.037064704895019535, 0.03703091049194336, 0.0370513916015625, 0.03708006286621094, 0.03705753707885742, 0.03709542465209961, 0.03706265640258789, 0.037070846557617186, 0.03704729461669922, 0.03712102508544922, 0.03710259246826172, 0.03708108901977539, 0.037133312225341795, 0.0370964469909668, 0.03709337615966797, 0.03706982421875, 0.037147647857666014, 0.14093618774414063, 0.03697663879394531, 0.036964351654052735, 0.03702169418334961, 0.036977664947509765, 0.03695718383789062, 0.036937759399414065, 0.03700323104858398, 0.037026817321777344, 0.036947967529296875, 0.03691622543334961, 0.03695820617675781, 0.036977664947509765, 0.036980735778808595, 0.03702169418334961, 0.036992000579833983, 0.036972545623779295, 0.036945919036865234, 0.0370145263671875, 0.03695315170288086, 0.036950977325439456, 0.03697356796264648, 0.036967422485351564, 0.03697049713134765, 0.03697868728637695, 0.03699609756469727, 0.03701251220703125, 0.037018592834472654, 0.03703091049194336, 0.03703398513793945, 0.036999168395996096, 0.03703500747680664, 0.03705855941772461, 0.036992000579833983, 0.03702783966064453, 0.037006401062011716, 0.037001152038574216, 0.03702067184448242, 0.03702889633178711, 0.037015518188476565, 0.037026817321777344, 0.03704627227783203, 0.037031936645507815, 0.0370513916015625, 0.0370513916015625, 0.03706367874145508, 0.037043201446533204, 0.03709337615966797, 0.037015552520751956, 0.037054462432861326, 0.03705548858642578, 0.03706880187988281, 0.03702998352050781, 0.03703081512451172, 0.03711795043945312, 0.03706367874145508, 0.03707289505004883, 0.03706675338745117, 0.03709030532836914, 0.03709952163696289, 0.03707289505004883, 0.03709132766723633, 0.037160961151123044, 0.14081639099121093, 0.036934654235839845, 0.036910079956054685, 0.03691424179077148, 0.036932544708251955, 0.03693875122070313, 0.03694079971313476, 0.03696640014648438, 0.036951038360595705, 0.036994049072265625, 0.03695206451416016, 0.03693670272827149, 0.03699507141113281, 0.03695718383789062, 0.036928512573242187, 0.03695206451416016, 0.03703807830810547, 0.036967422485351564, 0.036945919036865234, 0.036944896697998046, 0.03699814224243164, 0.036951038360595705, 0.03695513534545898, 0.036975616455078124, 0.037031936645507815, 0.037012481689453126, 0.036961280822753906, 0.036956161499023435, 0.037101566314697264, 0.037103614807128905, 0.036992000579833983, 0.037012481689453126, 0.0370513916015625, 0.037005313873291014, 0.03699507141113281, 0.0370063362121582, 0.03708313751220703, 0.037026817321777344, 0.037015552520751956, 0.03701760101318359, 0.03703807830810547, 0.037028865814208986, 0.03701964950561523, 0.036999168395996096, 0.03704524612426758, 0.03703603363037109, 0.03701760101318359, 0.0370513916015625, 0.03707596969604492, 0.03703398513793945, 0.0370247688293457, 0.03702374267578125, 0.03705548858642578, 0.03708313751220703, 0.037043201446533204, 0.03712102508544922, 0.03710566329956055, 0.03708313751220703, 0.037059585571289064, 0.03709030532836914, 0.03706060791015625, 0.03708006286621094, 0.03710464096069336, 0.1406781463623047, 0.0369356803894043, 0.03690496063232422, 0.03687628936767578, 0.03694079971313476, 0.036945919036865234, 0.036918270111083985, 0.0369172477722168, 0.03697868728637695, 0.03707392120361328, 0.03714355087280274, 0.036956161499023435, 0.037002239227294925, 0.036961280822753906, 0.03694387054443359, 0.036951038360595705, 0.0369879035949707, 0.03704012680053711, 0.036939777374267575, 0.037004318237304684, 0.037002208709716794, 0.036969470977783206, 0.03695513534545898, 0.03702272033691406, 0.03696844863891602, 0.03700735855102539, 0.036967422485351564, 0.03703910446166992, 0.03711795043945312, 0.037043201446533204, 0.0370247688293457, 0.037010433197021485, 0.03704627227783203, 0.03705855941772461, 0.037002239227294925, 0.03704934310913086, 0.03711590576171875, 0.03730636978149414, 0.03718656158447266, 0.037070846557617186, 0.03702067184448242, 0.0370513916015625, 0.03704934310913086, 0.037131263732910154, 0.03704524612426758, 0.03706675338745117, 0.03709750366210938, 0.0372408332824707, 0.03707183837890625, 0.037154815673828126, 0.03719987106323242, 0.03711078262329102, 0.03705036926269531, 0.037222400665283206, 0.03783679962158203, 0.03758182525634766, 0.037302272796630856, 0.037326847076416016, 0.03707904052734375, 0.03711078262329102, 0.037098495483398435, 0.03709132766723633, 0.03709132766723633, 0.14067097473144533, 0.036929534912109374, 0.03692134475708008, 0.03688857650756836, 0.03690598297119141, 0.03700735855102539, 0.036959232330322264, 0.03698175811767578, 0.03690393447875977, 0.03695315170288086, 0.03698271942138672, 0.036926464080810545, 0.036913150787353514, 0.036928512573242187, 0.036979774475097656, 0.03693766403198242, 0.03695001602172852, 0.036913150787353514, 0.03695718383789062, 0.03700428771972656, 0.037103614807128905, 0.0370882568359375, 0.036994049072265625, 0.03695513534545898, 0.03705855941772461, 0.037085182189941404, 0.037029888153076174, 0.03695718383789062, 0.03699507141113281, 0.03702783966064453, 0.03706060791015625, 0.03711283111572266, 0.037101566314697264, 0.03706982421875, 0.03702169418334961, 0.037064704895019535, 0.03701145553588867, 0.037082111358642575, 0.037136383056640625, 0.03704729461669922, 0.037031936645507815, 0.03706367874145508, 0.037028865814208986, 0.03703807830810547, 0.03701862335205078, 0.0370780143737793, 0.03705036926269531, 0.03707289505004883, 0.03707494354248047, 0.03707596969604492, 0.03717836761474609, 0.037256191253662106, 0.03707289505004883, 0.037087230682373046, 0.037131263732910154, 0.03706367874145508, 0.03707904052734375, 0.037048320770263675, 0.03702272033691406, 0.03710259246826172, 0.037116928100585936, 0.037087230682373046, 0.037106689453125, 0.14069862365722657, 0.03696844863891602, 0.03688652801513672, 0.036893695831298826, 0.03691212844848633, 0.036928512573242187, 0.03691526412963867, 0.03693049621582031, 0.0369244155883789, 0.03696230316162109, 0.036942848205566405, 0.036910079956054685, 0.036934654235839845, 0.036951038360595705, 0.03695718383789062, 0.036931583404541016, 0.03693670272827149, 0.0369879035949707, 0.03695001602172852, 0.036985855102539066, 0.03702783966064453, 0.03713945770263672, 0.03702783966064453, 0.03705344009399414, 0.036972545623779295, 0.03703910446166992, 0.03699097442626953, 0.03700940704345703, 0.03700121688842774, 0.03721932983398438, 0.03701760101318359, 0.0369879035949707, 0.037010433197021485, 0.03705753707885742, 0.03703807830810547, 0.03701862335205078, 0.03708108901977539, 0.03707596969604492, 0.03706060791015625, 0.03703398513793945, 0.03700121688842774, 0.037048320770263675, 0.03703807830810547, 0.03704934310913086, 0.037070846557617186, 0.03707596969604492, 0.037048320770263675, 0.03709036636352539, 0.03710867309570313, 0.03710976028442383, 0.03707187271118164, 0.037087230682373046, 0.03706880187988281, 0.03706982421875, 0.03706060791015625, 0.03709439849853516, 0.03705548858642578, 0.03709542465209961, 0.03706265640258789, 0.03716505432128906, 0.037064704895019535, 0.03708927917480469, 0.03703919982910156, 0.1407169647216797, 0.03688857650756836, 0.03689471817016601, 0.03688550567626953, 0.036910079956054685, 0.03694899368286133, 0.036918270111083985, 0.03693059158325195, 0.03693052673339844, 0.036985855102539066, 0.03700326538085937, 0.036980735778808595, 0.03696230316162109, 0.037007423400878904, 0.036967361450195316, 0.036947967529296875, 0.03702169418334961, 0.03698688125610351, 0.036956161499023435, 0.036977664947509765, 0.036961280822753906, 0.037000190734863284, 0.03704012680053711, 0.037002239227294925, 0.03697356796264648, 0.0370513916015625, 0.03700428771972656, 0.037000190734863284, 0.03700428771972656, 0.03714355087280274, 0.03699609756469727, 0.03700940704345703, 0.03699507141113281, 0.03702272033691406, 0.037002239227294925, 0.037012481689453126, 0.03709337615966797, 0.03709542465209961, 0.0370882568359375, 0.03703091049194336, 0.037032958984375, 0.03714355087280274, 0.03703705596923828, 0.03713433456420898, 0.03708313751220703, 0.03708313751220703, 0.037059585571289064, 0.03708006286621094, 0.03706265640258789, 0.03706880187988281, 0.037144577026367184, 0.037059585571289064, 0.03706777572631836, 0.03707904052734375, 0.037068832397460935, 0.037074913024902345, 0.03707494354248047, 0.03709542465209961, 0.03706880187988281, 0.037119998931884765, 0.03708620834350586, 0.037085182189941404, 0.03719168090820312, 0.14086860656738281, 0.03693670272827149, 0.03689471817016601, 0.036898815155029296, 0.03698995208740234, 0.036945919036865234, 0.03689779281616211, 0.036951038360595705, 0.03698483276367188, 0.03694899368286133, 0.03696025466918945, 0.036953086853027346, 0.0370063362121582, 0.036982784271240236, 0.036951072692871095, 0.03691312026977539, 0.036961280822753906, 0.03705548858642578, 0.03698175811767578, 0.036988929748535154, 0.03696230316162109, 0.036985855102539066, 0.03697868728637695, 0.03695820617675781, 0.036992000579833983, 0.03703603363037109, 0.03704940795898438, 0.0369950065612793, 0.03700326538085937, 0.03703705596923828, 0.036992000579833983, 0.03703094482421875, 0.03701961517333984, 0.03702169418334961, 0.036992000579833983, 0.03700121688842774, 0.03713228988647461, 0.037054462432861326, 0.037013504028320314, 0.03700735855102539, 0.03702272033691406, 0.03704524612426758, 0.03699097442626953, 0.037130241394042966, 0.03733606338500976, 0.03712102508544922, 0.03712409591674805, 0.03708006286621094, 0.03706163024902344, 0.03706163024902344, 0.03701760101318359, 0.037132350921630856, 0.037069759368896484, 0.03706265640258789, 0.037048320770263675, 0.03711590576171875, 0.037179393768310545, 0.03737702560424805, 0.03711897659301758, 0.03707494354248047, 0.037103614807128905, 0.03722649765014648, 0.03718348693847656, 0.1409617919921875, 0.03692031860351563, 0.03689779281616211, 0.036915199279785156, 0.03690496063232422, 0.036934654235839845, 0.03690393447875977, 0.03700838470458984, 0.03692236709594727, 0.03696025466918945, 0.03695513534545898, 0.036994049072265625, 0.03700940704345703, 0.036982784271240236, 0.03694079971313476, 0.03701766586303711, 0.03702982330322266, 0.036939777374267575, 0.03697049713134765, 0.0370247688293457, 0.036988929748535154, 0.03696844863891602, 0.03695513534545898, 0.03698995208740234, 0.03698995208740234, 0.036964351654052735, 0.03716201782226562, 0.03732579040527344, 0.037166080474853515, 0.03703910446166992, 0.03698995208740234, 0.03705344009399414, 0.03705241775512695, 0.037000190734863284, 0.03699302291870117, 0.037064704895019535, 0.03705344009399414, 0.03704217529296875, 0.0373831672668457, 0.03728691101074219, 0.03709439849853516, 0.03712204742431641, 0.037125118255615236, 0.037101566314697264, 0.03703398513793945, 0.037087230682373046, 0.03706163024902344, 0.03706367874145508, 0.03706572723388672, 0.037070846557617186, 0.03707187271118164, 0.037163009643554686, 0.03707289505004883, 0.03718659210205078, 0.037117919921875, 0.03708313751220703, 0.037114879608154294, 0.03711180877685547, 0.037108734130859376, 0.037108734130859376, 0.03711078262329102, 0.037101566314697264, 0.03708313751220703]",tokens/s,25.951399281968584,,,main,False,False -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7132,7 +7132,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7161,7 +7161,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d487c-23fea44f0cdc71804a1808d9;b7b91380-cd72-486c-a9eb-c4ad89aa74c4) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febf1-04bd92d562d51cb21cd22d72;2bd1fe03-e7ab-444c-9c51-d805d0de2e9f) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -7246,7 +7246,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e962b-3e3cdb42165d143973a44242;ad3b950f-e15c-468d-bac8-c0dd870c4d26) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7e7-7c826a3e1ebf435f711df2eb;cb0407f6-c660-4470-9f23-6a20015d3a04) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7278,7 +7278,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1249.275904,6715.604992,0.0,6069.157888,5763.863552,s,10,0.3415832290649414,0.034158322906494135,0.0002809213528596382,0.03404644775390625,0.03435670700073242,0.034658257675170896,0.034899498214721686,"[0.03495980834960938, 0.0340579833984375, 0.034001216888427735, 0.034066753387451174, 0.033984542846679684, 0.03401929473876953, 0.03401123046875, 0.03428969573974609, 0.034034912109375, 0.03415779113769531]",tokens/s,7494.513144008298,kWh,4.0388607734706223e-07,2.213100763245153e-07,2.4461614184368217e-06,3.0713575721083992e-06,tokens/kWh,83350763.95037368,MB,1249.275904,6736.576512,0.0,6090.129408,5863.102976,s,10,14.847766601562498,1.4847766601562498,0.009434452525821986,1.4838017578125,1.4956170288085937,1.5011369445800782,1.5055528771972655,"[1.5066568603515624, 1.477275390625, 1.48590478515625, 1.475824462890625, 1.4729095458984376, 1.4877525634765625, 1.4845987548828126, 1.494390380859375, 1.4794490966796876, 1.4830047607421875]",tokens/s,42.43062387131693,kWh,1.7854550211751467e-05,9.782296886967015e-06,5.96573327087643e-05,8.729417980748278e-05,tokens/kWh,721697.5993008837,,s,629,15.12954572105409,0.024053331829974687,0.004114993424087828,0.02345779228210449,0.024201830291748046,0.02438041572570801,0.05799691192626953,"[0.0243189754486084, 0.024070144653320313, 0.024370176315307617, 0.024418304443359375, 0.024333311080932618, 0.024264736175537108, 0.0241376953125, 0.024184831619262694, 0.024371200561523438, 0.024382463455200197, 0.02431391906738281, 0.024259519577026368, 0.02434048080444336, 0.024217599868774413, 0.024327167510986326, 0.024425472259521484, 0.024183807373046876, 0.024028160095214843, 0.023979007720947267, 0.024398847579956053, 0.02503167915344238, 0.024534015655517577, 0.024319999694824217, 0.02429542350769043, 0.024276992797851563, 0.024151039123535157, 0.024175615310668946, 0.02412646484375, 0.023448575973510744, 0.023397375106811523, 0.02346598434448242, 0.023434240341186522, 0.023419904708862304, 0.023451648712158202, 0.023436351776123045, 0.02344441604614258, 0.023434240341186522, 0.02351103973388672, 0.023455743789672853, 0.023382015228271484, 0.02346188735961914, 0.023341056823730468, 0.02342911911010742, 0.023448575973510744, 0.023391231536865235, 0.02345471954345703, 0.02342092704772949, 0.023367712020874024, 0.023459808349609375, 0.023391231536865235, 0.023417856216430662, 0.02351820755004883, 0.02349465560913086, 0.023439359664916993, 0.023432191848754884, 0.023401472091674806, 0.023475263595581056, 0.023490495681762695, 0.024637439727783202, 0.024976383209228514, 0.024239103317260743, 0.024284160614013672, 0.058036224365234375, 0.023432191848754884, 0.023398399353027344, 0.02345881652832031, 0.023421951293945312, 0.02344550323486328, 0.023401472091674806, 0.02347315216064453, 0.023396352767944335, 0.023349248886108398, 0.023488512039184572, 0.02344550323486328, 0.0233503360748291, 0.023406528472900392, 0.023444480895996093, 0.023436288833618164, 0.023385087966918947, 0.023367679595947266, 0.023378944396972655, 0.023363584518432616, 0.023425024032592775, 0.02343017578125, 0.023492576599121094, 0.023427072525024413, 0.023327743530273438, 0.023358463287353515, 0.02346905517578125, 0.023407615661621094, 0.02335436820983887, 0.023379968643188476, 0.023378944396972655, 0.023447551727294923, 0.023557119369506836, 0.023408639907836915, 0.023478271484375, 0.023478271484375, 0.023413759231567383, 0.023411712646484374, 0.023513088226318358, 0.023421951293945312, 0.02346598434448242, 0.023443456649780273, 0.023406591415405274, 0.023407615661621094, 0.02349158477783203, 0.023398399353027344, 0.023504896163940428, 0.02350492858886719, 0.023454687118530274, 0.023400480270385743, 0.02356425666809082, 0.023557119369506836, 0.02353152084350586, 0.023476224899291992, 0.023463935852050782, 0.023407615661621094, 0.023554048538208007, 0.02351411247253418, 0.02346598434448242, 0.02349567985534668, 0.02350796890258789, 0.023426048278808592, 0.02348134422302246, 0.057998336791992185, 0.023553024291992186, 0.02350284767150879, 0.023454784393310547, 0.023477184295654298, 0.023446527481079102, 0.023380992889404296, 0.023224319458007812, 0.023182336807250976, 0.023102495193481447, 0.023236576080322265, 0.023434240341186522, 0.023447551727294923, 0.02347929573059082, 0.02348953628540039, 0.02345267105102539, 0.023468032836914062, 0.023532543182373047, 0.023406591415405274, 0.023533567428588868, 0.023533567428588868, 0.023416864395141603, 0.023478239059448243, 0.023430143356323242, 0.02467532730102539, 0.025385984420776365, 0.024627199172973634, 0.02410713577270508, 0.023924640655517578, 0.023842912673950195, 0.0240392951965332, 0.024017919540405275, 0.02349465560913086, 0.023573503494262696, 0.023488512039184572, 0.02348646354675293, 0.02355200004577637, 0.024377344131469726, 0.023631967544555665, 0.023377824783325195, 0.023392288208007813, 0.023446495056152344, 0.023433216094970705, 0.02344960021972656, 0.023433216094970705, 0.023441408157348635, 0.023432191848754884, 0.0235284481048584, 0.023421951293945312, 0.023459840774536132, 0.023425024032592775, 0.02346598434448242, 0.02349567985534668, 0.023480319976806642, 0.023436288833618164, 0.02345369529724121, 0.023657472610473632, 0.02342092704772949, 0.023434240341186522, 0.02351411247253418, 0.02346188735961914, 0.023545856475830077, 0.023443456649780273, 0.05831987380981445, 0.02352332878112793, 0.023571456909179687, 0.02346291160583496, 0.02350592041015625, 0.02347110366821289, 0.02345779228210449, 0.023504896163940428, 0.02355615997314453, 0.023502784729003905, 0.02347110366821289, 0.02345881652832031, 0.023371776580810546, 0.02355200004577637, 0.02346086311340332, 0.023430143356323242, 0.02351411247253418, 0.023558143615722657, 0.023398399353027344, 0.02351103973388672, 0.023814144134521483, 0.02351820755004883, 0.023433216094970705, 0.02355200004577637, 0.02341072082519531, 0.023532512664794922, 0.02351103973388672, 0.02349875259399414, 0.023417856216430662, 0.02355200004577637, 0.023370752334594725, 0.023549951553344727, 0.02352230453491211, 0.02348441505432129, 0.02344960021972656, 0.023508991241455078, 0.023362560272216795, 0.023473215103149415, 0.0235611515045166, 0.023428096771240234, 0.023414783477783203, 0.02347929573059082, 0.023085056304931642, 0.023146495819091797, 0.02325196838378906, 0.02349056053161621, 0.023432191848754884, 0.023431167602539063, 0.023365631103515624, 0.023464960098266603, 0.023370752334594725, 0.02347724723815918, 0.02344960021972656, 0.02348236846923828, 0.02331340789794922, 0.02307174491882324, 0.02308403205871582, 0.023051424026489256, 0.02309209632873535, 0.02310243225097656, 0.02307379150390625, 0.02310553550720215, 0.023136255264282226, 0.057630718231201174, 0.023143423080444335, 0.023375871658325196, 0.023198720932006835, 0.02310041618347168, 0.02312716865539551, 0.023042943954467772, 0.023043071746826172, 0.02309734344482422, 0.023185407638549805, 0.023227392196655275, 0.023128063201904296, 0.02305638313293457, 0.02309222412109375, 0.023240703582763672, 0.023085056304931642, 0.023349248886108398, 0.023472127914428712, 0.023419904708862304, 0.023394304275512694, 0.0235100154876709, 0.023411712646484374, 0.02345779228210449, 0.02348236846923828, 0.023430143356323242, 0.023447551727294923, 0.02370560073852539, 0.023410688400268553, 0.02344550323486328, 0.023390207290649414, 0.023396352767944335, 0.023369728088378908, 0.023415807723999024, 0.023402496337890624, 0.023443456649780273, 0.023372800827026367, 0.023384063720703126, 0.023401472091674806, 0.023415807723999024, 0.02344550323486328, 0.02348953628540039, 0.023421951293945312, 0.023414880752563476, 0.023358367919921876, 0.023433216094970705, 0.02346188735961914, 0.023426048278808592, 0.023547903060913086, 0.023410688400268553, 0.023407615661621094, 0.023569408416748046, 0.02348646354675293, 0.023606271743774415, 0.02345779228210449, 0.023526464462280273, 0.023427007675170898, 0.02345471954345703, 0.02347007942199707, 0.02351923179626465, 0.023554048538208007, 0.023448575973510744, 0.023406591415405274, 0.023472127914428712, 0.05804339218139649, 0.02350387191772461, 0.02408755111694336, 0.023702527999877928, 0.02348441505432129, 0.023425024032592775, 0.023435264587402343, 0.02332057571411133, 0.024137727737426756, 0.02434764862060547, 0.02429542350769043, 0.024327167510986326, 0.024239103317260743, 0.023469120025634765, 0.023427007675170898, 0.023392255783081056, 0.024261632919311524, 0.024201215744018553, 0.023198720932006835, 0.023152639389038086, 0.023141376495361327, 0.02299920082092285, 0.023109472274780274, 0.02393497657775879, 0.023546880722045898, 0.025165824890136718, 0.025044031143188476, 0.024078271865844728, 0.024030208587646484, 0.023972864151000976, 0.023961599349975587, 0.023937023162841797, 0.024005632400512695, 0.023842815399169923, 0.02342911911010742, 0.023193599700927735, 0.023129215240478517, 0.02310335922241211, 0.02308710479736328, 0.023026687622070312, 0.02333695983886719, 0.023129087448120117, 0.023152736663818358, 0.023136159896850587, 0.023407615661621094, 0.0233175048828125, 0.023348224639892577, 0.023567359924316408, 0.023390207290649414, 0.02351206398010254, 0.023517183303833008, 0.023425056457519532, 0.02352943992614746, 0.02353152084350586, 0.02348748779296875, 0.02352639961242676, 0.023421951293945312, 0.02349363136291504, 0.023443487167358397, 0.023484384536743164, 0.023524351119995117, 0.023362560272216795, 0.023558143615722657, 0.057993247985839845, 0.023377887725830077, 0.023391231536865235, 0.023476224899291992, 0.023373823165893554, 0.023549951553344727, 0.023427072525024413, 0.023417856216430662, 0.023607295989990236, 0.023508991241455078, 0.023641088485717773, 0.02352742385864258, 0.02346598434448242, 0.02350182342529297, 0.023366655349731445, 0.02346598434448242, 0.023436288833618164, 0.023386112213134767, 0.02347007942199707, 0.023447551727294923, 0.023601152420043944, 0.02347315216064453, 0.023392255783081056, 0.023533567428588868, 0.02332876777648926, 0.02353049659729004, 0.023421951293945312, 0.023367679595947266, 0.02367283248901367, 0.024135679244995118, 0.025536512374877928, 0.02455449676513672, 0.024352767944335937, 0.024139776229858398, 0.02342911911010742, 0.02350796890258789, 0.023508991241455078, 0.02348646354675293, 0.02349056053161621, 0.02344960021972656, 0.023498783111572264, 0.02340553665161133, 0.023409664154052736, 0.02355200004577637, 0.023378944396972655, 0.02348441505432129, 0.02351206398010254, 0.023472127914428712, 0.023480319976806642, 0.023382015228271484, 0.02349056053161621, 0.02346086311340332, 0.023463935852050782, 0.023527456283569337, 0.02338096046447754, 0.023472127914428712, 0.023382112503051757, 0.023459775924682617, 0.023515104293823242, 0.02346700859069824, 0.023778303146362305, 0.023564287185668945, 0.023370752334594725, 0.058297344207763675, 0.023408639907836915, 0.02350284767150879, 0.023422975540161133, 0.023415807723999024, 0.02331648063659668, 0.023433216094970705, 0.02326323127746582, 0.02426572799682617, 0.024196096420288086, 0.024018943786621092, 0.024142847061157227, 0.023967744827270508, 0.0241582088470459, 0.0241582088470459, 0.024054784774780274, 0.024240127563476564, 0.02411827278137207, 0.02346700859069824, 0.023397375106811523, 0.023372800827026367, 0.02345267105102539, 0.023414783477783203, 0.023548959732055664, 0.023414751052856446, 0.023402496337890624, 0.02346700859069824, 0.023444480895996093, 0.023579647064208984, 0.02350592041015625, 0.023342079162597656, 0.023468032836914062, 0.0233175048828125, 0.02351820755004883, 0.023673856735229492, 0.02555801582336426, 0.024400896072387695, 0.024129535675048826, 0.024209407806396483, 0.0241213436126709, 0.024258560180664062, 0.02428620719909668, 0.02412441635131836, 0.02430975914001465, 0.024558591842651366, 0.023789567947387694, 0.023568416595458986, 0.023423967361450197, 0.023504896163940428, 0.023516159057617187, 0.02349056053161621, 0.02344960021972656, 0.02350387191772461, 0.02350387191772461, 0.023388160705566406, 0.023540735244750977, 0.02349158477783203, 0.02345062446594238, 0.02349875259399414, 0.023426048278808592, 0.023361536026000978, 0.023392255783081056, 0.02329190444946289, 0.05805055999755859, 0.023436288833618164, 0.023431167602539063, 0.02343731117248535, 0.023513088226318358, 0.02348953628540039, 0.023392255783081056, 0.023423040390014648, 0.023398336410522462, 0.023305248260498047, 0.023516128540039063, 0.023426048278808592, 0.023421951293945312, 0.023410688400268553, 0.023421951293945312, 0.023380992889404296, 0.023533567428588868, 0.023380992889404296, 0.023412736892700195, 0.02344960021972656, 0.023372800827026367, 0.023404544830322265, 0.023431167602539063, 0.02349977684020996, 0.023407615661621094, 0.023350271224975586, 0.02345779228210449, 0.023380992889404296, 0.023430143356323242, 0.023441408157348635, 0.02351206398010254, 0.023569440841674803, 0.023420896530151367, 0.023351295471191406, 0.02365644836425781, 0.023561216354370116, 0.02345779228210449, 0.02342911911010742, 0.023348224639892577, 0.023472127914428712, 0.023468032836914062, 0.02343731117248535, 0.023356416702270507, 0.02345471954345703, 0.023435264587402343, 0.02344960021972656, 0.023558143615722657, 0.02345779228210449, 0.02347007942199707, 0.023525375366210938, 0.023257087707519532, 0.023436288833618164, 0.023582719802856447, 0.023546880722045898, 0.02353459167480469, 0.0234833927154541, 0.023538688659667968, 0.02346086311340332, 0.023504896163940428, 0.02353971290588379, 0.023565311431884766, 0.02348953628540039, 0.024611839294433592, 0.05835366439819336, 0.023222272872924804, 0.024204288482666016, 0.024321023941040038, 0.024071168899536134, 0.024460351943969727, 0.02442848014831543, 0.024451072692871095, 0.02448588752746582, 0.024358911514282225, 0.023616512298583983, 0.023593984603881835, 0.023472127914428712, 0.023440383911132814, 0.023450687408447267, 0.02343622398376465, 0.02344550323486328, 0.02350182342529297, 0.023405567169189453, 0.023488512039184572, 0.023557119369506836, 0.023412736892700195, 0.023435295104980467, 0.023483360290527344, 0.023394304275512694, 0.023403520584106444, 0.023451648712158202, 0.02332159996032715, 0.02330624008178711, 0.023224319458007812, 0.023030784606933592, 0.02307379150390625, 0.02333798408508301, 0.023425024032592775, 0.023407615661621094, 0.023379968643188476, 0.02333286476135254, 0.02345471954345703, 0.023391231536865235, 0.023387136459350585, 0.023348224639892577, 0.02345062446594238, 0.023352319717407227, 0.023387136459350585, 0.023423999786376954, 0.023390207290649414, 0.023405567169189453, 0.023379968643188476, 0.023407615661621094, 0.023389184951782226, 0.023409664154052736, 0.02342092704772949, 0.023347200393676756, 0.023456768035888673, 0.023369728088378908, 0.023396352767944335, 0.023380992889404296, 0.02342911911010742, 0.023341056823730468, 0.023928831100463867, 0.02366361618041992, 0.023439359664916993, 0.023423999786376954]",tokens/s,41.574281977593806,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7334,7 +7334,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fd0-66d93c145a82e42e7cab2970;3658122a-f16f-4f85-94ef-b8e2e325cb19) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe16c-70b1c8b914082971735f0941;c0f819da-3fa0-4f77-be76-949d552e2e5b) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -7420,7 +7420,7 @@ Traceback (most recent call last): OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7486,7 +7486,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95ba-7d9a6d440d71a73e5e3c49fb;edae81e5-88b4-442f-b447-4ceed4aa0954) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe77c-6c890cec52f0e38a4e1f2747;3e8bf50b-a33c-43af-88b2-9d9b77c9e4ed) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7620,7 +7620,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9070-66b16a927256657e29425ea7;b960958e-970c-4c0c-89f8-5f14f0a14b70) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe213-39efa46e05db86cf7b913246;d8eeb13b-c80a-4a90-89b1-8d6095eb2b54) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7742,7 +7742,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7781,7 +7781,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4616-1561fbef034183bd7b83bf28;9b96eab6-eb74-4e55-ae4b-52db3ab78c63) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe96d-69f36dc2027b365a029f3fd2;d8e09fba-ba33-4cb1-b446-e93547eb3227) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7812,7 +7812,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -7879,7 +7879,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e936d-1d8aead72b50363c4dfb170b;d6401b6d-d7c0-44c3-b302-47ea42755251) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe520-755f5b45712bc03168f6cd9c;fa234855-bfe0-4cad-9a43-1d3b4d47401a) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -7910,7 +7910,7 @@ OSError: s is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8083,7 +8083,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1220.804608,14563.147776,0.0,13916.700672,13738.740736,s,10,0.9359868392944336,0.09359868392944336,0.0002275031437491741,0.09355839920043946,0.09371135940551757,0.09397617454528809,0.09418802665710449,"[0.09424098968505859, 0.09356479644775391, 0.093552001953125, 0.09348719787597656, 0.09365251159667969, 0.09336857604980468, 0.09358521270751953, 0.09344294738769532, 0.09350450897216797, 0.09358809661865235]",tokens/s,2735.08119187849,kWh,1.1069706744617887e-06,6.065673582729388e-07,6.1301970121495445e-06,7.843735044884272e-06,tokens/kWh,32637512.42680802,MB,1220.804608,14563.147776,0.0,13916.700672,13889.779712,s,10,22.558502685546873,2.2558502685546875,0.0009753264580620002,2.25562646484375,2.2571563720703125,2.2574865112304687,2.2577506225585937,"[2.255815673828125, 2.25638037109375, 2.254172119140625, 2.255611083984375, 2.255641845703125, 2.257816650390625, 2.25538134765625, 2.2570830078125, 2.255073974609375, 2.255526611328125]",tokens/s,27.927385464445653,kWh,2.6576527158419297e-05,1.4564238501452137e-05,0.00011811940238744975,0.00015926016804732117,tokens/kWh,395579.13803833694,,s,629,23.363786796569805,0.03714433512968176,0.011185130091063316,0.035789825439453124,0.035883416748046876,0.035952844238281254,0.12990066833496094,"[0.035757057189941405, 0.03570687866210937, 0.03571200180053711, 0.03569356918334961, 0.03569356918334961, 0.03574272155761719, 0.0357130241394043, 0.03570995330810547, 0.0357386245727539, 0.035724288940429685, 0.035729408264160156, 0.03568646240234375, 0.03574163055419922, 0.035901439666748046, 0.03585945510864258, 0.035781631469726564, 0.03575913619995117, 0.03576009750366211, 0.03576934432983398, 0.03573766326904297, 0.03574470520019531, 0.035730430603027344, 0.03575193786621094, 0.0357386245727539, 0.03576115036010742, 0.0357498893737793, 0.03577241516113281, 0.03578572845458984, 0.03581340789794922, 0.03576316833496094, 0.03582361602783203, 0.03576422500610352, 0.03580108642578125, 0.035789825439453124, 0.03581235122680664, 0.035784736633300784, 0.03580617523193359, 0.03578675079345703, 0.035789825439453124, 0.03577139282226562, 0.035814399719238284, 0.03578060913085938, 0.035827713012695314, 0.0358021125793457, 0.035814399719238284, 0.03583180618286133, 0.03583692932128906, 0.03582156753540039, 0.03584204864501953, 0.035799072265625, 0.035862495422363284, 0.03582566452026367, 0.03585433578491211, 0.03582668685913086, 0.03588915252685547, 0.0359659538269043, 0.0359024658203125, 0.03585433578491211, 0.03587071990966797, 0.03585433578491211, 0.03586764907836914, 0.03583488082885742, 0.12998042297363283, 0.03574169540405273, 0.03569561767578125, 0.03573555374145508, 0.035746814727783204, 0.03574275207519531, 0.035724254608154295, 0.03573555374145508, 0.035716094970703126, 0.035730430603027344, 0.03573350524902344, 0.0357283821105957, 0.035740673065185545, 0.0357386245727539, 0.03573657608032227, 0.03573350524902344, 0.03574169540405273, 0.035716094970703126, 0.03574272155761719, 0.035776512145996094, 0.03575193786621094, 0.035760128021240234, 0.03576627349853516, 0.035745792388916016, 0.03574784088134766, 0.03580723190307617, 0.035767295837402346, 0.03578265762329102, 0.03577139282226562, 0.035783679962158206, 0.03577139282226562, 0.03579391860961914, 0.035778560638427735, 0.035806209564208984, 0.035768318176269534, 0.035814399719238284, 0.03594854354858398, 0.03617587280273438, 0.035980289459228515, 0.03594342422485351, 0.035871742248535156, 0.03581542587280274, 0.03582156753540039, 0.035829761505126956, 0.03583078384399414, 0.03585126495361328, 0.035885055541992186, 0.03587788772583008, 0.035855358123779296, 0.03585638427734375, 0.03582668685913086, 0.03582259368896484, 0.035850238800048825, 0.03586150360107422, 0.03583692932128906, 0.03586150360107422, 0.03584105682373047, 0.03586659240722656, 0.035860481262207033, 0.03587686538696289, 0.035844097137451174, 0.03586457443237305, 0.03586150360107422, 0.12993536376953124, 0.03565366363525391, 0.03567919921875, 0.035675136566162106, 0.03566284942626953, 0.035688449859619144, 0.0356864013671875, 0.035722240447998044, 0.035681278228759765, 0.03570687866210937, 0.035716094970703126, 0.03574784088134766, 0.035699710845947266, 0.0357283821105957, 0.035694591522216795, 0.03570687866210937, 0.0357130241394043, 0.03575603103637695, 0.035862529754638675, 0.03582566452026367, 0.035724288940429685, 0.0357386245727539, 0.035727359771728515, 0.03575296020507813, 0.03573555374145508, 0.03575398254394531, 0.03575398254394531, 0.03576319885253906, 0.03576627349853516, 0.03578265762329102, 0.03574476623535156, 0.03577139282226562, 0.035762176513671876, 0.03577139282226562, 0.035783679962158206, 0.035783679962158206, 0.03577241516113281, 0.035794944763183595, 0.035773441314697264, 0.03580108642578125, 0.03575500869750976, 0.03580825424194336, 0.03579904174804688, 0.03579391860961914, 0.03582463836669922, 0.03581849670410156, 0.03581542587280274, 0.03586969757080078, 0.03581542587280274, 0.0358287353515625, 0.03581542587280274, 0.03582463836669922, 0.03580416107177734, 0.03581951904296875, 0.03583488082885742, 0.03585945510864258, 0.03581951904296875, 0.035833854675292966, 0.03583692932128906, 0.03585228729248047, 0.03582566452026367, 0.03585843276977539, 0.03584921646118164, 0.1298851776123047, 0.03567103958129883, 0.035676158905029294, 0.03569868850708008, 0.03569664001464844, 0.035683326721191407, 0.03572019195556641, 0.035697662353515625, 0.03572019195556641, 0.035729408264160156, 0.035705856323242184, 0.03573350524902344, 0.035732479095458985, 0.035722240447998044, 0.0357130241394043, 0.03573350524902344, 0.035714046478271484, 0.03578879928588867, 0.03571916961669922, 0.03574272155761719, 0.03572019195556641, 0.035757057189941405, 0.035730430603027344, 0.035768318176269534, 0.035757057189941405, 0.035783679962158206, 0.03578777694702148, 0.035811328887939455, 0.03618406295776367, 0.03581951904296875, 0.035794944763183595, 0.03578060913085938, 0.03578265762329102, 0.0357498893737793, 0.035773441314697264, 0.03589427185058594, 0.036021247863769534, 0.035860481262207033, 0.035806209564208984, 0.03577446365356445, 0.03579289627075195, 0.035794944763183595, 0.035784702301025394, 0.03579596710205078, 0.03580313491821289, 0.035840030670166015, 0.035810272216796876, 0.03583590316772461, 0.03583180618286133, 0.035846145629882815, 0.035844097137451174, 0.03584102249145508, 0.03583795166015625, 0.03587071990966797, 0.03587891387939453, 0.035866622924804685, 0.03585433578491211, 0.035860481262207033, 0.03584921646118164, 0.03583078384399414, 0.03582566452026367, 0.03586969757080078, 0.035871742248535156, 0.13025587463378907, 0.035714046478271484, 0.0359741439819336, 0.03581849670410156, 0.035783679962158206, 0.035729408264160156, 0.03572019195556641, 0.0357130241394043, 0.035768318176269534, 0.03570380783081055, 0.035697662353515625, 0.0357130241394043, 0.0356864013671875, 0.035759105682373046, 0.03570073699951172, 0.035759105682373046, 0.035737598419189456, 0.03579084777832031, 0.0357498893737793, 0.03582156753540039, 0.03585331344604492, 0.035748863220214845, 0.035691520690917966, 0.03575398254394531, 0.03582668685913086, 0.03580928039550781, 0.035722240447998044, 0.035743743896484374, 0.03575603103637695, 0.035796993255615236, 0.03580313491821289, 0.0357918701171875, 0.03574272155761719, 0.03579084777832031, 0.03575193786621094, 0.035796993255615236, 0.035778560638427735, 0.03578265762329102, 0.03576115036010742, 0.03579084777832031, 0.035768318176269534, 0.03578060913085938, 0.035765247344970705, 0.03580416107177734, 0.03589017486572266, 0.035922943115234376, 0.035860481262207033, 0.03584000015258789, 0.03579289627075195, 0.03583488082885742, 0.03584000015258789, 0.03584511947631836, 0.03580825424194336, 0.035844097137451174, 0.03590348815917969, 0.035850238800048825, 0.03581542587280274, 0.03587481689453125, 0.035833854675292966, 0.035871742248535156, 0.03582463836669922, 0.03586150360107422, 0.03585638427734375, 0.12979814147949217, 0.03571916961669922, 0.035672065734863284, 0.03577040100097656, 0.0358614730834961, 0.03574784088134766, 0.03577139282226562, 0.035745792388916016, 0.035765247344970705, 0.03584000015258789, 0.035757057189941405, 0.03570483016967774, 0.03572127914428711, 0.03572934341430664, 0.03574169540405273, 0.03570175933837891, 0.03571814346313477, 0.03575603103637695, 0.03571507263183594, 0.03575807952880859, 0.035746814727783204, 0.035776512145996094, 0.035776512145996094, 0.035813377380371096, 0.035813377380371096, 0.03579391860961914, 0.035776512145996094, 0.035811328887939455, 0.035999744415283204, 0.03596492767333984, 0.03579391860961914, 0.03579084777832031, 0.035767295837402346, 0.03579391860961914, 0.03580928039550781, 0.03579904174804688, 0.035781631469726564, 0.03584716796875, 0.03585638427734375, 0.03586457443237305, 0.035879936218261715, 0.03583795166015625, 0.03589324951171875, 0.03589427185058594, 0.035885055541992186, 0.03584102249145508, 0.03584102249145508, 0.03587481689453125, 0.03589734268188476, 0.03596799850463867, 0.03591167831420899, 0.03584307098388672, 0.03590348815917969, 0.035862529754638675, 0.035945472717285154, 0.03609600067138672, 0.035896320343017575, 0.03596905517578125, 0.0358614730834961, 0.03597721481323242, 0.035844097137451174, 0.035931137084960936, 0.03596083068847656, 0.13005824279785155, 0.03567923355102539, 0.035683326721191407, 0.035675136566162106, 0.03569868850708008, 0.03569356918334961, 0.03568537521362305, 0.03570892715454101, 0.035705856323242184, 0.03575500869750976, 0.035814399719238284, 0.03571814346313477, 0.03571200180053711, 0.03573561477661133, 0.035684288024902346, 0.03572531127929687, 0.035737598419189456, 0.03572531127929687, 0.035714046478271484, 0.0357386245727539, 0.035705856323242184, 0.0357283821105957, 0.035729408264160156, 0.035768318176269534, 0.03574272155761719, 0.035765247344970705, 0.03575091171264649, 0.0357386245727539, 0.03576934432983398, 0.035746814727783204, 0.035746814727783204, 0.03577241516113281, 0.03575193786621094, 0.035776512145996094, 0.03587686538696289, 0.036395008087158204, 0.03588608169555664, 0.03579391860961914, 0.03578777694702148, 0.03580928039550781, 0.03578060913085938, 0.035794944763183595, 0.035783679962158206, 0.0357918701171875, 0.035796993255615236, 0.03579084777832031, 0.035810302734375, 0.03580313491821289, 0.03582259368896484, 0.0358021125793457, 0.03581542587280274, 0.035813377380371096, 0.03595775985717774, 0.035855358123779296, 0.03582672119140625, 0.03584201431274414, 0.035846145629882815, 0.03587891387939453, 0.03589734268188476, 0.03599359893798828, 0.03586457443237305, 0.03587071990966797, 0.035883007049560545, 0.12990669250488282, 0.03572326278686523, 0.03573555374145508, 0.035676158905029294, 0.03573350524902344, 0.03585433578491211, 0.03578675079345703, 0.03577446365356445, 0.03573964691162109, 0.0357498893737793, 0.03579289627075195, 0.03585945510864258, 0.03571916961669922, 0.035745792388916016, 0.03578675079345703, 0.03581849670410156, 0.03585331344604492, 0.03575193786621094, 0.035743743896484374, 0.0357498893737793, 0.03575398254394531, 0.035746814727783204, 0.035748863220214845, 0.035765247344970705, 0.03576934432983398, 0.03580928039550781, 0.03577753448486328, 0.03578265762329102, 0.035778560638427735, 0.035759105682373046, 0.035776512145996094, 0.03576422500610352, 0.03578777694702148, 0.035796993255615236, 0.0357918701171875, 0.03580108642578125, 0.035781631469726564, 0.03581951904296875, 0.03582668685913086, 0.03579596710205078, 0.03580518341064453, 0.03583180618286133, 0.03582156753540039, 0.035866622924804685, 0.03598441696166992, 0.03613180923461914, 0.035862529754638675, 0.035866622924804685, 0.03585433578491211, 0.03588608169555664, 0.035888126373291016, 0.03588915252685547, 0.03599155044555664, 0.036087806701660154, 0.0358809585571289, 0.03584511947631836, 0.03583283233642578, 0.0358737907409668, 0.03584511947631836, 0.03584819030761719, 0.03585126495361328, 0.03585331344604492, 0.03586150360107422, 0.1299251251220703, 0.03570483016967774, 0.03566592025756836, 0.03569359970092773, 0.035685344696044924, 0.03573452758789063, 0.03574169540405273, 0.035722240447998044, 0.03569356918334961, 0.03570073699951172, 0.03572019195556641, 0.03575603103637695, 0.03566592025756836, 0.035773441314697264, 0.035888126373291016, 0.03590758514404297, 0.035783679962158206, 0.035972095489501955, 0.03571916961669922, 0.03571814346313477, 0.03571200180053711, 0.0357314567565918, 0.03570892715454101, 0.035732479095458985, 0.03571814346313477, 0.035748863220214845, 0.035732479095458985, 0.03573657608032227, 0.03577446365356445, 0.035746814727783204, 0.035781631469726564, 0.03580825424194336, 0.0358287353515625, 0.03582361602783203, 0.035776512145996094, 0.0358021125793457, 0.03577241516113281, 0.035783679962158206, 0.035757057189941405, 0.035762176513671876, 0.035781631469726564, 0.03579596710205078, 0.03576422500610352, 0.035775489807128906, 0.03583283233642578, 0.03581951904296875, 0.03577958297729492, 0.03583488082885742, 0.03582262420654297, 0.03582870483398438, 0.0358205451965332, 0.035813377380371096, 0.03585948944091797, 0.03584815979003906, 0.035866622924804685, 0.035862529754638675, 0.035833854675292966, 0.03584511947631836, 0.035833854675292966, 0.0358389778137207, 0.03585433578491211, 0.035945472717285154, 0.035932159423828124, 0.13005619812011718, 0.03570175933837891, 0.03573555374145508, 0.03573350524902344, 0.03570380783081055, 0.03572326278686523, 0.0357283821105957, 0.035702816009521486, 0.03569865417480469, 0.03573555374145508, 0.03570278549194336, 0.03571200180053711, 0.035714046478271484, 0.035710975646972655, 0.03572531127929687, 0.03572531127929687, 0.03571200180053711, 0.035757057189941405, 0.0357212142944336, 0.03575500869750976, 0.035730464935302735, 0.03573859024047851, 0.03571507263183594, 0.03587891387939453, 0.035768318176269534, 0.035810302734375, 0.03584102249145508, 0.03576319885253906, 0.03575296020507813, 0.03577958297729492, 0.035760128021240234, 0.03578265762329102, 0.03575296020507813, 0.035789825439453124, 0.03578265762329102, 0.03581235122680664, 0.03580825424194336, 0.03582668685913086, 0.035794944763183595, 0.03581951904296875, 0.03577753448486328, 0.03578476715087891, 0.03584915161132812, 0.035955711364746096, 0.03580928039550781, 0.03581951904296875, 0.03580723190307617, 0.03586969757080078, 0.035811328887939455, 0.03584819030761719, 0.035922943115234376, 0.03597619247436523, 0.035846145629882815, 0.0358389778137207, 0.03583180618286133, 0.03587071990966797, 0.03580416107177734, 0.035846145629882815, 0.03584921646118164, 0.03587071990966797, 0.0358809585571289, 0.03589120101928711, 0.035934207916259765]",tokens/s,26.922005643894472,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8246,7 +8246,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9301-0788885a0b26406b46435668;07b33049-f7c1-4222-a984-069553d3f05a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4b6-0e851a3702a1272314e27836;6b243ea4-fa6e-49e2-886e-d20ee1911ee1) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8350,7 +8350,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8389,7 +8389,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d471f-55d8d1ec6af35632720c6477;78e7dd53-64fa-4a29-8b76-2ffea8bb4833) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea81-22b83ab72b7b52284958c34c;0048d253-ce9c-4fdf-8daa-9f66ed0d3a3c) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8420,7 +8420,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8459,7 +8459,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46b6-63e861ed786a20e60e2ff1a1;6ae809e1-1dff-4c9f-9a72-aab951d4b5ce) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea10-0ecc85ed422cdd886cdcbf29;1cab21c1-2997-4fba-8a2f-6df1095e385a) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8517,7 +8517,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: GPTNeoXForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8585,7 +8585,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96db-413749777a72d1f43c0a686e;d116c00a-0539-4b7f-a4b0-98488c2fb854) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe89d-13f7c0ac7eb15b2e45094543;43073341-9347-400d-9fed-ec8b238c1ec3) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8617,7 +8617,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1301.925888,3852.992512,0.0,3206.545408,3105.82784,s,10,0.22998556900024414,0.022998556900024412,0.0013529496380716877,0.022550992012023926,0.023032716751098633,0.02504474258422851,0.026654363250732425,"[0.0270567684173584, 0.02258086395263672, 0.022585599899291993, 0.022520320892333984, 0.02251375961303711, 0.022565088272094726, 0.022544511795043944, 0.022516672134399413, 0.022553632736206055, 0.022548351287841797]",tokens/s,11131.13318861012,kWh,2.679527560509432e-07,1.4682554384078958e-07,1.2156201460907105e-06,1.6303984459824433e-06,tokens/kWh,157016832.68333825,MB,1301.925888,3852.992512,0.0,3206.545408,3189.19168,s,10,10.880635986328127,1.0880635986328124,0.011995065648385062,1.0871802978515626,1.10194228515625,1.102078076171875,1.1021867089843749,"[1.101912109375, 1.097273681640625, 1.0840592041015624, 1.0657208251953125, 1.077572998046875, 1.0848209228515624, 1.0760526123046874, 1.0895396728515625, 1.1014700927734375, 1.1022138671875]",tokens/s,57.901027181831616,kWh,1.2661301302283622e-05,6.9378927495859155e-06,3.6167298649108625e-05,5.5766492700978175e-05,tokens/kWh,1129710.6371348856,,s,629,11.064073196411131,0.01758994148872994,0.0027467619663067533,0.017339391708374022,0.017653350830078126,0.01776842918395996,0.04001446121215821,"[0.01734758377075195, 0.016983039855957033, 0.017040384292602538, 0.016946176528930663, 0.01700864028930664, 0.01697689628601074, 0.017137664794921875, 0.017107967376708985, 0.0170199031829834, 0.016940031051635742, 0.01703321647644043, 0.017565696716308594, 0.01767731285095215, 0.017745920181274414, 0.017785856246948242, 0.017787904739379884, 0.01775923156738281, 0.01780633544921875, 0.017753087997436523, 0.017839103698730468, 0.017753087997436523, 0.01781862449645996, 0.017719295501708983, 0.017802240371704102, 0.01779814338684082, 0.01770086479187012, 0.01779199981689453, 0.017804288864135744, 0.01740083122253418, 0.01726464080810547, 0.017140735626220704, 0.01722368049621582, 0.017089536666870117, 0.01722777557373047, 0.017722368240356445, 0.01764659118652344, 0.017292287826538084, 0.017296384811401368, 0.017542144775390626, 0.01742131233215332, 0.01765888023376465, 0.017657855987548828, 0.017573888778686524, 0.01759231948852539, 0.017556480407714844, 0.017829887390136717, 0.01756876754760742, 0.01764556884765625, 0.017567743301391603, 0.017617919921875, 0.017584127426147463, 0.017305599212646485, 0.017211423873901368, 0.017415136337280274, 0.017277984619140624, 0.017285087585449218, 0.01738956832885742, 0.01721343994140625, 0.017141759872436522, 0.017300479888916014, 0.01741312026977539, 0.017560575485229494, 0.040629249572753906, 0.016951295852661134, 0.016931840896606445, 0.0169564151763916, 0.01700966453552246, 0.016904191970825197, 0.016966655731201173, 0.01719808006286621, 0.017755136489868165, 0.017351743698120117, 0.017246143341064453, 0.017370111465454103, 0.017494016647338868, 0.017551359176635743, 0.017479679107666016, 0.01742540740966797, 0.017736703872680663, 0.017497087478637697, 0.017476608276367187, 0.017526784896850587, 0.017527807235717775, 0.017348608016967772, 0.017228799819946287, 0.017177631378173828, 0.017170400619506837, 0.017712127685546874, 0.0175861759185791, 0.017527807235717775, 0.01759129524230957, 0.01745510482788086, 0.017510400772094727, 0.01760358428955078, 0.017511423110961915, 0.01723391914367676, 0.017693695068359376, 0.0176312313079834, 0.017687551498413084, 0.017481727600097655, 0.01754729652404785, 0.016971744537353516, 0.016990207672119142, 0.016898048400878905, 0.01716531181335449, 0.01754521560668945, 0.017575935363769533, 0.017108991622924806, 0.016907264709472656, 0.017003551483154297, 0.017286144256591796, 0.01768342399597168, 0.017510400772094727, 0.017702911376953127, 0.017503231048583985, 0.017596416473388672, 0.017704959869384765, 0.0176312313079834, 0.017501184463500977, 0.017496063232421876, 0.01757798385620117, 0.017552383422851564, 0.017844224929809572, 0.017604608535766602, 0.01782476806640625, 0.04033331298828125, 0.016849920272827147, 0.016887807846069337, 0.016888832092285155, 0.01756159973144531, 0.017501184463500977, 0.017509376525878906, 0.017475584030151366, 0.017505279541015627, 0.017500160217285156, 0.017597440719604493, 0.017531936645507812, 0.017553375244140624, 0.017735679626464843, 0.017588224411010742, 0.017558528900146485, 0.018251775741577148, 0.018109439849853515, 0.0174653434753418, 0.017566719055175782, 0.017534975051879884, 0.017497087478637697, 0.017559551239013673, 0.017546239852905272, 0.017534975051879884, 0.017485824584960938, 0.017508352279663086, 0.017580032348632812, 0.01763737678527832, 0.017507328033447265, 0.017245183944702147, 0.017282047271728516, 0.017468416213989257, 0.017512447357177736, 0.017318912506103516, 0.01722777557373047, 0.01724928092956543, 0.017262592315673828, 0.01721036720275879, 0.016780288696289062, 0.016821247100830078, 0.016726015090942382, 0.016722944259643553, 0.016733184814453125, 0.016720895767211915, 0.016743423461914063, 0.01679462432861328, 0.016703487396240235, 0.016779296875, 0.016794591903686523, 0.016756736755371093, 0.016735231399536133, 0.016862207412719727, 0.01678643226623535, 0.01677414321899414, 0.016854015350341797, 0.016749568939208984, 0.0168222713470459, 0.016781312942504883, 0.01686425590515137, 0.016748544692993163, 0.016825344085693358, 0.016719871520996094, 0.0400230712890625, 0.016929759979248046, 0.016959487915039064, 0.016919551849365236, 0.016895999908447267, 0.016882688522338866, 0.016937984466552734, 0.01695337677001953, 0.01689187240600586, 0.016870399475097657, 0.016906240463256835, 0.016910335540771485, 0.01697177505493164, 0.016897024154663084, 0.017565696716308594, 0.017464319229125978, 0.016917503356933594, 0.016893951416015626, 0.016949247360229493, 0.016942079544067384, 0.01696051216125488, 0.01685196876525879, 0.017519615173339845, 0.017084415435791016, 0.016949247360229493, 0.01702604866027832, 0.01701375961303711, 0.016910335540771485, 0.0169902400970459, 0.016977888107299804, 0.016949247360229493, 0.016959487915039064, 0.016738304138183592, 0.016716800689697265, 0.01683456039428711, 0.01674959945678711, 0.016807903289794923, 0.016686080932617187, 0.01680998420715332, 0.016776191711425782, 0.01679974365234375, 0.016736255645751954, 0.016743423461914063, 0.01677824020385742, 0.01681407928466797, 0.01680486488342285, 0.016846847534179688, 0.016852991104125976, 0.016776191711425782, 0.016785408020019533, 0.01678950309753418, 0.016743423461914063, 0.016825344085693358, 0.016755712509155272, 0.01677107238769531, 0.016865280151367186, 0.016942079544067384, 0.016921600341796874, 0.016932863235473633, 0.016906272888183593, 0.01694102478027344, 0.016930816650390625, 0.01701375961303711, 0.039992321014404295, 0.01698406410217285, 0.016941055297851563, 0.016906240463256835, 0.016713727951049806, 0.01679769515991211, 0.016895999908447267, 0.01677107238769531, 0.016721920013427736, 0.016963647842407226, 0.01746732711791992, 0.017526784896850587, 0.01757798385620117, 0.017496063232421876, 0.017506303787231444, 0.017488895416259767, 0.017339391708374022, 0.017117183685302736, 0.016744447708129884, 0.016772096633911132, 0.016942079544067384, 0.0169564151763916, 0.01696767997741699, 0.016910367965698243, 0.016962528228759766, 0.016986112594604492, 0.016977920532226562, 0.016973823547363282, 0.017067007064819336, 0.016944128036499022, 0.01698099136352539, 0.016995328903198242, 0.016982015609741212, 0.016987136840820313, 0.016948223114013672, 0.016932863235473633, 0.016923648834228516, 0.016953344345092772, 0.016882688522338866, 0.016934911727905275, 0.016950271606445313, 0.016946176528930663, 0.01696051216125488, 0.016977920532226562, 0.016910335540771485, 0.01696767997741699, 0.016970752716064453, 0.016926719665527345, 0.016940031051635742, 0.01698099136352539, 0.016943103790283204, 0.01698307228088379, 0.017061855316162108, 0.017507328033447265, 0.017943552017211914, 0.017699840545654297, 0.017369087219238282, 0.017625087738037108, 0.01764454460144043, 0.01743974494934082, 0.01745305633544922, 0.017491968154907226, 0.017476608276367187, 0.04009574508666992, 0.016906240463256835, 0.016885759353637696, 0.016867328643798828, 0.016936960220336913, 0.016877567291259766, 0.016959487915039064, 0.016878591537475587, 0.016737279891967775, 0.016671743392944336, 0.016769023895263673, 0.01681203269958496, 0.016894975662231446, 0.016876544952392578, 0.016898048400878905, 0.016854015350341797, 0.016932863235473633, 0.016877567291259766, 0.016897024154663084, 0.016871423721313478, 0.016899072647094726, 0.016874528884887694, 0.016895967483520506, 0.016890880584716796, 0.016871423721313478, 0.016939008712768554, 0.0169932804107666, 0.016903167724609376, 0.016907264709472656, 0.016899072647094726, 0.016941055297851563, 0.016886783599853517, 0.016924671173095703, 0.016920576095581053, 0.018545663833618165, 0.017936384201049805, 0.01773980712890625, 0.017669088363647462, 0.017748992919921876, 0.0175994873046875, 0.017624063491821287, 0.017459199905395507, 0.017542144775390626, 0.01759846305847168, 0.017498111724853514, 0.017541120529174805, 0.017535999298095704, 0.017558528900146485, 0.017549312591552735, 0.017503231048583985, 0.017539072036743163, 0.01754521560668945, 0.01759231948852539, 0.0175861759185791, 0.01753091239929199, 0.017523679733276367, 0.017309696197509765, 0.01727801513671875, 0.017613759994506838, 0.01759334373474121, 0.017460224151611328, 0.017449983596801756, 0.017510400772094727, 0.039964672088623046, 0.016955392837524414, 0.016893951416015626, 0.016919551849365236, 0.016905248641967772, 0.01705571174621582, 0.016939008712768554, 0.016942079544067384, 0.016917503356933594, 0.016932863235473633, 0.016925695419311524, 0.01694419288635254, 0.01691334342956543, 0.016987136840820313, 0.016929792404174804, 0.01695232009887695, 0.016982015609741212, 0.016966655731201173, 0.016915456771850586, 0.01679871940612793, 0.016709632873535156, 0.01678233528137207, 0.016882688522338866, 0.016756736755371093, 0.016726015090942382, 0.016914432525634765, 0.016891904830932617, 0.016902143478393555, 0.016953344345092772, 0.016924671173095703, 0.01697279930114746, 0.016963584899902344, 0.016948223114013672, 0.01700454330444336, 0.01702092742919922, 0.01724415969848633, 0.017531904220581054, 0.01749504089355469, 0.017443840026855468, 0.017456127166748048, 0.017590272903442384, 0.017494016647338868, 0.017531936645507812, 0.017558496475219728, 0.017511423110961915, 0.01757695960998535, 0.017551359176635743, 0.01746329689025879, 0.017498111724853514, 0.017526784896850587, 0.017526784896850587, 0.01748684883117676, 0.016913408279418944, 0.016886783599853517, 0.016893983840942383, 0.017012704849243165, 0.016934911727905275, 0.016918527603149415, 0.016966655731201173, 0.016871423721313478, 0.016865280151367186, 0.016917503356933594, 0.016866304397583007, 0.04018380737304687, 0.016926719665527345, 0.016930816650390625, 0.016957439422607423, 0.01702809524536133, 0.016906240463256835, 0.016919551849365236, 0.016888832092285155, 0.016951295852661134, 0.016922624588012695, 0.016943103790283204, 0.016910335540771485, 0.016917503356933594, 0.016970752716064453, 0.016930816650390625, 0.016866304397583007, 0.01701478385925293, 0.016944128036499022, 0.016970752716064453, 0.016935935974121095, 0.016919551849365236, 0.016923648834228516, 0.01764352035522461, 0.017551359176635743, 0.017484800338745117, 0.017563648223876953, 0.017559551239013673, 0.017624063491821287, 0.017508352279663086, 0.017567743301391603, 0.017460224151611328, 0.017490943908691405, 0.017492992401123047, 0.017529855728149413, 0.01760665512084961, 0.017583103179931642, 0.017500160217285156, 0.017497087478637697, 0.017550336837768556, 0.017558528900146485, 0.017504255294799806, 0.017507360458374022, 0.017541088104248048, 0.01756979179382324, 0.017523712158203125, 0.017509376525878906, 0.01758515167236328, 0.017555456161499023, 0.017552383422851564, 0.017510400772094727, 0.01743052864074707, 0.016948223114013672, 0.01683967971801758, 0.016759807586669923, 0.0167587833404541, 0.016793600082397463, 0.017063936233520507, 0.018144287109375, 0.017774560928344726, 0.017711103439331053, 0.0176363525390625, 0.01759129524230957, 0.01762816047668457, 0.04059033584594727, 0.017511423110961915, 0.017474559783935546, 0.017522687911987304, 0.017473567962646486, 0.017499103546142578, 0.017508352279663086, 0.017514495849609374, 0.017525760650634766, 0.017285120010375975, 0.01720729637145996, 0.017238016128540038, 0.01719193649291992, 0.017524736404418945, 0.017475584030151366, 0.017532928466796875, 0.017543167114257813, 0.017520639419555666, 0.017537023544311522, 0.017254432678222655, 0.017564640045166016, 0.01759846305847168, 0.017549312591552735, 0.017510400772094727, 0.017506368637084962, 0.01755743980407715, 0.017467391967773437, 0.0175861759185791, 0.017492992401123047, 0.01760051155090332, 0.017563648223876953, 0.017550336837768556, 0.017550336837768556, 0.017480703353881837, 0.017572864532470703, 0.017540096282958984, 0.017520639419555666, 0.017512447357177736, 0.017473535537719728, 0.017535999298095704, 0.017571840286254883, 0.01718681526184082, 0.016926719665527345, 0.016931840896606445, 0.016977920532226562, 0.016929792404174804, 0.01696460723876953, 0.016917503356933594, 0.016926719665527345, 0.01696870422363281, 0.017667072296142578, 0.01760051155090332, 0.018037792205810546, 0.018824159622192384, 0.017888256072998047, 0.017950719833374023, 0.017572864532470703, 0.01759334373474121, 0.017588224411010742, 0.01760358428955078, 0.017571840286254883, 0.0176312313079834, 0.017572864532470703, 0.04003123092651367, 0.01747865676879883, 0.01759334373474121, 0.017540096282958984, 0.017482751846313475, 0.01751865577697754, 0.01752876853942871, 0.017649663925170898, 0.01760358428955078, 0.017509376525878906, 0.01756159973144531, 0.017655807495117186, 0.01759846305847168, 0.01762303924560547, 0.017612800598144532, 0.017452032089233398, 0.01759334373474121, 0.017528831481933595, 0.017663999557495116, 0.017508352279663086, 0.017589248657226563, 0.017479679107666016, 0.017467391967773437, 0.017511423110961915, 0.01755340766906738, 0.017554431915283202, 0.017563648223876953, 0.017464319229125978, 0.017468416213989257, 0.017488895416259767, 0.017529855728149413, 0.01759129524230957, 0.017584127426147463, 0.017515520095825195, 0.017525760650634766, 0.01754422378540039, 0.017546207427978515, 0.017548288345336914, 0.01726361656188965, 0.017287168502807617, 0.017236991882324217, 0.017304576873779298, 0.017570816040039062, 0.017665023803710937, 0.017581056594848633, 0.017475584030151366, 0.01759129524230957, 0.017665023803710937, 0.017689599990844726, 0.01742540740966797, 0.01706598472595215, 0.016927743911743166, 0.017002527236938476, 0.017564640045166016, 0.01765273666381836, 0.017512447357177736, 0.01722777557373047, 0.017208351135253906, 0.017300447463989257, 0.017699840545654297, 0.01756159973144531, 0.017531904220581054, 0.017522687911987304]",tokens/s,56.85067233684151,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -8656,7 +8656,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45ee-2d26164047f9690a7c3cd546;bd3d911e-6e49-4c54-aa11-9bb81ac3bf4c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe940-076925f968f7aeb10515c4ea;4aae3ac7-968a-45c3-a30b-35c192d4e1a3) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -8711,7 +8711,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2jdxr_2i/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpx6ccteao/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1276.506112,885.522432,0.0,239.075328,215.486464,s,33,0.17781625556945801,0.005388371380892667,0.0002118844441012189,0.005337183952331543,0.005426726531982422,0.005557625675201416,0.006254851970672607,"[0.0064943041801452635, 0.005746016025543213, 0.0053919677734375, 0.0052193918228149415, 0.005302624225616455, 0.005311456203460694, 0.005330815792083741, 0.005290080070495606, 0.005318880081176758, 0.005315072059631347, 0.0053012800216674805, 0.005337183952331543, 0.005354879856109619, 0.0053144960403442385, 0.005359424114227295, 0.005425600051879883, 0.005303552150726318, 0.005389023780822754, 0.005432032108306885, 0.005311071872711182, 0.005326687812805176, 0.005427008152008057, 0.005338592052459717, 0.00535814380645752, 0.00538972806930542, 0.005336095809936523, 0.005366975784301758, 0.005368607997894287, 0.005338304042816162, 0.005303135871887207, 0.005324960231781006, 0.005371967792510987, 0.005316895961761475]",tokens/s,47509.71711188728,kWh,6.558223184455779e-08,3.593580860049073e-08,1.197129096390998e-07,2.212309500841483e-07,tokens/kWh,1157161780.0431032,MB,1276.506112,885.522432,0.0,239.075328,220.896256,s,33,9.83883044433594,0.29814637710108904,0.006565352680415171,0.29647915649414064,0.29813798828125,0.30521984252929685,0.3255126794433594,"[0.3313179626464844, 0.3131764526367187, 0.29581048583984376, 0.29629986572265626, 0.29654156494140627, 0.29702188110351563, 0.2962219848632813, 0.2960429382324219, 0.29619210815429686, 0.29583963012695313, 0.29660260009765627, 0.29660693359375, 0.2959345397949219, 0.2961164245605469, 0.2976431884765625, 0.2959195251464844, 0.2982616882324219, 0.29685659790039065, 0.2957413024902344, 0.2961203918457031, 0.29991543579101565, 0.2964104309082031, 0.2968109436035156, 0.29647915649414064, 0.296353759765625, 0.29699728393554686, 0.29651690673828124, 0.29602044677734374, 0.29627545166015623, 0.29661376953125, 0.29703427124023435, 0.2958043212890625, 0.2973302001953125]",tokens/s,211.3056030147209,kWh,3.457817449008332e-06,1.8947154396265524e-06,5.381778419654403e-06,1.0734311308289286e-05,tokens/kWh,5869030.456695431,,s,2078,10.005302271842952,0.004814871160655898,0.0007238291203950634,0.0046929922103881834,0.004805609750747681,0.005073356771469115,0.010431877031326293,"[0.005583871841430664, 0.005627967834472656, 0.005525440216064453, 0.0054271998405456545, 0.005462016105651855, 0.005389311790466309, 0.005337088108062744, 0.005442560195922852, 0.005994495868682862, 0.006638591766357422, 0.005647359848022461, 0.005537792205810547, 0.005467135906219483, 0.005280767917633057, 0.005312511920928955, 0.005256192207336426, 0.005272575855255127, 0.005250048160552978, 0.0053821439743042, 0.005497856140136719, 0.0054241280555725096, 0.0052336640357971195, 0.005186560153961181, 0.005108736038208008, 0.0050063362121582035, 0.005053440093994141, 0.005228544235229492, 0.005208064079284668, 0.005173247814178467, 0.0051435518264770505, 0.005239808082580567, 0.0051333122253417966, 0.005075967788696289, 0.005111807823181152, 0.005185535907745361, 0.005169151782989502, 0.005142528057098389, 0.004968448162078858, 0.0049192957878112795, 0.004902912139892578, 0.004957183837890625, 0.005086207866668701, 0.004986879825592041, 0.0049725441932678225, 0.00506879997253418, 0.005091328144073487, 0.00506982421875, 0.005254144191741943, 0.005204991817474365, 0.005285888195037842, 0.0052715520858764645, 0.005254144191741943, 0.005164031982421875, 0.005105663776397705, 0.005022719860076904, 0.00520908784866333, 0.005216256141662597, 0.005078015804290771, 0.005079040050506592, 0.0051773438453674315, 0.00501043176651001, 0.004929535865783692, 0.01134499168395996, 0.005028768062591553, 0.005169151782989502, 0.005304384231567383, 0.004997056007385254, 0.004992000102996826, 0.0050462718009948735, 0.00505244779586792, 0.005082079887390137, 0.005191679954528809, 0.005058559894561767, 0.00502784013748169, 0.0050206718444824215, 0.005041152000427246, 0.0052111358642578124, 0.005254144191741943, 0.005246975898742676, 0.005255167961120606, 0.005253119945526123, 0.005144576072692871, 0.005121024131774903, 0.005161983966827393, 0.00502784013748169, 0.0049203200340270994, 0.005280767917633057, 0.004980735778808594, 0.004931583881378174, 0.005055488109588623, 0.004794367790222168, 0.004795392036437988, 0.005058559894561767, 0.004980735778808594, 0.00483843183517456, 0.004698080062866211, 0.0047216639518737795, 0.004690944194793701, 0.00470630407333374, 0.004716544151306152, 0.004958208084106445, 0.0048855037689208985, 0.0049500160217285155, 0.004902944087982177, 0.0050011839866638185, 0.005017600059509277, 0.005132287979125977, 0.005185535907745361, 0.004999167919158935, 0.004848639965057373, 0.004810751914978028, 0.004836351871490479, 0.004877312183380127, 0.004874239921569825, 0.004886623859405518, 0.004864927768707276, 0.0048568320274353025, 0.004850687980651855, 0.0048220157623291016, 0.0047626237869262695, 0.004685823917388916, 0.004707327842712402, 0.00470739221572876, 0.004694975852966308, 0.00469708776473999, 0.01054412841796875, 0.004687871932983399, 0.004682752132415771, 0.004678656101226806, 0.004679679870605469, 0.004678656101226806, 0.004687871932983399, 0.004698112010955811, 0.0047216639518737795, 0.004738048076629638, 0.004694015979766846, 0.004704256057739258, 0.004739071846008301, 0.004715519905090332, 0.004700160026550293, 0.004712448120117187, 0.0046929922103881834, 0.004712448120117187, 0.004689919948577881, 0.004700160026550293, 0.0046991357803344725, 0.004704288005828857, 0.004693984031677246, 0.004701183795928955, 0.004689919948577881, 0.004686848163604736, 0.0046919679641723635, 0.004695040225982666, 0.004676608085632325, 0.00471449613571167, 0.004695040225982666, 0.004685823917388916, 0.004683775901794434, 0.004681727886199951, 0.004686848163604736, 0.004720640182495117, 0.004687871932983399, 0.004682752132415771, 0.004690944194793701, 0.0046888961791992185, 0.004707327842712402, 0.004684800148010254, 0.004690976142883301, 0.004688864231109619, 0.004694015979766846, 0.00469708776473999, 0.004701183795928955, 0.0046888961791992185, 0.0046888961791992185, 0.004685823917388916, 0.00469708776473999, 0.004680704116821289, 0.0046919679641723635, 0.004695040225982666, 0.004677631855010986, 0.00464793586730957, 0.004636672019958496, 0.004655136108398438, 0.004660192012786865, 0.004644864082336426, 0.004642816066741944, 0.004631552219390869, 0.004644864082336426, 0.010299391746520996, 0.004680704116821289, 0.004685823917388916, 0.004695040225982666, 0.004709375858306885, 0.004702208042144776, 0.00470527982711792, 0.004707327842712402, 0.004690944194793701, 0.004695040225982666, 0.0047226881980895995, 0.004694015979766846, 0.004707327842712402, 0.004683775901794434, 0.0046919679641723635, 0.004682752132415771, 0.004681727886199951, 0.004709375858306885, 0.00470531177520752, 0.004685791969299316, 0.00470633602142334, 0.004689888000488281, 0.004712448120117187, 0.004677631855010986, 0.004677631855010986, 0.004678656101226806, 0.004689919948577881, 0.004676640033721924, 0.004706272125244141, 0.004686848163604736, 0.004695040225982666, 0.004690944194793701, 0.004682752132415771, 0.004695040225982666, 0.0046991357803344725, 0.004680704116821289, 0.004675583839416504, 0.004689919948577881, 0.004682752132415771, 0.004695040225982666, 0.004686848163604736, 0.0046929922103881834, 0.0046991357803344725, 0.004673567771911621, 0.00477180814743042, 0.00470630407333374, 0.00467251205444336, 0.004680704116821289, 0.004784128189086914, 0.004687871932983399, 0.004729856014251709, 0.004707327842712402, 0.004702208042144776, 0.004694143772125244, 0.004705152034759521, 0.004686848163604736, 0.0047185921669006346, 0.00471347188949585, 0.004712448120117187, 0.0046929922103881834, 0.0046991357803344725, 0.004682752132415771, 0.004711423873901367, 0.010620927810668946, 0.004698112010955811, 0.004701183795928955, 0.00470630407333374, 0.004683775901794434, 0.004685823917388916, 0.004684800148010254, 0.004696063995361328, 0.004724736213684082, 0.004690944194793701, 0.004698112010955811, 0.004684800148010254, 0.004686848163604736, 0.0047226881980895995, 0.004679679870605469, 0.004680704116821289, 0.004683775901794434, 0.004766719818115234, 0.004758528232574463, 0.004769792079925537, 0.004777984142303467, 0.00480460786819458, 0.004682752132415771, 0.004674560070037842, 0.004735007762908936, 0.0046806721687316895, 0.004733952045440673, 0.004700160026550293, 0.004708352088928222, 0.004687935829162598, 0.004704192161560059, 0.004700160026550293, 0.004702208042144776, 0.004683775901794434, 0.004652031898498535, 0.004664351940155029, 0.004706272125244141, 0.004684800148010254, 0.004672544002532959, 0.004675551891326905, 0.004674560070037842, 0.004673535823822021, 0.004716544151306152, 0.00469708776473999, 0.004681727886199951, 0.004683775901794434, 0.004676608085632325, 0.004709375858306885, 0.004673535823822021, 0.00469708776473999, 0.004667424201965332, 0.004667359828948975, 0.0046888961791992185, 0.0047554559707641605, 0.004682752132415771, 0.004690944194793701, 0.004686848163604736, 0.004685823917388916, 0.004677631855010986, 0.004698112010955811, 0.004687871932983399, 0.00468175983428955, 0.004668384075164795, 0.010424320220947265, 0.004699168205261231, 0.00467964792251587, 0.0046888961791992185, 0.004686848163604736, 0.004712448120117187, 0.004694015979766846, 0.0046919679641723635, 0.004678656101226806, 0.004683775901794434, 0.004680704116821289, 0.004696063995361328, 0.004676608085632325, 0.004731904029846192, 0.004689919948577881, 0.00469708776473999, 0.0046919679641723635, 0.004704256057739258, 0.004710400104522705, 0.0046910080909729, 0.004700096130371094, 0.004680704116821289, 0.004738048076629638, 0.0046929922103881834, 0.004696063995361328, 0.004686848163604736, 0.004694015979766846, 0.004689919948577881, 0.004702208042144776, 0.0046888961791992185, 0.0046929922103881834, 0.004701183795928955, 0.004675583839416504, 0.004681727886199951, 0.004683775901794434, 0.00469708776473999, 0.0046888961791992185, 0.004679679870605469, 0.005018623828887939, 0.004877312183380127, 0.004807680130004883, 0.004819968223571777, 0.004791296005249023, 0.004791296005249023, 0.004825088024139404, 0.004696063995361328, 0.004702208042144776, 0.004685823917388916, 0.004671487808227539, 0.004674560070037842, 0.004690944194793701, 0.0046919679641723635, 0.00470630407333374, 0.004670464038848877, 0.004704256057739258, 0.004679679870605469, 0.004677631855010986, 0.004682752132415771, 0.004684800148010254, 0.004684800148010254, 0.00467251205444336, 0.004693056106567383, 0.004682688236236572, 0.01042739200592041, 0.00467251205444336, 0.0046991357803344725, 0.0046919679641723635, 0.0046694397926330565, 0.004682752132415771, 0.004675583839416504, 0.004684800148010254, 0.004701183795928955, 0.004687935829162598, 0.004684735774993896, 0.004671520233154297, 0.00467964792251587, 0.0046929922103881834, 0.004684800148010254, 0.004676608085632325, 0.004686848163604736, 0.004673535823822021, 0.004684800148010254, 0.004731904029846192, 0.004683775901794434, 0.004701183795928955, 0.0046929922103881834, 0.004747263908386231, 0.004700160026550293, 0.004696063995361328, 0.004690944194793701, 0.00469708776473999, 0.004689919948577881, 0.004689919948577881, 0.004711423873901367, 0.0047329277992248535, 0.004712448120117187, 0.004694015979766846, 0.004709375858306885, 0.0046919679641723635, 0.0047185921669006346, 0.004686848163604736, 0.004715519905090332, 0.004707327842712402, 0.004711423873901367, 0.004700160026550293, 0.004704256057739258, 0.004781055927276612, 0.004710400104522705, 0.0046919679641723635, 0.004694015979766846, 0.004678656101226806, 0.00470527982711792, 0.004685823917388916, 0.004681727886199951, 0.004671487808227539, 0.004673535823822021, 0.004700160026550293, 0.004698112010955811, 0.004719615936279297, 0.004687871932983399, 0.004696063995361328, 0.004687871932983399, 0.00470630407333374, 0.004683775901794434, 0.004683775901794434, 0.0047001919746398925, 0.01039459228515625, 0.004685823917388916, 0.004671487808227539, 0.004682752132415771, 0.004668416023254395, 0.0046919679641723635, 0.004700160026550293, 0.004684800148010254, 0.004680704116821289, 0.004689919948577881, 0.004686848163604736, 0.004708352088928222, 0.004679679870605469, 0.004683775901794434, 0.004684800148010254, 0.004707327842712402, 0.004678656101226806, 0.0046919679641723635, 0.004673535823822021, 0.004685823917388916, 0.004892672061920166, 0.004717567920684815, 0.00470527982711792, 0.004687871932983399, 0.004712448120117187, 0.00469708776473999, 0.004692031860351563, 0.004689856052398682, 0.0047216639518737795, 0.004683775901794434, 0.00470527982711792, 0.004684800148010254, 0.004681727886199951, 0.004686848163604736, 0.004696063995361328, 0.0046991357803344725, 0.004696063995361328, 0.004686848163604736, 0.004681727886199951, 0.004671487808227539, 0.0046929922103881834, 0.00469708776473999, 0.004687871932983399, 0.004676608085632325, 0.004679679870605469, 0.004669472217559815, 0.004708320140838623, 0.004685823917388916, 0.004667391777038574, 0.004704288005828857, 0.004675551891326905, 0.004715519905090332, 0.004680704116821289, 0.004696063995361328, 0.004687871932983399, 0.004685823917388916, 0.004698112010955811, 0.004712448120117187, 0.004686848163604736, 0.004686848163604736, 0.004674560070037842, 0.004671487808227539, 0.004677631855010986, 0.01043660831451416, 0.004679679870605469, 0.004694015979766846, 0.0047084159851074215, 0.0046837120056152344, 0.004699168205261231, 0.004675551891326905, 0.004684800148010254, 0.004673535823822021, 0.004689919948577881, 0.004667391777038574, 0.0046888961791992185, 0.004681727886199951, 0.00469708776473999, 0.004686848163604736, 0.004698112010955811, 0.0046991357803344725, 0.004701183795928955, 0.00469708776473999, 0.004676608085632325, 0.004680704116821289, 0.004716544151306152, 0.004700160026550293, 0.0046929922103881834, 0.004689919948577881, 0.004687871932983399, 0.0047125439643859865, 0.004693920135498047, 0.004704256057739258, 0.004678656101226806, 0.004701183795928955, 0.004682752132415771, 0.004723743915557861, 0.004688864231109619, 0.0046919679641723635, 0.0046888961791992185, 0.004708352088928222, 0.004690976142883301, 0.004704224109649658, 0.0046991357803344725, 0.004709375858306885, 0.004694015979766846, 0.004709375858306885, 0.004686848163604736, 0.0047216639518737795, 0.004684800148010254, 0.004696063995361328, 0.004696063995361328, 0.0046919679641723635, 0.004733952045440673, 0.004686848163604736, 0.0046919679641723635, 0.0046929922103881834, 0.00470527982711792, 0.004681727886199951, 0.004720672130584717, 0.004699168205261231, 0.004696000099182129, 0.0046919679641723635, 0.00470527982711792, 0.004677631855010986, 0.004741119861602783, 0.004684800148010254, 0.010416128158569337, 0.004698112010955811, 0.004675583839416504, 0.004678656101226806, 0.004687871932983399, 0.004674560070037842, 0.00467251205444336, 0.004679679870605469, 0.004668416023254395, 0.004687871932983399, 0.004683775901794434, 0.004670464038848877, 0.004661248207092285, 0.0046888961791992185, 0.004670464038848877, 0.004694015979766846, 0.004673535823822021, 0.004679679870605469, 0.004677631855010986, 0.004700160026550293, 0.004675583839416504, 0.004680704116821289, 0.004673535823822021, 0.0046888961791992185, 0.004675583839416504, 0.004701183795928955, 0.004720640182495117, 0.004702208042144776, 0.0046694397926330565, 0.004681727886199951, 0.004715519905090332, 0.004684800148010254, 0.004671487808227539, 0.004677631855010986, 0.004680704116821289, 0.00468998384475708, 0.0047103362083435055, 0.004685823917388916, 0.004685823917388916, 0.004683775901794434, 0.004690944194793701, 0.004671487808227539, 0.004738048076629638, 0.004726784229278564, 0.004698112010955811, 0.004690944194793701, 0.004710400104522705, 0.004674623966217041, 0.004736959934234619, 0.004685823917388916, 0.004677631855010986, 0.004679679870605469, 0.004690944194793701, 0.004687871932983399, 0.004695040225982666, 0.0046888961791992185, 0.004680704116821289, 0.004691999912261963, 0.004701151847839356, 0.004733952045440673, 0.0047288317680358885, 0.004709375858306885, 0.004695040225982666, 0.01041305637359619, 0.004694015979766846, 0.004687871932983399, 0.004682752132415771, 0.004673535823822021, 0.00470630407333374, 0.004674560070037842, 0.00469708776473999, 0.004679679870605469, 0.004682752132415771, 0.004687871932983399, 0.0046919679641723635, 0.0046919679641723635, 0.004687871932983399, 0.004680704116821289, 0.00467251205444336, 0.004670464038848877, 0.004707327842712402, 0.004683775901794434, 0.004695104122161865, 0.004684735774993896, 0.004683839797973633, 0.00467142391204834, 0.004717567920684815, 0.004715519905090332, 0.004765696048736572, 0.004694015979766846, 0.004681727886199951, 0.004708352088928222, 0.00469708776473999, 0.004681727886199951, 0.00471347188949585, 0.004685823917388916, 0.004677631855010986, 0.0046929922103881834, 0.004731904029846192, 0.004719615936279297, 0.004703231811523437, 0.004702208042144776, 0.004717631816864014, 0.004703167915344238, 0.004996096134185791, 0.0047820801734924315, 0.004811776161193848, 0.004738048076629638, 0.004708352088928222, 0.004689919948577881, 0.004680704116821289, 0.004680704116821289, 0.0046694397926330565, 0.004698112010955811, 0.004677631855010986, 0.0046888961791992185, 0.004682752132415771, 0.004686848163604736, 0.004694015979766846, 0.0047226881980895995, 0.0046929922103881834, 0.004694047927856445, 0.004706272125244141, 0.004677631855010986, 0.004667391777038574, 0.004675583839416504, 0.010481663703918457, 0.004743167877197266, 0.004704256057739258, 0.004701183795928955, 0.004679679870605469, 0.004655104160308838, 0.004683775901794434, 0.004679679870605469, 0.004685823917388916, 0.004678656101226806, 0.0047185921669006346, 0.004696063995361328, 0.004686848163604736, 0.00466431999206543, 0.004700160026550293, 0.0046888961791992185, 0.004696063995361328, 0.0046694397926330565, 0.00469708776473999, 0.004682752132415771, 0.004720640182495117, 0.004703231811523437, 0.004694015979766846, 0.004689919948577881, 0.004678656101226806, 0.004700160026550293, 0.004685823917388916, 0.00470527982711792, 0.004737023830413818, 0.004719615936279297, 0.004709375858306885, 0.0047185921669006346, 0.004711423873901367, 0.004723711967468262, 0.0047185921669006346, 0.004704256057739258, 0.004683775901794434, 0.004734975814819336, 0.004698112010955811, 0.004689919948577881, 0.004689919948577881, 0.004711423873901367, 0.004715519905090332, 0.004701183795928955, 0.0046991357803344725, 0.004698112010955811, 0.004694015979766846, 0.00469708776473999, 0.004700160026550293, 0.004698112010955811, 0.004711423873901367, 0.004711423873901367, 0.004680704116821289, 0.0047226881980895995, 0.0047216639518737795, 0.004686848163604736, 0.004703231811523437, 0.004723711967468262, 0.004701183795928955, 0.0047226881980895995, 0.004742144107818603, 0.0047216639518737795, 0.00470527982711792, 0.01046121597290039, 0.004692959785461426, 0.004690944194793701, 0.004678656101226806, 0.004676608085632325, 0.004738048076629638, 0.004668416023254395, 0.004663360118865967, 0.004677567958831787, 0.00467251205444336, 0.004675583839416504, 0.00470527982711792, 0.004685823917388916, 0.0046888961791992185, 0.004702208042144776, 0.004682752132415771, 0.004680704116821289, 0.00469708776473999, 0.004687871932983399, 0.00469814395904541, 0.004688864231109619, 0.004676608085632325, 0.004674560070037842, 0.004703231811523437, 0.0046888961791992185, 0.004681727886199951, 0.0046888961791992185, 0.004689919948577881, 0.0047288317680358885, 0.004724736213684082, 0.004701183795928955, 0.004700160026550293, 0.004704256057739258, 0.00470630407333374, 0.004703231811523437, 0.004712448120117187, 0.004716544151306152, 0.004686912059783935, 0.004684735774993896, 0.004686848163604736, 0.00471449613571167, 0.004702271938323975, 0.004687808036804199, 0.004655168056488037, 0.004678592205047607, 0.004670464038848877, 0.0046919679641723635, 0.004684800148010254, 0.004680704116821289, 0.004700160026550293, 0.00469708776473999, 0.0046929922103881834, 0.004680704116821289, 0.004689919948577881, 0.0046919679641723635, 0.004680768013000488, 0.004683743953704834, 0.004704224109649658, 0.004677631855010986, 0.004682752132415771, 0.004689919948577881, 0.004674560070037842, 0.004717567920684815, 0.010430463790893555, 0.004687871932983399, 0.004675583839416504, 0.00471449613571167, 0.004682752132415771, 0.004678656101226806, 0.004683775901794434, 0.004678656101226806, 0.004675583839416504, 0.0046991357803344725, 0.004673535823822021, 0.004674560070037842, 0.004695040225982666, 0.004670464038848877, 0.004681727886199951, 0.004716544151306152, 0.0046704959869384765, 0.004743135929107666, 0.004686880111694336, 0.004685791969299316, 0.0046581759452819825, 0.0047185921669006346, 0.004676608085632325, 0.004687871932983399, 0.004676608085632325, 0.0046888961791992185, 0.004681727886199951, 0.004713503837585449, 0.004692959785461426, 0.004686880111694336, 0.004734943866729737, 0.0046991357803344725, 0.004707327842712402, 0.004679679870605469, 0.004695040225982666, 0.00467251205444336, 0.004675583839416504, 0.004679679870605469, 0.004696063995361328, 0.004676608085632325, 0.004673535823822021, 0.00469708776473999, 0.004726784229278564, 0.004678656101226806, 0.004677631855010986, 0.004682752132415771, 0.00470527982711792, 0.004683839797973633, 0.004682688236236572, 0.004670464038848877, 0.004708352088928222, 0.004703231811523437, 0.004696063995361328, 0.004677631855010986, 0.004696063995361328, 0.004674560070037842, 0.004711423873901367, 0.004671487808227539, 0.004677631855010986, 0.004682752132415771, 0.004667391777038574, 0.0049920320510864254, 0.004723680019378662, 0.010469375610351562, 0.004708352088928222, 0.004672544002532959, 0.004674560070037842, 0.004669407844543457, 0.004683807849884033, 0.00466431999206543, 0.004695007801055908, 0.004666368007659912, 0.004684800148010254, 0.00466534423828125, 0.004686848163604736, 0.004680704116821289, 0.0046929922103881834, 0.004667391777038574, 0.004686848163604736, 0.004675583839416504, 0.004679679870605469, 0.004684800148010254, 0.004700160026550293, 0.004707327842712402, 0.004704256057739258, 0.00469708776473999, 0.004687871932983399, 0.004673535823822021, 0.004921343803405762, 0.004710400104522705, 0.004712448120117187, 0.004746240139007568, 0.004986879825592041, 0.005346303939819336, 0.004842495918273926, 0.004809728145599365, 0.004803584098815918, 0.00486297607421875, 0.00479744005203247, 0.004696063995361328, 0.0046919679641723635, 0.004715583801269531, 0.004734911918640137, 0.004702208042144776, 0.004709375858306885, 0.004681727886199951, 0.004687871932983399, 0.0047257599830627444, 0.00470527982711792, 0.004684800148010254, 0.004667391777038574, 0.004672544002532959, 0.00466428804397583, 0.004671487808227539, 0.004683807849884033, 0.004665311813354492, 0.004677631855010986, 0.004666368007659912, 0.004682752132415771, 0.004684800148010254, 0.004716544151306152, 0.0046929922103881834, 0.0046919679641723635, 0.004683775901794434, 0.00467251205444336, 0.004690944194793701, 0.010534912109375, 0.004677631855010986, 0.00466431999206543, 0.004704256057739258, 0.004679679870605469, 0.004676608085632325, 0.004666368007659912, 0.0046694397926330565, 0.004680704116821289, 0.004717599868774414, 0.004698080062866211, 0.0046888961791992185, 0.004671487808227539, 0.0046919679641723635, 0.004710400104522705, 0.004671487808227539, 0.004677631855010986, 0.004675583839416504, 0.004684800148010254, 0.004666368007659912, 0.004709375858306885, 0.004932608127593994, 0.00470630407333374, 0.004670464038848877, 0.004673535823822021, 0.00467251205444336, 0.004690944194793701, 0.004682752132415771, 0.004727807998657227, 0.004684800148010254, 0.004678656101226806, 0.004718624114990235, 0.00467964792251587, 0.004668416023254395, 0.004671487808227539, 0.004740096092224121, 0.004678656101226806, 0.004711423873901367, 0.004674560070037842, 0.004703231811523437, 0.0046929922103881834, 0.00469708776473999, 0.004680704116821289, 0.004698112010955811, 0.00466534423828125, 0.0046694397926330565, 0.004678656101226806, 0.004666368007659912, 0.004667391777038574, 0.004707327842712402, 0.004690944194793701, 0.004684800148010254, 0.004666368007659912, 0.004681727886199951, 0.004696063995361328, 0.004715519905090332, 0.004709375858306885, 0.0046929922103881834, 0.004689919948577881, 0.004674560070037842, 0.004704256057739258, 0.004685823917388916, 0.004681727886199951, 0.010402815818786621, 0.00470527982711792, 0.004695040225982666, 0.004668447971343994, 0.004684768199920654, 0.004684800148010254, 0.004687871932983399, 0.004680704116821289, 0.004675583839416504, 0.00467251205444336, 0.004677631855010986, 0.004678656101226806, 0.0046991357803344725, 0.004681727886199951, 0.004701183795928955, 0.004690944194793701, 0.0046694397926330565, 0.004678656101226806, 0.004711423873901367, 0.0046674561500549315, 0.004687808036804199, 0.004674560070037842, 0.00466534423828125, 0.004668416023254395, 0.004695040225982666, 0.004682752132415771, 0.004694015979766846, 0.004700160026550293, 0.004690944194793701, 0.004678656101226806, 0.0046991357803344725, 0.0046888961791992185, 0.004677631855010986, 0.005040128231048584, 0.004827136039733886, 0.004811776161193848, 0.004884479999542236, 0.004869120121002197, 0.004808703899383545, 0.0051476478576660155, 0.005288959980010987, 0.0049725441932678225, 0.004731904029846192, 0.0046929922103881834, 0.004754432201385498, 0.004720640182495117, 0.0046888961791992185, 0.00466431999206543, 0.00471449613571167, 0.0047606401443481446, 0.0046796159744262695, 0.004685823917388916, 0.00471449613571167, 0.004683775901794434, 0.004687903881072998, 0.004676576137542725, 0.004682752132415771, 0.004676608085632325, 0.004681727886199951, 0.004677631855010986, 0.004687871932983399, 0.004678688049316406, 0.004670432090759277, 0.011083776473999024, 0.0048496642112731934, 0.004704256057739258, 0.004698112010955811, 0.00470630407333374, 0.004703231811523437, 0.004721695899963379, 0.004697055816650391, 0.004674560070037842, 0.004687871932983399, 0.0046919679641723635, 0.0047216639518737795, 0.004680704116821289, 0.00468998384475708, 0.004698048114776612, 0.004716544151306152, 0.004674623966217041, 0.004868031978607178, 0.004695040225982666, 0.004708352088928222, 0.004621312141418457, 0.004655104160308838, 0.004626431941986084, 0.0046592001914978025, 0.004641791820526123, 0.00464896011352539, 0.004668416023254395, 0.00465715217590332, 0.00467251205444336, 0.0047216639518737795, 0.004720640182495117, 0.004695040225982666, 0.004678656101226806, 0.004702208042144776, 0.004686848163604736, 0.004710400104522705, 0.004690944194793701, 0.004680704116821289, 0.004689919948577881, 0.0046888961791992185, 0.00470527982711792, 0.00471347188949585, 0.0046929922103881834, 0.004687871932983399, 0.004702208042144776, 0.004687871932983399, 0.004708352088928222, 0.0046888961791992185, 0.004704256057739258, 0.004695040225982666, 0.0046991357803344725, 0.004702208042144776, 0.004724736213684082, 0.004757503986358643, 0.004708352088928222, 0.004680704116821289, 0.004668416023254395, 0.004686848163604736, 0.0047257599830627444, 0.004703231811523437, 0.004675583839416504, 0.00470630407333374, 0.004678656101226806, 0.01054207992553711, 0.004671487808227539, 0.0047185921669006346, 0.004683839797973633, 0.0046908798217773435, 0.004686848163604736, 0.004673535823822021, 0.00468281602859497, 0.0046837120056152344, 0.004701183795928955, 0.0046694397926330565, 0.00470630407333374, 0.004687871932983399, 0.004666368007659912, 0.00466431999206543, 0.0046919679641723635, 0.004674560070037842, 0.004681727886199951, 0.004680704116821289, 0.004679743766784668, 0.004676544189453125, 0.004698112010955811, 0.004677631855010986, 0.004703231811523437, 0.004678656101226806, 0.0047001919746398925, 0.004693984031677246, 0.00471449613571167, 0.004687871932983399, 0.004727807998657227, 0.004686848163604736, 0.004684800148010254, 0.004679679870605469, 0.004695040225982666, 0.004737023830413818, 0.004687871932983399, 0.004670464038848877, 0.004678656101226806, 0.004700160026550293, 0.004734975814819336, 0.004684800148010254, 0.004684800148010254, 0.004667391777038574, 0.004685823917388916, 0.00471449613571167, 0.00467251205444336, 0.004670464038848877, 0.004673535823822021, 0.004660223960876465, 0.004660223960876465, 0.004700160026550293, 0.004666368007659912, 0.004675583839416504, 0.004700160026550293, 0.004685823917388916, 0.004695104122161865, 0.004699071884155273, 0.0046919679641723635, 0.00470527982711792, 0.004687935829162598, 0.004696063995361328, 0.004675519943237305, 0.004700160026550293, 0.010424320220947265, 0.004687871932983399, 0.004679679870605469, 0.004717567920684815, 0.004702208042144776, 0.004673535823822021, 0.004673567771911621, 0.004660192012786865, 0.004704256057739258, 0.004678656101226806, 0.00469708776473999, 0.004687871932983399, 0.004694047927856445, 0.004684768199920654, 0.004712448120117187, 0.004703231811523437, 0.004680704116821289, 0.004686848163604736, 0.004670464038848877, 0.004711423873901367, 0.004675615787506104, 0.004709343910217285, 0.004690944194793701, 0.004739071846008301, 0.004694015979766846, 0.004715583801269531, 0.004691904067993164, 0.00470527982711792, 0.004685823917388916, 0.004689919948577881, 0.004689919948577881, 0.004698112010955811, 0.004679679870605469, 0.004673600196838379, 0.0046908798217773435, 0.00469708776473999, 0.004711423873901367, 0.004700160026550293, 0.004700160026550293, 0.004675583839416504, 0.0046581759452819825, 0.0046581759452819825, 0.004642816066741944, 0.00466431999206543, 0.004655104160308838, 0.004681727886199951, 0.004684800148010254, 0.004687871932983399, 0.004727807998657227, 0.004837376117706299, 0.004679679870605469, 0.004690944194793701, 0.004678656101226806, 0.004666368007659912, 0.004686848163604736, 0.004674560070037842, 0.004690944194793701, 0.004694015979766846, 0.004684832096099854, 0.004665311813354492, 0.004851712226867676, 0.004775936126708984, 0.004679679870605469, 0.010490880012512208, 0.00469708776473999, 0.004686848163604736, 0.004673535823822021, 0.0047185921669006346, 0.004681727886199951, 0.004695040225982666, 0.004677631855010986, 0.00466431999206543, 0.004676608085632325, 0.004667391777038574, 0.004702208042144776, 0.004695040225982666, 0.004707327842712402, 0.004709375858306885, 0.0046929922103881834, 0.004790272235870361, 0.004701183795928955, 0.004694015979766846, 0.004684800148010254, 0.004679679870605469, 0.0046694397926330565, 0.004668447971343994, 0.004705247879028321, 0.004683775901794434, 0.004850687980651855, 0.004783103942871094, 0.004711423873901367, 0.004833280086517334, 0.0050728960037231445, 0.005371903896331787, 0.004867072105407715, 0.004851712226867676, 0.00481279993057251, 0.0048056321144104, 0.004820991992950439, 0.004770815849304199, 0.004766719818115234, 0.004716544151306152, 0.004701183795928955, 0.004687871932983399, 0.0046991357803344725, 0.004876287937164306, 0.004841472148895264, 0.004847616195678711, 0.004884479999542236, 0.004740096092224121, 0.004838399887084961, 0.0047554559707641605, 0.004966400146484375, 0.00481382417678833, 0.004712448120117187, 0.004730879783630371, 0.004701183795928955, 0.004690944194793701, 0.004712448120117187, 0.004743199825286865, 0.004686848163604736, 0.004720608234405518, 0.00469708776473999, 0.0046919679641723635, 0.004686848163604736, 0.0046888961791992185, 0.010582015991210938, 0.004707327842712402, 0.004676608085632325, 0.0046991357803344725, 0.004709375858306885, 0.004791296005249023, 0.004694015979766846, 0.004690944194793701, 0.004679679870605469, 0.004708352088928222, 0.004702208042144776, 0.00470630407333374, 0.004680704116821289, 0.004678656101226806, 0.004673535823822021, 0.0046929922103881834, 0.004666368007659912, 0.004679679870605469, 0.004677631855010986, 0.004670464038848877, 0.004671487808227539, 0.004708352088928222, 0.004703231811523437, 0.0046929922103881834, 0.004679679870605469, 0.004700160026550293, 0.004701183795928955, 0.0047513599395751956, 0.0047012481689453125, 0.0046888961791992185, 0.0047021441459655765, 0.004687871932983399, 0.004709375858306885, 0.004683775901794434, 0.0046919679641723635, 0.004694015979766846, 0.004694015979766846, 0.004694015979766846, 0.0046929922103881834, 0.004707327842712402, 0.00470527982711792, 0.004690944194793701, 0.0047288317680358885, 0.004689919948577881, 0.004730879783630371, 0.004702208042144776, 0.004701183795928955, 0.004677631855010986, 0.004690944194793701, 0.004694079875946045, 0.004694975852966308, 0.00470527982711792, 0.004700160026550293, 0.0046929922103881834, 0.004707327842712402, 0.004731904029846192, 0.0046991357803344725, 0.004687871932983399, 0.004707327842712402, 0.004696063995361328, 0.0046919679641723635, 0.004719615936279297, 0.00471347188949585, 0.010447872161865235, 0.004708352088928222, 0.004771840095520019, 0.004695040225982666, 0.004682752132415771, 0.0046888961791992185, 0.00468998384475708, 0.004712384223937988, 0.004706367969512939, 0.004699071884155273, 0.00466534423828125, 0.004680704116821289, 0.004719615936279297, 0.004676608085632325, 0.004696063995361328, 0.004687871932983399, 0.004673535823822021, 0.0047329277992248535, 0.004710400104522705, 0.004709375858306885, 0.004712448120117187, 0.004702208042144776, 0.004677631855010986, 0.004684800148010254, 0.00469708776473999, 0.004684800148010254, 0.0046929922103881834, 0.00470531177520752, 0.004707295894622803, 0.004707327842712402, 0.004712448120117187, 0.004716544151306152, 0.0046919679641723635, 0.0046991357803344725, 0.004696063995361328, 0.0047226881980895995, 0.004695072174072266, 0.004700128078460693, 0.0046991357803344725, 0.004720640182495117, 0.004704256057739258, 0.004703231811523437, 0.004701183795928955, 0.004708352088928222, 0.004700160026550293, 0.004680704116821289, 0.004687871932983399, 0.004694015979766846, 0.004680704116821289, 0.004678656101226806, 0.004675583839416504, 0.004687871932983399, 0.004663296222686767, 0.004698112010955811, 0.0046888961791992185, 0.004696063995361328, 0.004678656101226806, 0.004701183795928955, 0.004833280086517334, 0.0047861762046813965, 0.0047820801734924315, 0.004774911880493164, 0.0047851519584655765, 0.010476544380187988, 0.004695040225982666, 0.004681727886199951, 0.004694015979766846, 0.0046991357803344725, 0.00469708776473999, 0.004695040225982666, 0.0046888961791992185, 0.004694015979766846, 0.004709375858306885, 0.004696063995361328, 0.004719647884368896, 0.004704224109649658, 0.00466431999206543, 0.004673535823822021, 0.00470527982711792, 0.004678656101226806, 0.00467251205444336, 0.0046888961791992185, 0.004683775901794434, 0.004698112010955811, 0.0046929922103881834, 0.004690944194793701, 0.0046929922103881834, 0.004695040225982666, 0.0046888961791992185, 0.004695040225982666, 0.004720640182495117, 0.004717631816864014, 0.0047103681564331056, 0.004681695938110351, 0.004689919948577881, 0.004700160026550293, 0.004690944194793701, 0.004696063995361328, 0.0046356477737426755, 0.004641791820526123, 0.004651008129119873, 0.004653056144714355, 0.00464896011352539, 0.004766719818115234, 0.00471449613571167, 0.004709375858306885, 0.0046888961791992185, 0.004740096092224121, 0.004684800148010254, 0.004704256057739258, 0.004687871932983399, 0.004766719818115234, 0.004807680130004883, 0.004771840095520019, 0.004667391777038574, 0.004627456188201904, 0.00464793586730957, 0.00466534423828125, 0.004631552219390869, 0.0047226881980895995, 0.0049192957878112795, 0.004799488067626953, 0.004689919948577881, 0.004702208042144776, 0.004724736213684082, 0.004711423873901367, 0.010523648262023925, 0.004712448120117187, 0.0046991357803344725, 0.004678656101226806, 0.004682752132415771, 0.00469708776473999, 0.004703231811523437, 0.004717567920684815, 0.004686848163604736, 0.004676608085632325, 0.004678656101226806, 0.004680704116821289, 0.004723711967468262, 0.004700160026550293, 0.004702208042144776, 0.004684800148010254, 0.004678656101226806, 0.004677631855010986, 0.004726784229278564, 0.004819968223571777, 0.0047636480331420894, 0.004673567771911621, 0.00467964792251587, 0.004670464038848877, 0.0047185921669006346, 0.004697152137756348, 0.0047134079933166505, 0.004700160026550293, 0.004727807998657227, 0.004711455821990966, 0.004696032047271728, 0.0046929922103881834, 0.004695040225982666, 0.0046991357803344725, 0.004681727886199951, 0.00470630407333374, 0.004689919948577881, 0.004689919948577881, 0.004683775901794434, 0.004677631855010986, 0.004682752132415771, 0.004681727886199951, 0.004682752132415771, 0.004708352088928222, 0.0046888961791992185, 0.004682752132415771, 0.004666368007659912, 0.004723711967468262, 0.004684800148010254, 0.004676608085632325, 0.00466431999206543, 0.004663296222686767, 0.004695072174072266, 0.004697055816650391, 0.004683775901794434, 0.004674560070037842, 0.00485584020614624, 0.004669407844543457, 0.004727807998657227, 0.004678656101226806, 0.004694015979766846, 0.004685823917388916, 0.004698112010955811, 0.010440704345703124, 0.004686848163604736, 0.004678656101226806, 0.004677631855010986, 0.004700160026550293, 0.004689919948577881, 0.004675583839416504, 0.0050954241752624516, 0.004723711967468262, 0.004709407806396484, 0.004693984031677246, 0.004684800148010254, 0.004681727886199951, 0.004671487808227539, 0.00467251205444336, 0.004707327842712402, 0.004673535823822021, 0.004668416023254395, 0.004678656101226806, 0.004661248207092285, 0.004668416023254395, 0.004700160026550293, 0.004673535823822021, 0.004681727886199951, 0.004674560070037842, 0.004675583839416504, 0.004689919948577881, 0.004827136039733886, 0.0046929922103881834, 0.00471347188949585, 0.004687871932983399, 0.004709375858306885, 0.004681727886199951, 0.004710400104522705, 0.00471449613571167, 0.004707327842712402, 0.004710400104522705, 0.00470630407333374, 0.004709375858306885, 0.004718624114990235, 0.004692959785461426, 0.004702208042144776, 0.004671487808227539, 0.004753407955169678, 0.005144576072692871, 0.004781055927276612, 0.004680704116821289, 0.004670464038848877, 0.004774911880493164, 0.004696095943450928, 0.00467248010635376, 0.004684800148010254, 0.00468175983428955, 0.004671520233154297, 0.004665279865264892, 0.004729856014251709, 0.004680704116821289, 0.00466534423828125, 0.00468281602859497, 0.004673471927642822, 0.00466534423828125, 0.004711455821990966, 0.0046806721687316895, 0.010470399856567383, 0.0047216639518737795, 0.004689919948577881, 0.004668416023254395, 0.004679679870605469, 0.004743167877197266, 0.004773888111114502, 0.004682752132415771, 0.004799488067626953, 0.00469708776473999, 0.004684800148010254, 0.004685823917388916, 0.004802559852600098, 0.00467251205444336, 0.004689919948577881, 0.004661248207092285, 0.004671487808227539, 0.004679679870605469, 0.004696063995361328, 0.004698112010955811, 0.004673535823822021, 0.004678656101226806, 0.0046694397926330565, 0.004709375858306885, 0.0047226881980895995, 0.0046888961791992185, 0.0046919679641723635, 0.004719615936279297, 0.0046929922103881834, 0.004715519905090332, 0.00470630407333374, 0.004715519905090332, 0.004678656101226806, 0.004685823917388916, 0.004726784229278564, 0.004698112010955811, 0.004684800148010254, 0.004701183795928955, 0.00468281602859497, 0.004706240177154541, 0.004675583839416504, 0.004712448120117187, 0.004695040225982666, 0.00470630407333374, 0.004694047927856445, 0.004696032047271728, 0.004707327842712402, 0.004698112010955811, 0.004738048076629638, 0.004695040225982666, 0.004684800148010254, 0.004673535823822021, 0.004702208042144776, 0.004719615936279297, 0.004681727886199951, 0.0047216639518737795, 0.004682752132415771, 0.004703231811523437, 0.00470630407333374, 0.004748288154602051, 0.004687871932983399, 0.004680704116821289, 0.004689919948577881, 0.010488832473754883, 0.004701183795928955, 0.004684800148010254, 0.004707327842712402, 0.00469708776473999, 0.00470527982711792, 0.004704256057739258, 0.004676608085632325, 0.004677631855010986, 0.004682752132415771, 0.004683775901794434, 0.004631552219390869, 0.004677631855010986, 0.0050094079971313476, 0.00481279993057251, 0.004830207824707031, 0.0046919679641723635, 0.004666368007659912, 0.004670464038848877, 0.004661248207092285, 0.00469708776473999, 0.004679679870605469, 0.004683775901794434, 0.004667391777038574, 0.004676608085632325, 0.004679679870605469, 0.004686848163604736, 0.0047288317680358885, 0.004679679870605469, 0.004741119861602783, 0.004684800148010254, 0.004667391777038574, 0.0046888961791992185, 0.004668416023254395, 0.0048056321144104, 0.004679679870605469, 0.004676608085632325, 0.004701183795928955, 0.0046919679641723635, 0.004676608085632325, 0.004668416023254395, 0.0046694397926330565, 0.00467251205444336, 0.0047226881980895995, 0.004707327842712402, 0.004667391777038574, 0.004668416023254395, 0.004707327842712402, 0.0047216639518737795, 0.00470527982711792, 0.004677631855010986, 0.00469708776473999, 0.004670464038848877, 0.004680704116821289, 0.004634624004364014, 0.004743167877197266, 0.004644864082336426, 0.004630591869354248, 0.004636608123779297, 0.004620287895202637, 0.004633600234985351, 0.004653056144714355, 0.00466534423828125, 0.010447872161865235, 0.004687871932983399, 0.004710400104522705, 0.004690944194793701, 0.004686848163604736, 0.004684800148010254, 0.004719615936279297, 0.004687871932983399, 0.004704256057739258, 0.004694015979766846, 0.004676608085632325, 0.0046919679641723635, 0.004690944194793701, 0.004709375858306885, 0.0046919679641723635, 0.0046888961791992185, 0.0046919679641723635, 0.004673567771911621, 0.0046806721687316895, 0.004687871932983399, 0.004668416023254395, 0.004695040225982666, 0.004677631855010986, 0.004677631855010986, 0.004687871932983399, 0.004707327842712402, 0.004682784080505371, 0.004805600166320801, 0.004690944194793701, 0.004689919948577881, 0.004724736213684082, 0.004695040225982666, 0.004694015979766846, 0.004680704116821289, 0.004701183795928955, 0.004701183795928955, 0.004726784229278564, 0.004699168205261231, 0.004692959785461426, 0.00470630407333374, 0.004696063995361328, 0.004696063995361328, 0.004715519905090332, 0.0046929922103881834, 0.004723711967468262, 0.004709375858306885, 0.004686848163604736, 0.004685823917388916, 0.004759552001953125, 0.00470527982711792, 0.004698112010955811, 0.004686848163604736, 0.004676608085632325, 0.0046991357803344725, 0.004681727886199951, 0.004704256057739258, 0.004686848163604736, 0.004678656101226806, 0.0046929922103881834, 0.0047257599830627444, 0.004694015979766846, 0.004685823917388916, 0.0046592001914978025, 0.010380288124084473, 0.004684800148010254, 0.004671487808227539, 0.004687871932983399, 0.004676608085632325, 0.0046929922103881834, 0.0047216639518737795, 0.004666368007659912, 0.0046919679641723635, 0.004694015979766846, 0.004686848163604736, 0.004710400104522705, 0.004709375858306885, 0.004686848163604736, 0.004679679870605469, 0.004695040225982666, 0.004674560070037842, 0.0047185921669006346, 0.0046929922103881834, 0.004685823917388916, 0.004682784080505371, 0.0046919360160827634, 0.004707327842712402, 0.005293119907379151, 0.00481171178817749, 0.0048230400085449215, 0.004736000061035156, 0.004712448120117187, 0.004675583839416504, 0.00469708776473999, 0.0046919679641723635, 0.004701183795928955, 0.004671487808227539, 0.004701183795928955, 0.004685823917388916, 0.0046991357803344725, 0.004684800148010254, 0.004685823917388916, 0.004709375858306885, 0.004704256057739258, 0.004698112010955811, 0.004670464038848877, 0.004668416023254395, 0.004678688049316406, 0.004696032047271728, 0.004694015979766846, 0.00469708776473999, 0.004687871932983399, 0.004676608085632325, 0.0046991357803344725, 0.004696063995361328, 0.004687871932983399, 0.004670464038848877, 0.00466431999206543, 0.004668416023254395, 0.004667391777038574, 0.004685855865478515, 0.004664351940155029, 0.004673471927642822, 0.004702208042144776, 0.0046581759452819825, 0.004671487808227539, 0.0046929922103881834, 0.010424320220947265, 0.00469708776473999, 0.004712448120117187, 0.004683775901794434, 0.004677631855010986, 0.004677631855010986, 0.0046622719764709475, 0.004676608085632325, 0.004700160026550293, 0.0046694397926330565, 0.004685823917388916, 0.004682752132415771, 0.00468179178237915, 0.004681663990020752, 0.004701183795928955, 0.004695040225982666, 0.0048455681800842285, 0.0046888961791992185, 0.0046991357803344725, 0.004695040225982666, 0.004744192123413086, 0.00469708776473999, 0.004682752132415771, 0.00467251205444336, 0.004666431903839112, 0.0046908798217773435, 0.004679679870605469, 0.004685823917388916, 0.004710464000701904, 0.004664256095886231, 0.004675583839416504, 0.0046991357803344725, 0.004684800148010254, 0.0046694397926330565, 0.004921343803405762, 0.00470630407333374, 0.004670464038848877, 0.004712512016296386, 0.004696000099182129, 0.004700160026550293, 0.0046929922103881834, 0.004680799961090088, 0.004699039936065674, 0.004682752132415771, 0.004705376148223877, 0.00467142391204834, 0.004708320140838623, 0.004682784080505371, 0.0047144641876220705, 0.00471449613571167, 0.004674560070037842, 0.004690944194793701, 0.004676608085632325, 0.004675583839416504, 0.004682752132415771, 0.004695040225982666, 0.004683775901794434, 0.004689919948577881, 0.005478400230407715, 0.004779007911682129, 0.004680704116821289, 0.004671487808227539, 0.004679743766784668, 0.010496959686279297, 0.004704256057739258, 0.004688960075378418, 0.004685760021209717, 0.004684800148010254, 0.004696063995361328, 0.004687871932983399, 0.004676640033721924, 0.0046878399848937985, 0.004675583839416504, 0.004700160026550293, 0.004683775901794434, 0.004683775901794434, 0.00467251205444336, 0.004717567920684815, 0.004696063995361328, 0.00471449613571167, 0.004678656101226806, 0.004684800148010254, 0.004674560070037842, 0.004710464000701904, 0.004676544189453125, 0.0046929922103881834, 0.004666368007659912, 0.0046888961791992185, 0.004667391777038574, 0.004678656101226806, 0.004686848163604736, 0.004719615936279297, 0.004681727886199951, 0.004679679870605469, 0.004685823917388916, 0.004670464038848877, 0.004696063995361328, 0.004696063995361328, 0.004687871932983399, 0.004674560070037842, 0.004682752132415771, 0.0046704959869384765, 0.004683743953704834, 0.004695040225982666, 0.004708352088928222, 0.004690944194793701, 0.004667391777038574, 0.0047257599830627444, 0.004696063995361328, 0.004681727886199951, 0.004761600017547608, 0.004670464038848877, 0.004673535823822021, 0.004698112010955811, 0.004681727886199951, 0.004668416023254395, 0.004685823917388916, 0.004696063995361328, 0.004674560070037842, 0.004680704116821289, 0.004694015979766846, 0.0046929922103881834, 0.004681727886199951, 0.004682752132415771, 0.0046930241584777835, 0.004693984031677246, 0.010465279579162597, 0.004733952045440673, 0.004707327842712402, 0.004774911880493164, 0.004694015979766846, 0.004690944194793701, 0.004682752132415771, 0.004695040225982666, 0.00467251205444336, 0.0046991357803344725, 0.004707327842712402, 0.004690944194793701, 0.004707327842712402, 0.004690944194793701, 0.004717567920684815, 0.004801536083221435, 0.004723711967468262, 0.00470527982711792, 0.004757503986358643, 0.00470527982711792, 0.004747263908386231, 0.004707327842712402, 0.0046929922103881834, 0.004905983924865722, 0.004746240139007568, 0.004709375858306885, 0.004753407955169678, 0.004723711967468262, 0.004717567920684815, 0.004710400104522705, 0.004694015979766846, 0.004717567920684815, 0.004743167877197266, 0.004724736213684082, 0.004677631855010986, 0.0047513599395751956, 0.004712448120117187, 0.004685823917388916, 0.0046888961791992185, 0.004687871932983399, 0.004675583839416504, 0.0046991357803344725, 0.004764671802520752, 0.004690944194793701, 0.004684800148010254, 0.004677631855010986, 0.004673535823822021, 0.004680704116821289, 0.004857855796813965, 0.004773888111114502, 0.004676608085632325, 0.004673535823822021, 0.004682752132415771, 0.004703231811523437, 0.004671487808227539, 0.0047185921669006346, 0.004668416023254395, 0.00467251205444336, 0.004685823917388916, 0.0046991357803344725, 0.004684800148010254, 0.0047350401878356934, 0.004718527793884277]",tokens/s,207.68987718121548,,,main,False,False @@ -8861,7 +8861,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9048-57c400ea192289d045323afb;803612dd-263b-4d41-adda-a6e851f7af4e) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1e8-2f8fdf6354b628f246063f87;772022b1-4ee2-4c1a-8f0c-b3ef28dced5c) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -8986,7 +8986,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9593-0683d31134cfdb2d0e998170;22ecd24a-e008-42b8-bffa-c4aa79188456) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8d3-5f1b48a37e2cbeff0319580f;1a786904-6a00-472a-9c47-27aa9ec6267d) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9046,7 +9046,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9013-20effb0052252b6926a02194;c797e97f-97d5-4c99-acfa-c3ff640fcc62) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1af-03ca57107ce9448b55fe094a;263a3d16-706d-4db0-b18e-3be2d93c023e) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -9301,7 +9301,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpw50wy7le/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpw0ib2tn3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -9490,7 +9490,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8e07-491b3e82354c922f1f31c989;27950d66-82f2-431d-9230-1a424cad9390) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf99-197196b03a66d4a15d678deb;87a9aced-4316-47f6-bbd5-10ccdc11e2a0) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -9575,7 +9575,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9672-3ba259115ccf740d5828947b;63f78a8c-36ea-4381-b6e8-3808cd651cd4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe82b-52c4cb931fdea4f04532fac4;4f00ee29-f279-47e5-8482-eb17f4a8d306) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9607,7 +9607,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1231.99488,14529.593344,0.0,13883.14624,13705.186304,s,10,0.9352619552612306,0.09352619552612304,0.00022871080860555472,0.09344286346435547,0.09363615264892577,0.09390740509033203,0.09412440704345702,"[0.09417865753173828, 0.09348995208740235, 0.09342777252197265, 0.09343241882324219, 0.09332745361328125, 0.09357587432861328, 0.09344054412841797, 0.09356108856201172, 0.0933830108642578, 0.09344518280029297]",tokens/s,2737.2010436209393,kWh,1.1073021734614982e-06,6.066724194408091e-07,6.123927536728964e-06,7.837902129631271e-06,tokens/kWh,32661801.049057417,MB,1231.99488,14529.593344,0.0,13883.14624,13856.22528,s,10,22.602925048828123,2.260292504882812,0.0016421842502448833,2.2602330322265622,2.262653857421875,2.2627298828124998,2.262790703125,"[2.262636962890625, 2.260993408203125, 2.259475341796875, 2.262805908203125, 2.25965234375, 2.260813720703125, 2.25700244140625, 2.26097216796875, 2.259251220703125, 2.259321533203125]",tokens/s,27.872498742487455,kWh,2.662618296230569e-05,1.4590572437131258e-05,0.00011656403172467102,0.000157780787124108,tokens/kWh,399288.15889633726,,s,629,23.407387664794918,0.037213652885206554,0.01117367162514878,0.03586969757080078,0.03598151626586914,0.03602227020263672,0.12987019348144532,"[0.03587481689453125, 0.03576627349853516, 0.03585228729248047, 0.03581849670410156, 0.0357498893737793, 0.03581849670410156, 0.035762176513671876, 0.035743743896484374, 0.035783679962158206, 0.0357386245727539, 0.035781631469726564, 0.03577036666870117, 0.035784702301025394, 0.03578572845458984, 0.035899425506591795, 0.03585123062133789, 0.03577446365356445, 0.03582259368896484, 0.035789825439453124, 0.03584204864501953, 0.03582156753540039, 0.035833854675292966, 0.03583795166015625, 0.035879936218261715, 0.035883007049560545, 0.03590348815917969, 0.0358205451965332, 0.03582361602783203, 0.035883007049560545, 0.0359106559753418, 0.035899391174316404, 0.03589324951171875, 0.03590348815917969, 0.03587891387939453, 0.03590963363647461, 0.03588710403442383, 0.03585228729248047, 0.035980289459228515, 0.036040702819824216, 0.035931137084960936, 0.035884033203125, 0.035966976165771485, 0.035934207916259765, 0.03590041732788086, 0.035952640533447267, 0.035939327239990236, 0.0359444465637207, 0.035920894622802735, 0.03590553665161133, 0.035939327239990236, 0.03618815994262695, 0.03666124725341797, 0.03601919937133789, 0.03595980834960937, 0.035912704467773435, 0.03601513671875, 0.035977184295654295, 0.03596806335449219, 0.03598432159423828, 0.03598643112182617, 0.03602022552490235, 0.03594137573242188, 0.13005926513671875, 0.03581849670410156, 0.035757057189941405, 0.035760128021240234, 0.03573657608032227, 0.035811328887939455, 0.03576732635498047, 0.035827678680419923, 0.035817470550537106, 0.0357130241394043, 0.035784702301025394, 0.03576422500610352, 0.03578060913085938, 0.03583488082885742, 0.03577241516113281, 0.0358021125793457, 0.03583692932128906, 0.03580416107177734, 0.035776512145996094, 0.035781631469726564, 0.03579904174804688, 0.03579084777832031, 0.03590963363647461, 0.03587276840209961, 0.03577036666870117, 0.03585228729248047, 0.035850238800048825, 0.03590655899047852, 0.03586969757080078, 0.03581951904296875, 0.03585433578491211, 0.03581849670410156, 0.035901439666748046, 0.03594649505615234, 0.03597619247436523, 0.03590860748291016, 0.03594649505615234, 0.035939327239990236, 0.035883007049560545, 0.03589017486572266, 0.035883007049560545, 0.03590041732788086, 0.03593625640869141, 0.03600998306274414, 0.03591372680664062, 0.03590963363647461, 0.03597619247436523, 0.035920894622802735, 0.035917823791503906, 0.03590860748291016, 0.036001792907714845, 0.03596492767333984, 0.03599871826171875, 0.03599462509155273, 0.03595673751831055, 0.03598233413696289, 0.035972095489501955, 0.03600588989257812, 0.03593625640869141, 0.0359813117980957, 0.035996673583984375, 0.03594035339355469, 0.03602227020263672, 0.12988108825683595, 0.03569356918334961, 0.0357130241394043, 0.03576627349853516, 0.035732479095458985, 0.035762176513671876, 0.03582668685913086, 0.035710975646972655, 0.03577446365356445, 0.03574272155761719, 0.03573350524902344, 0.035748863220214845, 0.035937278747558594, 0.03594137573242188, 0.03578675079345703, 0.03581951904296875, 0.03575091171264649, 0.03585740661621094, 0.03577446365356445, 0.03579596710205078, 0.03577139282226562, 0.03580825424194336, 0.035798015594482424, 0.03580825424194336, 0.0358021125793457, 0.03578572845458984, 0.03580928039550781, 0.03580416107177734, 0.03581139373779297, 0.035843006134033205, 0.03587686538696289, 0.03585638427734375, 0.03585638427734375, 0.035871742248535156, 0.03582566452026367, 0.03583488082885742, 0.03584819030761719, 0.03589734268188476, 0.03605196762084961, 0.035912704467773435, 0.03583795166015625, 0.035884033203125, 0.035929088592529294, 0.03585638427734375, 0.0359106559753418, 0.03588710403442383, 0.035996673583984375, 0.03597107315063477, 0.03593625640869141, 0.03591167831420899, 0.035934207916259765, 0.035937278747558594, 0.03591167831420899, 0.035914752960205076, 0.03590655899047852, 0.03597312164306641, 0.0359813117980957, 0.03593011093139648, 0.0359444465637207, 0.035879936218261715, 0.0359106559753418, 0.035937278747558594, 0.03603148651123047, 0.1299251251220703, 0.03576627349853516, 0.03577446365356445, 0.03571200180053711, 0.03578060913085938, 0.03576934432983398, 0.035765247344970705, 0.03574476623535156, 0.035798015594482424, 0.03576115036010742, 0.03580416107177734, 0.035814399719238284, 0.03584307098388672, 0.03583078384399414, 0.03584716796875, 0.03587891387939453, 0.03583180618286133, 0.03575603103637695, 0.035810302734375, 0.03585331344604492, 0.03585126495361328, 0.03585433578491211, 0.03589017486572266, 0.035833854675292966, 0.03591167831420899, 0.03587071990966797, 0.03587686538696289, 0.03586969757080078, 0.035862529754638675, 0.0359813117980957, 0.036311038970947264, 0.0359813117980957, 0.03597926330566406, 0.035862529754638675, 0.03593011093139648, 0.03589734268188476, 0.03594956970214844, 0.03589836883544922, 0.03593222427368164, 0.03586553573608398, 0.03596799850463867, 0.03596799850463867, 0.03598950576782227, 0.035914752960205076, 0.03593116760253906, 0.03594646453857422, 0.03594342422485351, 0.035929088592529294, 0.03592601776123047, 0.035939327239990236, 0.03597721481323242, 0.036122623443603515, 0.03602739334106445, 0.03592499160766602, 0.03603251266479492, 0.035985408782958986, 0.03609395217895508, 0.0361338882446289, 0.03610009765625, 0.03594956970214844, 0.03597619247436523, 0.035961856842041014, 0.036034561157226565, 0.12981248474121093, 0.03573555374145508, 0.03570278549194336, 0.035707904815673826, 0.035730430603027344, 0.03568435287475586, 0.0357386245727539, 0.03571916961669922, 0.035776512145996094, 0.035745792388916016, 0.03579084777832031, 0.035800064086914066, 0.03574476623535156, 0.035757057189941405, 0.035768318176269534, 0.03578265762329102, 0.03573964691162109, 0.03580825424194336, 0.03576422500610352, 0.035798015594482424, 0.03578777694702148, 0.03577241516113281, 0.035784702301025394, 0.035746814727783204, 0.0357918701171875, 0.03579296112060547, 0.03579692840576172, 0.035827713012695314, 0.0358205451965332, 0.035814399719238284, 0.03584102249145508, 0.03578675079345703, 0.035827713012695314, 0.035810302734375, 0.03584204864501953, 0.03584921646118164, 0.035883007049560545, 0.035844097137451174, 0.03590867233276367, 0.035937217712402346, 0.03592704010009766, 0.03597107315063477, 0.03595980834960937, 0.03587788772583008, 0.03597824096679687, 0.03594649505615234, 0.036036609649658206, 0.035912704467773435, 0.03599257659912109, 0.0359444465637207, 0.035952640533447267, 0.035955711364746096, 0.035945472717285154, 0.03595775985717774, 0.03601408004760742, 0.036038654327392575, 0.03600588989257812, 0.03594854354858398, 0.03600588989257812, 0.035953662872314454, 0.036016128540039063, 0.03598745727539063, 0.03612160110473633, 0.1300807647705078, 0.035796993255615236, 0.03581542587280274, 0.03583283233642578, 0.03588710403442383, 0.03578777694702148, 0.03577753448486328, 0.03576319885253906, 0.035733535766601564, 0.03579395294189453, 0.03573958587646484, 0.03583283233642578, 0.03576319885253906, 0.035813377380371096, 0.03584000015258789, 0.03578879928588867, 0.035796993255615236, 0.035833854675292966, 0.0358021125793457, 0.03577139282226562, 0.03581542587280274, 0.03578879928588867, 0.03586355209350586, 0.0358656005859375, 0.03588198471069336, 0.035883007049560545, 0.03584511947631836, 0.03588710403442383, 0.035862529754638675, 0.03594137573242188, 0.03584000015258789, 0.03591372680664062, 0.03590044784545898, 0.035929054260253904, 0.03590655899047852, 0.035885055541992186, 0.03593318557739258, 0.03588198471069336, 0.03590041732788086, 0.03588915252685547, 0.035947521209716796, 0.03589836883544922, 0.03595775985717774, 0.03586355209350586, 0.03586457443237305, 0.0359444465637207, 0.035983360290527344, 0.03593523025512695, 0.03593523025512695, 0.035896320343017575, 0.035937278747558594, 0.0359628791809082, 0.03595471954345703, 0.03591984176635742, 0.035932159423828124, 0.035931137084960936, 0.03598643112182617, 0.035922943115234376, 0.035963905334472655, 0.0359741439819336, 0.03592601776123047, 0.0359628791809082, 0.035972095489501955, 0.12984217834472656, 0.035730430603027344, 0.03571916961669922, 0.035643390655517575, 0.035681278228759765, 0.0357437744140625, 0.0357130241394043, 0.035682273864746095, 0.035699710845947266, 0.0356864013671875, 0.03569356918334961, 0.03571712112426758, 0.03577036666870117, 0.035707904815673826, 0.035757057189941405, 0.03575398254394531, 0.035748863220214845, 0.03570073699951172, 0.035714046478271484, 0.03571814346313477, 0.03573452758789063, 0.03575296020507813, 0.03577139282226562, 0.03576224136352539, 0.035769279479980466, 0.0357918701171875, 0.03576115036010742, 0.03575296020507813, 0.03582361602783203, 0.036034561157226565, 0.03601206588745117, 0.03588502502441406, 0.03585945510864258, 0.035829761505126956, 0.03585331344604492, 0.03585331344604492, 0.03584511947631836, 0.035827713012695314, 0.035817470550537106, 0.035810302734375, 0.03584102249145508, 0.035846145629882815, 0.03589120101928711, 0.0358287353515625, 0.03586457443237305, 0.03587276840209961, 0.03589836883544922, 0.035885055541992186, 0.03585945510864258, 0.03586355209350586, 0.03589734268188476, 0.0359024658203125, 0.035914752960205076, 0.03587481689453125, 0.03590348815917969, 0.03589529418945313, 0.03589529418945313, 0.035884033203125, 0.0359024658203125, 0.03589734268188476, 0.03592601776123047, 0.035939327239990236, 0.0359813117980957, 0.1301822052001953, 0.03573241424560547, 0.03572431945800781, 0.03571708679199219, 0.03575398254394531, 0.03574272155761719, 0.03577862548828125, 0.03575494384765625, 0.0358021125793457, 0.035773441314697264, 0.03584204864501953, 0.035773441314697264, 0.03577756881713867, 0.035756000518798826, 0.03583488082885742, 0.03583590316772461, 0.035885055541992186, 0.03577446365356445, 0.03577446365356445, 0.03580416107177734, 0.035789825439453124, 0.03580723190307617, 0.03582668685913086, 0.03578675079345703, 0.035811328887939455, 0.03584204864501953, 0.03584307098388672, 0.03586355209350586, 0.03581644821166992, 0.03586355209350586, 0.03590041732788086, 0.03584000015258789, 0.03590860748291016, 0.03591372680664062, 0.03592601776123047, 0.035922943115234376, 0.03592704010009766, 0.03587686538696289, 0.03592704010009766, 0.03591680145263672, 0.03596799850463867, 0.035912704467773435, 0.03590348815917969, 0.03592499160766602, 0.03592806243896484, 0.035929088592529294, 0.036168704986572264, 0.036071422576904294, 0.03598438262939453, 0.0359813117980957, 0.03596799850463867, 0.03594956970214844, 0.03595980834960937, 0.035942401885986325, 0.03592192077636719, 0.03594854354858398, 0.03602227020263672, 0.03594649505615234, 0.03595775985717774, 0.03598643112182617, 0.03599462509155273, 0.035945472717285154, 0.035958782196044925, 0.1298903045654297, 0.03576627349853516, 0.03575296020507813, 0.03571718215942383, 0.03572934341430664, 0.03575807952880859, 0.03574476623535156, 0.03575603103637695, 0.03591167831420899, 0.03595673751831055, 0.03572531127929687, 0.03573555374145508, 0.03573555374145508, 0.035768318176269534, 0.035811328887939455, 0.03581644821166992, 0.035760128021240234, 0.03583180618286133, 0.035778560638427735, 0.03574476623535156, 0.03577753448486328, 0.0357918701171875, 0.03578572845458984, 0.03580825424194336, 0.035800064086914066, 0.03583795166015625, 0.03583488082885742, 0.03581951904296875, 0.03585331344604492, 0.035860481262207033, 0.03594649505615234, 0.035888126373291016, 0.03586969757080078, 0.03580518341064453, 0.03584511947631836, 0.035844097137451174, 0.03584921646118164, 0.03583692932128906, 0.03585331344604492, 0.03584102249145508, 0.035860481262207033, 0.03586764907836914, 0.03590758514404297, 0.0358809585571289, 0.03589427185058594, 0.035896320343017575, 0.03593011093139648, 0.03587276840209961, 0.03593011093139648, 0.035917823791503906, 0.035896320343017575, 0.03593830490112305, 0.035952640533447267, 0.03591884613037109, 0.03595468902587891, 0.03595673751831055, 0.03598233413696289, 0.035922943115234376, 0.035955711364746096, 0.03594649505615234, 0.03592499160766602, 0.035945472717285154, 0.03596083068847656, 0.1298841552734375, 0.0357283821105957, 0.03572633743286133, 0.035659774780273434, 0.035705856323242184, 0.03574172973632812, 0.03574985504150391, 0.03571814346313477, 0.035746814727783204, 0.03572326278686523, 0.03574784088134766, 0.03573657608032227, 0.03578265762329102, 0.03574169540405273, 0.03585126495361328, 0.03577241516113281, 0.035768318176269534, 0.03576220703125, 0.035764190673828126, 0.0357386245727539, 0.035888126373291016, 0.036119552612304685, 0.035901439666748046, 0.03579596710205078, 0.0358389778137207, 0.035844097137451174, 0.03584307098388672, 0.03581235122680664, 0.03581542587280274, 0.035810302734375, 0.03581542587280274, 0.0358287353515625, 0.03588608169555664, 0.03581235122680664, 0.03589529418945313, 0.03585843276977539, 0.0358922233581543, 0.03584511947631836, 0.03585638427734375, 0.035862529754638675, 0.03587686538696289, 0.035901439666748046, 0.03592396926879883, 0.03584921646118164, 0.03587276840209961, 0.035901439666748046, 0.03594956970214844, 0.03593318557739258, 0.03591167831420899, 0.035892288208007814, 0.03592595291137695, 0.03603558349609375, 0.03605401611328125, 0.035972095489501955, 0.036004863739013675, 0.03595468902587891, 0.03591987228393555, 0.03588710403442383, 0.03591884613037109, 0.03591987228393555, 0.03595161437988281, 0.03599769592285156, 0.0359659538269043]",tokens/s,26.871858107687338,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9721,7 +9721,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e955d-4ec8ef044c36a87f0140f128;478fd0f4-7886-47ae-ae8b-8e9e86db8fc1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe71b-3b1d622f3aca6105072a5fba;4e5f7925-943e-4459-a461-331039f04424) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9790,7 +9790,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1282.637824,5222.432768,0.0,4575.985664,4408.404992,s,10,0.3124974708557129,0.031249747085571287,0.00090185139925193,0.03089788818359375,0.031724757766723635,0.03277908306121826,0.03362254329681397,"[0.03383340835571289, 0.030997472763061522, 0.030746112823486327, 0.03069455909729004, 0.03073587226867676, 0.030798303604125978, 0.030700031280517577, 0.031203039169311525, 0.03129820823669434, 0.03149046325683594]",tokens/s,8192.066300536588,kWh,3.658403691492583e-07,2.0044497673116097e-07,1.426686727863879e-06,1.992972073744298e-06,tokens/kWh,128451373.38981362,MB,1282.637824,5222.432768,0.0,4575.985664,4408.407552,s,10,13.857858032226563,1.3857858032226564,0.02388665195845957,1.3779736328125,1.4190756225585939,1.4192181701660156,1.419332208251953,"[1.3689766845703124, 1.357552978515625, 1.363571044921875, 1.3694896240234375, 1.3864576416015626, 1.3621201171875, 1.3937149658203125, 1.4193607177734375, 1.4190439453125, 1.4175703125]",tokens/s,45.461571228030316,kWh,1.6344615843212395e-05,8.955108905606538e-06,4.0334874459137036e-05,6.563459920795597e-05,tokens/kWh,959859.5978378943,,s,629,14.112389108657833,0.02243623069738925,0.0037539084422681035,0.02188800048828125,0.02259947509765625,0.022733823776245117,0.052650106048583986,"[0.022372352600097657, 0.0216760311126709, 0.021629951477050782, 0.021507072448730468, 0.021522432327270507, 0.021604352951049805, 0.021731327056884766, 0.021742591857910155, 0.021642240524291992, 0.02170675277709961, 0.021566463470458985, 0.02167705535888672, 0.021573631286621094, 0.021543935775756837, 0.02165555191040039, 0.021982208251953125, 0.02168320083618164, 0.021559295654296876, 0.021627935409545898, 0.021552095413208006, 0.021612543106079102, 0.021593088150024413, 0.021786624908447266, 0.021634048461914062, 0.02232729530334473, 0.022024192810058595, 0.021949440002441405, 0.021518335342407227, 0.021592063903808592, 0.021707775115966797, 0.02170675277709961, 0.021607423782348634, 0.021778432846069336, 0.021921791076660157, 0.021601280212402343, 0.021580799102783203, 0.021659648895263672, 0.021572608947753907, 0.02164121627807617, 0.02162892723083496, 0.021610496520996093, 0.02162892723083496, 0.02162073516845703, 0.021614591598510743, 0.02163609504699707, 0.021518335342407227, 0.021348352432250976, 0.021364736557006835, 0.021840927124023437, 0.021446624755859376, 0.021568544387817384, 0.021591007232666017, 0.021532703399658203, 0.021665760040283203, 0.021561344146728514, 0.021549055099487305, 0.02154806327819824, 0.02170159912109375, 0.022352895736694335, 0.022076416015625, 0.0221265926361084, 0.02166476821899414, 0.05295513534545898, 0.02126131248474121, 0.02129817581176758, 0.021243904113769533, 0.021342208862304687, 0.02148454475402832, 0.021578752517700195, 0.021586944580078125, 0.021521408081054686, 0.021507072448730468, 0.021332992553710937, 0.021321727752685548, 0.021716991424560548, 0.021602304458618164, 0.021493759155273438, 0.02148659133911133, 0.021545984268188476, 0.02149990463256836, 0.021510143280029297, 0.021535743713378908, 0.02148454475402832, 0.021541887283325196, 0.021506048202514647, 0.021255168914794922, 0.021311487197875977, 0.02151628875732422, 0.021556224822998047, 0.021465087890625, 0.021351423263549805, 0.02151219177246094, 0.021522432327270507, 0.0214968318939209, 0.021481472015380858, 0.02152038383483887, 0.02146611213684082, 0.021334016799926758, 0.021778495788574218, 0.021539840698242187, 0.02157049560546875, 0.021584896087646483, 0.021557247161865235, 0.021527551651000978, 0.02168422317504883, 0.021573631286621094, 0.02149990463256836, 0.02142310333251953, 0.021574655532836915, 0.021584896087646483, 0.021592096328735353, 0.021636064529418946, 0.021582847595214845, 0.021587968826293946, 0.021550079345703126, 0.021562368392944335, 0.021560319900512694, 0.02168524742126465, 0.021560319900512694, 0.021613567352294923, 0.021599231719970705, 0.022232063293457033, 0.021622783660888673, 0.02227302360534668, 0.02187161636352539, 0.05274214553833008, 0.021585920333862304, 0.021545984268188476, 0.021562368392944335, 0.021581823348999024, 0.02165862464904785, 0.0216760311126709, 0.021578752517700195, 0.02166579246520996, 0.021597183227539063, 0.02167705535888672, 0.02165350341796875, 0.02345062446594238, 0.022935552597045897, 0.02208768081665039, 0.021605375289916993, 0.021394432067871092, 0.021523456573486328, 0.021617664337158202, 0.021595136642456055, 0.02169139289855957, 0.021554176330566405, 0.021329919815063478, 0.021559295654296876, 0.021551103591918946, 0.021563392639160156, 0.021553152084350585, 0.021413888931274414, 0.021517311096191406, 0.021614591598510743, 0.021595136642456055, 0.021639167785644533, 0.021570560455322265, 0.021539840698242187, 0.02153267288208008, 0.021610496520996093, 0.021578752517700195, 0.021319679260253906, 0.021301248550415038, 0.021562368392944335, 0.021409791946411134, 0.021564416885375977, 0.021617664337158202, 0.021520416259765626, 0.021544927597045897, 0.021525503158569336, 0.021448768615722657, 0.02136569595336914, 0.0212992000579834, 0.021315584182739256, 0.02127462387084961, 0.021303295135498047, 0.021382144927978516, 0.02287615966796875, 0.02290995216369629, 0.02188800048828125, 0.02164531135559082, 0.021621759414672852, 0.021595136642456055, 0.021391359329223633, 0.021307392120361326, 0.021584896087646483, 0.021634048461914062, 0.05267763137817383, 0.021609472274780273, 0.02164735984802246, 0.021588991165161133, 0.021571584701538086, 0.021325824737548828, 0.021432319641113282, 0.021404672622680664, 0.021336063385009766, 0.021283840179443358, 0.021357568740844726, 0.021364736557006835, 0.021574655532836915, 0.021527551651000978, 0.021453855514526367, 0.02133705520629883, 0.021335039138793945, 0.021346303939819337, 0.021518335342407227, 0.022123519897460937, 0.021798912048339843, 0.02167193603515625, 0.02157267189025879, 0.021685184478759764, 0.02145587158203125, 0.021910528182983398, 0.021559295654296876, 0.02162073516845703, 0.021716032028198242, 0.021711807250976562, 0.021378047943115236, 0.021375999450683594, 0.021617664337158202, 0.021646335601806642, 0.021579839706420897, 0.021561279296875, 0.022375423431396483, 0.022809600830078124, 0.02261203193664551, 0.021896160125732422, 0.02276348876953125, 0.022368255615234374, 0.02208460807800293, 0.0219289608001709, 0.02229555130004883, 0.02228326416015625, 0.021921791076660157, 0.02190028762817383, 0.02185830307006836, 0.021998592376708984, 0.022133760452270508, 0.022190080642700196, 0.02182143974304199, 0.02184806442260742, 0.022074367523193358, 0.021595136642456055, 0.02147327995300293, 0.02165657615661621, 0.02148761558532715, 0.02152448081970215, 0.02143846321105957, 0.021534719467163087, 0.02166067123413086, 0.05257932662963867, 0.021349376678466796, 0.021563392639160156, 0.02165760040283203, 0.02194534492492676, 0.02221670341491699, 0.022200319290161134, 0.02184499168395996, 0.02282598304748535, 0.022593536376953126, 0.022253568649291993, 0.022000640869140626, 0.022063104629516602, 0.021719039916992186, 0.02247065544128418, 0.022402048110961914, 0.022245376586914063, 0.022148096084594726, 0.022108160018920898, 0.02225663948059082, 0.02204979133605957, 0.022314016342163085, 0.021767135620117187, 0.02144972801208496, 0.021420032501220702, 0.021962751388549806, 0.02246963119506836, 0.022093856811523437, 0.021812192916870116, 0.02168934440612793, 0.02150399971008301, 0.021642240524291992, 0.02164019203186035, 0.021881856918334962, 0.021926912307739257, 0.02173030471801758, 0.021744640350341796, 0.021955583572387697, 0.021824512481689453, 0.021747711181640626, 0.021955583572387697, 0.02232729530334473, 0.022207487106323243, 0.021943296432495117, 0.02211327934265137, 0.022131711959838866, 0.021986303329467775, 0.022254592895507814, 0.022163455963134765, 0.02248806381225586, 0.022064128875732423, 0.022153215408325197, 0.022185983657836913, 0.022196224212646484, 0.021977088928222657, 0.022047744750976563, 0.022108160018920898, 0.02207744026184082, 0.022007808685302735, 0.022030336380004883, 0.022774784088134766, 0.02146918487548828, 0.02149478340148926, 0.05274009704589844, 0.02150297546386719, 0.02149478340148926, 0.021556224822998047, 0.02148761558532715, 0.02131052780151367, 0.02138105583190918, 0.021562368392944335, 0.021601280212402343, 0.021513216018676756, 0.021497919082641603, 0.0217322883605957, 0.021792768478393554, 0.021832704544067383, 0.021501951217651367, 0.02152038383483887, 0.021972991943359374, 0.022002687454223634, 0.021607423782348634, 0.021977088928222657, 0.021557247161865235, 0.02147737693786621, 0.021711872100830077, 0.021969919204711915, 0.021360639572143555, 0.02147737693786621, 0.021312511444091797, 0.021296127319335938, 0.02126540756225586, 0.021377023696899415, 0.021608448028564452, 0.021786624908447266, 0.02208665657043457, 0.02167807960510254, 0.022737920761108397, 0.021587968826293946, 0.021522432327270507, 0.02127359962463379, 0.02129100799560547, 0.021399551391601563, 0.021543935775756837, 0.02171494483947754, 0.02267344093322754, 0.021657567977905273, 0.021626880645751953, 0.021543935775756837, 0.021766143798828123, 0.02182655906677246, 0.021319679260253906, 0.021392383575439454, 0.021324800491333007, 0.02149068832397461, 0.0216494083404541, 0.021960704803466798, 0.021374975204467773, 0.021301248550415038, 0.021445632934570313, 0.02164531135559082, 0.021733375549316408, 0.021797887802124022, 0.021596160888671875, 0.021603328704833984, 0.021598304748535156, 0.05255465698242188, 0.021364736557006835, 0.021296127319335938, 0.02146713638305664, 0.021332992553710937, 0.021378047943115236, 0.021559295654296876, 0.02186342430114746, 0.021747711181640626, 0.021415935516357423, 0.021235712051391603, 0.021343231201171875, 0.021389312744140625, 0.021335039138793945, 0.02150815963745117, 0.02179987144470215, 0.022276159286499023, 0.021540800094604493, 0.021358591079711914, 0.021932031631469725, 0.022183935165405275, 0.0223242244720459, 0.02260479927062988, 0.022443008422851563, 0.022587392807006838, 0.022371328353881836, 0.022503423690795898, 0.022544384002685547, 0.022553600311279298, 0.022544384002685547, 0.022560768127441407, 0.022429695129394533, 0.022245376586914063, 0.02225766372680664, 0.022632448196411133, 0.022525951385498046, 0.02231500816345215, 0.022190080642700196, 0.022188032150268554, 0.022208511352539064, 0.02244607925415039, 0.022509567260742186, 0.02247987174987793, 0.022399999618530272, 0.022503423690795898, 0.022564863204956053, 0.022585344314575196, 0.02253004837036133, 0.02247475242614746, 0.022371328353881836, 0.022107168197631834, 0.022197216033935548, 0.022140928268432617, 0.022163455963134765, 0.022143007278442383, 0.022192096710205077, 0.022195199966430663, 0.022156288146972656, 0.022260736465454102, 0.022486015319824217, 0.022487039566040038, 0.02252390480041504, 0.022575103759765625, 0.0541317138671875, 0.02244915199279785, 0.022375423431396483, 0.022486015319824217, 0.022543359756469726, 0.022551551818847656, 0.02248089599609375, 0.022444032669067384, 0.02249932861328125, 0.02242361640930176, 0.022444992065429686, 0.022492160797119142, 0.022545408248901368, 0.022405120849609376, 0.02241535949707031, 0.02249728012084961, 0.02252390480041504, 0.02254643249511719, 0.022487039566040038, 0.022487039566040038, 0.0224881591796875, 0.022746015548706054, 0.02229043197631836, 0.022188095092773436, 0.022513599395751954, 0.022322175979614257, 0.022155263900756835, 0.022579200744628908, 0.02246963119506836, 0.02249113655090332, 0.022413312911987306, 0.02245631980895996, 0.022500352859497072, 0.022510591506958007, 0.022557695388793944, 0.022432767868041992, 0.022519807815551757, 0.02248806381225586, 0.022537216186523438, 0.022619136810302733, 0.022733823776245117, 0.022590463638305663, 0.022623231887817383, 0.022579200744628908, 0.02253004837036133, 0.022564863204956053, 0.022626304626464845, 0.022535167694091796, 0.02264575958251953, 0.022776832580566408, 0.022605823516845702, 0.022603776931762694, 0.02264473533630371, 0.022519807815551757, 0.022561792373657227, 0.022920192718505858, 0.022542335510253905, 0.022419456481933595, 0.022602752685546876, 0.022605823516845702, 0.022566911697387695, 0.02260479927062988, 0.02269603157043457, 0.05462313461303711, 0.02305023956298828, 0.0226693115234375, 0.022591487884521484, 0.022729791641235352, 0.02257913589477539, 0.02266009521484375, 0.022684671401977538, 0.022577152252197266, 0.022566911697387695, 0.02249318313598633, 0.022183935165405275, 0.022468608856201173, 0.022467584609985353, 0.022510591506958007, 0.022271999359130858, 0.022334463119506837, 0.02264271926879883, 0.02297340774536133, 0.022625280380249024, 0.022567935943603516, 0.022558719635009765, 0.022486015319824217, 0.02253209686279297, 0.02246348762512207, 0.022701055526733398, 0.022558719635009765, 0.022589439392089843, 0.02253107261657715, 0.022579200744628908, 0.022594560623168947, 0.022559743881225586, 0.02266828727722168, 0.022598655700683593, 0.022466560363769532, 0.0225218563079834, 0.02168627166748047, 0.02164531135559082, 0.02163199996948242, 0.022923263549804687, 0.022977535247802734, 0.022550527572631835, 0.022459392547607423, 0.022211584091186523, 0.022404096603393556, 0.022323200225830078, 0.02247478485107422, 0.022393823623657227, 0.02253824043273926, 0.0224901123046875, 0.02250752067565918, 0.022733823776245117, 0.02262118339538574, 0.022607872009277344, 0.022718463897705078, 0.02253004837036133, 0.022487039566040038, 0.022581247329711913, 0.02254643249511719, 0.022260736465454102, 0.022360063552856444, 0.02254643249511719, 0.02248806381225586, 0.05481584167480469, 0.02262723159790039, 0.022412288665771486, 0.022508544921875, 0.022519807815551757, 0.022571008682250978, 0.022608959197998046, 0.022502336502075195, 0.022608896255493165, 0.022588415145874022, 0.022556671142578123, 0.022502399444580077, 0.022550527572631835, 0.022597631454467772, 0.022487039566040038, 0.02287308883666992, 0.024625152587890626, 0.02284441566467285, 0.022567935943603516, 0.022665279388427734, 0.022529983520507814, 0.022503423690795898, 0.022578176498413087, 0.02249830436706543, 0.022548479080200197, 0.022581247329711913, 0.02254643249511719, 0.02255462455749512, 0.022588415145874022, 0.022500352859497072, 0.0225218563079834, 0.02246348762512207, 0.02254745674133301, 0.02259660720825195, 0.022492160797119142, 0.022509567260742186, 0.022581247329711913, 0.0224849910736084, 0.022487039566040038, 0.022466560363769532, 0.022582271575927734, 0.022441984176635742, 0.0224768009185791, 0.022553600311279298, 0.022560768127441407, 0.022592512130737305, 0.02247475242614746, 0.022410240173339844, 0.022575103759765625, 0.022524927139282228, 0.02250649642944336, 0.022559743881225586, 0.022525951385498046, 0.022785024642944338, 0.02285875129699707, 0.02302566337585449, 0.02161664009094238, 0.021577728271484374, 0.021529600143432616, 0.02162483215332031, 0.021565439224243164, 0.021558271408081055, 0.021553152084350585]",tokens/s,44.57076651990226,,,main,False,False -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9814,7 +9814,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp171veuta/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9sclg3tt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -9845,7 +9845,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1249.23904,3041.394688,0.0,2394.947584,2279.417856,s,10,0.23634982109069824,0.023634982109069825,0.0011165227742452248,0.02325393581390381,0.024405410957336424,0.025520129299163814,0.026411903972625734,"[0.02663484764099121, 0.022848447799682616, 0.02291916847229004, 0.023588703155517577, 0.02277120018005371, 0.02387081527709961, 0.02384486389160156, 0.022809791564941406, 0.022904287338256835, 0.024157695770263672]",tokens/s,10831.402317912527,kWh,2.7556789257896285e-07,1.5098250052967787e-07,1.0927326794172656e-06,1.5192830725259062e-06,tokens/kWh,168500528.0644531,MB,1249.23904,3041.394688,0.0,2394.947584,2279.420416,s,10,14.361950073242188,1.4361950073242187,0.016421711028924653,1.44034375,1.456474658203125,1.4572763305664063,1.4579176684570314,"[1.4580780029296876, 1.4076146240234375, 1.4368963623046875, 1.4487359619140625, 1.4437911376953125, 1.4562965087890625, 1.42836181640625, 1.4149393310546876, 1.42232568359375, 1.44491064453125]",tokens/s,43.86590934985603,kWh,1.6920164502275267e-05,9.272085132958566e-06,3.6294880563983074e-05,6.248713019921691e-05,tokens/kWh,1008207.6068967801,,s,629,14.545633255004864,0.02312501312401412,0.002848130619724419,0.022890495300292968,0.02322396125793457,0.02381926383972168,0.04538929153442383,"[0.024168447494506837, 0.023833599090576172, 0.024000511169433594, 0.024071199417114258, 0.024100831985473633, 0.023974912643432617, 0.023829504013061522, 0.023569408416748046, 0.024196096420288086, 0.02433843231201172, 0.02389094352722168, 0.024004608154296874, 0.023888896942138672, 0.02342092704772949, 0.024335359573364256, 0.024337408065795898, 0.024024063110351563, 0.023837696075439452, 0.023723007202148438, 0.023476224899291992, 0.023793664932250977, 0.02409164810180664, 0.02411827278137207, 0.023204864501953124, 0.023071840286254884, 0.022964128494262694, 0.02274508857727051, 0.02290790367126465, 0.022785024642944338, 0.022833152770996092, 0.022840320587158205, 0.022329343795776366, 0.02259868812561035, 0.02262118339538574, 0.0225515193939209, 0.022330368041992187, 0.022416383743286132, 0.022361087799072265, 0.022545408248901368, 0.022195199966430663, 0.02283417510986328, 0.0224399356842041, 0.022724607467651366, 0.022737920761108397, 0.022588415145874022, 0.022801408767700194, 0.022829055786132812, 0.02285875129699707, 0.02249932861328125, 0.02245427131652832, 0.022384639739990234, 0.0233123836517334, 0.02289254379272461, 0.022740991592407226, 0.02285158348083496, 0.02206515121459961, 0.02205388832092285, 0.02208358383178711, 0.02208665657043457, 0.022124544143676757, 0.02206719970703125, 0.021986303329467775, 0.04530688095092773, 0.022137855529785155, 0.023468032836914062, 0.022943744659423827, 0.022657024383544923, 0.022776832580566408, 0.02229145622253418, 0.021932031631469725, 0.02179596710205078, 0.02183359909057617, 0.021983327865600585, 0.021810079574584963, 0.021769216537475586, 0.02206924819946289, 0.02227097511291504, 0.02283622360229492, 0.022611967086791994, 0.021876735687255858, 0.022788095474243163, 0.021967872619628907, 0.021715967178344727, 0.021936128616333008, 0.02261299133300781, 0.022573055267333983, 0.022879232406616212, 0.022656000137329102, 0.02188806343078613, 0.022639551162719727, 0.022740991592407226, 0.022524927139282228, 0.022437887191772463, 0.022565887451171874, 0.0224532470703125, 0.02265395164489746, 0.02245734405517578, 0.02249318313598633, 0.022495231628417968, 0.02215936088562012, 0.022544416427612304, 0.022628320693969726, 0.0224849910736084, 0.022598655700683593, 0.022882303237915038, 0.022681600570678712, 0.02271334457397461, 0.022803455352783202, 0.023249919891357423, 0.02208665657043457, 0.02208563232421875, 0.02206003189086914, 0.022147071838378905, 0.02208768081665039, 0.02205900764465332, 0.021980159759521483, 0.02206208038330078, 0.022031423568725585, 0.022041536331176757, 0.02189516830444336, 0.02210201644897461, 0.02206208038330078, 0.022120447158813478, 0.021985279083251954, 0.02210201644897461, 0.04544409561157227, 0.02203647994995117, 0.02207027244567871, 0.021993471145629884, 0.022034431457519533, 0.022012928009033202, 0.022021120071411132, 0.021978111267089845, 0.021995519638061522, 0.022785024642944338, 0.024576000213623047, 0.023803903579711915, 0.02328371238708496, 0.02311884880065918, 0.02310348892211914, 0.023012351989746094, 0.022912031173706056, 0.02313315200805664, 0.0228351993560791, 0.022789119720458984, 0.02302566337585449, 0.023000160217285157, 0.022814624786376952, 0.022785024642944338, 0.023013376235961915, 0.022848575592041016, 0.02274604797363281, 0.023004159927368165, 0.02286899185180664, 0.022907968521118163, 0.022862783432006838, 0.022986751556396484, 0.022915071487426757, 0.022924287796020508, 0.023036928176879884, 0.022841344833374022, 0.022763519287109374, 0.023130111694335938, 0.02269696044921875, 0.022973440170288087, 0.022906879425048828, 0.023001087188720702, 0.022689792633056642, 0.022736928939819337, 0.02296623992919922, 0.02269900894165039, 0.02262937545776367, 0.02269593620300293, 0.02282700729370117, 0.022908992767333984, 0.022900672912597658, 0.02288128089904785, 0.022965248107910157, 0.02289151954650879, 0.02271334457397461, 0.022838432312011717, 0.02296201515197754, 0.022776832580566408, 0.022622207641601562, 0.02282598304748535, 0.023010303497314453, 0.022676479339599608, 0.022632448196411133, 0.04679270553588867, 0.023007232666015624, 0.022987775802612305, 0.022890495300292968, 0.02267136001586914, 0.023059455871582032, 0.023000064849853515, 0.022953983306884765, 0.023043071746826172, 0.02288844871520996, 0.022968320846557616, 0.02306764793395996, 0.02313216018676758, 0.023203840255737306, 0.022889471054077147, 0.022983680725097655, 0.02307174491882324, 0.023069696426391603, 0.02289151954650879, 0.02305536079406738, 0.022995967864990235, 0.02282598304748535, 0.022982656478881838, 0.02287001609802246, 0.022978559494018554, 0.02295910453796387, 0.022866943359375, 0.022900800704956054, 0.02282080078125, 0.02301644706726074, 0.02287615966796875, 0.022994943618774414, 0.023143423080444335, 0.023207935333251953, 0.022983680725097655, 0.023053312301635744, 0.022890495300292968, 0.02309119987487793, 0.02301644706726074, 0.023012351989746094, 0.023003135681152344, 0.02294272041320801, 0.023088127136230468, 0.022969343185424804, 0.022924287796020508, 0.022958080291748048, 0.02285977554321289, 0.023053312301635744, 0.02289151954650879, 0.022972415924072266, 0.022862848281860353, 0.023405567169189453, 0.023079935073852538, 0.023006208419799806, 0.022952959060668944, 0.02321820831298828, 0.02294576072692871, 0.023007232666015624, 0.02286591911315918, 0.023034879684448242, 0.02292736053466797, 0.023045183181762696, 0.02317305564880371, 0.045399040222167966, 0.02202726364135742, 0.02188595199584961, 0.022016000747680665, 0.021954559326171876, 0.021967872619628907, 0.021979167938232423, 0.02193404769897461, 0.021968896865844727, 0.021843967437744142, 0.021977088928222657, 0.0224716796875, 0.02294272041320801, 0.023184383392333984, 0.023021568298339845, 0.02288844871520996, 0.023000064849853515, 0.022980607986450196, 0.02307276725769043, 0.022993919372558593, 0.023187456130981447, 0.023020544052124024, 0.022916095733642578, 0.02289459228515625, 0.02304614448547363, 0.023000064849853515, 0.022955007553100586, 0.022985727310180663, 0.023059455871582032, 0.023012351989746094, 0.023007232666015624, 0.023012351989746094, 0.02308710479736328, 0.023085056304931642, 0.02349056053161621, 0.023397375106811523, 0.02307788848876953, 0.02309222412109375, 0.023023712158203126, 0.023012256622314452, 0.02304102325439453, 0.02304819107055664, 0.02310041618347168, 0.02307072067260742, 0.02311884880065918, 0.02304204750061035, 0.023038976669311522, 0.023027711868286133, 0.023771135330200196, 0.024211456298828125, 0.023362560272216795, 0.02332159996032715, 0.023136255264282226, 0.02310758399963379, 0.02303385543823242, 0.023235584259033205, 0.023372800827026367, 0.023008256912231444, 0.023219200134277345, 0.023198720932006835, 0.023018495559692383, 0.023166015625, 0.023118783950805664, 0.04736102294921875, 0.02308915138244629, 0.022986751556396484, 0.023022592544555662, 0.023036928176879884, 0.02307583999633789, 0.022930431365966796, 0.022977535247802734, 0.023137344360351562, 0.022892511367797852, 0.022995935440063477, 0.023034879684448242, 0.023630847930908205, 0.02322131156921387, 0.02318124771118164, 0.022940671920776368, 0.023382015228271484, 0.02300726318359375, 0.02354991912841797, 0.02329702377319336, 0.023030784606933592, 0.023171072006225587, 0.023145471572875977, 0.02328883171081543, 0.022921215057373046, 0.023037952423095705, 0.023007232666015624, 0.023138303756713868, 0.023030784606933592, 0.022920192718505858, 0.02304921531677246, 0.023076864242553712, 0.022953983306884765, 0.02305740737915039, 0.02303385543823242, 0.023335935592651368, 0.023045120239257814, 0.023117824554443358, 0.023126016616821288, 0.022991872787475585, 0.02303696060180664, 0.023012319564819337, 0.023026687622070312, 0.02305740737915039, 0.023234560012817384, 0.02303385543823242, 0.023044095993041993, 0.02308403205871582, 0.023047168731689452, 0.023010303497314453, 0.023031808853149413, 0.02330521583557129, 0.023013376235961915, 0.022871040344238282, 0.023047168731689452, 0.02301644706726074, 0.023536640167236327, 0.023426111221313477, 0.02383148765563965, 0.02305740737915039, 0.02305740737915039, 0.02307583999633789, 0.023111679077148437, 0.0474152946472168, 0.023185407638549805, 0.022985727310180663, 0.022974464416503908, 0.02311065673828125, 0.023009279251098632, 0.022993919372558593, 0.02307583999633789, 0.022982656478881838, 0.02287820816040039, 0.022958080291748048, 0.023014400482177736, 0.022993919372558593, 0.023219232559204103, 0.02313007926940918, 0.024027135848999022, 0.023109632492065428, 0.022940671920776368, 0.022960128784179686, 0.022971391677856445, 0.022973440170288087, 0.023010303497314453, 0.022939647674560547, 0.02305740737915039, 0.022106111526489256, 0.021953535079956055, 0.022006784439086914, 0.022008832931518556, 0.022041599273681642, 0.023181312561035155, 0.023447551727294923, 0.022946815490722656, 0.021950464248657226, 0.022077503204345702, 0.022082496643066406, 0.0220897274017334, 0.02211942481994629, 0.02209280014038086, 0.022639616012573242, 0.0228853759765625, 0.022597631454467772, 0.021970943450927736, 0.02211123275756836, 0.021993471145629884, 0.02209382438659668, 0.022916095733642578, 0.02286899185180664, 0.022793216705322264, 0.022760543823242187, 0.02236406326293945, 0.02205695915222168, 0.021949472427368163, 0.02201193618774414, 0.022840255737304686, 0.022786048889160155, 0.022808576583862306, 0.02262937545776367, 0.022751232147216797, 0.02225971221923828, 0.02187571144104004, 0.022591487884521484, 0.022837247848510742, 0.022833152770996092, 0.04536422348022461, 0.022022144317626953, 0.021947391510009767, 0.022206464767456056, 0.022924287796020508, 0.022586368560791017, 0.022774784088134766, 0.022719520568847656, 0.02273276710510254, 0.022074432373046876, 0.02202822494506836, 0.022815744400024415, 0.02285875129699707, 0.02211840057373047, 0.022150144577026368, 0.02207334327697754, 0.022185983657836913, 0.022718463897705078, 0.022920192718505858, 0.022847488403320314, 0.022322175979614257, 0.021969919204711915, 0.022023168563842774, 0.021993471145629884, 0.02205388832092285, 0.02225766372680664, 0.023045120239257814, 0.022797311782836914, 0.02272153663635254, 0.022168575286865236, 0.021926912307739257, 0.022141952514648438, 0.02210406494140625, 0.022666240692138673, 0.022813695907592774, 0.022072320938110353, 0.02206617546081543, 0.021908479690551756, 0.022237184524536133, 0.022983680725097655, 0.022715391159057616, 0.022424575805664062, 0.022754304885864256, 0.022716415405273437, 0.022787071228027343, 0.02270515251159668, 0.02267136001586914, 0.022408191680908202, 0.022803455352783202, 0.02273587226867676, 0.02205286407470703, 0.021961727142333985, 0.022080575942993164, 0.02199443244934082, 0.021876735687255858, 0.022608896255493165, 0.022923263549804687, 0.024328224182128905, 0.02312495994567871, 0.02269388771057129, 0.02209587287902832, 0.022236160278320313, 0.022725631713867187, 0.04547792053222656, 0.02200060844421387, 0.021989376068115234, 0.022024192810058595, 0.02204364776611328, 0.021934080123901366, 0.0226693115234375, 0.022880319595336915, 0.022864831924438476, 0.022709247589111328, 0.021977088928222657, 0.02228428840637207, 0.022017023086547852, 0.022760448455810548, 0.022492223739624024, 0.02292627143859863, 0.022723583221435546, 0.022821887969970703, 0.022403072357177735, 0.02246963119506836, 0.02287001609802246, 0.022354944229125977, 0.02249932861328125, 0.02209689521789551, 0.022041599273681642, 0.022785152435302734, 0.022750080108642576, 0.022714368820190428, 0.022681600570678712, 0.022766592025756836, 0.02247382354736328, 0.022752159118652342, 0.022737920761108397, 0.02269388771057129, 0.02281881523132324, 0.022822912216186524, 0.02270310401916504, 0.02281062316894531, 0.022529024124145508, 0.021980159759521483, 0.022739967346191405, 0.022734848022460938, 0.022769664764404295, 0.022610944747924806, 0.022855680465698244, 0.022797311782836914, 0.022676479339599608, 0.022820863723754883, 0.02269900894165039, 0.022768640518188478, 0.022964223861694336, 0.022920192718505858, 0.022584320068359375, 0.022871040344238282, 0.022611967086791994, 0.022205440521240235, 0.022966272354125978, 0.02265292739868164, 0.022626399993896484, 0.022563743591308593, 0.022518783569335937, 0.02223308753967285, 0.02273587226867676, 0.048899070739746094, 0.023196672439575194, 0.022738943099975584, 0.02209791946411133, 0.022170623779296874, 0.022762496948242186, 0.02271232032775879, 0.022033407211303712, 0.021991424560546875, 0.02203545570373535, 0.02206003189086914, 0.021820415496826173, 0.02205695915222168, 0.022757375717163086, 0.022955007553100586, 0.022831104278564454, 0.022687744140625, 0.023044095993041993, 0.022787071228027343, 0.023138303756713868, 0.022961215972900392, 0.02306656074523926, 0.023003135681152344, 0.02309836769104004, 0.02311884880065918, 0.023021568298339845, 0.0231147518157959, 0.02314854431152344, 0.022990848541259764, 0.02309836769104004, 0.023121919631958008, 0.02308198356628418, 0.023641088485717773, 0.023085056304931642, 0.023145471572875977, 0.022998016357421876, 0.02310553550720215, 0.02306355285644531, 0.022983680725097655, 0.02301644706726074, 0.022969343185424804, 0.023060480117797853, 0.0231014404296875, 0.022990848541259764, 0.02330624008178711, 0.023191551208496093, 0.023002111434936523, 0.02308915138244629, 0.02293452835083008, 0.023055423736572267, 0.02302764892578125, 0.02306764793395996, 0.023051263809204102, 0.022965248107910157, 0.02307276725769043, 0.022982656478881838, 0.02309529685974121, 0.02310246467590332, 0.02307583999633789, 0.02310246467590332, 0.023128063201904296, 0.0230328311920166, 0.023051263809204102]",tokens/s,43.24321870163836,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -9884,7 +9884,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d468c-045c48e645ba9f312861afe7;ba7b3417-1e65-492f-9fee-70b3513cbc41) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9e7-69b18fc92192d30e72c3bb87;45899803-a280-4b6c-8e65-5c4359ae5b72) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -9939,7 +9939,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptqu61u8q/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpknh0ntoh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -9981,7 +9981,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9528-3bd568c266a7e2fe0176b52f;0fd7fd33-845f-44c2-9a7b-ef65d65a7770) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6e5-08fcb79923bee26552e162e4;f89441f3-822a-4309-bc8c-090b075fa364) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10051,7 +10051,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95ff-569982cc3239218456a427ae;7c34ece4-d5dc-4068-b64b-127d73810e9c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7bf-5a08291809b952cd6252714d;fd3cc490-31ae-4a3b-a7b1-906a10c12dcc) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10083,7 +10083,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1296.539648,17102.798848,0.0,16456.351744,16359.853056,s,10,1.0926036529541017,0.10926036529541014,0.001893364755570167,0.1085610237121582,0.10995124130249023,0.1124175724029541,0.1143906372833252,"[0.11488390350341797, 0.10940316772460937, 0.1085745620727539, 0.10866429138183593, 0.10854630279541015, 0.1085474853515625, 0.10843228912353516, 0.10861603546142579, 0.10850553894042969, 0.1084300765991211]",tokens/s,2343.0271288938675,kWh,1.2849429361342232e-06,7.04090113883823e-07,6.003153957108733e-06,7.99218700712678e-06,tokens/kWh,32031282.5227588,MB,1296.539648,17102.798848,0.0,16456.351744,16416.242176,s,10,29.505367675781255,2.950536767578125,0.007298161312162202,2.95167822265625,2.96021103515625,2.9605828124999998,2.960880234375,"[2.952864990234375, 2.94440087890625, 2.950491455078125, 2.953120361328125, 2.9489072265625, 2.940178466796875, 2.96012841796875, 2.96095458984375, 2.9558095703125, 2.93851171875]",tokens/s,21.352047089286735,kWh,3.488551348987698e-05,1.911889900293555e-05,0.00012785161979329105,0.00018185603228610352,tokens/kWh,346427.88148421585,,s,629,30.434665451049792,0.048385795629649925,0.012948080369770033,0.046676990509033206,0.047583238983154294,0.04774830017089844,0.15544816040039064,"[0.047582206726074217, 0.04730470275878906, 0.04824883270263672, 0.04772249603271484, 0.04878745651245117, 0.04808703994750976, 0.04736000061035156, 0.047241214752197266, 0.047952896118164064, 0.04727807998657227, 0.04649062347412109, 0.04636979293823242, 0.04666470336914062, 0.04662988662719727, 0.04664115142822266, 0.04670873641967774, 0.04672512054443359, 0.046691326141357424, 0.04698214340209961, 0.046519294738769534, 0.04654079818725586, 0.04665856170654297, 0.046635009765625, 0.04673126220703125, 0.04660224151611328, 0.046773246765136715, 0.04655206298828125, 0.046635009765625, 0.04667193603515625, 0.04662572860717774, 0.04678860855102539, 0.046599166870117184, 0.046676990509033206, 0.04663808059692383, 0.046683135986328124, 0.04657766342163086, 0.046693374633789066, 0.04663603210449219, 0.04674560165405273, 0.04665856170654297, 0.04662579345703125, 0.04668415832519531, 0.04674662399291992, 0.04703539276123047, 0.046635009765625, 0.04672819137573242, 0.04648038482666016, 0.04710092926025391, 0.046478336334228515, 0.04648038482666016, 0.04640460968017578, 0.04667494583129883, 0.046835712432861325, 0.0464721908569336, 0.0466165771484375, 0.04666265487670898, 0.046601215362548826, 0.04664524841308594, 0.04665651321411133, 0.046713886260986326, 0.04696368026733398, 0.04662886428833008, 0.15727513122558595, 0.04655718231201172, 0.046685184478759766, 0.046648353576660154, 0.04671894454956055, 0.046706687927246096, 0.04657049560546875, 0.04666470336914062, 0.04660838317871094, 0.046729217529296874, 0.04665651321411133, 0.0466360969543457, 0.046658496856689456, 0.046669822692871094, 0.046614528656005856, 0.046723072052001956, 0.04704358291625976, 0.04674662399291992, 0.047241214752197266, 0.04723814392089844, 0.04694527816772461, 0.047088638305664066, 0.04729446411132812, 0.04644966506958008, 0.04667494583129883, 0.04665958404541016, 0.0466165771484375, 0.04714707183837891, 0.04651411056518555, 0.04662579345703125, 0.04655718231201172, 0.046653438568115234, 0.046568447113037106, 0.04709580612182617, 0.046755840301513675, 0.046535678863525394, 0.04667903900146484, 0.04644147109985351, 0.04665446472167969, 0.04659814453125, 0.04658278274536133, 0.04658380889892578, 0.04668108749389648, 0.04667084884643555, 0.046717952728271485, 0.046685184478759766, 0.04658278274536133, 0.04667391967773438, 0.046680065155029295, 0.04674764633178711, 0.04662988662719727, 0.0466319351196289, 0.04670873641967774, 0.04674662399291992, 0.04654694366455078, 0.04663296127319336, 0.04662783813476563, 0.046737407684326174, 0.04670873641967774, 0.04662374496459961, 0.04715520095825195, 0.04658585739135742, 0.047094783782958984, 0.15548313903808594, 0.046652416229248046, 0.04667289733886719, 0.04660332870483398, 0.04651923370361328, 0.046653438568115234, 0.04668928146362305, 0.046706687927246096, 0.04652544021606445, 0.046633983612060545, 0.04670361709594727, 0.04668620681762695, 0.04676403045654297, 0.04657571029663086, 0.04660316848754883, 0.046712833404541014, 0.04677632141113281, 0.046635009765625, 0.04666777420043945, 0.0466954231262207, 0.04656947326660156, 0.04663808059692383, 0.04665958404541016, 0.04664831924438476, 0.04665139389038086, 0.046630912780761716, 0.046696449279785154, 0.04670054244995117, 0.04667903900146484, 0.0466247673034668, 0.04666470336914062, 0.04714905548095703, 0.04762214279174805, 0.04757503890991211, 0.04760678482055664, 0.04757196807861328, 0.04655104064941406, 0.04684288024902344, 0.046755840301513675, 0.04650188827514649, 0.0472729606628418, 0.04758323287963867, 0.047116287231445314, 0.04761804962158203, 0.047492095947265625, 0.046676990509033206, 0.047413246154785156, 0.04763545608520508, 0.04737843322753906, 0.04647628784179687, 0.046655487060546875, 0.04764879989624023, 0.04658480072021484, 0.0464005126953125, 0.046548992156982424, 0.04667391967773438, 0.046611457824707034, 0.04661862564086914, 0.04665446472167969, 0.046707710266113284, 0.0466431999206543, 0.04665961456298828, 0.046728160858154295, 0.15585382080078125, 0.046706687927246096, 0.0466954231262207, 0.04670361709594727, 0.046717952728271485, 0.04658892822265625, 0.046682113647460936, 0.046685184478759766, 0.04662783813476563, 0.046721023559570314, 0.046693374633789066, 0.04668108749389648, 0.04665961456298828, 0.04667184066772461, 0.046633983612060545, 0.04674662399291992, 0.04663504028320312, 0.04661859130859375, 0.04666470336914062, 0.04666265487670898, 0.046637054443359374, 0.0467322883605957, 0.04677836990356445, 0.04781158447265625, 0.04698624038696289, 0.046614528656005856, 0.04748185729980469, 0.04743987274169922, 0.0471275520324707, 0.04736716842651367, 0.04670566558837891, 0.04651520156860352, 0.04688076782226563, 0.04653670501708984, 0.046868480682373044, 0.04737945556640625, 0.04776243209838867, 0.04832460784912109, 0.047543296813964846, 0.047780864715576174, 0.04768569564819336, 0.0464967041015625, 0.046663681030273435, 0.04671692657470703, 0.0465428466796875, 0.04669235229492188, 0.046626846313476564, 0.04666672134399414, 0.046693374633789066, 0.04731391906738281, 0.046516223907470705, 0.04649062347412109, 0.04653977584838867, 0.04655104064941406, 0.046611457824707034, 0.0466247673034668, 0.04675686264038086, 0.04660326385498047, 0.04664934539794922, 0.04660838317871094, 0.04664934539794922, 0.047282176971435545, 0.047876094818115236, 0.15560089111328124, 0.04665753555297852, 0.04658892822265625, 0.04646297454833984, 0.04610969543457031, 0.04650700759887695, 0.04630323028564453, 0.046453758239746096, 0.046227455139160156, 0.046273536682128906, 0.04635955047607422, 0.046693374633789066, 0.04666572952270508, 0.04664217758178711, 0.04667903900146484, 0.04657356643676758, 0.048691200256347655, 0.04808499145507812, 0.047642623901367184, 0.046473217010498044, 0.046827518463134765, 0.04668928146362305, 0.046736385345458986, 0.046682113647460936, 0.04636979293823242, 0.04649267196655273, 0.04650188827514649, 0.046789630889892575, 0.04672204971313477, 0.046734336853027345, 0.046660606384277346, 0.04664217758178711, 0.046944255828857424, 0.046514175415039063, 0.04661043167114258, 0.046698497772216796, 0.046614528656005856, 0.047119392395019534, 0.04721865463256836, 0.046309375762939455, 0.046453758239746096, 0.046481407165527344, 0.046489601135253904, 0.04689920043945312, 0.04652339172363281, 0.047683582305908204, 0.047164417266845705, 0.046545921325683595, 0.04698624038696289, 0.046429183959960936, 0.04752896118164063, 0.04715212631225586, 0.047666175842285156, 0.04669440078735351, 0.04696473693847656, 0.04652134323120117, 0.046527488708496094, 0.04763238525390625, 0.04729241561889649, 0.04808499145507812, 0.046633983612060545, 0.04718188858032227, 0.04641068649291992, 0.15555072021484376, 0.04655206298828125, 0.04662374496459961, 0.04666265487670898, 0.04666572952270508, 0.04664831924438476, 0.04671897506713867, 0.046584831237792966, 0.046693374633789066, 0.046707710266113284, 0.046653438568115234, 0.04670054244995117, 0.04662783813476563, 0.046665790557861325, 0.046636993408203126, 0.04665856170654297, 0.0465428466796875, 0.04651724624633789, 0.04632371139526367, 0.046494720458984375, 0.04645171356201172, 0.0465428466796875, 0.046584831237792966, 0.046666751861572264, 0.046601215362548826, 0.0466954231262207, 0.04665958404541016, 0.04662374496459961, 0.04667801666259765, 0.04671180725097656, 0.04662169647216797, 0.04667289733886719, 0.046663681030273435, 0.046614528656005856, 0.046653438568115234, 0.046767105102539064, 0.04664115142822266, 0.046707710266113284, 0.046706687927246096, 0.04659814453125, 0.046668800354003906, 0.04672614288330078, 0.04657766342163086, 0.04667801666259765, 0.04664012908935547, 0.04655513763427734, 0.04671590423583984, 0.04671590423583984, 0.04667903900146484, 0.046696449279785154, 0.047449119567871095, 0.04729135894775391, 0.046617599487304685, 0.0465715217590332, 0.04658687973022461, 0.04680704116821289, 0.046698497772216796, 0.0466319351196289, 0.046714881896972656, 0.04661248016357422, 0.046712833404541014, 0.046736385345458986, 0.046595073699951174, 0.1552506866455078, 0.04637081527709961, 0.046486526489257815, 0.046617599487304685, 0.046581760406494144, 0.046545921325683595, 0.04674457550048828, 0.04666777420043945, 0.046714881896972656, 0.04662374496459961, 0.046668800354003906, 0.04666470336914062, 0.0467077751159668, 0.046626750946044924, 0.04668108749389648, 0.046693374633789066, 0.046936065673828124, 0.04641791915893555, 0.04650598526000976, 0.04672204971313477, 0.04665446472167969, 0.04678246307373047, 0.04658182525634766, 0.04666566467285156, 0.046683135986328124, 0.046655487060546875, 0.047250431060791014, 0.04772249603271484, 0.047505409240722656, 0.047683582305908204, 0.04763955307006836, 0.04772556686401367, 0.047505409240722656, 0.04746854400634765, 0.04727705764770508, 0.04767846298217773, 0.04741120147705078, 0.047596542358398435, 0.047375358581542966, 0.04744704055786133, 0.04765388870239258, 0.04801740646362305, 0.04772351837158203, 0.04766003036499023, 0.04641996765136719, 0.04669235229492188, 0.04747468948364258, 0.0478023681640625, 0.047583263397216795, 0.04709379196166992, 0.046405601501464847, 0.047219711303710936, 0.046577632904052736, 0.046354496002197265, 0.04781254577636719, 0.04671180725097656, 0.04717772674560547, 0.046499839782714845, 0.04641996765136719, 0.046475265502929686, 0.04646912002563477, 0.046511104583740234, 0.047297599792480466, 0.155569091796875, 0.046633983612060545, 0.04651827239990235, 0.04679782485961914, 0.04654079818725586, 0.04683161544799805, 0.04654489517211914, 0.04661043167114258, 0.046635009765625, 0.04699852752685547, 0.046755840301513675, 0.04692172622680664, 0.047088638305664066, 0.04734873580932617, 0.048059391021728515, 0.0467138557434082, 0.04768153762817383, 0.04649574279785156, 0.04668415832519531, 0.04657356643676758, 0.04668723297119141, 0.04740095901489258, 0.04648550415039063, 0.047459327697753906, 0.04694937515258789, 0.04746960067749023, 0.04670563125610352, 0.04711116790771484, 0.04659302520751953, 0.046561279296875, 0.04655104064941406, 0.04700364685058594, 0.047713279724121094, 0.04758937454223633, 0.048573440551757815, 0.04681318283081055, 0.04672819137573242, 0.046690303802490236, 0.046497791290283204, 0.047217662811279294, 0.04754739379882812, 0.047631359100341795, 0.04667084884643555, 0.0470129280090332, 0.04737843322753906, 0.047624126434326175, 0.047470592498779295, 0.04712243270874023, 0.04655411148071289, 0.046637054443359374, 0.047661056518554686, 0.04688281631469727, 0.04650598526000976, 0.04779315185546875, 0.04676607894897461, 0.04664627075195313, 0.04662681579589844, 0.04690534210205078, 0.046639102935791016, 0.046516223907470705, 0.046889984130859375, 0.047751167297363284, 0.04753100967407226, 0.15626547241210936, 0.04726067352294922, 0.046535713195800785, 0.047881183624267576, 0.047592449188232425, 0.047077438354492185, 0.046674880981445316, 0.0465428466796875, 0.0468746223449707, 0.04667903900146484, 0.04692377471923828, 0.04800204849243164, 0.04675174331665039, 0.04653055953979492, 0.04704153442382813, 0.04653977584838867, 0.046496768951416016, 0.04745318222045898, 0.047661056518554686, 0.04668415832519531, 0.04676812744140625, 0.04782796859741211, 0.04716339111328125, 0.04757401657104492, 0.04696268844604492, 0.04690124893188476, 0.04645478439331055, 0.046699520111083984, 0.04681625747680664, 0.04649369430541992, 0.04699548721313476, 0.047728607177734375, 0.046565376281738284, 0.046615550994873044, 0.04696473693847656, 0.04654489517211914, 0.04654796981811524, 0.046992385864257816, 0.046773246765136715, 0.046534656524658206, 0.046548992156982424, 0.04774399948120117, 0.04717055892944336, 0.04657254409790039, 0.0474450569152832, 0.046711742401123045, 0.04667596817016602, 0.047234046936035154, 0.046458881378173826, 0.046669822692871094, 0.04778905487060547, 0.0470118408203125, 0.04667596817016602, 0.04657254409790039, 0.04644659042358398, 0.046723072052001956, 0.04658995056152344, 0.04665651321411133, 0.046676990509033206, 0.046698497772216796, 0.04665753555297852, 0.04664217758178711, 0.046630912780761716, 0.15535821533203126, 0.046691326141357424, 0.04637900924682617, 0.046486526489257815, 0.046545921325683595, 0.04657459259033203, 0.04662886428833008, 0.04666265487670898, 0.04666572952270508, 0.046698497772216796, 0.046653438568115234, 0.04659814453125, 0.04668723297119141, 0.04670259094238281, 0.047017982482910156, 0.04664524841308594, 0.046524417877197265, 0.04652851104736328, 0.0466063346862793, 0.046696449279785154, 0.046693374633789066, 0.04659097671508789, 0.046717952728271485, 0.0466165771484375, 0.04657049560546875, 0.04676300811767578, 0.04668723297119141, 0.04664831924438476, 0.046699520111083984, 0.046668800354003906, 0.04665651321411133, 0.046693374633789066, 0.046635009765625, 0.0465530891418457, 0.04670054244995117, 0.04674764633178711, 0.04654796981811524, 0.046706687927246096, 0.04667084884643555, 0.04666777420043945, 0.04692070388793945, 0.046544960021972656, 0.04660627365112305, 0.04667398452758789, 0.04666572952270508, 0.0467496337890625, 0.04657561492919922, 0.046721023559570314, 0.04662783813476563, 0.04666268920898437, 0.046646240234375, 0.046666751861572264, 0.04667084884643555, 0.04650086212158203, 0.04658995056152344, 0.046202880859375, 0.046306304931640625, 0.04681318283081055, 0.04711423873901367, 0.04668108749389648, 0.04639846420288086, 0.04647731018066406, 0.04656742477416992]",tokens/s,20.667222414902657,,,main,False,False -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10107,7 +10107,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5rxmr90x/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp_tb2y8c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -10147,7 +10147,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10176,7 +10176,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4889-08d9daad033255b37bce23b8;03fedc49-356b-4a62-87f4-b127912ec941) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fec01-53e3983724ad388b309a5ca1;a4a9dacf-26a7-442d-bb33-dd7b16581bff) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -10261,7 +10261,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e963a-40d5c5c71e1ea4221571a47e;63d870a9-21f8-4e48-8bee-78ad69f54719) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7f4-547cf65e2d994b5040432f84;8009cdae-5924-4579-8059-6c8b76b2f72d) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10323,7 +10323,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fde-22a27c3a08f74cf90cb8bc51;21475bb6-b99b-4aa6-950b-2935e7441651) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe179-59820e933253df8a1c3c82a1;baf4b67d-6cff-4b41-8451-0b30810eeba0) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -10449,7 +10449,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95c8-5d88c0665eb883481ed238ee;d4209b02-48d3-4aa7-99ad-26fcd48c48eb) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe78a-7b8a5d756f2af88e24be5d6d;e5372d50-6241-4a1e-9b97-114cfdd233e1) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10594,7 +10594,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e907f-7fdd33d14427aa79159d9d36;61190eaf-4e29-43f5-bb77-a5b0eea4eea5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe221-76b0296631ff888d096241c3;f4828f5c-825e-459e-88a4-f9009c46cefb) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10687,10 +10687,10 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphz7hqg61/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9i__1wou/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10729,7 +10729,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4623-5ee8a99c0223c23c3f5a97a4;a21dff66-cea1-4e01-8ef1-1ce6a3e258e1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe97a-631706c971d7e90d7ee4f59d;8f188193-8022-4bed-bca4-ac28fe0c17df) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10760,7 +10760,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -10784,7 +10784,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmplwf8ih3c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpdldzf1_c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1273.401344,19665.518592,0.0,19019.071488,18747.268096,s,10,1.1008828201293945,0.11008828201293945,0.0024261382345637157,0.10917001342773437,0.11099395599365235,0.11411876983642578,0.11661862091064454,"[0.11724358367919922, 0.10893663787841797, 0.10915385437011718, 0.10922672271728516, 0.10884496307373047, 0.11029955291748048, 0.1088978271484375, 0.10918617248535156, 0.109942626953125, 0.10915087890625]",tokens/s,2325.4064403503953,kWh,1.2924292993142412e-06,7.081907045294824e-07,7.784082314217451e-06,9.784702318061173e-06,tokens/kWh,26163289.559403386,MB,1273.401344,19665.518592,0.0,19019.071488,18774.938112,s,10,38.20687036132812,3.8206870361328122,0.009300846714813165,3.8190885009765623,3.832152001953125,3.835840771484375,3.838791787109375,"[3.831332275390625, 3.8135830078125, 3.82000732421875, 3.80987060546875, 3.818169677734375, 3.8121875, 3.811255859375, 3.839529541015625, 3.822679443359375, 3.828255126953125]",tokens/s,16.489180978237556,kWh,4.4929080872576014e-05,2.4622353401834478e-05,0.0001603518021943793,0.0002299032364687898,tokens/kWh,274028.33021253475,,s,629,39.12849711990357,0.06220746759921075,0.01302544399920906,0.06040678405761719,0.0615464973449707,0.061790821838378904,0.16975769592285156,"[0.06134272003173828, 0.0619950065612793, 0.061774848937988285, 0.06159564971923828, 0.06201446533203125, 0.062325759887695314, 0.06233804702758789, 0.061328384399414064, 0.060622848510742185, 0.06049689483642578, 0.06022655868530274, 0.0613488655090332, 0.06141747283935547, 0.06154035186767578, 0.06064332962036133, 0.060407806396484375, 0.06013030242919922, 0.06017340850830078, 0.060087230682373045, 0.06018966293334961, 0.06021324920654297, 0.060456993103027344, 0.06024905776977539, 0.06022041702270508, 0.060268543243408204, 0.060680191040039064, 0.06026956939697266, 0.06055116653442383, 0.06043852615356445, 0.0637757453918457, 0.061446144104003904, 0.061497344970703124, 0.06033407974243164, 0.06032691192626953, 0.06030950546264648, 0.06028595352172852, 0.06125363159179688, 0.06104985427856445, 0.06138163375854492, 0.06055731201171875, 0.06028287887573242, 0.060662784576416016, 0.06029312133789062, 0.060247039794921874, 0.060371967315673826, 0.060488704681396485, 0.06159360122680664, 0.060967937469482425, 0.06091571044921875, 0.06028799819946289, 0.06044467163085938, 0.06030847930908203, 0.06021529769897461, 0.06033817672729492, 0.060470272064208984, 0.06047129440307617, 0.06045183944702148, 0.0603770866394043, 0.06053171157836914, 0.06042316818237305, 0.060903423309326174, 0.06041292953491211, 0.1694924774169922, 0.06027478408813477, 0.06027356719970703, 0.0603054084777832, 0.06034124755859375, 0.060386302947998044, 0.06021222305297851, 0.06032793426513672, 0.06028902435302735, 0.06040063858032227, 0.06031465530395508, 0.06060335922241211, 0.060214271545410154, 0.060189697265625, 0.06023168182373047, 0.06027264022827149, 0.06023680114746094, 0.060911617279052734, 0.06027471923828125, 0.060278751373291015, 0.06024806213378906, 0.060246017456054686, 0.060235774993896485, 0.06028499221801758, 0.060525505065917966, 0.06073651123046875, 0.06159158325195312, 0.06137750244140625, 0.060680191040039064, 0.06032486343383789, 0.06043545532226562, 0.06034431838989258, 0.06042726516723633, 0.0603955192565918, 0.06041907119750976, 0.06037913513183594, 0.06034431838989258, 0.06019174575805664, 0.060393470764160156, 0.060229633331298826, 0.06048255920410156, 0.06029619216918945, 0.060655616760253904, 0.06044467163085938, 0.06025113677978516, 0.060265472412109375, 0.06167654418945313, 0.06060748672485351, 0.06104678344726563, 0.060919807434082034, 0.061067264556884764, 0.060439552307128906, 0.06025625610351563, 0.060486656188964844, 0.060286975860595705, 0.060870655059814455, 0.06133964920043945, 0.06048153686523437, 0.06125056076049805, 0.06043648147583008, 0.06093107223510742, 0.06026444625854492, 0.06207385635375977, 0.17065676879882813, 0.06058291244506836, 0.06093209457397461, 0.06032691192626953, 0.06032486343383789, 0.06081433486938476, 0.06026137542724609, 0.060290046691894535, 0.060418048858642576, 0.06025830459594726, 0.06055321502685547, 0.060846080780029295, 0.06034431838989258, 0.06034124755859375, 0.06014361572265625, 0.060300289154052736, 0.060319744110107425, 0.06026963043212891, 0.06040057754516601, 0.06020608139038086, 0.06024192047119141, 0.060316673278808595, 0.060401664733886716, 0.060319744110107425, 0.060044288635253906, 0.06027980804443359, 0.06083379364013672, 0.06154342269897461, 0.0626319351196289, 0.06146662521362305, 0.06138982391357422, 0.0615464973449707, 0.06143078231811523, 0.06135504150390625, 0.06046819305419922, 0.061312000274658204, 0.06117171096801758, 0.06123212814331055, 0.06126489639282227, 0.06147174453735352, 0.06051430511474609, 0.06050406265258789, 0.06032486343383789, 0.061238273620605466, 0.061795326232910154, 0.060472320556640625, 0.06027980804443359, 0.06030131149291992, 0.06030745697021484, 0.06014976119995117, 0.06030233764648438, 0.06030847930908203, 0.06020505523681641, 0.060186622619628906, 0.06035763168334961, 0.06024192047119141, 0.06024806213378906, 0.060271648406982424, 0.06025212860107422, 0.060375038146972655, 0.06031155014038086, 0.06063417434692383, 0.06038214492797851, 0.1700823059082031, 0.060647422790527344, 0.06041907119750976, 0.06032281494140625, 0.060295169830322265, 0.060175361633300783, 0.06024911880493164, 0.060349407196044924, 0.06031564712524414, 0.06074367904663086, 0.061982719421386716, 0.060865535736083984, 0.060165119171142575, 0.060216320037841796, 0.06033817672729492, 0.0603422737121582, 0.060859390258789066, 0.060728321075439455, 0.06251219177246094, 0.060615615844726564, 0.060788734436035156, 0.060470272064208984, 0.060249088287353515, 0.06025932693481445, 0.060249088287353515, 0.06035763168334961, 0.060903423309326174, 0.06058086395263672, 0.06045286560058594, 0.06036275100708008, 0.06034124755859375, 0.06034636688232422, 0.06025625610351563, 0.060252159118652344, 0.0603422737121582, 0.0599818229675293, 0.06021017456054688, 0.060375038146972655, 0.06026342391967773, 0.060400737762451175, 0.060367774963378903, 0.0603351058959961, 0.060217342376708984, 0.06029926300048828, 0.060418048858642576, 0.06038118362426758, 0.06060748672485351, 0.06020710372924805, 0.06031772613525391, 0.060523487091064455, 0.06047129440307617, 0.060273662567138675, 0.06057164764404297, 0.060641281127929686, 0.06106623840332031, 0.06032998275756836, 0.06033817672729492, 0.06018252944946289, 0.060181503295898435, 0.06055321502685547, 0.06041190338134766, 0.060381214141845704, 0.0603616943359375, 0.16966554260253905, 0.06024192047119141, 0.060243968963623044, 0.06023884963989258, 0.06034329605102539, 0.06096384048461914, 0.06052249526977539, 0.06031769561767578, 0.06015999984741211, 0.060237823486328126, 0.060175361633300783, 0.06040678405761719, 0.060265472412109375, 0.060388351440429686, 0.06024499130249023, 0.060388351440429686, 0.06059622573852539, 0.06225612640380859, 0.06142566299438477, 0.06144716644287109, 0.06152703857421875, 0.06159360122680664, 0.061505535125732425, 0.06107033538818359, 0.06093414306640625, 0.060711936950683595, 0.06042316818237305, 0.060611583709716796, 0.0607825927734375, 0.06037094497680664, 0.061341697692871094, 0.060352512359619144, 0.06077337646484375, 0.0627322883605957, 0.06178406524658203, 0.06041702270507812, 0.06040678405761719, 0.06035968017578125, 0.06044979095458984, 0.06058905410766602, 0.06035763168334961, 0.06024294281005859, 0.060158977508544924, 0.060368896484375, 0.06032896041870117, 0.060336128234863284, 0.060368896484375, 0.06030745697021484, 0.060303359985351565, 0.060382209777832034, 0.06030438232421875, 0.06035148620605469, 0.06030643081665039, 0.06042316818237305, 0.06047641754150391, 0.06032998275756836, 0.0603351058959961, 0.06040371322631836, 0.06028083038330078, 0.06037913513183594, 0.06031872177124024, 0.06038118362426758, 0.06036070251464844, 0.17123532104492187, 0.060281856536865235, 0.06057984161376953, 0.06063616180419922, 0.06180249786376953, 0.06084710311889648, 0.06022655868530274, 0.06032588958740234, 0.060262401580810546, 0.06033919906616211, 0.06041702270507812, 0.06036896133422852, 0.06052345657348633, 0.06038323211669922, 0.06023891067504883, 0.0606627197265625, 0.06034739303588867, 0.060308544158935544, 0.06044255828857422, 0.060418048858642576, 0.060464126586914066, 0.06022553634643555, 0.06031052780151367, 0.060252159118652344, 0.06023987197875977, 0.06025932693481445, 0.0603351058959961, 0.06032793426513672, 0.0605030403137207, 0.06064441680908203, 0.06087366485595703, 0.060816383361816405, 0.060864513397216796, 0.06024499130249023, 0.06022655868530274, 0.06037913513183594, 0.06034022521972656, 0.06054707336425781, 0.060508159637451174, 0.060450817108154295, 0.060184574127197264, 0.061416446685791014, 0.06073548889160156, 0.06056345748901367, 0.06003609466552735, 0.060402687072753904, 0.06054608154296875, 0.060288993835449216, 0.06035865783691406, 0.06022348785400391, 0.06042726516723633, 0.06077132797241211, 0.060939262390136716, 0.061104129791259766, 0.061758464813232425, 0.06065459060668945, 0.060249088287353515, 0.0605747184753418, 0.060864513397216796, 0.06015488052368164, 0.06032281494140625, 0.06022252655029297, 0.060238784790039065, 0.1697935333251953, 0.0602531852722168, 0.060286975860595705, 0.06022655868530274, 0.060260353088378904, 0.06086860656738281, 0.060252159118652344, 0.06023884963989258, 0.0603135986328125, 0.06023372650146484, 0.06019174575805664, 0.060286975860595705, 0.060268543243408204, 0.060355583190917966, 0.06023680114746094, 0.06026444625854492, 0.06043852615356445, 0.060418048858642576, 0.06031052780151367, 0.06030847930908203, 0.06016716766357422, 0.060314624786376954, 0.06029312133789062, 0.06028287887573242, 0.060281856536865235, 0.06035763168334961, 0.06027980804443359, 0.060300289154052736, 0.060252159118652344, 0.06033919906616211, 0.06029926300048828, 0.06051942443847656, 0.060240894317626956, 0.06065971374511719, 0.06016204833984375, 0.06014976119995117, 0.06032793426513672, 0.06025523376464844, 0.060230655670166014, 0.06022348785400391, 0.060249088287353515, 0.06029209518432617, 0.06046515274047851, 0.06028595352172852, 0.060286975860595705, 0.06018048095703125, 0.060129280090332034, 0.06018867111206055, 0.06028083038330078, 0.05994188690185547, 0.06065356826782226, 0.06021945571899414, 0.06165190505981445, 0.06201753616333008, 0.061628414154052735, 0.06147686386108398, 0.061459457397460934, 0.061538303375244144, 0.06137855911254883, 0.0613939208984375, 0.0615464973449707, 0.06094131088256836, 0.060267520904541017, 0.17014790344238281, 0.060321727752685544, 0.060237823486328126, 0.06024399948120117, 0.06152291107177734, 0.06187519836425781, 0.060644351959228515, 0.060270591735839846, 0.06026956939697266, 0.06032281494140625, 0.06021222305297851, 0.06032486343383789, 0.060352512359619144, 0.06030131149291992, 0.06027775955200195, 0.06031769561767578, 0.06046515274047851, 0.06033715057373047, 0.06044467163085938, 0.06029414367675781, 0.060194816589355465, 0.06018252944946289, 0.06015590286254883, 0.06026956939697266, 0.06026137542724609, 0.0602501106262207, 0.06033715057373047, 0.0602347526550293, 0.06036684799194336, 0.060249088287353515, 0.0602716178894043, 0.06039961624145508, 0.061306880950927733, 0.06060134506225586, 0.061873214721679684, 0.06122796630859375, 0.06044876861572265, 0.060346431732177734, 0.060787647247314454, 0.06117171096801758, 0.06175948715209961, 0.06170828628540039, 0.06187724685668945, 0.06137753677368164, 0.06173593521118164, 0.06162124633789062, 0.061459457397460934, 0.0616519660949707, 0.06177382278442383, 0.06181990432739258, 0.061661182403564455, 0.06186700820922852, 0.061615169525146486, 0.0616242561340332, 0.0617625617980957, 0.06176563262939453, 0.0616099853515625, 0.061827072143554686, 0.06152601623535156, 0.06144307327270508, 0.06170624160766602, 0.06178406524658203, 0.06160179138183594, 0.17100697326660155, 0.06032896041870117, 0.06029926300048828, 0.06025113677978516, 0.060382209777832034, 0.060409854888916016, 0.060777473449707034, 0.06056755065917969, 0.060181503295898435, 0.060453887939453124, 0.06197043228149414, 0.061328384399414064, 0.061316158294677736, 0.0614901123046875, 0.06159872055053711, 0.061369342803955076, 0.060572673797607425, 0.06111641693115234, 0.06030438232421875, 0.060590080261230465, 0.06036787033081055, 0.06038118362426758, 0.06074982452392578, 0.061195262908935545, 0.061925376892089844, 0.061521919250488284, 0.06075596618652344, 0.060524543762207034, 0.060224510192871096, 0.06027468872070312, 0.06028083038330078, 0.06027167892456055, 0.06030227279663086, 0.06042931365966797, 0.06030649566650391, 0.06048147201538086, 0.06070169448852539, 0.06052556610107422, 0.060793857574462894, 0.06031769561767578, 0.06141132736206055, 0.06138265609741211, 0.06138374328613281, 0.060510208129882816, 0.06024800109863281, 0.06029312133789062, 0.06010060882568359, 0.060268543243408204, 0.06032486343383789, 0.06125260925292969, 0.061052928924560546, 0.060303359985351565, 0.0606668815612793, 0.06055833435058594, 0.06085222244262695, 0.06076316833496094, 0.06034735870361328, 0.06028902435302735, 0.06038425445556641, 0.060426239013671876, 0.06058598327636719, 0.06035456085205078, 0.06030233764648438, 0.16998503112792968, 0.060443679809570314, 0.060333023071289064, 0.060104705810546874, 0.060641281127929686, 0.06022860717773437, 0.06033820724487305, 0.06049276733398438, 0.061276161193847656, 0.06052249526977539, 0.06084505462646484, 0.06114713668823242, 0.06113385772705078, 0.06115331268310547, 0.06088185501098633, 0.06097715377807617, 0.06134988784790039, 0.06095667266845703, 0.06141747283935547, 0.06155263900756836, 0.060527614593505856, 0.06099148941040039, 0.06169804763793945, 0.061142017364501956, 0.060662784576416016, 0.06107955169677735, 0.06043859100341797, 0.06115116882324219, 0.06056755065917969, 0.06081126403808594, 0.06098227310180664, 0.060351585388183596, 0.06113987350463867, 0.06030137634277344, 0.061045696258544925, 0.06038937759399414, 0.060767230987548826, 0.06071603012084961, 0.06045798492431641, 0.06154751968383789, 0.0620840950012207, 0.06156595230102539, 0.06044467163085938, 0.06040063858032227, 0.0603873291015625, 0.060421119689941405, 0.060385280609130856, 0.06074265670776367, 0.06043852615356445, 0.060486656188964844, 0.061080577850341794, 0.06103859329223633, 0.06031769561767578, 0.06070374298095703, 0.06041292953491211, 0.06031769561767578, 0.06035968017578125, 0.06057984161376953, 0.06048255920410156, 0.06036172866821289, 0.06051430511474609, 0.060652542114257815, 0.06068121719360352]",tokens/s,16.075240458955562,,,main,False,False @@ -10827,7 +10827,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e937b-678e485f3eff88907f99d4d7;74a86022-e745-4a28-a7f0-a1ba64eb71a8) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe52d-28d8a4b42165936d17ab2d6c;b1d91452-3b5d-4844-a7bc-d0b21e5a124a) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -10883,7 +10883,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpg8ojyqro/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_r4o2fz4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1175.089152,1455.947776,0.0,809.500672,723.533824,s,15,0.17660559940338133,0.011773706626892089,0.0005060809352526505,0.01188105583190918,0.011992940521240234,0.012405353450775146,0.013093141155242918,"[0.013265088081359863, 0.011219743728637696, 0.011262335777282716, 0.011187264442443848, 0.011892576217651368, 0.01188105583190918, 0.011240384101867677, 0.012036895751953125, 0.011927007675170898, 0.011889760017395019, 0.011867008209228515, 0.011883199691772461, 0.011868415832519531, 0.011281344413757324, 0.01190351963043213]",tokens/s,21743.364949766583,kWh,1.3865128598706948e-07,7.597172755684907e-08,3.956406399342899e-07,6.102636534782083e-07,tokens/kWh,419490819.32198244,MB,1175.089152,1455.947776,0.0,809.500672,749.216256,s,15,10.484396118164064,0.6989597412109376,0.010546266198019412,0.6962911987304687,0.7025856811523438,0.7139740173339844,0.7317457800292969,"[0.736188720703125, 0.6892533569335938, 0.69624951171875, 0.6921040649414063, 0.6986005249023437, 0.698226806640625, 0.6946048583984376, 0.6997840576171875, 0.6971741943359375, 0.6962911987304687, 0.694924072265625, 0.6990907592773438, 0.6946989135742188, 0.6927516479492187, 0.7044534301757812]",tokens/s,90.13394661451233,kWh,7.764596984365612e-06,4.254614382124614e-06,1.4961130904734307e-05,2.6980342271224535e-05,tokens/kWh,2335033.3871483784,,s,944,10.635618295669536,0.011266544804734699,0.0014591324737404984,0.01107148838043213,0.011258982372283935,0.011755068683624267,0.02223625762939453,"[0.012138496398925782, 0.012057600021362304, 0.011867136001586913, 0.01165721607208252, 0.011790335655212402, 0.011788288116455077, 0.011667455673217773, 0.011686911582946777, 0.012061696052551269, 0.011962368011474609, 0.011684864044189454, 0.011777024269104003, 0.011752511978149414, 0.01197868824005127, 0.011803647994995118, 0.012000255584716797, 0.011875328063964843, 0.011687935829162598, 0.011875328063964843, 0.01174937629699707, 0.011942912101745605, 0.011983872413635254, 0.011774016380310058, 0.0118721923828125, 0.01233407974243164, 0.011925503730773926, 0.011716608047485352, 0.011866111755371094, 0.011828224182128906, 0.011709440231323242, 0.011905023574829102, 0.01174835205078125, 0.011743231773376465, 0.012090368270874024, 0.012113920211791992, 0.01176576042175293, 0.011701312065124512, 0.011774911880493164, 0.011737088203430175, 0.011653120040893555, 0.011552767753601074, 0.011234304428100587, 0.011653120040893555, 0.011614208221435546, 0.011580415725708008, 0.01183027172088623, 0.011583488464355468, 0.011491328239440919, 0.011707391738891602, 0.011467776298522948, 0.011268095970153809, 0.01115340805053711, 0.011082752227783203, 0.011146240234375, 0.011140095710754394, 0.011147263526916504, 0.011131903648376466, 0.01115340805053711, 0.011222016334533692, 0.011147263526916504, 0.011168800354003906, 0.01108784008026123, 0.02211942481994629, 0.010397695541381835, 0.010449919700622558, 0.010497023582458496, 0.010392576217651368, 0.010460160255432128, 0.010432512283325195, 0.01045094394683838, 0.010445823669433594, 0.010406911849975586, 0.010437631607055664, 0.010470399856567383, 0.01042636775970459, 0.010480640411376953, 0.010445823669433594, 0.010461183547973632, 0.010458111763000488, 0.010431488037109376, 0.010424320220947265, 0.01043455982208252, 0.010455039978027344, 0.01093017578125, 0.011171839714050292, 0.011189248085021973, 0.011116543769836425, 0.011207679748535156, 0.011160575866699218, 0.011148287773132324, 0.011110400199890137, 0.011133952140808106, 0.011155455589294434, 0.011123711585998536, 0.011090944290161133, 0.011088895797729491, 0.011189248085021973, 0.011101183891296386, 0.011117568016052246, 0.011094016075134277, 0.011119615554809571, 0.011193344116210938, 0.01113804817199707, 0.011147263526916504, 0.011320320129394532, 0.011206656455993653, 0.01112166404724121, 0.011126784324645997, 0.011096063613891602, 0.01107148838043213, 0.011132927894592285, 0.011144191741943359, 0.011135999679565429, 0.011383808135986329, 0.01175551986694336, 0.01153331184387207, 0.011256832122802735, 0.011196415901184082, 0.0111595516204834, 0.011420672416687011, 0.011188223838806152, 0.011104255676269532, 0.01116262435913086, 0.01116262435913086, 0.011131903648376466, 0.02213580894470215, 0.010478591918945313, 0.010484736442565918, 0.011142144203186035, 0.011163647651672364, 0.011189248085021973, 0.011179007530212403, 0.011090944290161133, 0.011057151794433593, 0.01115443229675293, 0.011074560165405273, 0.011171839714050292, 0.01111244773864746, 0.011139072418212891, 0.011106304168701172, 0.011200511932373047, 0.011057151794433593, 0.010911744117736816, 0.011289600372314454, 0.010860544204711914, 0.01103052806854248, 0.011191295623779298, 0.011142144203186035, 0.011233280181884766, 0.01112377643585205, 0.011103167533874511, 0.011147263526916504, 0.011070528030395509, 0.011084735870361328, 0.011485183715820312, 0.011078656196594238, 0.011074560165405273, 0.011172863960266113, 0.011123711585998536, 0.01103667163848877, 0.011100159645080567, 0.011033599853515624, 0.011095040321350098, 0.01107148838043213, 0.010899456024169921, 0.010878975868225099, 0.010903552055358886, 0.010842111587524414, 0.010879008293151856, 0.010818528175354005, 0.01083084774017334, 0.010789888381958008, 0.01083903980255127, 0.010860544204711914, 0.011130880355834961, 0.011092991828918456, 0.011107328414916993, 0.01112166404724121, 0.011177984237670899, 0.011076607704162598, 0.011125760078430176, 0.011097087860107421, 0.011111424446105958, 0.0111278076171875, 0.01112883186340332, 0.011160575866699218, 0.011094016075134277, 0.0110448637008667, 0.02204979133605957, 0.010391551971435547, 0.01081651210784912, 0.011019264221191406, 0.01102847957611084, 0.011114496231079102, 0.011080703735351562, 0.011111424446105958, 0.011027520179748534, 0.01104684829711914, 0.011084799766540527, 0.011072544097900391, 0.011072480201721191, 0.011060223579406739, 0.011164671897888183, 0.011094016075134277, 0.011081727981567382, 0.011073535919189453, 0.011040767669677735, 0.011068415641784669, 0.011085824012756347, 0.011061247825622558, 0.011024383544921875, 0.011081727981567382, 0.010852352142333984, 0.010819583892822266, 0.010806271553039551, 0.010788864135742187, 0.010807295799255372, 0.010853407859802246, 0.011113439559936523, 0.01116262435913086, 0.011104255676269532, 0.011017215728759766, 0.011003904342651367, 0.011041791915893554, 0.011025407791137695, 0.011066368103027344, 0.0110448637008667, 0.01103769588470459, 0.01102950382232666, 0.011049983978271484, 0.011041791915893554, 0.01104691219329834, 0.011077631950378418, 0.011032575607299805, 0.011010047912597656, 0.010844160079956054, 0.010813440322875977, 0.010787839889526368, 0.010875904083251953, 0.011091967582702637, 0.011053055763244628, 0.011053055763244628, 0.011111424446105958, 0.01101414394378662, 0.010912768363952637, 0.010781696319580078, 0.010753024101257324, 0.010797056198120117, 0.01083187198638916, 0.011004927635192872, 0.011039744377136231, 0.023410688400268553, 0.011052032470703126, 0.01105510425567627, 0.011003904342651367, 0.011023360252380371, 0.011087871551513672, 0.011041791915893554, 0.011075584411621094, 0.011066399574279785, 0.011058143615722656, 0.011043840408325196, 0.011074560165405273, 0.011161600112915039, 0.011259903907775879, 0.01107968044281006, 0.011074560165405273, 0.011040767669677735, 0.011058176040649414, 0.011141119956970215, 0.011090944290161133, 0.011088895797729491, 0.011089920043945312, 0.011066368103027344, 0.01115443229675293, 0.01107968044281006, 0.01114521598815918, 0.011061247825622558, 0.011072511672973634, 0.011019264221191406, 0.01107049560546875, 0.011074527740478516, 0.011058176040649414, 0.011052032470703126, 0.011066368103027344, 0.010970111846923827, 0.011059200286865235, 0.01102847957611084, 0.011056127548217774, 0.011078656196594238, 0.01115135955810547, 0.011070464134216309, 0.0111278076171875, 0.011064319610595704, 0.01104691219329834, 0.011062272071838379, 0.011057151794433593, 0.011061247825622558, 0.011083776473999024, 0.011062272071838379, 0.011045887947082519, 0.011051008224487305, 0.011097087860107421, 0.011019264221191406, 0.011114496231079102, 0.011066368103027344, 0.011139072418212891, 0.011181056022644043, 0.011150336265563965, 0.011089920043945312, 0.011372544288635255, 0.011087871551513672, 0.011062272071838379, 0.011053055763244628, 0.023417856216430662, 0.011049983978271484, 0.011092991828918456, 0.01105510425567627, 0.011049983978271484, 0.011139072418212891, 0.011018239974975585, 0.011054080009460449, 0.011051008224487305, 0.01103052806854248, 0.011158592224121093, 0.011132863998413086, 0.011143168449401856, 0.011181056022644043, 0.01115135955810547, 0.011072511672973634, 0.011059200286865235, 0.011190272331237794, 0.01112063980102539, 0.0110448637008667, 0.011105279922485351, 0.011070464134216309, 0.011113471984863281, 0.011107328414916993, 0.011034624099731445, 0.011074560165405273, 0.011105279922485351, 0.011081727981567382, 0.011089920043945312, 0.011096063613891602, 0.011076607704162598, 0.011095040321350098, 0.011051008224487305, 0.011147263526916504, 0.011059200286865235, 0.011070464134216309, 0.01103667163848877, 0.011065343856811523, 0.011130880355834961, 0.011054080009460449, 0.011077631950378418, 0.011048959732055665, 0.011065343856811523, 0.011052032470703126, 0.011113471984863281, 0.011072511672973634, 0.011115519523620606, 0.011100159645080567, 0.011015168190002441, 0.011144191741943359, 0.0110448637008667, 0.011076607704162598, 0.011132927894592285, 0.011057151794433593, 0.011021311759948731, 0.010954751968383789, 0.010874879837036134, 0.01084620761871338, 0.011087871551513672, 0.011068415641784669, 0.011119615554809571, 0.01111244773864746, 0.011054080009460449, 0.022202367782592772, 0.010402815818786621, 0.010449919700622558, 0.011032575607299805, 0.011088895797729491, 0.01104691219329834, 0.011006976127624512, 0.011085824012756347, 0.011072511672973634, 0.011025407791137695, 0.011027456283569336, 0.01103667163848877, 0.011070464134216309, 0.011086848258972168, 0.010819583892822266, 0.010833984375, 0.01081439971923828, 0.01095372772216797, 0.01083187198638916, 0.011095104217529296, 0.011084735870361328, 0.0110448637008667, 0.01112985610961914, 0.011302911758422851, 0.011109375953674316, 0.01101414394378662, 0.011039744377136231, 0.011113471984863281, 0.011100159645080567, 0.011259903907775879, 0.011041791915893554, 0.011059200286865235, 0.011110400199890137, 0.011118592262268067, 0.011064319610595704, 0.011016223907470703, 0.011090911865234376, 0.011254783630371093, 0.011076607704162598, 0.011010047912597656, 0.01116262435913086, 0.011091967582702637, 0.011072511672973634, 0.011025407791137695, 0.0110448637008667, 0.01107148838043213, 0.011069439888000488, 0.011080767631530762, 0.011090880393981933, 0.011061247825622558, 0.01092403221130371, 0.010891263961791992, 0.010870783805847169, 0.010827775955200195, 0.01084108829498291, 0.011094016075134277, 0.011086879730224609, 0.011106271743774414, 0.011158528327941895, 0.011142144203186035, 0.011083776473999024, 0.011052032470703126, 0.01102847957611084, 0.023323648452758788, 0.011080767631530762, 0.010983455657958984, 0.011079584121704102, 0.011096063613891602, 0.011982848167419433, 0.011134976387023926, 0.010999808311462403, 0.011023360252380371, 0.011230208396911622, 0.011049983978271484, 0.011078656196594238, 0.010974207878112792, 0.011049983978271484, 0.011017215728759766, 0.01213644790649414, 0.011421695709228515, 0.01214566421508789, 0.011182080268859864, 0.011249664306640626, 0.011106304168701172, 0.011099136352539063, 0.011016192436218262, 0.010991616249084473, 0.011004927635192872, 0.010983424186706543, 0.01100595188140869, 0.011000831604003907, 0.011051008224487305, 0.011083776473999024, 0.01101414394378662, 0.010982399940490722, 0.011083776473999024, 0.011109375953674316, 0.01105510425567627, 0.011027456283569336, 0.010991616249084473, 0.010999808311462403, 0.01102233600616455, 0.011200511932373047, 0.011125760078430176, 0.011000831604003907, 0.011080703735351562, 0.011008000373840332, 0.011061247825622558, 0.010998784065246582, 0.010796031951904296, 0.010761216163635253, 0.010984448432922364, 0.011100159645080567, 0.0111278076171875, 0.01108176040649414, 0.011106271743774414, 0.011104255676269532, 0.011049983978271484, 0.010997759819030761, 0.011039744377136231, 0.011043840408325196, 0.011081727981567382, 0.011049983978271484, 0.011004927635192872, 0.011086848258972168, 0.011081727981567382, 0.02349772834777832, 0.010971136093139648, 0.011004927635192872, 0.011001855850219726, 0.011040767669677735, 0.011041791915893554, 0.010982399940490722, 0.011059200286865235, 0.011111424446105958, 0.011010047912597656, 0.011004927635192872, 0.011035648345947266, 0.011043904304504394, 0.011024319648742675, 0.010991616249084473, 0.011039744377136231, 0.011060223579406739, 0.011073535919189453, 0.011016192436218262, 0.011066399574279785, 0.011066335678100586, 0.011020288467407227, 0.010982399940490722, 0.011085824012756347, 0.011023360252380371, 0.011041791915893554, 0.01103052806854248, 0.011102208137512207, 0.011045887947082519, 0.011048959732055665, 0.011117568016052246, 0.011016192436218262, 0.011064319610595704, 0.011072511672973634, 0.01103769588470459, 0.011317248344421387, 0.011150336265563965, 0.011147263526916504, 0.011049983978271484, 0.011048959732055665, 0.010994688034057617, 0.011033599853515624, 0.011058176040649414, 0.011073535919189453, 0.011021311759948731, 0.011062272071838379, 0.011096063613891602, 0.011086848258972168, 0.01101414394378662, 0.01102233600616455, 0.01104793643951416, 0.01103667163848877, 0.01103872013092041, 0.011034624099731445, 0.01122815990447998, 0.01126912021636963, 0.011086848258972168, 0.011060223579406739, 0.011107328414916993, 0.011076607704162598, 0.01103052806854248, 0.011092991828918456, 0.011011072158813476, 0.023358463287353515, 0.010990592002868652, 0.011009023666381837, 0.011004927635192872, 0.011003904342651367, 0.010980352401733399, 0.010906623840332032, 0.011048959732055665, 0.011114496231079102, 0.011043840408325196, 0.010972160339355469, 0.010981375694274903, 0.011039744377136231, 0.011023360252380371, 0.010992639541625977, 0.010978303909301757, 0.010966015815734862, 0.011045887947082519, 0.011019264221191406, 0.01096396827697754, 0.011002880096435547, 0.011033599853515624, 0.011007040023803712, 0.011027392387390136, 0.011003904342651367, 0.01103052806854248, 0.01103052806854248, 0.011103232383728028, 0.011019264221191406, 0.011002880096435547, 0.010986559867858887, 0.010992575645446777, 0.011026432037353515, 0.011149312019348144, 0.011064319610595704, 0.01103769588470459, 0.011196415901184082, 0.011034624099731445, 0.01100595188140869, 0.01100595188140869, 0.011006976127624512, 0.01102950382232666, 0.01103667163848877, 0.011004927635192872, 0.011056127548217774, 0.011084799766540527, 0.011019264221191406, 0.011095040321350098, 0.011042816162109375, 0.011066368103027344, 0.011100159645080567, 0.011077664375305175, 0.011090911865234376, 0.011150336265563965, 0.01111244773864746, 0.011092991828918456, 0.011107328414916993, 0.011163647651672364, 0.011117568016052246, 0.011105279922485351, 0.011048959732055665, 0.011288576126098633, 0.01112166404724121, 0.023423999786376954, 0.011000831604003907, 0.010979328155517578, 0.011001855850219726, 0.011026432037353515, 0.011033599853515624, 0.010976256370544434, 0.010976256370544434, 0.01102847957611084, 0.011041791915893554, 0.011056127548217774, 0.01105510425567627, 0.010995712280273438, 0.010989567756652833, 0.01100595188140869, 0.010988544464111329, 0.010991616249084473, 0.011126784324645997, 0.011016192436218262, 0.010951711654663085, 0.011007967948913575, 0.011049983978271484, 0.010992639541625977, 0.010916864395141602, 0.010827775955200195, 0.010815520286560059, 0.01099465560913086, 0.011035648345947266, 0.010984448432922364, 0.011013119697570802, 0.01095680046081543, 0.010994688034057617, 0.01102847957611084, 0.011065376281738282, 0.011089887619018555, 0.011064319610595704, 0.011024383544921875, 0.011042816162109375, 0.011025407791137695, 0.010976256370544434, 0.011019264221191406, 0.01104691219329834, 0.011012096405029297, 0.011039744377136231, 0.011031552314758301, 0.011027456283569336, 0.011076640129089356, 0.01110422420501709, 0.011056127548217774, 0.01102847957611084, 0.011065343856811523, 0.011042816162109375, 0.011115519523620606, 0.011028544425964355, 0.011113408088684083, 0.011076607704162598, 0.01105510425567627, 0.011061280250549317, 0.01109603214263916, 0.011019264221191406, 0.011023360252380371, 0.011065343856811523, 0.011039744377136231, 0.023407615661621094, 0.010978303909301757, 0.010981375694274903, 0.011058176040649414, 0.011073535919189453, 0.011033599853515624, 0.0109486083984375, 0.011002911567687988, 0.010992608070373534, 0.010993663787841796, 0.011011072158813476, 0.0110632963180542, 0.011070464134216309, 0.011092991828918456, 0.011156479835510253, 0.01144217586517334, 0.011598848342895507, 0.011254783630371093, 0.011147263526916504, 0.011066368103027344, 0.011118592262268067, 0.011103232383728028, 0.01112063980102539, 0.011068415641784669, 0.01103052806854248, 0.011039744377136231, 0.011169792175292969, 0.011075615882873535, 0.011022303581237793, 0.011060223579406739, 0.011160575866699218, 0.011082752227783203, 0.011065343856811523, 0.011053055763244628, 0.011033599853515624, 0.011076607704162598, 0.011049983978271484, 0.011026432037353515, 0.011016223907470703, 0.011038687705993653, 0.011024383544921875, 0.011085824012756347, 0.01111251163482666, 0.011107263565063476, 0.011247615814208984, 0.011100159645080567, 0.011042816162109375, 0.011062335968017579, 0.011111359596252441, 0.011152383804321288, 0.011080703735351562, 0.011119615554809571, 0.011057151794433593, 0.011068415641784669, 0.011021311759948731, 0.01104793643951416, 0.011161600112915039, 0.011126784324645997, 0.011076607704162598, 0.011054080009460449, 0.011202560424804688, 0.011080703735351562, 0.011062272071838379, 0.023365631103515624, 0.010972160339355469, 0.01102950382232666, 0.011017215728759766, 0.011016192436218262, 0.01102847957611084, 0.011011072158813476, 0.011003904342651367, 0.011016192436218262, 0.011031552314758301, 0.011053088188171387, 0.011050975799560547, 0.011039744377136231, 0.011001855850219726, 0.011066368103027344, 0.01112985610961914, 0.010855423927307128, 0.01092300796508789, 0.010872832298278809, 0.010899456024169921, 0.010778623580932617, 0.010829824447631836, 0.010994688034057617, 0.011018239974975585, 0.011035648345947266, 0.011034624099731445, 0.011143168449401856, 0.011077664375305175, 0.011027423858642579, 0.01105510425567627, 0.011064319610595704, 0.01108790397644043, 0.011046879768371582, 0.01101414394378662, 0.011060223579406739, 0.011064319610595704, 0.011098112106323242, 0.011034624099731445, 0.011096063613891602, 0.01104793643951416, 0.011021311759948731, 0.011065343856811523, 0.011182080268859864, 0.01112166404724121, 0.011107328414916993, 0.011080703735351562, 0.011080703735351562, 0.01113702392578125, 0.011103232383728028, 0.011082752227783203, 0.011018303871154786, 0.011081664085388184, 0.011163647651672364, 0.011080767631530762, 0.011104191780090332, 0.011074560165405273, 0.010855423927307128, 0.010910719871520995, 0.01083903980255127, 0.010877951622009278, 0.010870783805847169, 0.01080832004547119, 0.010946559906005859, 0.022261823654174805, 0.010372032165527343, 0.01041305637359619, 0.010391551971435547, 0.010406911849975586, 0.0104017915725708, 0.010338303565979003, 0.010470399856567383, 0.010395648002624512, 0.010359807968139649, 0.010357760429382324, 0.010380288124084473, 0.010528767585754394, 0.011116543769836425, 0.011110400199890137, 0.011115519523620606, 0.011082752227783203, 0.01115443229675293, 0.011058176040649414, 0.011031552314758301, 0.011048959732055665, 0.011095040321350098, 0.011101183891296386, 0.0110632963180542, 0.011089920043945312, 0.01107148838043213, 0.011062272071838379, 0.011088895797729491, 0.01105510425567627, 0.011023360252380371, 0.011111424446105958, 0.011047967910766602, 0.011032544136047363, 0.011105279922485351, 0.011359231948852539, 0.011072511672973634, 0.01105510425567627, 0.011084799766540527, 0.011080703735351562, 0.011078656196594238, 0.012894207954406739, 0.01135206413269043, 0.011204607963562012, 0.011106304168701172, 0.011083776473999024, 0.011175935745239257, 0.011152383804321288, 0.01111244773864746, 0.0110448637008667, 0.011191295623779298, 0.011116543769836425, 0.011083776473999024, 0.01107968044281006, 0.011076640129089356, 0.011076576232910156, 0.011108351707458495, 0.01112063980102539, 0.011068415641784669, 0.011074560165405273, 0.011169792175292969, 0.011084832191467286, 0.011101152420043945, 0.011115519523620606, 0.023419904708862304, 0.011096063613891602, 0.011101183891296386, 0.011091967582702637, 0.011140095710754394, 0.01112063980102539, 0.011058176040649414, 0.011149312019348144, 0.011003904342651367, 0.011060223579406739, 0.011060223579406739, 0.011188223838806152, 0.011062272071838379, 0.01140121555328369, 0.01126912021636963, 0.011206720352172852, 0.011110336303710937, 0.011169792175292969, 0.011081727981567382, 0.011107328414916993, 0.011073535919189453, 0.011057151794433593, 0.011122688293457032, 0.011110400199890137, 0.011061247825622558, 0.011032575607299805, 0.011084799766540527, 0.011122688293457032, 0.011084863662719727, 0.01113593578338623, 0.011147263526916504, 0.01306214427947998, 0.012184576034545898, 0.011279359817504882, 0.011052032470703126, 0.011067392349243164, 0.011080703735351562, 0.011072511672973634, 0.011158592224121093, 0.011073472023010254, 0.011085824012756347, 0.011311103820800781, 0.011406335830688476, 0.011143168449401856, 0.0111278076171875, 0.011105279922485351, 0.011146240234375, 0.011101183891296386, 0.011067392349243164, 0.01120358371734619, 0.011150336265563965, 0.011087871551513672, 0.011042816162109375, 0.011045887947082519, 0.011133952140808106, 0.011110400199890137, 0.011117568016052246, 0.011411456108093262, 0.011166720390319825, 0.011073535919189453, 0.011205632209777832, 0.011075584411621094, 0.011076607704162598]",tokens/s,88.75835647320693,,,main,False,False @@ -11111,7 +11111,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e930e-009d4676249c1a075160ff0a;d4e3af37-e7f8-4ec1-8ad1-253998909850) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4c3-6b8fe7762f16ebe170956106;17a714e4-8f67-4512-b7ad-ae117a03fc48) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11215,7 +11215,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11254,7 +11254,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d472c-4eb897a756be812c77614ba1;c2323adf-5b66-4145-ac2e-90dfd38401fa) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea8e-6c164fc6651f804825f19e67;155e3af3-25aa-46e4-b99b-26b8c681c2dd) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11285,7 +11285,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11324,7 +11324,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46c2-540660f162f2af4b55e787f9;f7798759-fdfa-4aef-879f-32efe682e3e9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea1d-1adf52573d4344bb6eca05a1;93304495-9e0e-4ec8-ab9d-633d08abee1c) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11356,7 +11356,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,,cuda,0,42,,,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1301.676032,15125.184512,0.0,14478.737408,14237.625344,s,10,0.8224414138793944,0.08224414138793945,0.00023158737850503415,0.08215292739868164,0.08237123489379883,0.08264656257629394,0.08286682472229004,"[0.08292189025878906, 0.0821987533569336, 0.08213279724121093, 0.08231005096435547, 0.0821382064819336, 0.08213455963134765, 0.08213699340820313, 0.08216111755371094, 0.08214473724365234, 0.08216230773925781]",tokens/s,3112.683720442374,kWh,9.732241499727975e-07,5.332684545643789e-07,5.104102443934566e-06,6.610595048471742e-06,tokens/kWh,38725712.000644006,MB,1301.676032,15125.184512,0.0,14478.737408,14440.977408,s,10,21.55573461914063,2.155573461914062,0.003128784082911401,2.1546517333984374,2.1563899902343753,2.160598193359375,2.163964755859375,"[2.164806396484375, 2.155454833984375, 2.155191162109375, 2.154422119140625, 2.153969482421875, 2.15465673828125, 2.155087646484375, 2.153739501953125, 2.154646728515625, 2.153760009765625]",tokens/s,29.22656133651717,kWh,2.539409045693219e-05,1.3916760766133236e-05,0.00011640616143086489,0.00015571701265393033,tokens/kWh,404580.0707724396,,s,629,22.260371433258044,0.03539009766813682,0.009800577973651038,0.03420876693725586,0.034320641326904294,0.034373221588134766,0.11664900054931641,"[0.034751487731933595, 0.03479142379760742, 0.03473206329345703, 0.03477910232543945, 0.03483855819702148, 0.03484259033203125, 0.03480575942993164, 0.034825214385986326, 0.03483443069458008, 0.034772991180419925, 0.034618366241455076, 0.0346060791015625, 0.03485184097290039, 0.0344453125, 0.03424870300292969, 0.03413094329833984, 0.034099262237548826, 0.03411040115356445, 0.03410227203369141, 0.03416883087158203, 0.03420569610595703, 0.034121726989746096, 0.03416064071655273, 0.03413094329833984, 0.03420467376708984, 0.034146305084228515, 0.0341923828125, 0.03416883087158203, 0.03421184158325195, 0.034146305084228515, 0.03413094329833984, 0.03419750213623047, 0.034179073333740234, 0.03422208023071289, 0.03421699142456055, 0.03435107040405273, 0.034342910766601564, 0.03427635192871094, 0.03420876693725586, 0.03420159912109375, 0.034253822326660154, 0.034236415863037106, 0.034260990142822266, 0.03425791931152344, 0.03429171371459961, 0.0342476806640625, 0.034293758392333985, 0.03426201629638672, 0.03425484848022461, 0.03429683303833008, 0.03427635192871094, 0.03431731033325195, 0.03426508712768555, 0.03428044891357422, 0.03424563217163086, 0.03429683303833008, 0.03427532958984375, 0.034283519744873044, 0.03433881759643555, 0.03430297470092773, 0.034367488861083983, 0.03433574295043945, 0.11684864044189452, 0.03409107208251953, 0.03410835266113281, 0.0340684814453125, 0.034070526123046875, 0.034078720092773435, 0.034105342864990236, 0.03407564926147461, 0.03406438446044922, 0.0340582389831543, 0.03409305572509766, 0.0340684814453125, 0.034132991790771484, 0.03428966522216797, 0.03419340896606445, 0.0341319694519043, 0.0341104621887207, 0.03411251068115234, 0.034149375915527344, 0.03407974243164062, 0.03413094329833984, 0.03412684631347656, 0.03414527893066406, 0.034127872467041014, 0.034127872467041014, 0.034187263488769534, 0.03425484848022461, 0.03421286392211914, 0.03422822570800781, 0.03421184158325195, 0.03422316741943359, 0.03420358276367187, 0.03418931198120117, 0.03420467376708984, 0.034200576782226565, 0.03420774459838867, 0.03424051284790039, 0.03422003173828125, 0.03422003173828125, 0.03421798324584961, 0.03425177764892578, 0.034206718444824216, 0.034233345031738284, 0.03421286392211914, 0.034255870819091795, 0.03424460983276367, 0.03428147125244141, 0.03421286392211914, 0.03427123260498047, 0.03437977600097656, 0.034531326293945314, 0.03434188842773438, 0.03427942276000977, 0.03427942276000977, 0.03428659057617187, 0.03436851119995117, 0.034372608184814454, 0.03430809783935547, 0.03434905624389648, 0.034283519744873044, 0.034304000854492187, 0.03430096054077148, 0.03432956695556641, 0.11668479919433594, 0.03402035140991211, 0.034113536834716796, 0.034344959259033206, 0.034233345031738284, 0.034086910247802735, 0.03409100723266602, 0.034138111114501955, 0.03405721664428711, 0.03408281707763672, 0.03409622573852539, 0.034100128173828126, 0.0341104621887207, 0.034124801635742184, 0.03410227203369141, 0.03410227203369141, 0.034141185760498044, 0.03410739135742188, 0.034113536834716796, 0.03413401412963867, 0.034118656158447266, 0.03419136047363281, 0.034132991790771484, 0.034165760040283204, 0.03415654373168946, 0.034154495239257815, 0.034165760040283204, 0.03419033432006836, 0.034149375915527344, 0.03422003173828125, 0.03416678237915039, 0.034223102569580076, 0.03419852828979492, 0.03422822570800781, 0.03417292785644531, 0.03422208023071289, 0.03421184158325195, 0.034285568237304685, 0.034301952362060545, 0.03424051284790039, 0.03424460983276367, 0.03424256134033203, 0.03421388626098633, 0.03427532958984375, 0.03423436737060547, 0.03428966522216797, 0.03422003173828125, 0.03429785537719727, 0.034320384979248046, 0.03427328109741211, 0.03424665451049805, 0.034304000854492187, 0.034298881530761716, 0.03428044891357422, 0.03427840042114258, 0.0342999038696289, 0.03428761672973633, 0.03429280090332031, 0.03427731323242188, 0.034367488861083983, 0.03428659057617187, 0.03435007858276367, 0.034288639068603514, 0.11707904052734375, 0.03401529693603516, 0.034060222625732425, 0.03401113510131836, 0.03401932907104492, 0.03405619049072266, 0.03409100723266602, 0.03403059387207031, 0.034088958740234376, 0.034062335968017575, 0.034081790924072264, 0.034080768585205076, 0.034136062622070314, 0.03410432052612305, 0.03412070465087891, 0.03410124969482422, 0.03408588790893555, 0.03412377548217774, 0.03409408187866211, 0.03404800033569336, 0.034121726989746096, 0.034119678497314454, 0.034100223541259765, 0.0341739501953125, 0.03414527893066406, 0.03419443130493164, 0.034187263488769534, 0.03416371154785156, 0.03420876693725586, 0.034184192657470705, 0.03418316650390625, 0.03414220809936523, 0.03417292785644531, 0.034206718444824216, 0.0342108154296875, 0.0342108154296875, 0.034200576782226565, 0.03419551849365234, 0.03425273513793945, 0.03422617721557617, 0.03421388626098633, 0.03424051284790039, 0.03423436737060547, 0.03421798324584961, 0.03424256134033203, 0.03423846435546875, 0.034216960906982424, 0.034301952362060545, 0.034272254943847655, 0.03424256134033203, 0.03426303863525391, 0.034298881530761716, 0.03427123260498047, 0.034285568237304685, 0.03439923095703125, 0.034536449432373044, 0.03436236953735351, 0.03432755279541016, 0.03426201629638672, 0.034339839935302735, 0.03430604934692383, 0.034272254943847655, 0.034315265655517575, 0.11667865753173828, 0.034067455291748046, 0.034028545379638675, 0.03402956771850586, 0.0340469741821289, 0.03402751922607422, 0.03409408187866211, 0.03408998489379883, 0.03406643295288086, 0.034114593505859374, 0.03410940933227539, 0.034062335968017575, 0.0341104621887207, 0.03403776168823242, 0.034121726989746096, 0.034049022674560545, 0.034111488342285154, 0.03409305572509766, 0.03410739135742188, 0.03408793640136719, 0.034105342864990236, 0.03410739135742188, 0.034118656158447266, 0.03410943984985351, 0.03415039825439453, 0.034121726989746096, 0.034165760040283204, 0.03410851287841797, 0.034177982330322265, 0.03413808059692383, 0.03417190551757812, 0.03414425659179687, 0.034184192657470705, 0.034154495239257815, 0.034260990142822266, 0.03418009567260742, 0.03423436737060547, 0.03415859222412109, 0.03424870300292969, 0.03418214416503906, 0.03425689697265625, 0.034202625274658206, 0.03434393692016602, 0.03455692672729492, 0.034293758392333985, 0.03424153518676758, 0.03426201629638672, 0.03425996780395508, 0.034260990142822266, 0.03426713562011719, 0.03428147125244141, 0.03424358367919922, 0.034344959259033206, 0.03424665451049805, 0.03430809783935547, 0.03425177764892578, 0.03431628799438476, 0.03433065414428711, 0.03431216049194336, 0.03428761672973633, 0.03431321716308594, 0.03427532958984375, 0.03433779144287109, 0.11667250823974609, 0.03399168014526367, 0.03406335830688476, 0.03410739135742188, 0.034149375915527344, 0.03417190551757812, 0.034105342864990236, 0.034078720092773435, 0.034080768585205076, 0.03407564926147461, 0.03413401412963867, 0.034044929504394535, 0.03409612655639648, 0.03410124969482422, 0.034119678497314454, 0.03410432052612305, 0.03413094329833984, 0.0341473274230957, 0.034095104217529294, 0.034136062622070314, 0.03410739135742188, 0.034116607666015625, 0.03413913726806641, 0.034132991790771484, 0.03415859222412109, 0.03416883087158203, 0.03421593475341797, 0.03419033432006836, 0.034233345031738284, 0.0341923828125, 0.03420569610595703, 0.03415961456298828, 0.03422719955444336, 0.03420774459838867, 0.034200576782226565, 0.03416678237915039, 0.03421286392211914, 0.03421491241455078, 0.034229248046875, 0.03426713562011719, 0.034219009399414066, 0.03421798324584961, 0.03425689697265625, 0.03422003173828125, 0.03423440170288086, 0.03426710510253906, 0.034331649780273435, 0.03427635192871094, 0.03429177474975586, 0.034270145416259765, 0.034255870819091795, 0.03422617721557617, 0.03427532958984375, 0.03428761672973633, 0.03429683303833008, 0.034256961822509764, 0.03433465576171875, 0.03434188842773438, 0.03427532958984375, 0.034334720611572264, 0.034374656677246096, 0.034285568237304685, 0.03434905624389648, 0.1166223373413086, 0.034045951843261715, 0.034049022674560545, 0.034088958740234376, 0.03411763381958008, 0.034146305084228515, 0.03407564926147461, 0.0341288948059082, 0.03422412872314453, 0.03416166305541992, 0.034083839416503905, 0.034167808532714845, 0.034098175048828124, 0.034108417510986325, 0.034136062622070314, 0.034252799987792966, 0.03415654373168946, 0.03413708877563477, 0.03408793640136719, 0.034118656158447266, 0.034121726989746096, 0.034132991790771484, 0.03410739135742188, 0.034141185760498044, 0.03416678237915039, 0.03412275314331055, 0.034157569885253904, 0.03419340896606445, 0.03417705535888672, 0.03416572952270508, 0.03416678237915039, 0.03415961456298828, 0.03418316650390625, 0.03429177474975586, 0.03432236862182617, 0.03438284683227539, 0.034229248046875, 0.03425689697265625, 0.034233345031738284, 0.034239486694335936, 0.03424870300292969, 0.03423846435546875, 0.03420876693725586, 0.03423436737060547, 0.03424460983276367, 0.03426611328125, 0.034236415863037106, 0.034237438201904294, 0.03427635192871094, 0.034290687561035156, 0.03423955154418945, 0.034319297790527344, 0.03427942276000977, 0.03432243347167969, 0.034260990142822266, 0.03427945709228516, 0.034317279815673826, 0.034282497406005856, 0.034301952362060545, 0.034298881530761716, 0.034298881530761716, 0.034304000854492187, 0.034310142517089845, 0.11665100860595704, 0.03404800033569336, 0.03415552139282227, 0.034195457458496094, 0.03405926513671875, 0.03407974243164062, 0.034050048828125, 0.03405926513671875, 0.03406643295288086, 0.03404390335083008, 0.034044929504394535, 0.03410636901855469, 0.034098175048828124, 0.03405414581298828, 0.03412384033203125, 0.034119617462158205, 0.034121726989746096, 0.03410432052612305, 0.034098175048828124, 0.03407974243164062, 0.03412684631347656, 0.034095104217529294, 0.03411455917358398, 0.03413401412963867, 0.034165760040283204, 0.03410636901855469, 0.034179073333740234, 0.034148353576660156, 0.034132991790771484, 0.03417702484130859, 0.034200576782226565, 0.034233345031738284, 0.03420979309082031, 0.03422719955444336, 0.03417599868774414, 0.03421798324584961, 0.034170879364013675, 0.03416678237915039, 0.034187297821044925, 0.034245601654052736, 0.03422515106201172, 0.03424153518676758, 0.03425689697265625, 0.0342108154296875, 0.03424153518676758, 0.034239486694335936, 0.03422515106201172, 0.03426406478881836, 0.03427532958984375, 0.03425075149536133, 0.03427635192871094, 0.03428659057617187, 0.03424358367919922, 0.03429683303833008, 0.0342476806640625, 0.034275360107421875, 0.03432444763183594, 0.034305023193359374, 0.034290687561035156, 0.03428764724731445, 0.03430806350708008, 0.03426713562011719, 0.034321407318115234, 0.11664383697509766, 0.03402956771850586, 0.03400396728515625, 0.03403776168823242, 0.03405926513671875, 0.03405516815185547, 0.03407257461547852, 0.03407462310791016, 0.03410227203369141, 0.03407155227661133, 0.034162689208984375, 0.03407974243164062, 0.03409305572509766, 0.03406950378417969, 0.03409612655639648, 0.03406438446044922, 0.03415244674682617, 0.034105342864990236, 0.03418828964233398, 0.0340582389831543, 0.03437363052368164, 0.03469219207763672, 0.03427318572998047, 0.034111488342285154, 0.034200576782226565, 0.03415039825439453, 0.03415244674682617, 0.03416883087158203, 0.034195457458496094, 0.034154495239257815, 0.034219009399414066, 0.03416371154785156, 0.034200576782226565, 0.03416678237915039, 0.034203647613525394, 0.03419443130493164, 0.03426508712768555, 0.03418828964233398, 0.03423539352416992, 0.03419443130493164, 0.03427635192871094, 0.03419340896606445, 0.03424460983276367, 0.03423846435546875, 0.034255870819091795, 0.03425996780395508, 0.03426003265380859, 0.034242496490478516, 0.03429478454589844, 0.0342476806640625, 0.0342927360534668, 0.03421184158325195, 0.034275390625, 0.03422304153442383, 0.03430092620849609, 0.03425996780395508, 0.034283519744873044, 0.03424870300292969, 0.03431935882568359, 0.03425791931152344, 0.03431423950195313, 0.034282497406005856, 0.03431219100952149, 0.11666841888427734, 0.034004032135009764, 0.03404281616210938, 0.034045951843261715, 0.034044929504394535, 0.03406854248046875, 0.03409196853637695, 0.03405311965942383, 0.034095104217529294, 0.034051071166992186, 0.034113536834716796, 0.034032638549804685, 0.0341104621887207, 0.034086910247802735, 0.03411558532714844, 0.0340469741821289, 0.034138111114501955, 0.034141185760498044, 0.0340766716003418, 0.03410432052612305, 0.03411455917358398, 0.03409100723266602, 0.034148353576660156, 0.034141185760498044, 0.034136062622070314, 0.034127872467041014, 0.03415654373168946, 0.03419852828979492, 0.03413708877563477, 0.03415552139282227, 0.03416371154785156, 0.03413913726806641, 0.034203647613525394, 0.03420876693725586, 0.03427328109741211, 0.03417292785644531, 0.034223102569580076, 0.034253822326660154, 0.03431628799438476, 0.03421798324584961, 0.03423539352416992, 0.03420979309082031, 0.03422003173828125, 0.034233345031738284, 0.03423846435546875, 0.03421798324584961, 0.034277374267578126, 0.03427328109741211, 0.03425996780395508, 0.03428761672973633, 0.03426611328125, 0.03426303863525391, 0.034269184112548826, 0.034260990142822266, 0.03430297470092773, 0.03426406478881836, 0.03432044982910156, 0.034289600372314454, 0.034285568237304685, 0.03431219100952149, 0.03433881759643555, 0.034277374267578126, 0.03433779144287109]",tokens/s,28.256491671124767,,,main,False,False -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11424,7 +11424,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96e2-2735c455326ea85f7872d1be;f113bcc7-56c2-4d20-adef-79ac1b6e4a5a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8a3-3f622002574a67884e335be6;2ef565a9-5cd0-42ef-b1e1-0ef4a6f5fda6) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11456,7 +11456,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1257.570304,3852.992512,0.0,3206.545408,3105.82784,s,10,0.1930878410339356,0.019308784103393553,0.0002982267366102465,0.019219376564025878,0.019381788444519044,0.01978655824661255,0.020110374088287353,"[0.020191328048706055, 0.019291839599609374, 0.019276735305786132, 0.01921321678161621, 0.01916364860534668, 0.01917958450317383, 0.019126815795898436, 0.019245439529418946, 0.01922553634643555, 0.019173696517944337]",tokens/s,13258.214428686244,kWh,2.2930128148513863e-07,1.255967355000605e-07,1.0614075028720838e-06,1.416305519857283e-06,tokens/kWh,180751960.93692863,MB,1257.570304,3852.992512,0.0,3206.545408,3189.19168,s,10,10.936780883789062,1.0936780883789061,0.01630772174915674,1.0941107177734375,1.1115546997070311,1.1175878967285156,1.122414454345703,"[1.12362109375, 1.1102139892578125, 1.09401953125, 1.0732847900390625, 1.08555517578125, 1.0743533935546874, 1.074810791015625, 1.1100771484375, 1.094201904296875, 1.09664306640625]",tokens/s,57.603787320436446,kWh,1.3152869895716662e-05,7.204484050677799e-06,3.6702956041928975e-05,5.706030998832343e-05,tokens/kWh,1104094.9481853857,,s,629,11.091198944091792,0.017633066683770744,0.0023944620599391686,0.017116159439086915,0.017849549102783205,0.01795563507080078,0.03670208465576172,"[0.018229215621948243, 0.018549760818481444, 0.017408000946044923, 0.017102848052978514, 0.017147903442382813, 0.017110015869140623, 0.017257471084594727, 0.018291711807250977, 0.017951744079589844, 0.017933311462402343, 0.017864704132080078, 0.01785651206970215, 0.0177838077545166, 0.017855487823486327, 0.017953792572021485, 0.017913856506347657, 0.017951744079589844, 0.01780633544921875, 0.017902591705322265, 0.01785139274597168, 0.01780121612548828, 0.017910783767700195, 0.017886207580566405, 0.01785856056213379, 0.01803980827331543, 0.017917951583862304, 0.017895423889160156, 0.017977344512939454, 0.01785036849975586, 0.017992704391479493, 0.018061311721801757, 0.018448383331298827, 0.01819340705871582, 0.018375680923461913, 0.018152448654174806, 0.017811456680297853, 0.01784934425354004, 0.01785753631591797, 0.017934335708618163, 0.017708032608032227, 0.01780121612548828, 0.017750015258789064, 0.017701887130737306, 0.017633279800415038, 0.0176363525390625, 0.017761280059814453, 0.017696767807006835, 0.017795072555541993, 0.017719327926635744, 0.017763296127319337, 0.017741823196411134, 0.017702911376953127, 0.017727487564086913, 0.01759539222717285, 0.017497087478637697, 0.017490943908691405, 0.017516544342041016, 0.017794048309326172, 0.017703935623168944, 0.017696767807006835, 0.017746944427490235, 0.01806438446044922, 0.038354942321777344, 0.017970176696777345, 0.01780735969543457, 0.017812480926513673, 0.017769472122192383, 0.017735679626464843, 0.017709056854248048, 0.01767628860473633, 0.017687551498413084, 0.017733631134033204, 0.017695743560791014, 0.01775923156738281, 0.017435647964477538, 0.01744691276550293, 0.017467391967773437, 0.017694719314575197, 0.017770496368408203, 0.01766912078857422, 0.017729536056518554, 0.017715200424194336, 0.017704959869384765, 0.01780633544921875, 0.017680383682250975, 0.01770086479187012, 0.01742131233215332, 0.017605632781982423, 0.017557504653930665, 0.017727487564086913, 0.017717248916625978, 0.017753087997436523, 0.017693695068359376, 0.01768448066711426, 0.017746944427490235, 0.017753087997436523, 0.017786880493164063, 0.01760972785949707, 0.01762303924560547, 0.01769267272949219, 0.017777664184570312, 0.017779712677001954, 0.017726463317871095, 0.017743871688842772, 0.01746329689025879, 0.017124351501464845, 0.017110015869140623, 0.017118207931518553, 0.01713363265991211, 0.01714067268371582, 0.01701478385925293, 0.0170700798034668, 0.017059839248657227, 0.017005632400512696, 0.01714681625366211, 0.017054719924926756, 0.01703628730773926, 0.01705881690979004, 0.01726470375061035, 0.017803199768066408, 0.018060287475585936, 0.018311168670654295, 0.018736127853393555, 0.017971200942993162, 0.01798041534423828, 0.03761056137084961, 0.01788307189941406, 0.017714176177978515, 0.017531904220581054, 0.017548288345336914, 0.0176312313079834, 0.017663999557495116, 0.01767628860473633, 0.0175994873046875, 0.01717452812194824, 0.017043455123901367, 0.017135616302490234, 0.017088512420654296, 0.017139711380004884, 0.017073152542114257, 0.017084415435791016, 0.01705369567871094, 0.017082368850708008, 0.01705881690979004, 0.017102848052978514, 0.017081344604492187, 0.017098751068115235, 0.017138687133789063, 0.01691961669921875, 0.01686419105529785, 0.016903167724609376, 0.01702707290649414, 0.017031167984008787, 0.017102848052978514, 0.017107967376708985, 0.017155071258544922, 0.017126399993896483, 0.017105920791625977, 0.017156095504760743, 0.017155071258544922, 0.017076223373413087, 0.017083391189575196, 0.017118207931518553, 0.017176576614379883, 0.017192960739135742, 0.017133600234985353, 0.017084384918212892, 0.01710086441040039, 0.01713657569885254, 0.017050655364990234, 0.01709667205810547, 0.017385471343994142, 0.017781759262084963, 0.017729536056518554, 0.017755136489868165, 0.017765375137329103, 0.017789951324462892, 0.017768447875976562, 0.01796403121948242, 0.017794048309326172, 0.017901567459106444, 0.017799200057983397, 0.017763296127319337, 0.01777561569213867, 0.017900543212890627, 0.01767628860473633, 0.017939456939697264, 0.01772960090637207, 0.03676153564453125, 0.01713667106628418, 0.017050592422485352, 0.017043455123901367, 0.01706188774108887, 0.017006591796875, 0.01704243278503418, 0.017047552108764647, 0.017046592712402345, 0.017060800552368163, 0.017036352157592773, 0.01705465507507324, 0.01706188774108887, 0.016924671173095703, 0.01686425590515137, 0.01685196876525879, 0.017171455383300782, 0.01706188774108887, 0.01702809524536133, 0.017079296112060546, 0.01700966453552246, 0.01701375961303711, 0.01701273536682129, 0.01703628730773926, 0.01701171112060547, 0.0170700798034668, 0.017150976181030272, 0.016921600341796874, 0.016855039596557618, 0.016867328643798828, 0.01681407928466797, 0.016857088088989256, 0.017037376403808594, 0.017106880187988283, 0.0170700798034668, 0.017093631744384767, 0.017100799560546876, 0.0170250244140625, 0.01702911949157715, 0.017063936233520507, 0.016974847793579103, 0.01700556755065918, 0.01700966453552246, 0.01702400016784668, 0.017074176788330078, 0.017004575729370117, 0.017019872665405274, 0.017072128295898437, 0.01701785659790039, 0.017126399993896483, 0.01701580810546875, 0.01701375961303711, 0.017077247619628907, 0.01706598472595215, 0.01703731155395508, 0.017039360046386717, 0.017068031311035157, 0.01700556755065918, 0.017133567810058595, 0.017047552108764647, 0.017039360046386717, 0.017073152542114257, 0.01705881690979004, 0.036703231811523435, 0.017076223373413087, 0.016969728469848632, 0.017027103424072265, 0.0170229434967041, 0.01701785659790039, 0.016950271606445313, 0.01700147247314453, 0.01704140853881836, 0.01705881690979004, 0.01702195167541504, 0.017073152542114257, 0.017082368850708008, 0.017115135192871094, 0.017113088607788086, 0.017035263061523438, 0.017077247619628907, 0.017112064361572265, 0.01705779266357422, 0.017094655990600584, 0.017035263061523438, 0.01701478385925293, 0.01707423973083496, 0.017034175872802735, 0.017117183685302736, 0.017089536666870117, 0.01701171112060547, 0.017090560913085938, 0.01699839973449707, 0.017085439682006837, 0.017087488174438475, 0.017122304916381836, 0.01704960060119629, 0.017059839248657227, 0.017161216735839844, 0.016950271606445313, 0.016908287048339844, 0.016870399475097657, 0.016880640029907225, 0.016849920272827147, 0.016917503356933594, 0.016842752456665038, 0.01682329559326172, 0.016858112335205077, 0.017116159439086915, 0.01768550491333008, 0.017736703872680663, 0.017755136489868165, 0.017727487564086913, 0.01782374382019043, 0.017672191619873046, 0.017710079193115236, 0.017710079193115236, 0.017671199798583986, 0.017700895309448243, 0.01774995231628418, 0.017737728118896484, 0.017778688430786133, 0.017732608795166017, 0.017682432174682617, 0.017697792053222656, 0.01775923156738281, 0.01770086479187012, 0.03675651168823242, 0.01709667205810547, 0.01705881690979004, 0.017040384292602538, 0.01700556755065918, 0.01703731155395508, 0.017185792922973633, 0.0170199031829834, 0.01704652786254883, 0.01700966453552246, 0.01706188774108887, 0.01704550361633301, 0.0169932804107666, 0.01703321647644043, 0.017060863494873048, 0.01698918342590332, 0.017071104049682616, 0.01701683235168457, 0.017031167984008787, 0.017125375747680666, 0.017085439682006837, 0.017022975921630858, 0.016879615783691407, 0.01684480094909668, 0.016871423721313478, 0.017044479370117188, 0.017147903442382813, 0.01702195167541504, 0.017073152542114257, 0.017062911987304686, 0.017063936233520507, 0.017052671432495118, 0.01703628730773926, 0.01702911949157715, 0.017097728729248047, 0.017052671432495118, 0.017052671432495118, 0.01702707290649414, 0.017076223373413087, 0.017068031311035157, 0.017007680892944337, 0.017015743255615234, 0.01702195167541504, 0.017054719924926756, 0.016990207672119142, 0.017092607498168946, 0.017091583251953125, 0.017130495071411133, 0.01703219223022461, 0.01704652786254883, 0.017101823806762697, 0.0169932804107666, 0.017097728729248047, 0.017079296112060546, 0.017085439682006837, 0.017064960479736328, 0.01704550361633301, 0.01700454330444336, 0.017067007064819336, 0.01703731155395508, 0.017083423614501953, 0.017076192855834962, 0.017073152542114257, 0.03666534423828125, 0.017054719924926756, 0.01701580810546875, 0.01699737548828125, 0.01701785659790039, 0.01698406410217285, 0.01701478385925293, 0.01696051216125488, 0.01700249671936035, 0.017031167984008787, 0.01699839973449707, 0.017047552108764647, 0.01698099136352539, 0.0170383358001709, 0.016995328903198242, 0.01702604866027832, 0.017060863494873048, 0.016998464584350587, 0.017014720916748046, 0.01704243278503418, 0.016991231918334963, 0.01700044822692871, 0.017119232177734374, 0.01696051216125488, 0.016877567291259766, 0.01683558464050293, 0.01683456039428711, 0.01683558464050293, 0.017042463302612304, 0.01697587203979492, 0.017061920166015626, 0.017036224365234377, 0.0169932804107666, 0.017068031311035157, 0.017142784118652343, 0.01706188774108887, 0.01700761604309082, 0.017069055557250978, 0.01700351905822754, 0.017074176788330078, 0.017135616302490234, 0.017006591796875, 0.01704140853881836, 0.017095680236816405, 0.01700454330444336, 0.01703014373779297, 0.017105920791625977, 0.0170250244140625, 0.01702911949157715, 0.017112096786499022, 0.017001440048217773, 0.017257471084594727, 0.01775103950500488, 0.017670143127441407, 0.017475584030151366, 0.017163263320922852, 0.016849920272827147, 0.016915456771850586, 0.017067007064819336, 0.017088512420654296, 0.01702400016784668, 0.017076223373413087, 0.017040384292602538, 0.03828736114501953, 0.01795686340332031, 0.017936384201049805, 0.017796096801757814, 0.017743871688842772, 0.017718271255493166, 0.01774284744262695, 0.017711103439331053, 0.017745920181274414, 0.017721343994140625, 0.017699840545654297, 0.01779199981689453, 0.01704960060119629, 0.01702911949157715, 0.01700556755065918, 0.01701273536682129, 0.017117183685302736, 0.017110015869140623, 0.017073152542114257, 0.017067007064819336, 0.017063936233520507, 0.017076223373413087, 0.01705062484741211, 0.017048576354980468, 0.01701683235168457, 0.017090560913085938, 0.017129472732543945, 0.017894399642944335, 0.01790771293640137, 0.017769472122192383, 0.017679359436035155, 0.017559551239013673, 0.017468416213989257, 0.017479679107666016, 0.017914880752563478, 0.017819648742675782, 0.01778892707824707, 0.017777664184570312, 0.017718271255493166, 0.01761587142944336, 0.017488895416259767, 0.01784217643737793, 0.017911808013916015, 0.017744895935058593, 0.017735679626464843, 0.017711103439331053, 0.017797119140625, 0.01780019187927246, 0.017739776611328126, 0.01771321678161621, 0.01774176025390625, 0.017657855987548828, 0.017821695327758787, 0.01844534492492676, 0.017991647720336915, 0.0177838077545166, 0.017703935623168944, 0.017689599990844726, 0.017736703872680663, 0.017702911376953127, 0.017743871688842772, 0.017736703872680663, 0.017960992813110352, 0.037421024322509766, 0.01785036849975586, 0.01764761543273926, 0.017670143127441407, 0.017686527252197267, 0.017675264358520508, 0.017625087738037108, 0.017694719314575197, 0.01764352035522461, 0.017839103698730468, 0.0176629753112793, 0.01775103950500488, 0.017680383682250975, 0.017765375137329103, 0.017729536056518554, 0.01787494468688965, 0.01780940818786621, 0.017709056854248048, 0.017698816299438477, 0.017839103698730468, 0.017739776611328126, 0.017663999557495116, 0.017716224670410157, 0.017731584548950196, 0.01769267272949219, 0.01781760025024414, 0.017522687911987304, 0.017731584548950196, 0.01720627212524414, 0.016941055297851563, 0.017051647186279297, 0.017103872299194335, 0.01700249671936035, 0.017087488174438475, 0.017083423614501953, 0.017078239440917967, 0.017039360046386717, 0.017112064361572265, 0.01702809524536133, 0.0170199031829834, 0.01707827186584473, 0.017031167984008787, 0.017074176788330078, 0.017120256423950195, 0.01701888084411621, 0.01705887985229492, 0.0170761604309082, 0.01701785659790039, 0.01703321647644043, 0.017093631744384767, 0.01700966453552246, 0.017006591796875, 0.01705881690979004, 0.01716531181335449, 0.017303552627563477, 0.017102848052978514, 0.01704652786254883, 0.017072160720825194, 0.01709974479675293, 0.017110015869140623, 0.017093631744384767, 0.017156095504760743, 0.01700966453552246, 0.03669913482666016, 0.017085439682006837, 0.01701375961303711, 0.01701785659790039, 0.01701683235168457, 0.01721446418762207, 0.017079296112060546, 0.017060863494873048, 0.01707526397705078, 0.01710483169555664, 0.01703219223022461, 0.017082368850708008, 0.01696767997741699, 0.01706598472595215, 0.01701068878173828, 0.01701481628417969, 0.01707209587097168, 0.01702195167541504, 0.01698406410217285, 0.016966655731201173, 0.016978944778442383, 0.01705062484741211, 0.017083391189575196, 0.0169932804107666, 0.017012767791748047, 0.01703727912902832, 0.017089536666870117, 0.017022975921630858, 0.017048576354980468, 0.017031167984008787, 0.017154048919677735, 0.01779916763305664, 0.01785651206970215, 0.017703935623168944, 0.017731584548950196, 0.01775103950500488, 0.017712127685546874, 0.017710079193115236, 0.017723392486572266, 0.01783500862121582, 0.017762304306030274, 0.017665023803710937, 0.01767628860473633, 0.017745920181274414, 0.017752063751220702, 0.017717248916625978, 0.017730560302734375, 0.017872896194458008, 0.017682432174682617, 0.017737728118896484, 0.017763328552246094, 0.017656831741333007, 0.01782476806640625, 0.0178155517578125, 0.01776639938354492, 0.017732608795166017, 0.017755136489868165, 0.017681407928466796, 0.017770496368408203, 0.017688575744628905, 0.017688575744628905, 0.017702911376953127, 0.017737728118896484]",tokens/s,56.7116326350871,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -11495,7 +11495,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45f5-6cfd027b27c05292057cc517;16a8f42d-54bd-4c2d-9cff-2c30fb8929dc) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe947-001212e46dc5adc77282e1fc;a3a08374-f35b-4db6-bc14-ebc1dc6c266e) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11550,7 +11550,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmplpq39cur/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp3yffhja0/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-70m,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1250.996224,885.522432,0.0,239.075328,215.486464,s,33,0.1769663357734681,0.0053626162355596375,0.0002570235649559515,0.00526259183883667,0.005536326313018799,0.005578227043151855,0.006252962608337402,"[0.0065446081161499026, 0.005397280216217041, 0.005633215904235839, 0.005541567802429199, 0.005203455924987793, 0.005216959953308105, 0.0052919678688049315, 0.005539455890655518, 0.005497536182403564, 0.0055116481781005856, 0.005508543968200684, 0.005171135902404785, 0.005177504062652588, 0.005198751926422119, 0.005169536113739014, 0.005292768001556396, 0.005238239765167236, 0.005190879821777344, 0.005200255870819092, 0.00551526403427124, 0.005498079776763916, 0.005522528171539307, 0.005209440231323242, 0.005263679981231689, 0.005157504081726074, 0.00525708818435669, 0.00526259183883667, 0.005179391860961914, 0.005231840133666992, 0.005466207981109619, 0.005160255908966065, 0.0051933441162109375, 0.0055238080024719236]",tokens/s,47737.89299007783,kWh,6.423990868072943e-08,3.51987831490572e-08,1.1726808366394197e-07,2.167067754937286e-07,tokens/kWh,1181319778.3814032,MB,1250.996224,885.522432,0.0,239.075328,220.896256,s,33,10.074363525390625,0.3052837431936553,0.008342521184346705,0.30772052001953126,0.31419124145507815,0.31469938964843747,0.31493685302734375,"[0.3145581665039062, 0.30772052001953126, 0.3149112243652344, 0.312678955078125, 0.29504302978515623, 0.31121762084960936, 0.3128113098144531, 0.313947998046875, 0.3149489135742187, 0.31358535766601564, 0.3130281982421875, 0.29540716552734375, 0.2955582275390625, 0.295082275390625, 0.29649365234375, 0.3095794982910156, 0.295734130859375, 0.2946538696289063, 0.3122529907226563, 0.3142242431640625, 0.31405923461914065, 0.3140561218261719, 0.30607077026367185, 0.29541567993164064, 0.2955962829589844, 0.29640576171875, 0.29568914794921874, 0.304894287109375, 0.2955860290527344, 0.31265093994140625, 0.29499560546875, 0.30248748779296875, 0.31301882934570313]",tokens/s,206.36539417703696,kWh,3.599791314735198e-06,1.9725323771527164e-06,5.574361048760254e-06,1.1146684740648168e-05,tokens/kWh,5651904.711206232,,s,2078,10.24010649585725,0.004927866456139191,0.0007231453694833516,0.004890640020370483,0.005005289602279663,0.005042329406738281,0.01032829954147339,"[0.005499904155731201, 0.005431295871734619, 0.00538316822052002, 0.005511231899261474, 0.005427135944366455, 0.005411839962005615, 0.00535756778717041, 0.005387263774871826, 0.005327871799468994, 0.005294079780578613, 0.005265408039093018, 0.005604351997375488, 0.00603545618057251, 0.005058559894561767, 0.004873216152191162, 0.004924416065216064, 0.004873216152191162, 0.004868095874786377, 0.004929535865783692, 0.0048895998001098635, 0.004844543933868409, 0.004865024089813232, 0.004894720077514648, 0.004839424133300781, 0.004888576030731201, 0.004899839878082276, 0.0048568320274353025, 0.004831232070922851, 0.004867167949676514, 0.004913055896759033, 0.004837376117706299, 0.00481382417678833, 0.004835328102111816, 0.004921343803405762, 0.004843520164489746, 0.004834303855895996, 0.00486297607421875, 0.004884479999542236, 0.004859903812408447, 0.0048220157623291016, 0.004880383968353271, 0.004827136039733886, 0.004858880043029785, 0.004835328102111816, 0.004918272018432617, 0.004798463821411133, 0.004741119861602783, 0.0048220157623291016, 0.004884479999542236, 0.004828159809112549, 0.0047226881980895995, 0.0048455681800842285, 0.004901887893676758, 0.004798463821411133, 0.00481279993057251, 0.0048527359962463375, 0.004838399887084961, 0.0048189439773559575, 0.004831232070922851, 0.0048527359962463375, 0.004820991992950439, 0.004820991992950439, 0.0107325439453125, 0.004819968223571777, 0.004838399887084961, 0.004832255840301514, 0.0048527359962463375, 0.0048189439773559575, 0.0048056960105895994, 0.004883391857147217, 0.004858880043029785, 0.00481382417678833, 0.00481279993057251, 0.004873216152191162, 0.00485478401184082, 0.0048189439773559575, 0.004799488067626953, 0.00487014389038086, 0.0048148479461669925, 0.004811776161193848, 0.004828159809112549, 0.0048261117935180665, 0.00487120008468628, 0.004923359870910644, 0.004980735778808594, 0.004897856235504151, 0.004856768131256103, 0.004845664024353027, 0.004908959865570068, 0.0048455681800842285, 0.004867072105407715, 0.004957183837890625, 0.0050728960037231445, 0.0049530878067016604, 0.004930560111999512, 0.00481279993057251, 0.004874239921569825, 0.005253119945526123, 0.004974592208862305, 0.004901887893676758, 0.004824063777923584, 0.00485478401184082, 0.004835328102111816, 0.004834303855895996, 0.004819968223571777, 0.0048527359962463375, 0.0048230400085449215, 0.0048496642112731934, 0.0048220157623291016, 0.004788224220275879, 0.004817920207977295, 0.004724800109863281, 0.0047134079933166505, 0.004698112010955811, 0.004704256057739258, 0.004717567920684815, 0.004741119861602783, 0.005054463863372802, 0.005022719860076904, 0.0050360321998596195, 0.005044223785400391, 0.0051036162376403805, 0.005037055969238281, 0.005049344062805176, 0.005074944019317627, 0.01113804817199707, 0.00499507188796997, 0.0050032639503479, 0.004965375900268554, 0.005058559894561767, 0.00501145601272583, 0.005102591991424561, 0.004989952087402344, 0.005040128231048584, 0.005040192127227784, 0.0049642882347106934, 0.004977663993835449, 0.005035007953643799, 0.0049725441932678225, 0.004965375900268554, 0.00501964807510376, 0.0049797120094299315, 0.004978687763214112, 0.004946944236755371, 0.0049725441932678225, 0.00506982421875, 0.004982880115509033, 0.004983712196350098, 0.004969471931457519, 0.004978687763214112, 0.004992000102996826, 0.00501043176651001, 0.004976640224456787, 0.004992000102996826, 0.004963327884674072, 0.004977727890014648, 0.004983744144439697, 0.0049613118171691895, 0.005017568111419677, 0.004976640224456787, 0.004963327884674072, 0.004977663993835449, 0.004998144149780274, 0.004988927841186524, 0.004988927841186524, 0.004952064037322998, 0.0053432960510253905, 0.004991936206817627, 0.004948031902313232, 0.00498476791381836, 0.00495411205291748, 0.004992000102996826, 0.004959231853485108, 0.004944896221160889, 0.004976640224456787, 0.0049572157859802245, 0.004960224151611328, 0.004968448162078858, 0.004994048118591309, 0.00496230411529541, 0.004978687763214112, 0.004952064037322998, 0.004977663993835449, 0.004971519947052002, 0.004963327884674072, 0.0049500160217285155, 0.005015552043914795, 0.004989952087402344, 0.01095372772216797, 0.004964352130889893, 0.004955135822296143, 0.00496127986907959, 0.004971519947052002, 0.004981760025024414, 0.004963327884674072, 0.004943871974945068, 0.004965375900268554, 0.004980735778808594, 0.004976704120635987, 0.004959167957305909, 0.004990015983581543, 0.004945856094360351, 0.0049827837944030765, 0.00496127986907959, 0.004895743846893311, 0.004903935909271241, 0.00491315221786499, 0.004877312183380127, 0.004922368049621582, 0.004915200233459473, 0.0049192957878112795, 0.004889632225036621, 0.004949984073638916, 0.004987904071807861, 0.0049459199905395506, 0.004987904071807861, 0.004999167919158935, 0.004998144149780274, 0.004980735778808594, 0.0049827837944030765, 0.004975615978240967, 0.00499507188796997, 0.004978687763214112, 0.0049725441932678225, 0.00496230411529541, 0.004986879825592041, 0.004988927841186524, 0.004956160068511963, 0.004947968006134033, 0.00496127986907959, 0.004981760025024414, 0.004971519947052002, 0.004959231853485108, 0.004956160068511963, 0.0049909758567810054, 0.004968448162078858, 0.004952064037322998, 0.005001215934753418, 0.004998144149780274, 0.004981791973114014, 0.004954080104827881, 0.00497049617767334, 0.0050145602226257325, 0.004914144039154053, 0.0049192957878112795, 0.004902912139892578, 0.0049500160217285155, 0.004883456230163574, 0.004921343803405762, 0.004907008171081543, 0.004892672061920166, 0.010329088211059571, 0.0046694397926330565, 0.004678656101226806, 0.004690944194793701, 0.00467251205444336, 0.004685823917388916, 0.004670464038848877, 0.004663296222686767, 0.004666368007659912, 0.00469708776473999, 0.00466534423828125, 0.004675583839416504, 0.00467251205444336, 0.004696063995361328, 0.004682752132415771, 0.00469708776473999, 0.004679679870605469, 0.004683775901794434, 0.004663296222686767, 0.00467251205444336, 0.004712448120117187, 0.004685823917388916, 0.00465715217590332, 0.004668416023254395, 0.004675583839416504, 0.004677631855010986, 0.004734975814819336, 0.004694015979766846, 0.004708384037017822, 0.0046878399848937985, 0.004675615787506104, 0.004689888000488281, 0.004668447971343994, 0.0047144641876220705, 0.004701183795928955, 0.0046694397926330565, 0.004753407955169678, 0.0046694397926330565, 0.004701183795928955, 0.004677631855010986, 0.0046561279296875, 0.004666368007659912, 0.0046254081726074215, 0.004626431941986084, 0.004654079914093018, 0.004618239879608154, 0.004628511905670166, 0.004619232177734375, 0.004628511905670166, 0.004636640071868896, 0.004675583839416504, 0.004675583839416504, 0.004689919948577881, 0.004667391777038574, 0.00466534423828125, 0.004677631855010986, 0.0046888961791992185, 0.004709375858306885, 0.0046929922103881834, 0.004690944194793701, 0.004680704116821289, 0.004674560070037842, 0.00470630407333374, 0.01032806396484375, 0.004671487808227539, 0.0046581759452819825, 0.004675583839416504, 0.004671487808227539, 0.004712448120117187, 0.0046888961791992185, 0.004668416023254395, 0.00466534423828125, 0.0046929922103881834, 0.004682752132415771, 0.0046991357803344725, 0.004853759765625, 0.004770815849304199, 0.004801536083221435, 0.004729856014251709, 0.005141503810882568, 0.005125120162963868, 0.0050657281875610355, 0.004988927841186524, 0.004998208045959473, 0.004996064186096191, 0.005023712158203125, 0.004985856056213379, 0.004974592208862305, 0.005014527797698975, 0.005037055969238281, 0.005047296047210693, 0.004992000102996826, 0.004994048118591309, 0.005000192165374756, 0.0050032639503479, 0.004958208084106445, 0.004984831809997559, 0.005001215934753418, 0.004998144149780274, 0.004946944236755371, 0.004952064037322998, 0.005041152000427246, 0.005030911922454834, 0.004992000102996826, 0.0049511041641235355, 0.004987840175628662, 0.005055488109588623, 0.004964352130889893, 0.004957183837890625, 0.004955135822296143, 0.004985856056213379, 0.004942880153656006, 0.004987872123718261, 0.004988927841186524, 0.00497049617767334, 0.004969471931457519, 0.005315584182739258, 0.005313536167144775, 0.00501043176651001, 0.004973567962646484, 0.004968448162078858, 0.005018623828887939, 0.004956160068511963, 0.0049530878067016604, 0.004958208084106445, 0.0050022401809692385, 0.01041817569732666, 0.004678656101226806, 0.004678656101226806, 0.0046622719764709475, 0.004676608085632325, 0.004665408134460449, 0.00502675199508667, 0.004980735778808594, 0.004988927841186524, 0.004963327884674072, 0.004971519947052002, 0.0050432000160217285, 0.005030911922454834, 0.004988927841186524, 0.005026815891265869, 0.0050421757698059086, 0.005039167881011963, 0.005014463901519776, 0.005008416175842285, 0.005006303787231445, 0.004968448162078858, 0.004975615978240967, 0.0049530878067016604, 0.0050165758132934574, 0.004974624156951904, 0.004963295936584473, 0.005091328144073487, 0.005024767875671386, 0.004981760025024414, 0.0049725441932678225, 0.0049807682037353515, 0.004980703830718994, 0.004975615978240967, 0.004967423915863037, 0.0049797120094299315, 0.004977663993835449, 0.004977663993835449, 0.004996096134185791, 0.0049530878067016604, 0.00501964807510376, 0.004983808040618896, 0.0049725441932678225, 0.0049500160217285155, 0.005023744106292725, 0.004988927841186524, 0.00496230411529541, 0.0049797120094299315, 0.00502784013748169, 0.004963327884674072, 0.005005311965942383, 0.004965375900268554, 0.004958208084106445, 0.005007359981536865, 0.004986879825592041, 0.004930560111999512, 0.004980735778808594, 0.004975615978240967, 0.00496230411529541, 0.004965375900268554, 0.004967423915863037, 0.005044223785400391, 0.0049725441932678225, 0.004932608127593994, 0.010950655937194824, 0.004984831809997559, 0.004969471931457519, 0.005023744106292725, 0.004992000102996826, 0.0050032639503479, 0.004968448162078858, 0.004993023872375488, 0.004994048118591309, 0.004964352130889893, 0.004968448162078858, 0.005007359981536865, 0.00496230411529541, 0.004939839839935302, 0.004944831848144531, 0.004971519947052002, 0.0050432319641113285, 0.004955103874206543, 0.0049489917755126955, 0.0049827837944030765, 0.004974656105041504, 0.005074880123138428, 0.004981760025024414, 0.0049541440010070804, 0.004952032089233399, 0.004978752136230469, 0.004955103874206543, 0.00502678394317627, 0.0050063362121582035, 0.004977663993835449, 0.004968448162078858, 0.005014527797698975, 0.00495411205291748, 0.0049725441932678225, 0.0049489917755126955, 0.0049489917755126955, 0.005033984184265137, 0.00496230411529541, 0.004973567962646484, 0.005026815891265869, 0.004977663993835449, 0.004976640224456787, 0.004994080066680908, 0.004943840026855469, 0.004973567962646484, 0.004937727928161621, 0.004959231853485108, 0.004947968006134033, 0.004973567962646484, 0.00495411205291748, 0.004951039791107178, 0.004965375900268554, 0.005014527797698975, 0.004964352130889893, 0.004963327884674072, 0.004951039791107178, 0.004991040229797363, 0.005002175807952881, 0.0049530878067016604, 0.004936704158782959, 0.004977663993835449, 0.004977663993835449, 0.004968480110168457, 0.010910688400268554, 0.004977663993835449, 0.004957248210906982, 0.0050001277923583986, 0.004966400146484375, 0.004984831809997559, 0.004963327884674072, 0.004947968006134033, 0.005006368160247803, 0.00495305585861206, 0.004962399959564209, 0.004974495887756347, 0.005032959938049316, 0.005007359981536865, 0.004974592208862305, 0.005009439945220948, 0.005253087997436523, 0.005024767875671386, 0.005025792121887207, 0.005073919773101807, 0.005023744106292725, 0.00501145601272583, 0.0050063362121582035, 0.00501964807510376, 0.0050032639503479, 0.004946944236755371, 0.004964352130889893, 0.004985856056213379, 0.004966432094573974, 0.005014495849609375, 0.004976640224456787, 0.005004288196563721, 0.004988927841186524, 0.005054463863372802, 0.00496230411529541, 0.004983808040618896, 0.00501964807510376, 0.004975615978240967, 0.004967423915863037, 0.0050124797821044925, 0.004989952087402344, 0.004968448162078858, 0.004947968006134033, 0.004963327884674072, 0.0050206718444824215, 0.0049797120094299315, 0.00496127986907959, 0.004973567962646484, 0.005055488109588623, 0.004993023872375488, 0.004943871974945068, 0.004975615978240967, 0.005015552043914795, 0.004975615978240967, 0.005039103984832764, 0.004957183837890625, 0.004994048118591309, 0.004956160068511963, 0.004981760025024414, 0.0049530878067016604, 0.004999167919158935, 0.0050032639503479, 0.004947968006134033, 0.010899456024169921, 0.004956160068511963, 0.005339136123657226, 0.004977663993835449, 0.004944896221160889, 0.0049459199905395506, 0.004975615978240967, 0.0049500160217285155, 0.004955135822296143, 0.0049469761848449706, 0.004966368198394775, 0.00496127986907959, 0.004967423915863037, 0.004941823959350586, 0.0049387521743774416, 0.004975615978240967, 0.004955135822296143, 0.004935679912567138, 0.004940800189971924, 0.004968448162078858, 0.004942912101745606, 0.004944831848144531, 0.004957183837890625, 0.004935679912567138, 0.004963327884674072, 0.004973567962646484, 0.004981760025024414, 0.004947968006134033, 0.004965375900268554, 0.004944896221160889, 0.0049725441932678225, 0.004968448162078858, 0.0049459199905395506, 0.004939775943756103, 0.0049387521743774416, 0.004959231853485108, 0.004952064037322998, 0.004964352130889893, 0.004922368049621582, 0.004946944236755371, 0.004955135822296143, 0.004947968006134033, 0.004941823959350586, 0.004968448162078858, 0.004951039791107178, 0.004951039791107178, 0.004958208084106445, 0.005014527797698975, 0.005008384227752686, 0.0053053760528564455, 0.00501961612701416, 0.004959231853485108, 0.004993023872375488, 0.00495411205291748, 0.0049530878067016604, 0.0049530878067016604, 0.004937727928161621, 0.004996096134185791, 0.004956192016601563, 0.0050052800178527835, 0.004957183837890625, 0.004971519947052002, 0.004952064037322998, 0.010943488121032715, 0.004969471931457519, 0.004958208084106445, 0.004952064037322998, 0.004971519947052002, 0.004953120231628418, 0.004945888042449951, 0.004965375900268554, 0.004978687763214112, 0.0049909758567810054, 0.004960256099700928, 0.004955135822296143, 0.004973567962646484, 0.004984831809997559, 0.004951039791107178, 0.004955135822296143, 0.00496127986907959, 0.0050022401809692385, 0.004971519947052002, 0.0049500160217285155, 0.004969471931457519, 0.004958208084106445, 0.004959231853485108, 0.004963327884674072, 0.004947968006134033, 0.004978687763214112, 0.0049530878067016604, 0.004960256099700928, 0.004988927841186524, 0.0049725441932678225, 0.005004288196563721, 0.004957183837890625, 0.004946944236755371, 0.004976640224456787, 0.004969471931457519, 0.004966400146484375, 0.0049530878067016604, 0.004976640224456787, 0.004966400146484375, 0.004958208084106445, 0.004967423915863037, 0.004927487850189209, 0.004940800189971924, 0.004959231853485108, 0.004929535865783692, 0.004959231853485108, 0.004932608127593994, 0.004964416027069092, 0.004933568000793457, 0.004967423915863037, 0.00495411205291748, 0.004966400146484375, 0.004933631896972656, 0.004965407848358154, 0.004966368198394775, 0.0049428482055664065, 0.004939775943756103, 0.004945951938629151, 0.0050124478340148925, 0.004957183837890625, 0.004965375900268554, 0.004958208084106445, 0.004975615978240967, 0.010309632301330567, 0.004661248207092285, 0.004689919948577881, 0.004666368007659912, 0.004683775901794434, 0.004651072025299072, 0.004675519943237305, 0.004666368007659912, 0.004701183795928955, 0.0047288317680358885, 0.004684800148010254, 0.004674560070037842, 0.004679679870605469, 0.004649983882904053, 0.004682752132415771, 0.004661248207092285, 0.00466431999206543, 0.004660223960876465, 0.004668416023254395, 0.004673535823822021, 0.004698112010955811, 0.004671487808227539, 0.004660223960876465, 0.00464793586730957, 0.004661248207092285, 0.004670464038848877, 0.004675583839416504, 0.004677696228027344, 0.004669375896453857, 0.004677631855010986, 0.004670464038848877, 0.004671487808227539, 0.004698112010955811, 0.004674560070037842, 0.0046929922103881834, 0.0046694397926330565, 0.004743167877197266, 0.004678656101226806, 0.004701183795928955, 0.004677696228027344, 0.004694975852966308, 0.004681727886199951, 0.004681727886199951, 0.004690944194793701, 0.004739071846008301, 0.004680704116821289, 0.004676608085632325, 0.0046694397926330565, 0.004678656101226806, 0.004708352088928222, 0.004675583839416504, 0.0046991357803344725, 0.004668416023254395, 0.004682752132415771, 0.004674560070037842, 0.004695040225982666, 0.004667391777038574, 0.0046929922103881834, 0.004695040225982666, 0.004689919948577881, 0.004726784229278564, 0.004747263908386231, 0.004675583839416504, 0.010272768020629883, 0.004667391777038574, 0.004685823917388916, 0.004677631855010986, 0.004673535823822021, 0.004708352088928222, 0.004666368007659912, 0.004690944194793701, 0.004684800148010254, 0.0046694397926330565, 0.004674560070037842, 0.004666368007659912, 0.004674560070037842, 0.005073919773101807, 0.004741119861602783, 0.004696063995361328, 0.004681727886199951, 0.004684800148010254, 0.00469708776473999, 0.004673535823822021, 0.004673535823822021, 0.004674560070037842, 0.004663296222686767, 0.004666368007659912, 0.004695040225982666, 0.004681727886199951, 0.004686848163604736, 0.00467251205444336, 0.0046622719764709475, 0.004681727886199951, 0.004689919948577881, 0.004676608085632325, 0.004676608085632325, 0.004682752132415771, 0.004679679870605469, 0.004696063995361328, 0.0046991357803344725, 0.004671487808227539, 0.004667391777038574, 0.004671487808227539, 0.004666368007659912, 0.004683775901794434, 0.00471449613571167, 0.004660223960876465, 0.004687871932983399, 0.0046592001914978025, 0.0046622719764709475, 0.004686848163604736, 0.0046694397926330565, 0.004675615787506104, 0.0047103681564331056, 0.004660287857055664, 0.004666304111480713, 0.004675583839416504, 0.004678656101226806, 0.00465715217590332, 0.004686848163604736, 0.004663296222686767, 0.004663296222686767, 0.004667391777038574, 0.004703231811523437, 0.0046622719764709475, 0.004679679870605469, 0.010308608055114746, 0.004680704116821289, 0.004694015979766846, 0.00467251205444336, 0.004668416023254395, 0.004666368007659912, 0.004689919948577881, 0.004663296222686767, 0.0046888961791992185, 0.004654111862182617, 0.00467145586013794, 0.00466431999206543, 0.0046991357803344725, 0.004660223960876465, 0.00465715217590332, 0.0046592001914978025, 0.004660223960876465, 0.004723711967468262, 0.004698112010955811, 0.004667391777038574, 0.004667391777038574, 0.004695040225982666, 0.004678656101226806, 0.004667391777038574, 0.004678656101226806, 0.00468179178237915, 0.0046724481582641605, 0.004663296222686767, 0.004666368007659912, 0.004708352088928222, 0.0046888961791992185, 0.004686848163604736, 0.00466534423828125, 0.004670464038848877, 0.004702208042144776, 0.004663296222686767, 0.004685823917388916, 0.004660223960876465, 0.004684800148010254, 0.0046694397926330565, 0.00466431999206543, 0.004683775901794434, 0.004723711967468262, 0.004679679870605469, 0.0046694397926330565, 0.004674560070037842, 0.00466534423828125, 0.004664383888244629, 0.004686848163604736, 0.004668352127075196, 0.004679679870605469, 0.0046919679641723635, 0.004680704116821289, 0.0046888961791992185, 0.00467251205444336, 0.004683775901794434, 0.00466431999206543, 0.004671487808227539, 0.004675583839416504, 0.004724736213684082, 0.0046592001914978025, 0.0046694397926330565, 0.00466534423828125, 0.010238975524902343, 0.004681727886199951, 0.004683775901794434, 0.004646912097930909, 0.0046694397926330565, 0.004671487808227539, 0.004682752132415771, 0.00466431999206543, 0.004667391777038574, 0.00467251205444336, 0.004666431903839112, 0.0046908798217773435, 0.004677631855010986, 0.004666368007659912, 0.004679679870605469, 0.0046581759452819825, 0.004660223960876465, 0.004676608085632325, 0.004689919948577881, 0.004679679870605469, 0.004685855865478515, 0.004684768199920654, 0.00467251205444336, 0.004689919948577881, 0.004683775901794434, 0.00469708776473999, 0.004671487808227539, 0.00468179178237915, 0.004666304111480713, 0.004685823917388916, 0.004700160026550293, 0.004696063995361328, 0.004673535823822021, 0.00467251205444336, 0.004687871932983399, 0.004684800148010254, 0.004671487808227539, 0.0046888961791992185, 0.004687871932983399, 0.004675583839416504, 0.004683775901794434, 0.00470527982711792, 0.004673535823822021, 0.004683775901794434, 0.004690976142883301, 0.0046806721687316895, 0.004701183795928955, 0.004927487850189209, 0.004762688159942627, 0.004704192161560059, 0.0047226881980895995, 0.0046991357803344725, 0.004758528232574463, 0.0046929922103881834, 0.0046694397926330565, 0.004680704116821289, 0.00467251205444336, 0.0047523841857910155, 0.004759552001953125, 0.0047185921669006346, 0.004741119861602783, 0.0048865280151367185, 0.004988927841186524, 0.010466303825378418, 0.004708352088928222, 0.004667391777038574, 0.004715519905090332, 0.004671487808227539, 0.004667391777038574, 0.004675583839416504, 0.004675583839416504, 0.004668416023254395, 0.004681727886199951, 0.004679679870605469, 0.004694015979766846, 0.004682752132415771, 0.004671487808227539, 0.004673535823822021, 0.00471449613571167, 0.004663296222686767, 0.005022719860076904, 0.00501964807510376, 0.005008384227752686, 0.004973567962646484, 0.004971519947052002, 0.004971519947052002, 0.0049725441932678225, 0.005005311965942383, 0.004981760025024414, 0.004964352130889893, 0.004997119903564453, 0.005005311965942383, 0.00499507188796997, 0.004981760025024414, 0.004976640224456787, 0.005396480083465576, 0.004939839839935302, 0.004965312004089355, 0.005051392078399658, 0.004981760025024414, 0.004957183837890625, 0.004968448162078858, 0.0050063362121582035, 0.004973567962646484, 0.004967423915863037, 0.00497049617767334, 0.004996096134185791, 0.00496230411529541, 0.004964352130889893, 0.0049797120094299315, 0.004967423915863037, 0.00496127986907959, 0.004981760025024414, 0.004943871974945068, 0.0049623680114746094, 0.004978623867034912, 0.004967423915863037, 0.004967423915863037, 0.004952127933502197, 0.005006271839141846, 0.0050032639503479, 0.004969471931457519, 0.004956192016601563, 0.005013472080230713, 0.004978687763214112, 0.005000192165374756, 0.010371071815490723, 0.004695040225982666, 0.0047185921669006346, 0.004679711818695068, 0.004683743953704834, 0.004731904029846192, 0.004683775901794434, 0.004679679870605469, 0.004703231811523437, 0.004663296222686767, 0.004685823917388916, 0.0046991357803344725, 0.004680704116821289, 0.004716544151306152, 0.004674560070037842, 0.004694015979766846, 0.00471347188949585, 0.004679679870605469, 0.0046888961791992185, 0.004683775901794434, 0.0046888961791992185, 0.0046919679641723635, 0.004694015979766846, 0.0046919679641723635, 0.004703231811523437, 0.004685823917388916, 0.004701183795928955, 0.004687903881072998, 0.004727776050567627, 0.0046888961791992185, 0.0046888961791992185, 0.004685823917388916, 0.004681727886199951, 0.004695040225982666, 0.004703231811523437, 0.004724768161773682, 0.004707295894622803, 0.004676608085632325, 0.00466534423828125, 0.004696063995361328, 0.004670464038848877, 0.004690944194793701, 0.0046694397926330565, 0.004678656101226806, 0.004678656101226806, 0.004709375858306885, 0.004681727886199951, 0.004682752132415771, 0.004676608085632325, 0.004670464038848877, 0.00465715217590332, 0.004695040225982666, 0.004677631855010986, 0.0046694397926330565, 0.0046622719764709475, 0.0046622719764709475, 0.004685823917388916, 0.00471449613571167, 0.004674560070037842, 0.004676608085632325, 0.004685823917388916, 0.00467251205444336, 0.00466431999206543, 0.01028819179534912, 0.004674496173858643, 0.004673535823822021, 0.004695040225982666, 0.0046888961791992185, 0.004679679870605469, 0.004674560070037842, 0.004660223960876465, 0.004665408134460449, 0.004706240177154541, 0.004668416023254395, 0.004679679870605469, 0.0046561279296875, 0.0046561279296875, 0.00466534423828125, 0.004700160026550293, 0.004658239841461182, 0.004682688236236572, 0.004687871932983399, 0.004645952224731445, 0.0046663999557495115, 0.004707231998443604, 0.004675648212432861, 0.004669375896453857, 0.004670464038848877, 0.004675583839416504, 0.004661248207092285, 0.00469814395904541, 0.004681695938110351, 0.0046929922103881834, 0.004711423873901367, 0.004680704116821289, 0.004666368007659912, 0.004678656101226806, 0.004668416023254395, 0.004682752132415771, 0.004803584098815918, 0.004682752132415771, 0.004653056144714355, 0.004696063995361328, 0.0046919679641723635, 0.004673535823822021, 0.00466534423828125, 0.00463974380493164, 0.004631552219390869, 0.004644864082336426, 0.004636672019958496, 0.0046284799575805665, 0.004631552219390869, 0.004626431941986084, 0.0046356477737426755, 0.004688928127288819, 0.0046560959815979, 0.00469708776473999, 0.004623360157012939, 0.004632575988769531, 0.0046254081726074215, 0.00463974380493164, 0.0046694397926330565, 0.004702208042144776, 0.004667391777038574, 0.004676608085632325, 0.004671487808227539, 0.010304512023925782, 0.004685823917388916, 0.004689919948577881, 0.004676608085632325, 0.004686848163604736, 0.004702208042144776, 0.004680704116821289, 0.004667391777038574, 0.0046561279296875, 0.0046561279296875, 0.004683775901794434, 0.0047226881980895995, 0.005391359806060791, 0.005395455837249756, 0.005496831893920898, 0.005044223785400391, 0.004983808040618896, 0.005035039901733399, 0.004988895893096924, 0.0049827837944030765, 0.004996096134185791, 0.005032959938049316, 0.005000192165374756, 0.004996096134185791, 0.004989952087402344, 0.004996096134185791, 0.004955135822296143, 0.0049725441932678225, 0.004958208084106445, 0.004997119903564453, 0.004957183837890625, 0.005006464004516602, 0.004940671920776367, 0.00496127986907959, 0.005092351913452148, 0.004964352130889893, 0.004960256099700928, 0.004983808040618896, 0.004971519947052002, 0.004966400146484375, 0.004955135822296143, 0.004987904071807861, 0.0049500160217285155, 0.004951039791107178, 0.004985856056213379, 0.004974688053131103, 0.005358496189117432, 0.004964352130889893, 0.004943871974945068, 0.0049827837944030765, 0.00496127986907959, 0.0049459199905395506, 0.004946944236755371, 0.0050135040283203125, 0.004973567962646484, 0.004967423915863037, 0.004963327884674072, 0.004971519947052002, 0.004955135822296143, 0.004975679874420166, 0.00497657585144043, 0.004976640224456787, 0.0049500160217285155, 0.010941439628601075, 0.00497049617767334, 0.004964352130889893, 0.0049500160217285155, 0.004947008132934571, 0.005011392116546631, 0.005238783836364746, 0.00501964807510376, 0.0050135040283203125, 0.004999167919158935, 0.0050135040283203125, 0.0050022401809692385, 0.004996096134185791, 0.0049500160217285155, 0.004967423915863037, 0.004964352130889893, 0.00497049617767334, 0.004973567962646484, 0.005004288196563721, 0.004947968006134033, 0.004965375900268554, 0.00501964807510376, 0.004977663993835449, 0.004956160068511963, 0.0049428482055664065, 0.004980735778808594, 0.0049489917755126955, 0.004966432094573974, 0.00496124792098999, 0.004993023872375488, 0.004963327884674072, 0.004966400146484375, 0.004988927841186524, 0.004967423915863037, 0.004971519947052002, 0.004966400146484375, 0.004939775943756103, 0.004988927841186524, 0.004994048118591309, 0.004947968006134033, 0.0049469761848449706, 0.005011424064636231, 0.0050022401809692385, 0.0049725441932678225, 0.004961343765258789, 0.004974527835845947, 0.004957183837890625, 0.004996096134185791, 0.004956160068511963, 0.004975615978240967, 0.004956160068511963, 0.004981823921203613, 0.004953023910522461, 0.0049489917755126955, 0.005021696090698242, 0.004991040229797363, 0.005002175807952881, 0.004964352130889893, 0.005024767875671386, 0.004981760025024414, 0.004977663993835449, 0.004973567962646484, 0.004992000102996826, 0.010962944030761718, 0.005007359981536865, 0.00497049617767334, 0.004943871974945068, 0.004962336063385009, 0.00498582410812378, 0.0049428482055664065, 0.0049459199905395506, 0.004980735778808594, 0.0049725441932678225, 0.004994048118591309, 0.004947968006134033, 0.004956160068511963, 0.004969471931457519, 0.004966400146484375, 0.0049387521743774416, 0.004966400146484375, 0.0049827837944030765, 0.00494598388671875, 0.004952000141143799, 0.004947968006134033, 0.004983808040618896, 0.004944896221160889, 0.004949088096618652, 0.0049622077941894535, 0.0049500160217285155, 0.00499510383605957, 0.004972511768341064, 0.004959231853485108, 0.004958208084106445, 0.004980735778808594, 0.004956223964691162, 0.004955071926116944, 0.0049428482055664065, 0.004969471931457519, 0.0049827837944030765, 0.004958208084106445, 0.004981760025024414, 0.00496127986907959, 0.004999167919158935, 0.005646336078643799, 0.005000192165374756, 0.0049500160217285155, 0.0049725441932678225, 0.0049725441932678225, 0.004955135822296143, 0.004999167919158935, 0.00496127986907959, 0.004963327884674072, 0.004971519947052002, 0.005002336025238037, 0.0049407038688659664, 0.004978687763214112, 0.004967423915863037, 0.004951039791107178, 0.0050022401809692385, 0.00497049617767334, 0.004958208084106445, 0.004984831809997559, 0.00496230411529541, 0.004958208084106445, 0.004964352130889893, 0.004960256099700928, 0.01093120002746582, 0.004985856056213379, 0.004956160068511963, 0.004959231853485108, 0.004964352130889893, 0.0049909758567810054, 0.0049500160217285155, 0.005033984184265137, 0.004980735778808594, 0.00517632007598877, 0.0050135040283203125, 0.004992000102996826, 0.00497049617767334, 0.00502784013748169, 0.00501964807510376, 0.004993023872375488, 0.00496127986907959, 0.005008384227752686, 0.004987904071807861, 0.004984831809997559, 0.0049316477775573735, 0.004972479820251465, 0.004956160068511963, 0.004951039791107178, 0.004960256099700928, 0.004994048118591309, 0.00499513578414917, 0.004973504066467285, 0.0049459199905395506, 0.00495411205291748, 0.004994048118591309, 0.0049797120094299315, 0.0049428482055664065, 0.004959231853485108, 0.004924416065216064, 0.00491212797164917, 0.004874239921569825, 0.004888576030731201, 0.00491212797164917, 0.004946944236755371, 0.004894720077514648, 0.004904960155487061, 0.005119999885559082, 0.004955135822296143, 0.004970560073852539, 0.004945856094360351, 0.004975615978240967, 0.004952064037322998, 0.004958208084106445, 0.004966400146484375, 0.004952064037322998, 0.0050063362121582035, 0.004992000102996826, 0.004965375900268554, 0.004986879825592041, 0.004980735778808594, 0.004977663993835449, 0.004964352130889893, 0.00495411205291748, 0.005389311790466309, 0.004977663993835449, 0.0049879360198974605, 0.0049786558151245115, 0.01033625602722168, 0.004702208042144776, 0.004678656101226806, 0.004690944194793701, 0.004661248207092285, 0.004683775901794434, 0.004661248207092285, 0.0046592001914978025, 0.004661248207092285, 0.004703231811523437, 0.004674560070037842, 0.004671487808227539, 0.00466534423828125, 0.004683775901794434, 0.0046694397926330565, 0.004723711967468262, 0.004724736213684082, 0.004767744064331054, 0.004716544151306152, 0.0047185921669006346, 0.004746240139007568, 0.004702271938323975, 0.004668384075164795, 0.004682720184326172, 0.004678656101226806, 0.0046561279296875, 0.004690944194793701, 0.00489577579498291, 0.004986847877502441, 0.004984831809997559, 0.005001215934753418, 0.0049530878067016604, 0.004968448162078858, 0.0049725441932678225, 0.0050063362121582035, 0.004997119903564453, 0.0049909758567810054, 0.004937727928161621, 0.004955135822296143, 0.004980735778808594, 0.0049797120094299315, 0.004963391780853271, 0.004947904109954834, 0.0049797120094299315, 0.004960256099700928, 0.004955135822296143, 0.004951039791107178, 0.004967423915863037, 0.004956160068511963, 0.004985856056213379, 0.004937727928161621, 0.00496127986907959, 0.004935679912567138, 0.004963327884674072, 0.004936704158782959, 0.004987904071807861, 0.0049695358276367185, 0.004972479820251465, 0.005102591991424561, 0.005063680171966553, 0.004999167919158935, 0.004966400146484375, 0.004966400146484375, 0.010396736145019531, 0.004684735774993896, 0.004666368007659912, 0.00466431999206543, 0.004674560070037842, 0.004703231811523437, 0.0046919679641723635, 0.00466534423828125, 0.004677631855010986, 0.00466534423828125, 0.004708352088928222, 0.0046888961791992185, 0.0046622719764709475, 0.0046581759452819825, 0.004683775901794434, 0.0046581759452819825, 0.0046929922103881834, 0.004671487808227539, 0.004666431903839112, 0.00467142391204834, 0.004673535823822021, 0.004682752132415771, 0.004686848163604736, 0.004682752132415771, 0.004679679870605469, 0.004689919948577881, 0.004709375858306885, 0.004702208042144776, 0.004690944194793701, 0.004687871932983399, 0.004689919948577881, 0.00467251205444336, 0.004668416023254395, 0.004680704116821289, 0.004684800148010254, 0.004668416023254395, 0.004678688049316406, 0.004682720184326172, 0.004690944194793701, 0.004685823917388916, 0.004717567920684815, 0.00467251205444336, 0.004682752132415771, 0.004677696228027344, 0.004673471927642822, 0.004681727886199951, 0.0046919679641723635, 0.004673535823822021, 0.004716544151306152, 0.0046622719764709475, 0.004673535823822021, 0.004700160026550293, 0.004680704116821289, 0.004681727886199951, 0.00467251205444336, 0.0046561279296875, 0.004678656101226806, 0.004710400104522705, 0.004704256057739258, 0.004698112010955811, 0.004676608085632325, 0.004665408134460449, 0.0047400321960449215, 0.010270719528198241, 0.004685823917388916, 0.00467251205444336, 0.004701183795928955, 0.004676608085632325, 0.004710400104522705, 0.004678656101226806, 0.004681727886199951, 0.004667391777038574, 0.004702208042144776, 0.004678656101226806, 0.004673535823822021, 0.004676608085632325, 0.0046694397926330565, 0.0046694397926330565, 0.004689919948577881, 0.004683775901794434, 0.0046694397926330565, 0.00468281602859497, 0.004673471927642822, 0.004670464038848877, 0.004685823917388916, 0.0046715521812438964, 0.004660160064697266, 0.004662335872650147, 0.004670400142669678, 0.004724736213684082, 0.004695040225982666, 0.004712448120117187, 0.00467251205444336, 0.004679679870605469, 0.004693056106567383, 0.004698048114776612, 0.004684800148010254, 0.0046622719764709475, 0.00467251205444336, 0.00466431999206543, 0.0046694397926330565, 0.0046929922103881834, 0.004678656101226806, 0.004667391777038574, 0.00471347188949585, 0.004675583839416504, 0.004661248207092285, 0.004696063995361328, 0.0046622719764709475, 0.004682752132415771, 0.004675615787506104, 0.00467964792251587, 0.0046694397926330565, 0.0046888961791992185, 0.00464896011352539, 0.004680704116821289, 0.0046561279296875, 0.004663296222686767, 0.004652031898498535, 0.004866047859191895, 0.004696063995361328, 0.0046694397926330565, 0.0046592001914978025, 0.004873216152191162, 0.0047185921669006346, 0.004674560070037842, 0.010381312370300292, 0.0046929922103881834, 0.004715519905090332, 0.004690944194793701, 0.004695040225982666, 0.004683775901794434, 0.004685823917388916, 0.004704256057739258, 0.004679679870605469, 0.004682752132415771, 0.004710400104522705, 0.004685823917388916, 0.00505241584777832, 0.0046991357803344725, 0.00467251205444336, 0.0046694397926330565, 0.004670464038848877, 0.004680704116821289, 0.004693056106567383, 0.004693952083587647, 0.004708352088928222, 0.004681727886199951, 0.004679679870605469, 0.004667391777038574, 0.004699168205261231, 0.004682720184326172, 0.004687871932983399, 0.0046919679641723635, 0.00467251205444336, 0.004677696228027344, 0.004711359977722168, 0.004685855865478515, 0.004684768199920654, 0.004681727886199951, 0.004700160026550293, 0.004687871932983399, 0.004698112010955811, 0.0046929922103881834, 0.004710400104522705, 0.004701183795928955, 0.004695040225982666, 0.0046929922103881834, 0.004746240139007568, 0.004689919948577881, 0.004677631855010986, 0.004681727886199951, 0.004683775901794434, 0.004679679870605469, 0.004685823917388916, 0.00469708776473999, 0.004689919948577881, 0.004724736213684082, 0.004733952045440673, 0.004770815849304199, 0.00471347188949585, 0.004731904029846192, 0.004695072174072266, 0.00467964792251587, 0.00467251205444336, 0.004702208042144776, 0.004675583839416504, 0.004675583839416504, 0.00466431999206543, 0.01036083221435547, 0.0046929922103881834, 0.004674560070037842, 0.004666368007659912, 0.00467251205444336, 0.00470527982711792, 0.004666368007659912, 0.004685823917388916, 0.004676608085632325, 0.004674560070037842, 0.004673535823822021, 0.0046929922103881834, 0.004674560070037842, 0.00470630407333374, 0.004680704116821289, 0.004677631855010986, 0.004675583839416504, 0.004719615936279297, 0.004680704116821289, 0.004682752132415771, 0.00471449613571167, 0.004679679870605469, 0.004663296222686767, 0.004710400104522705, 0.004674560070037842, 0.00467251205444336, 0.004682752132415771, 0.004681727886199951, 0.004711423873901367, 0.004681727886199951, 0.004685887813568115, 0.004688831806182861, 0.004689919948577881, 0.00470527982711792, 0.00471449613571167, 0.004681727886199951, 0.004671487808227539, 0.0046715521812438964, 0.004675519943237305, 0.00471449613571167, 0.004683775901794434, 0.004712448120117187, 0.004680704116821289, 0.004716544151306152, 0.004685823917388916, 0.004683775901794434, 0.004697152137756348, 0.004722623825073242, 0.004668416023254395, 0.0046888961791992185, 0.004675583839416504, 0.004682752132415771, 0.004696063995361328, 0.004687871932983399, 0.004681727886199951, 0.004676608085632325, 0.004681727886199951, 0.0047257599830627444, 0.004681727886199951, 0.004685855865478515, 0.004689888000488281, 0.004684800148010254, 0.004683775901794434, 0.010320927619934082, 0.004685791969299316, 0.004675583839416504, 0.004682784080505371, 0.004712416172027588, 0.004682752132415771, 0.004687871932983399, 0.004663296222686767, 0.004726784229278564, 0.00470630407333374, 0.004694015979766846, 0.004684800148010254, 0.004683807849884033, 0.004673503875732422, 0.0046919679641723635, 0.004712448120117187, 0.004684800148010254, 0.004631552219390869, 0.004652031898498535, 0.004638720035552979, 0.004643839836120605, 0.004674560070037842, 0.004634624004364014, 0.00464793586730957, 0.004645887851715088, 0.004642816066741944, 0.0046356477737426755, 0.004689919948577881, 0.004663296222686767, 0.00471449613571167, 0.004652031898498535, 0.004646912097930909, 0.0046592001914978025, 0.004702208042144776, 0.004700160026550293, 0.005682176113128662, 0.00515993595123291, 0.0050432000160217285, 0.0049909758567810054, 0.004976640224456787, 0.00496127986907959, 0.005007359981536865, 0.005014527797698975, 0.004955135822296143, 0.005272575855255127, 0.005090303897857666, 0.005007359981536865, 0.005004288196563721, 0.00501145601272583, 0.0050032639503479, 0.004977663993835449, 0.005002272129058838, 0.004973536014556885, 0.004975615978240967, 0.005008384227752686, 0.00496230411529541, 0.004964352130889893, 0.004988927841186524, 0.004975615978240967, 0.0049428482055664065, 0.004923391819000244, 0.004996096134185791, 0.004959231853485108, 0.010363903999328614, 0.004710400104522705, 0.004719615936279297, 0.004661248207092285, 0.0047226881980895995, 0.004681727886199951, 0.004683775901794434, 0.004660223960876465, 0.004679679870605469, 0.004680704116821289, 0.004679679870605469, 0.00470527982711792, 0.004700160026550293, 0.004670464038848877, 0.00469708776473999, 0.004968448162078858, 0.0046787199974060055, 0.004639679908752441, 0.004667391777038574, 0.004631552219390869, 0.004719615936279297, 0.004671487808227539, 0.004673535823822021, 0.004666431903839112, 0.004714431762695312, 0.004723711967468262, 0.004690944194793701, 0.004682752132415771, 0.0046704959869384765, 0.005066751956939697, 0.0047247037887573244, 0.004682752132415771, 0.004682752132415771, 0.00466431999206543, 0.00466431999206543, 0.004696063995361328, 0.004681727886199951, 0.004696063995361328, 0.00467251205444336, 0.004686848163604736, 0.0046254081726074215, 0.004653056144714355, 0.004638720035552979, 0.0046254081726074215, 0.004636672019958496, 0.0046284799575805665, 0.0046295042037963864, 0.0046663999557495115, 0.00461513614654541, 0.004649983882904053, 0.004620287895202637, 0.004633600234985351, 0.004680704116821289, 0.004709375858306885, 0.0047226881980895995, 0.0047298879623413085, 0.0047144641876220705, 0.004686848163604736, 0.00466537618637085, 0.004689888000488281, 0.004651008129119873, 0.004624383926391602, 0.0046254081726074215, 0.010812416076660156, 0.004984831809997559, 0.004967455863952637, 0.00496943998336792, 0.0048855037689208985, 0.004897791862487793, 0.004894720077514648, 0.0049530878067016604, 0.0049203200340270994, 0.004903935909271241, 0.004932608127593994, 0.004895743846893311, 0.004931583881378174, 0.004960256099700928, 0.004947968006134033, 0.004939775943756103, 0.004963327884674072, 0.004967423915863037, 0.004966400146484375, 0.00497049617767334, 0.004958208084106445, 0.004966400146484375, 0.0050063362121582035, 0.004960256099700928, 0.0049459199905395506, 0.004988927841186524, 0.004976640224456787, 0.004967423915863037, 0.004965375900268554, 0.004999199867248535, 0.004988895893096924, 0.004984831809997559, 0.004966400146484375, 0.004985856056213379, 0.004956160068511963, 0.004968448162078858, 0.004940800189971924, 0.00496230411529541, 0.0049500160217285155, 0.004944896221160889, 0.0049428482055664065, 0.004958303928375244, 0.004982687950134277, 0.004951039791107178, 0.004957183837890625, 0.004993023872375488, 0.00495411205291748, 0.0049428482055664065, 0.004940832138061523, 0.005028831958770752, 0.004944896221160889, 0.004976640224456787, 0.004960256099700928, 0.0049428482055664065, 0.004975615978240967, 0.004960256099700928, 0.004936704158782959, 0.004930560111999512, 0.004975615978240967, 0.00495411205291748, 0.004946944236755371, 0.004966400146484375, 0.004974592208862305, 0.010241024017333985, 0.004670464038848877, 0.004703328132629394, 0.0046683201789855956, 0.004679679870605469, 0.00467251205444336, 0.00466534423828125, 0.004676608085632325, 0.004686848163604736, 0.004673535823822021, 0.004678656101226806, 0.004675583839416504, 0.004670464038848877, 0.004666368007659912, 0.004673535823822021, 0.00467251205444336, 0.00466534423828125, 0.004677631855010986, 0.0047084479331970215, 0.004663199901580811, 0.004684832096099854, 0.00467248010635376, 0.0046356477737426755, 0.004633600234985351, 0.0046284799575805665, 0.0046284799575805665, 0.004690944194793701, 0.004631552219390869, 0.004667391777038574, 0.00466431999206543, 0.004627456188201904, 0.004621312141418457, 0.004642816066741944, 0.004622335910797119, 0.004646912097930909, 0.00461516809463501, 0.004616191864013672, 0.004610047817230224, 0.004623360157012939, 0.0046991357803344725, 0.004689919948577881, 0.0046694397926330565, 0.0046694397926330565, 0.004690944194793701, 0.004679679870605469, 0.0047288317680358885, 0.004686848163604736, 0.004680704116821289, 0.004634624004364014, 0.00463974380493164, 0.004637695789337158, 0.004644864082336426, 0.004626431941986084, 0.004630527973175049, 0.00465715217590332, 0.004656159877777099, 0.004662240028381347, 0.004712448120117187, 0.0055203838348388675, 0.004751423835754395, 0.004674496173858643, 0.00465715217590332, 0.004637695789337158, 0.01031987190246582, 0.004689919948577881, 0.004703231811523437, 0.004700160026550293, 0.004671487808227539, 0.004673535823822021, 0.004690944194793701, 0.004673535823822021, 0.004696063995361328, 0.004686848163604736, 0.004674592018127441, 0.004696032047271728, 0.0046888961791992185, 0.0046888961791992185, 0.00466534423828125, 0.004680704116821289, 0.004671487808227539, 0.004670464038848877, 0.004680704116821289, 0.004690944194793701, 0.004684800148010254, 0.00467251205444336, 0.004671487808227539, 0.004660223960876465, 0.004679679870605469, 0.004744192123413086, 0.004680736064910889, 0.004701151847839356, 0.004704256057739258, 0.004689919948577881, 0.004698112010955811, 0.0046929922103881834, 0.0049192957878112795, 0.004748288154602051, 0.00469708776473999, 0.004689919948577881, 0.004695040225982666, 0.0046929922103881834, 0.0046919679641723635, 0.004675583839416504, 0.004745215892791748, 0.004859903812408447, 0.004769792079925537, 0.004729856014251709, 0.005243904113769531, 0.00515174388885498, 0.00520908784866333, 0.005025792121887207, 0.004997119903564453, 0.005040128231048584, 0.004974592208862305, 0.004997119903564453, 0.0049428482055664065, 0.004944896221160889, 0.00499513578414917, 0.004932544231414795, 0.0049489917755126955, 0.004955135822296143, 0.004975615978240967, 0.0049530878067016604, 0.004957183837890625, 0.004959231853485108, 0.004980735778808594, 0.010954751968383789, 0.00502784013748169, 0.004978687763214112, 0.005044223785400391, 0.004977663993835449, 0.0050032639503479, 0.004963327884674072, 0.004956160068511963, 0.004940800189971924, 0.004947968006134033, 0.004969471931457519, 0.004996096134185791, 0.004957183837890625, 0.004978687763214112, 0.004895743846893311, 0.004901887893676758, 0.004877312183380127, 0.004895743846893311, 0.004905983924865722, 0.004891647815704346, 0.004931615829467773, 0.004930592060089111, 0.005009344100952148, 0.005026815891265869, 0.0050022401809692385, 0.004996096134185791, 0.005074944019317627, 0.005021696090698242, 0.004974592208862305, 0.004897791862487793, 0.004923391819000244, 0.004918272018432617, 0.0049489917755126955, 0.00496230411529541, 0.004985856056213379, 0.005030911922454834, 0.004921343803405762, 0.0048895998001098635, 0.004898816108703613, 0.004981760025024414, 0.004973567962646484, 0.004960256099700928, 0.004987904071807861, 0.00497049617767334, 0.0049827837944030765, 0.0049469761848449706, 0.0049417920112609865, 0.004987904071807861, 0.0049459199905395506, 0.0049909758567810054, 0.004940800189971924, 0.004957183837890625, 0.004966400146484375, 0.0049500160217285155, 0.004935679912567138, 0.004992000102996826, 0.004973567962646484, 0.0049459199905395506, 0.004967423915863037, 0.004981760025024414, 0.004956192016601563, 0.004973536014556885, 0.004957183837890625]",tokens/s,202.92757705602773,,,main,False,False @@ -11700,7 +11700,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e904f-247958d235db3ad86f4a4090;530399dd-b0ea-4581-a0d6-30c665b8a9db) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1ef-1f503fd340fc45b20bdac4e6;b815d7c1-7009-4c77-8ef1-b55c3bc47b9e) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -11825,7 +11825,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e959a-116f0b0d7e37be8f582a69a4;0a0a47a2-d704-48b6-b592-8887aa3685a7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8d9-4ba9f19e2d9e2e1009ad7dee;e33ed643-a2da-46a9-a662-56b02da9ced9) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -11885,7 +11885,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e901a-54095ad94daa049850c60eea;a1ee87e6-53fd-44d2-aa9f-7f28d7e85bc4) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1b6-7f4843ba3b6ec9a450fdf0eb;78e73276-a343-4de3-b086-502c2d728645) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -12140,7 +12140,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_bymrd4u/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8qyky8le/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -12329,7 +12329,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8e0e-6db72e166c82d8c100046c49;24f8b157-aff4-4190-ab69-cd083bbbc7e7) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdfa0-04c0f2f71e25f4d0279296a4;ad282b09-34f3-4bcc-a455-2cc4872a2841) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -12414,7 +12414,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9679-0340cd7a7b6e487e027634a6;8ad6409e-dc6b-405d-b5b8-c0c94559e3ab) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe832-1f484706056308f4469d72da;f3ed302a-c8c9-4a7f-aba8-0a821095483a) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12446,7 +12446,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1246.285824,14529.593344,0.0,13883.14624,13705.186304,s,10,0.938450180053711,0.09384501800537108,0.00019577791226699223,0.09376971435546874,0.09399090576171874,0.09418457412719726,0.09433950881958007,"[0.09437824249267578, 0.09368576049804687, 0.09371097564697266, 0.09381791687011719, 0.09378534698486328, 0.09375408172607422, 0.09391398620605469, 0.09371485137939453, 0.09394786834716796, 0.09374114990234375]",tokens/s,2727.9018688594447,kWh,1.1102459435027211e-06,6.082857795076409e-07,5.9449606230467145e-06,7.663492346057078e-06,tokens/kWh,33405135.470868427,MB,1246.285824,14529.593344,0.0,13883.14624,13856.22528,s,10,22.596658203125,2.2596658203124997,0.0014668920688748867,2.25974267578125,2.2615420166015623,2.261759655761719,2.2619337670898436,"[2.2594091796875, 2.258534423828125, 2.25855029296875, 2.260200439453125, 2.258156005859375, 2.260076171875, 2.25726806640625, 2.26099267578125, 2.261977294921875, 2.26149365234375]",tokens/s,27.880228763776866,kWh,2.661313820835339e-05,1.45850039414841e-05,0.000113523023840154,0.0001547211659899915,tokens/kWh,407184.10824331106,,s,629,23.40419585037232,0.037208578458461546,0.011207920818503291,0.03585740661621094,0.035990732574462884,0.03602493438720703,0.13015347290039062,"[0.03576633453369141, 0.03570272064208985, 0.03567923355102539, 0.03569868850708008, 0.03576319885253906, 0.03578777694702148, 0.035759105682373046, 0.035757057189941405, 0.03573964691162109, 0.03572531127929687, 0.03573555374145508, 0.035722240447998044, 0.03572633743286133, 0.035746814727783204, 0.03583590316772461, 0.0357918701171875, 0.035767295837402346, 0.03577142333984375, 0.03578774261474609, 0.03594137573242188, 0.03580928039550781, 0.03577139282226562, 0.03576319885253906, 0.035796993255615236, 0.0358656005859375, 0.035850238800048825, 0.03580313491821289, 0.03581644821166992, 0.03580723190307617, 0.03582156753540039, 0.035829761505126956, 0.035789825439453124, 0.035806209564208984, 0.035860481262207033, 0.03585126495361328, 0.03587583923339844, 0.03589023971557617, 0.03598944091796875, 0.035827713012695314, 0.03584511947631836, 0.03585433578491211, 0.03586969757080078, 0.0358809585571289, 0.035884033203125, 0.03584000015258789, 0.0358922233581543, 0.03591680145263672, 0.03594137573242188, 0.03593830490112305, 0.035888126373291016, 0.03603046417236328, 0.036225025177001956, 0.03597619247436523, 0.03592396926879883, 0.035937278747558594, 0.03602431869506836, 0.035939327239990236, 0.035937278747558594, 0.035939327239990236, 0.035932159423828124, 0.03596799850463867, 0.035972095489501955, 0.13008793640136718, 0.035683326721191407, 0.03566387176513672, 0.035664894104003905, 0.035745792388916016, 0.035748863220214845, 0.035710975646972655, 0.035740673065185545, 0.035768318176269534, 0.03575091171264649, 0.035773441314697264, 0.03581644821166992, 0.03583283233642578, 0.03596492767333984, 0.035958782196044925, 0.03571507263183594, 0.03576115036010742, 0.035740673065185545, 0.03575807952880859, 0.03577139282226562, 0.0357283821105957, 0.03580518341064453, 0.03584000015258789, 0.035773441314697264, 0.03575500869750976, 0.03581542587280274, 0.03581644821166992, 0.03581951904296875, 0.03583488082885742, 0.0358737907409668, 0.035877918243408205, 0.0358502082824707, 0.03582668685913086, 0.03582156753540039, 0.035806209564208984, 0.03584102249145508, 0.03588608169555664, 0.03583488082885742, 0.03588915252685547, 0.03587583923339844, 0.03582156753540039, 0.035866622924804685, 0.03589836883544922, 0.03585433578491211, 0.035862529754638675, 0.03589017486572266, 0.0359444465637207, 0.03592396926879883, 0.0359106559753418, 0.03592396926879883, 0.0358809585571289, 0.03593011093139648, 0.035937278747558594, 0.03591372680664062, 0.03588198471069336, 0.035915775299072264, 0.03594035339355469, 0.0359444465637207, 0.03592601776123047, 0.03593625640869141, 0.0359106559753418, 0.03600281524658203, 0.035985408782958986, 0.13015347290039062, 0.035710975646972655, 0.03567718505859375, 0.03569049453735352, 0.035932159423828124, 0.03577036666870117, 0.035727359771728515, 0.0357283821105957, 0.03576422500610352, 0.035678207397460936, 0.035740673065185545, 0.035724288940429685, 0.03574169540405273, 0.03572326278686523, 0.03567001724243164, 0.035691520690917966, 0.03571712112426758, 0.03574169540405273, 0.035796993255615236, 0.035759105682373046, 0.03581542587280274, 0.035759105682373046, 0.03575296020507813, 0.03583692932128906, 0.03579289627075195, 0.03579084777832031, 0.03580108642578125, 0.035810302734375, 0.03581644821166992, 0.03579904174804688, 0.03584511947631836, 0.035917823791503906, 0.036669441223144535, 0.036071422576904294, 0.03590041732788086, 0.03585945510864258, 0.03585740661621094, 0.035860481262207033, 0.03585433578491211, 0.03581542587280274, 0.03584102249145508, 0.03582259368896484, 0.03581644821166992, 0.035796993255615236, 0.03583488082885742, 0.03584921646118164, 0.03591884613037109, 0.035899391174316404, 0.03589427185058594, 0.03586969757080078, 0.035885055541992186, 0.035929088592529294, 0.03589120101928711, 0.035846145629882815, 0.03587788772583008, 0.035920894622802735, 0.03599871826171875, 0.036001792907714845, 0.036004863739013675, 0.03591680145263672, 0.03592704010009766, 0.03590348815917969, 0.03588915252685547, 0.13025074768066405, 0.03566796875, 0.03567001724243164, 0.03565363311767578, 0.035676158905029294, 0.035737598419189456, 0.035789825439453124, 0.0357283821105957, 0.035730430603027344, 0.035740673065185545, 0.035765281677246095, 0.035783679962158206, 0.035772384643554686, 0.03574476623535156, 0.03571200180053711, 0.03577036666870117, 0.03575500869750976, 0.03575807952880859, 0.03576422500610352, 0.03579391860961914, 0.03581644821166992, 0.0358205451965332, 0.03580316925048828, 0.03578979110717773, 0.03577139282226562, 0.035796993255615236, 0.03582156753540039, 0.03582463836669922, 0.03584307098388672, 0.03584000015258789, 0.03588608169555664, 0.03588710403442383, 0.03585740661621094, 0.03585228729248047, 0.035846145629882815, 0.035896320343017575, 0.03586867141723633, 0.03589120101928711, 0.03587071990966797, 0.035844097137451174, 0.035942401885986325, 0.03601408004760742, 0.03593011093139648, 0.035931137084960936, 0.03589734268188476, 0.035901439666748046, 0.03593523025512695, 0.035952640533447267, 0.035929088592529294, 0.03589427185058594, 0.036087806701660154, 0.036286495208740235, 0.03608572769165039, 0.03600896072387695, 0.0359741439819336, 0.03601510238647461, 0.03602742385864258, 0.03600073623657227, 0.036034561157226565, 0.03601408004760742, 0.0360447998046875, 0.03609395217895508, 0.03596083068847656, 0.13020672607421874, 0.03569868850708008, 0.03577958297729492, 0.03568435287475586, 0.035732479095458985, 0.03572633743286133, 0.03571814346313477, 0.03574784088134766, 0.03570483016967774, 0.03570073699951172, 0.03576115036010742, 0.03582668685913086, 0.03580416107177734, 0.03570278549194336, 0.035743743896484374, 0.03577753448486328, 0.03571507263183594, 0.035794944763183595, 0.03578675079345703, 0.03588915252685547, 0.035796993255615236, 0.035784702301025394, 0.03576319885253906, 0.035765247344970705, 0.03575807952880859, 0.03578572845458984, 0.035817470550537106, 0.03578060913085938, 0.03580108642578125, 0.035810302734375, 0.0358175048828125, 0.0357979850769043, 0.03582668685913086, 0.03579084777832031, 0.03582668685913086, 0.035855358123779296, 0.03586150360107422, 0.035871742248535156, 0.03583180618286133, 0.035937278747558594, 0.035912704467773435, 0.03589427185058594, 0.035915775299072264, 0.03592499160766602, 0.03591884613037109, 0.03590553665161133, 0.03588915252685547, 0.03588608169555664, 0.03590041732788086, 0.03590655899047852, 0.03590553665161133, 0.03592499160766602, 0.03599359893798828, 0.03588198471069336, 0.03594854354858398, 0.03594342422485351, 0.035915775299072264, 0.03591372680664062, 0.035917823791503906, 0.03593318557739258, 0.03594649505615234, 0.0359628791809082, 0.03594342422485351, 0.13020057678222657, 0.035688449859619144, 0.035784702301025394, 0.03594854354858398, 0.0358389778137207, 0.0358389778137207, 0.03575091171264649, 0.03574169540405273, 0.035813377380371096, 0.035707904815673826, 0.035727359771728515, 0.03570483016967774, 0.03576115036010742, 0.03575398254394531, 0.03574272155761719, 0.035727359771728515, 0.03572019195556641, 0.03575603103637695, 0.03575296020507813, 0.03578572845458984, 0.0357724494934082, 0.035773406982421874, 0.03587276840209961, 0.035814399719238284, 0.03580316925048828, 0.03581539154052735, 0.0358656005859375, 0.035862529754638675, 0.03585433578491211, 0.03582156753540039, 0.03606016159057617, 0.03600691223144531, 0.03589734268188476, 0.03590041732788086, 0.03591167831420899, 0.035888126373291016, 0.03592396926879883, 0.035947521209716796, 0.03601203155517578, 0.03584819030761719, 0.035888126373291016, 0.035947521209716796, 0.03592499160766602, 0.035855358123779296, 0.035884033203125, 0.03590553665161133, 0.03591884613037109, 0.03590655899047852, 0.035921985626220704, 0.035943359375, 0.03598438262939453, 0.03597824096679687, 0.035937278747558594, 0.03588710403442383, 0.03595161437988281, 0.03598233413696289, 0.035970046997070314, 0.0359035530090332, 0.03594953536987305, 0.035911647796630856, 0.03595673751831055, 0.0359813117980957, 0.0359659538269043, 0.1303214111328125, 0.03565465545654297, 0.035659774780273434, 0.03564748764038086, 0.035661823272705076, 0.035672065734863284, 0.03572326278686523, 0.03567411041259766, 0.03570483016967774, 0.035675136566162106, 0.03572019195556641, 0.035732479095458985, 0.03579596710205078, 0.03568947219848633, 0.03570073699951172, 0.035765247344970705, 0.03582668685913086, 0.0359024658203125, 0.03593011093139648, 0.035796993255615236, 0.03584511947631836, 0.035748863220214845, 0.03573657608032227, 0.03576422500610352, 0.03569664001464844, 0.035789825439453124, 0.035813377380371096, 0.03577756881713867, 0.03578569412231445, 0.035781631469726564, 0.035844097137451174, 0.03585945510864258, 0.03580518341064453, 0.03577446365356445, 0.03578675079345703, 0.03581542587280274, 0.0358205451965332, 0.03583590316772461, 0.03581951904296875, 0.03579596710205078, 0.03588710403442383, 0.03584000015258789, 0.035833854675292966, 0.03582156753540039, 0.03580313491821289, 0.03583795166015625, 0.035922943115234376, 0.03589836883544922, 0.03601919937133789, 0.036324352264404294, 0.03612057495117187, 0.035912704467773435, 0.03590041732788086, 0.03585228729248047, 0.0358737907409668, 0.03588915252685547, 0.03589734268188476, 0.035972095489501955, 0.03592192077636719, 0.03586355209350586, 0.03591167831420899, 0.035899391174316404, 0.035896320343017575, 0.13015347290039062, 0.03570073699951172, 0.03575500869750976, 0.035707904815673826, 0.03578777694702148, 0.03571712112426758, 0.03575603103637695, 0.03577958297729492, 0.03589734268188476, 0.035814399719238284, 0.035846145629882815, 0.03584819030761719, 0.035781631469726564, 0.035727359771728515, 0.03577958297729492, 0.035775489807128906, 0.03577139282226562, 0.035784702301025394, 0.03577753448486328, 0.03580518341064453, 0.03577753448486328, 0.035879936218261715, 0.035942401885986325, 0.03586764907836914, 0.03588608169555664, 0.035934207916259765, 0.03580108642578125, 0.03582361602783203, 0.035860481262207033, 0.035866622924804685, 0.035945472717285154, 0.0359024658203125, 0.03594342422485351, 0.03587686538696289, 0.03592806243896484, 0.0359024658203125, 0.035896320343017575, 0.035862529754638675, 0.035884033203125, 0.03594956970214844, 0.03592396926879883, 0.03592396926879883, 0.035917823791503906, 0.03590553665161133, 0.03591680145263672, 0.03594137573242188, 0.03592499160766602, 0.03594342422485351, 0.03589324951171875, 0.03595980834960937, 0.036018177032470705, 0.035934207916259765, 0.035942401885986325, 0.03591987228393555, 0.03594342422485351, 0.03597721481323242, 0.035999744415283204, 0.03602329635620117, 0.036050945281982424, 0.036040702819824216, 0.0359628791809082, 0.036013057708740234, 0.03595673751831055, 0.130482177734375, 0.03599155044555664, 0.03595468902587891, 0.03578060913085938, 0.035811328887939455, 0.03574787139892578, 0.03580617523193359, 0.03582566452026367, 0.03579289627075195, 0.03575603103637695, 0.03578879928588867, 0.035813377380371096, 0.035762176513671876, 0.0358287353515625, 0.03574272155761719, 0.035807262420654296, 0.0358502082824707, 0.035880992889404294, 0.035803104400634764, 0.035844097137451174, 0.03581542587280274, 0.035794944763183595, 0.03591680145263672, 0.03589529418945313, 0.03589427185058594, 0.035800064086914066, 0.03591372680664062, 0.035846145629882815, 0.035937278747558594, 0.03590041732788086, 0.03591372680664062, 0.03587481689453125, 0.03584307098388672, 0.03586969757080078, 0.03586764907836914, 0.035961856842041014, 0.03591680145263672, 0.035931137084960936, 0.0358922233581543, 0.035947521209716796, 0.03599052810668945, 0.035985408782958986, 0.03598233413696289, 0.035899391174316404, 0.03587071990966797, 0.03593318557739258, 0.03593523025512695, 0.035920894622802735, 0.035931137084960936, 0.03599564743041992, 0.035983360290527344, 0.03594649505615234, 0.03600896072387695, 0.035952640533447267, 0.035995712280273436, 0.036061119079589844, 0.035997726440429687, 0.03592393493652344, 0.03594342422485351, 0.03594342422485351, 0.03602022552490235, 0.035939327239990236, 0.03595161437988281, 0.1302087707519531, 0.03575296020507813, 0.03567923355102539, 0.03570483016967774, 0.03579391860961914, 0.03578777694702148, 0.03575091171264649, 0.03570278549194336, 0.03577040100097656, 0.03576931381225586, 0.03575500869750976, 0.03574784088134766, 0.03573657608032227, 0.03573350524902344, 0.03577958297729492, 0.035781631469726564, 0.03585638427734375, 0.03585228729248047, 0.03584819030761719, 0.03624448013305664, 0.03592192077636719, 0.0358021125793457, 0.035806209564208984, 0.035850238800048825, 0.0358021125793457, 0.035801120758056644, 0.03581744003295898, 0.03585740661621094, 0.03589529418945313, 0.035783679962158206, 0.035846145629882815, 0.03585228729248047, 0.03590041732788086, 0.03590963363647461, 0.035884033203125, 0.035915775299072264, 0.03589120101928711, 0.035937278747558594, 0.03595775985717774, 0.03597107315063477, 0.03594854354858398, 0.03599462509155273, 0.03586150360107422, 0.035843135833740235, 0.03588294219970703, 0.03596492767333984, 0.03611443328857422, 0.036127742767333985, 0.035958782196044925, 0.03597721481323242, 0.036004863739013675, 0.03599871826171875, 0.035958782196044925, 0.0360263671875, 0.0359659538269043, 0.03599769592285156, 0.03604582214355469, 0.03602431869506836, 0.03596083068847656, 0.03599462509155273, 0.03602534484863281, 0.03600588989257812, 0.035955711364746096]",tokens/s,26.87552283450892,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12560,7 +12560,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9564-6f9c144c281022b13d61f7c6;7af5de42-3d59-4b50-b7aa-68f109412df4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe722-7e9362630d6e6656257171a0;7088d0f6-d2c9-41d2-80dd-5d3f968a1b0b) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12629,7 +12629,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1280.77824,5222.432768,0.0,4575.985664,4408.404992,s,10,0.2996208972930908,0.02996208972930909,0.0006406597139741031,0.029801631927490232,0.03034066619873047,0.031043501091003418,0.03160576900482178,"[0.03174633598327637, 0.03018448066711426, 0.02932364845275879, 0.029996639251708986, 0.029662208557128908, 0.029786144256591797, 0.02980108833312988, 0.029802175521850587, 0.029428159713745117, 0.029890016555786134]",tokens/s,8544.130343137558,kWh,3.491530141764803e-07,1.9131910481615884e-07,1.3412940917168101e-06,1.8817662107094493e-06,tokens/kWh,136042404.49374676,MB,1280.77824,5222.432768,0.0,4575.985664,4408.407552,s,10,14.066130737304686,1.4066130737304687,0.0129631800192974,1.4080363159179687,1.4199089721679687,1.4203238708496093,1.4206557897949217,"[1.4068297119140625, 1.398411865234375, 1.4057283935546876, 1.409242919921875, 1.42073876953125, 1.4197501220703126, 1.4104517822265625, 1.3749361572265626, 1.4002242431640626, 1.4198167724609374]",tokens/s,44.78843626336995,kWh,1.6028081063939408e-05,8.783234179226324e-06,3.890990477588561e-05,6.372122001905135e-05,tokens/kWh,988681.6351156534,,s,629,14.310509576797493,0.022751207594272634,0.003601284367660396,0.02248294448852539,0.022652845001220702,0.02277171211242676,0.051737850646972665,"[0.022715391159057616, 0.022351871490478514, 0.02245427131652832, 0.022495231628417968, 0.022346752166748047, 0.022502399444580077, 0.02242355155944824, 0.02225663948059082, 0.022139904022216796, 0.02265190315246582, 0.022495231628417968, 0.023756799697875978, 0.02314348793029785, 0.022495168685913086, 0.02230784034729004, 0.022492160797119142, 0.022768640518188478, 0.02249728012084961, 0.022336511611938475, 0.02209280014038086, 0.021994495391845705, 0.022839296340942384, 0.023358463287353515, 0.023001087188720702, 0.022676479339599608, 0.021539840698242187, 0.021612543106079102, 0.021598207473754884, 0.021549055099487305, 0.021599231719970705, 0.021565439224243164, 0.021593088150024413, 0.02163609504699707, 0.021554176330566405, 0.021563392639160156, 0.021565439224243164, 0.021630048751831055, 0.021596063613891603, 0.021597183227539063, 0.02162892723083496, 0.022115327835083007, 0.022598655700683593, 0.02246348762512207, 0.02247270393371582, 0.02253004837036133, 0.022410240173339844, 0.02246553611755371, 0.022561792373657227, 0.0224532470703125, 0.022406143188476564, 0.0225167350769043, 0.02246451187133789, 0.022452224731445314, 0.02249318313598633, 0.022460416793823244, 0.022445056915283205, 0.022608896255493165, 0.02263039970397949, 0.02243891143798828, 0.022518783569335937, 0.02253004837036133, 0.022520864486694336, 0.05312918472290039, 0.022384639739990234, 0.022483968734741212, 0.022458368301391602, 0.0224716796875, 0.02241433525085449, 0.022475776672363282, 0.0224901123046875, 0.022459392547607423, 0.022458368301391602, 0.02234783935546875, 0.022313919067382813, 0.021922815322875978, 0.02142617607116699, 0.021525503158569336, 0.02150399971008301, 0.021515264511108398, 0.021531648635864258, 0.02147225570678711, 0.021498880386352538, 0.02150912094116211, 0.021501951217651367, 0.02233241653442383, 0.022379520416259766, 0.022388736724853517, 0.022346752166748047, 0.02247987174987793, 0.022435840606689454, 0.021498880386352538, 0.02150092887878418, 0.02147532844543457, 0.02142720031738281, 0.021531648635864258, 0.021498880386352538, 0.021550079345703126, 0.021772287368774415, 0.021788703918457032, 0.0216790714263916, 0.0225218563079834, 0.022510591506958007, 0.022525951385498046, 0.02251468849182129, 0.022633472442626954, 0.022552576065063477, 0.02254643249511719, 0.0226375675201416, 0.022676479339599608, 0.022518783569335937, 0.02254643249511719, 0.022541311264038084, 0.022568960189819336, 0.02247987174987793, 0.0224768009185791, 0.02249625587463379, 0.022509567260742186, 0.022494207382202147, 0.02250752067565918, 0.022533119201660155, 0.022451200485229493, 0.022432832717895507, 0.022519775390625, 0.02253206443786621, 0.022553600311279298, 0.05137510299682617, 0.021578752517700195, 0.021590015411376954, 0.021659648895263672, 0.021695487976074217, 0.021901311874389647, 0.021865535736083984, 0.02178144073486328, 0.021763071060180664, 0.02208665657043457, 0.02170163154602051, 0.02166988754272461, 0.02164531135559082, 0.022152191162109376, 0.022773759841918945, 0.022584320068359375, 0.02250752067565918, 0.02251366424560547, 0.02282700729370117, 0.022526975631713866, 0.022533119201660155, 0.022525951385498046, 0.02251571273803711, 0.02247372817993164, 0.02252288055419922, 0.022568960189819336, 0.022542335510253905, 0.02254745674133301, 0.022608896255493165, 0.02253107261657715, 0.022634496688842775, 0.022675455093383787, 0.022602752685546876, 0.022689855575561524, 0.022552511215209962, 0.022611967086791994, 0.022607872009277344, 0.022584320068359375, 0.022517759323120116, 0.022552576065063477, 0.02249830436706543, 0.02254643249511719, 0.022540288925170897, 0.022569984436035157, 0.02244915199279785, 0.022549503326416014, 0.021626880645751953, 0.02206105613708496, 0.021769216537475586, 0.02165760040283203, 0.02163711929321289, 0.02168115234375, 0.021651456832885742, 0.022657024383544923, 0.02271232032775879, 0.022576128005981445, 0.02254745674133301, 0.022581247329711913, 0.022580223083496095, 0.02249728012084961, 0.022625280380249024, 0.02285158348083496, 0.022625280380249024, 0.05304217529296875, 0.022627328872680662, 0.022492160797119142, 0.022543359756469726, 0.022617088317871094, 0.022536191940307617, 0.02262015914916992, 0.022578176498413087, 0.022579200744628908, 0.022510591506958007, 0.022639616012573242, 0.02249932861328125, 0.022580223083496095, 0.022502399444580077, 0.022529024124145508, 0.022566911697387695, 0.02250752067565918, 0.02248192024230957, 0.02253926467895508, 0.023560192108154295, 0.02304921531677246, 0.02271129608154297, 0.022500352859497072, 0.02246143913269043, 0.02242355155944824, 0.022562816619873048, 0.0225218563079834, 0.022591487884521484, 0.022467584609985353, 0.021570560455322265, 0.02163711929321289, 0.021541887283325196, 0.021567487716674806, 0.021560319900512694, 0.021580799102783203, 0.021576704025268553, 0.02165350341796875, 0.021573631286621094, 0.021580799102783203, 0.021710847854614256, 0.021651456832885742, 0.021577728271484374, 0.021606399536132814, 0.021575712203979493, 0.022620128631591796, 0.022480928421020507, 0.02253206443786621, 0.022524927139282228, 0.022495231628417968, 0.02251366424560547, 0.022786048889160155, 0.02392678451538086, 0.022965248107910157, 0.022518783569335937, 0.022549503326416014, 0.02246451187133789, 0.022519807815551757, 0.02248192024230957, 0.02249830436706543, 0.022529024124145508, 0.022533119201660155, 0.022047744750976563, 0.022481983184814452, 0.052620223999023434, 0.02247987174987793, 0.02248192024230957, 0.022459392547607423, 0.022503423690795898, 0.022504512786865234, 0.022506431579589845, 0.02242460823059082, 0.022255584716796874, 0.02225971221923828, 0.02249113655090332, 0.02248908805847168, 0.022501375198364256, 0.022509567260742186, 0.022540288925170897, 0.02274406433105469, 0.023601152420043944, 0.022785024642944338, 0.02262015914916992, 0.022579200744628908, 0.022608896255493165, 0.022592512130737305, 0.02249932861328125, 0.022609920501708985, 0.02267750358581543, 0.022486015319824217, 0.022588415145874022, 0.022556671142578123, 0.02271027183532715, 0.022520832061767578, 0.022458368301391602, 0.022740991592407226, 0.022557695388793944, 0.022495231628417968, 0.022519807815551757, 0.022494207382202147, 0.022460416793823244, 0.022584320068359375, 0.02250752067565918, 0.022674432754516603, 0.022386688232421875, 0.022236160278320313, 0.022572032928466795, 0.02251571273803711, 0.02246963119506836, 0.022549503326416014, 0.022500415802001954, 0.022565824508666992, 0.022542335510253905, 0.02253926467895508, 0.02253107261657715, 0.02244607925415039, 0.022544384002685547, 0.0225218563079834, 0.02247372817993164, 0.022424575805664062, 0.022803455352783202, 0.022466560363769532, 0.02248089599609375, 0.022543359756469726, 0.02264678382873535, 0.022483968734741212, 0.02248806381225586, 0.052775936126708986, 0.02246963119506836, 0.023412736892700195, 0.023617536544799804, 0.02287001609802246, 0.022684671401977538, 0.022562816619873048, 0.022571008682250978, 0.02253926467895508, 0.022508544921875, 0.022507551193237305, 0.022638559341430664, 0.022658048629760744, 0.022542335510253905, 0.022508544921875, 0.022587392807006838, 0.022213632583618165, 0.022483968734741212, 0.02226278305053711, 0.02226688003540039, 0.02227097511291504, 0.02223411178588867, 0.022232063293457033, 0.022487039566040038, 0.022510591506958007, 0.02255462455749512, 0.022458368301391602, 0.022537216186523438, 0.022559743881225586, 0.022441984176635742, 0.02246553611755371, 0.02243891143798828, 0.022490144729614258, 0.02251158332824707, 0.02244812774658203, 0.0224768009185791, 0.022497312545776367, 0.02251158332824707, 0.0224849910736084, 0.022502399444580077, 0.022534143447875975, 0.02253004837036133, 0.022214656829833986, 0.022436864852905275, 0.02261299133300781, 0.022565887451171874, 0.022510591506958007, 0.022508544921875, 0.022526975631713866, 0.02264575958251953, 0.02251366424560547, 0.022622207641601562, 0.022556671142578123, 0.02244710350036621, 0.02250444793701172, 0.022550527572631835, 0.02243071937561035, 0.02259971237182617, 0.022450143814086915, 0.022478847503662108, 0.022536191940307617, 0.022534143447875975, 0.02247372817993164, 0.05276774215698242, 0.02241535949707031, 0.022396928787231447, 0.022406240463256837, 0.022459295272827147, 0.022486015319824217, 0.02248294448852539, 0.022467584609985353, 0.02251366424560547, 0.022411264419555665, 0.022486080169677736, 0.02238355255126953, 0.022477823257446287, 0.022492160797119142, 0.022486015319824217, 0.022487039566040038, 0.022437887191772463, 0.02259660720825195, 0.022445056915283205, 0.022445056915283205, 0.022509567260742186, 0.02253107261657715, 0.022537216186523438, 0.02247987174987793, 0.02245734405517578, 0.022509567260742186, 0.021600255966186522, 0.021563392639160156, 0.02322640037536621, 0.022596576690673827, 0.02248806381225586, 0.022571008682250978, 0.0225218563079834, 0.022540288925170897, 0.022395904541015626, 0.02248192024230957, 0.021597183227539063, 0.021573631286621094, 0.021533695220947266, 0.021567487716674806, 0.022444032669067384, 0.02249625587463379, 0.022990848541259764, 0.0224768009185791, 0.021507072448730468, 0.021772287368774415, 0.02249113655090332, 0.022445056915283205, 0.02253004837036133, 0.022420480728149415, 0.02247372817993164, 0.022343679428100584, 0.02240716743469238, 0.022391807556152343, 0.02240716743469238, 0.022418432235717774, 0.022673408508300782, 0.02244710350036621, 0.022495231628417968, 0.02244812774658203, 0.02244095993041992, 0.02246963119506836, 0.022459392547607423, 0.05181958389282226, 0.021584831237792968, 0.021586944580078125, 0.021565439224243164, 0.021608448028564452, 0.021562368392944335, 0.021711872100830077, 0.021618688583374023, 0.021625856399536132, 0.021588991165161133, 0.02165247917175293, 0.021811199188232423, 0.022715391159057616, 0.022563840866088865, 0.02248806381225586, 0.02245020866394043, 0.022469600677490233, 0.022502399444580077, 0.02254745674133301, 0.02249830436706543, 0.02250649642944336, 0.022583328247070312, 0.022413280487060545, 0.022537216186523438, 0.022483968734741212, 0.02167807960510254, 0.021610496520996093, 0.021600255966186522, 0.02168934440612793, 0.02161664009094238, 0.021560319900512694, 0.021545984268188476, 0.021599231719970705, 0.021543935775756837, 0.021642240524291992, 0.02152448081970215, 0.021622783660888673, 0.021827583312988282, 0.021625856399536132, 0.021646335601806642, 0.02166169548034668, 0.021590015411376954, 0.021613567352294923, 0.021577728271484374, 0.021573631286621094, 0.021572608947753907, 0.021604352951049805, 0.021775360107421874, 0.021618688583374023, 0.02163199996948242, 0.021568511962890623, 0.021604352951049805, 0.021630975723266603, 0.02142720031738281, 0.021769216537475586, 0.0216944637298584, 0.021609472274780273, 0.02166783905029297, 0.021393407821655275, 0.02142207908630371, 0.02172313690185547, 0.021596160888671875, 0.02246553611755371, 0.05152767944335938, 0.021556224822998047, 0.021604352951049805, 0.021659648895263672, 0.021533695220947266, 0.021557247161865235, 0.021612607955932617, 0.021518272399902345, 0.021511167526245118, 0.021368831634521485, 0.02129817581176758, 0.021336063385009766, 0.0212674560546875, 0.02126233673095703, 0.021362688064575194, 0.02148044776916504, 0.021552127838134767, 0.022743040084838868, 0.022487039566040038, 0.022403135299682617, 0.02242246437072754, 0.022452224731445314, 0.022380544662475587, 0.022477823257446287, 0.022443008422851563, 0.022433792114257813, 0.022435840606689454, 0.0224768009185791, 0.02250444793701172, 0.02243177604675293, 0.022405088424682616, 0.022466560363769532, 0.02245631980895996, 0.02244915199279785, 0.022468608856201173, 0.02246246337890625, 0.022459392547607423, 0.0226693115234375, 0.022466560363769532, 0.022569984436035157, 0.02230067253112793, 0.022441984176635742, 0.02247270393371582, 0.02245734405517578, 0.02241433525085449, 0.02251468849182129, 0.022466560363769532, 0.022387712478637696, 0.022434816360473633, 0.022178815841674804, 0.022377471923828125, 0.022726655960083008, 0.02271753692626953, 0.022612895965576172, 0.022610944747924806, 0.022582271575927734, 0.022536191940307617, 0.022627328872680662, 0.022610944747924806, 0.022758399963378906, 0.022577152252197266, 0.022500352859497072, 0.022563840866088865, 0.052776958465576174, 0.02245734405517578, 0.022587392807006838, 0.02244710350036621, 0.02254745674133301, 0.02248089599609375, 0.022640640258789063, 0.022603776931762694, 0.02230169677734375, 0.022387712478637696, 0.022592512130737305, 0.02228428840637207, 0.022072320938110353, 0.022564863204956053, 0.022470720291137697, 0.022454208374023437, 0.022749183654785156, 0.022617088317871094, 0.02260479927062988, 0.02259660720825195, 0.02270412826538086, 0.022557695388793944, 0.022668319702148436, 0.022660064697265624, 0.02268262481689453, 0.022648832321166993, 0.02249932861328125, 0.022681600570678712, 0.022684671401977538, 0.02259660720825195, 0.022603776931762694, 0.02263043212890625, 0.022645727157592773, 0.02255462455749512, 0.022381568908691408, 0.022427648544311524, 0.02244915199279785, 0.022444032669067384, 0.022208511352539064, 0.022519807815551757, 0.022527999877929687, 0.022549503326416014, 0.022468608856201173, 0.02226278305053711, 0.022576128005981445, 0.022622207641601562, 0.022564863204956053, 0.022208511352539064, 0.022409215927124023, 0.022624256134033204, 0.022239231109619142, 0.02232524871826172, 0.02230988883972168, 0.022397951126098634, 0.022588415145874022, 0.02254745674133301, 0.02303385543823242, 0.02294169616699219, 0.022591583251953123, 0.02265283203125, 0.022559743881225586, 0.02259971237182617, 0.022652896881103515]",tokens/s,43.953710846176755,,,main,False,False -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12653,7 +12653,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8yv8ny1a/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpwm9lz6de/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -12684,7 +12684,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1281.41312,3041.394688,0.0,2394.947584,2279.417856,s,10,0.23879766464233396,0.023879766464233395,0.0019186688840044529,0.023459343910217284,0.024200454521179197,0.026859299564361564,0.02898637559890747,"[0.029518144607543945, 0.02255036735534668, 0.023508031845092772, 0.022488000869750977, 0.023500608444213866, 0.02360960006713867, 0.023418079376220702, 0.023249856948852538, 0.023357599258422852, 0.023597375869750976]",tokens/s,10720.372847172997,kWh,2.755448700726511e-07,1.5098588436768086e-07,1.0324639184335585e-06,1.4589946728738905e-06,tokens/kWh,175463286.30229864,MB,1281.41312,3041.394688,0.0,2394.947584,2279.420416,s,10,14.219128295898438,1.4219128295898438,0.017921205502425246,1.4310108642578125,1.4332750732421875,1.4344568725585938,1.4354023120117188,"[1.432612060546875, 1.3843092041015626, 1.435638671875, 1.42259716796875, 1.433012451171875, 1.42940966796875, 1.389419921875, 1.4264703369140626, 1.43274169921875, 1.4329171142578125]",tokens/s,44.30651351403348,kWh,1.681395282529389e-05,9.213966185877955e-06,3.6267677032765595e-05,6.229559604393743e-05,tokens/kWh,1011307.4438771843,,s,629,14.40284059143066,0.022897997760621087,0.002853396107912129,0.022707199096679686,0.022890904998779295,0.023071335220336914,0.04606173294067386,"[0.023362560272216795, 0.02249113655090332, 0.022731775283813475, 0.022437887191772463, 0.02246143913269043, 0.022460416793823244, 0.022551551818847656, 0.022419456481933595, 0.022502399444580077, 0.022548479080200197, 0.022486015319824217, 0.022541311264038084, 0.023034879684448242, 0.02268876838684082, 0.02254240036010742, 0.02255558395385742, 0.02253209686279297, 0.022450176239013672, 0.022443008422851563, 0.022403072357177735, 0.022277151107788086, 0.021957599639892578, 0.02185523223876953, 0.02195769691467285, 0.023056320190429688, 0.023190528869628906, 0.02307276725769043, 0.022871040344238282, 0.022906879425048828, 0.022733823776245117, 0.022961151123046874, 0.02283417510986328, 0.022779903411865234, 0.022882303237915038, 0.022823936462402345, 0.022898687362670898, 0.022755327224731444, 0.023018495559692383, 0.022730752944946288, 0.022622207641601562, 0.022715391159057616, 0.022692895889282225, 0.022666208267211913, 0.022689792633056642, 0.022798336029052735, 0.02269491195678711, 0.02272153663635254, 0.02275328063964844, 0.022786048889160155, 0.022626304626464845, 0.022771711349487304, 0.0228351993560791, 0.022767616271972657, 0.02273689651489258, 0.022757375717163086, 0.022278144836425783, 0.022647808074951172, 0.022813695907592774, 0.022773759841918945, 0.023772159576416017, 0.02286591911315918, 0.022733823776245117, 0.04492390441894531, 0.02185420799255371, 0.02185420799255371, 0.021777408599853516, 0.02188390350341797, 0.02148044776916504, 0.02164735984802246, 0.021833728790283204, 0.021857280731201172, 0.021805055618286134, 0.02187571144104004, 0.021824512481689453, 0.021768192291259765, 0.02187059211730957, 0.021825536727905274, 0.021836799621582033, 0.021857280731201172, 0.02187264060974121, 0.021778432846069336, 0.02170675277709961, 0.02149478340148926, 0.02150297546386719, 0.02149580764770508, 0.021712896347045898, 0.02182143974304199, 0.02192793655395508, 0.021801984786987305, 0.02184806442260742, 0.021817344665527344, 0.02186240005493164, 0.021901376724243166, 0.021828544616699218, 0.02186649513244629, 0.02186751937866211, 0.02165350341796875, 0.021812223434448243, 0.021592063903808592, 0.021549055099487305, 0.021523456573486328, 0.021542911529541017, 0.02184499168395996, 0.021808128356933593, 0.02187468719482422, 0.02227507209777832, 0.0218470401763916, 0.02185318374633789, 0.021798912048339843, 0.02185318374633789, 0.021798912048339843, 0.022038528442382813, 0.022045759201049803, 0.021880767822265626, 0.02189619255065918, 0.021935104370117187, 0.022813695907592774, 0.02350796890258789, 0.02306662368774414, 0.022940671920776368, 0.02307072067260742, 0.02286489677429199, 0.022813695907592774, 0.02271027183532715, 0.022791168212890626, 0.046709758758544925, 0.022777856826782225, 0.022684671401977538, 0.02352639961242676, 0.022934560775756837, 0.022803424835205078, 0.02241433525085449, 0.022648832321166993, 0.022815744400024415, 0.022734848022460938, 0.022767616271972657, 0.02286796760559082, 0.022768640518188478, 0.02286899185180664, 0.02251366424560547, 0.022581247329711913, 0.02272972869873047, 0.02287718391418457, 0.022759424209594727, 0.022723583221435546, 0.022758399963378906, 0.022784000396728517, 0.02268262481689453, 0.023182336807250976, 0.022725631713867187, 0.0228853759765625, 0.02284851264953613, 0.022384639739990234, 0.02244095993041992, 0.022840320587158205, 0.022906879425048828, 0.022793216705322264, 0.022791168212890626, 0.022610944747924806, 0.022487039566040038, 0.0228351993560791, 0.022799360275268556, 0.022820863723754883, 0.02281062316894531, 0.022930431365966796, 0.022794240951538085, 0.022779903411865234, 0.022725631713867187, 0.02287308883666992, 0.022794240951538085, 0.022777856826782225, 0.02283622360229492, 0.02282803153991699, 0.022697984695434572, 0.02234982490539551, 0.022738943099975584, 0.022774784088134766, 0.02285260772705078, 0.022773759841918945, 0.022821887969970703, 0.023173120498657225, 0.022930431365966796, 0.022800384521484376, 0.02271232032775879, 0.02287615966796875, 0.022839296340942384, 0.022839296340942384, 0.022759424209594727, 0.04511948776245117, 0.022977535247802734, 0.02328371238708496, 0.022558719635009765, 0.022576128005981445, 0.022527999877929687, 0.022632448196411133, 0.02246451187133789, 0.022587423324584962, 0.02348441505432129, 0.022849599838256837, 0.022773664474487306, 0.02265395164489746, 0.022495231628417968, 0.022129663467407225, 0.022198272705078126, 0.022073375701904298, 0.022494304656982423, 0.02254323196411133, 0.022435840606689454, 0.022429695129394533, 0.022487039566040038, 0.022543359756469726, 0.022477888107299806, 0.02256278419494629, 0.022457311630249024, 0.02239897537231445, 0.02244607925415039, 0.022386688232421875, 0.022451200485229493, 0.022406143188476564, 0.022302879333496093, 0.02272751998901367, 0.022502399444580077, 0.022467584609985353, 0.021970943450927736, 0.021485567092895508, 0.022544384002685547, 0.02253107261657715, 0.02247475242614746, 0.021906431198120118, 0.022520832061767578, 0.022528032302856445, 0.022576095581054688, 0.022777856826782225, 0.022726655960083008, 0.022755327224731444, 0.023173120498657225, 0.022930431365966796, 0.022750207901000977, 0.02273689651489258, 0.022737920761108397, 0.022761472702026365, 0.02249830436706543, 0.022364160537719727, 0.022716415405273437, 0.022766592025756836, 0.022773759841918945, 0.022760448455810548, 0.022846464157104493, 0.02275328063964844, 0.023029760360717775, 0.022812671661376953, 0.04679884719848633, 0.022707199096679686, 0.022794240951538085, 0.02272870445251465, 0.02272051239013672, 0.022742015838623047, 0.022366207122802736, 0.022768640518188478, 0.022740991592407226, 0.022757375717163086, 0.02272051239013672, 0.022764543533325195, 0.02268262481689453, 0.02282803153991699, 0.022773759841918945, 0.022920223236083986, 0.023918560028076172, 0.023290880203247072, 0.023020544052124024, 0.02292633628845215, 0.022974464416503908, 0.022872064590454103, 0.02271334457397461, 0.02267852783203125, 0.022708223342895507, 0.022812671661376953, 0.02279225540161133, 0.023296991348266603, 0.022676479339599608, 0.022615007400512695, 0.022527999877929687, 0.022212608337402344, 0.02231091117858887, 0.022579200744628908, 0.022795328140258787, 0.022712255477905275, 0.022737920761108397, 0.02272153663635254, 0.022750207901000977, 0.022370304107666016, 0.022379520416259766, 0.022626304626464845, 0.022742015838623047, 0.022814720153808594, 0.022648832321166993, 0.022609920501708985, 0.022768640518188478, 0.02243891143798828, 0.02292633628845215, 0.022381568908691408, 0.022460416793823244, 0.022855680465698244, 0.022772768020629882, 0.02273583984375, 0.02274508857727051, 0.022743040084838868, 0.022421503067016603, 0.022591487884521484, 0.02288844871520996, 0.022746112823486327, 0.02274406433105469, 0.022837247848510742, 0.02285670471191406, 0.04677836990356445, 0.0227194881439209, 0.022603776931762694, 0.022898687362670898, 0.022821887969970703, 0.02274406433105469, 0.022702112197875976, 0.023112672805786133, 0.022724607467651366, 0.022762496948242186, 0.022746112823486327, 0.02273587226867676, 0.02281062316894531, 0.02287820816040039, 0.022768640518188478, 0.022734848022460938, 0.022358015060424806, 0.022375423431396483, 0.022350847244262697, 0.022346752166748047, 0.022593536376953126, 0.02286591911315918, 0.022774784088134766, 0.022815744400024415, 0.02267852783203125, 0.022743072509765625, 0.02273072052001953, 0.02265497589111328, 0.022746112823486327, 0.02272051239013672, 0.02270310401916504, 0.022689792633056642, 0.02274406433105469, 0.022789119720458984, 0.0227061767578125, 0.022796287536621093, 0.02275542449951172, 0.02264259147644043, 0.02262118339538574, 0.022563840866088865, 0.022441984176635742, 0.022408191680908202, 0.022353055953979493, 0.022639455795288085, 0.022743040084838868, 0.0227194881439209, 0.022687744140625, 0.022760448455810548, 0.022658048629760744, 0.022676479339599608, 0.022755327224731444, 0.023144447326660156, 0.02266214370727539, 0.022600704193115235, 0.022657024383544923, 0.02233241653442383, 0.022551551818847656, 0.022384639739990234, 0.022495231628417968, 0.022747135162353514, 0.02271232032775879, 0.022805503845214844, 0.022817792892456053, 0.046637054443359374, 0.02272051239013672, 0.02269081687927246, 0.0227194881439209, 0.02274406433105469, 0.022716415405273437, 0.022730752944946288, 0.022673408508300782, 0.022730752944946288, 0.022718463897705078, 0.022718463897705078, 0.022790143966674805, 0.022756351470947265, 0.02268671989440918, 0.02275225639343262, 0.02271129608154297, 0.022725631713867187, 0.02285260772705078, 0.02287308883666992, 0.02265395164489746, 0.02184499168395996, 0.021757951736450197, 0.021745664596557617, 0.021933055877685546, 0.021767168045043944, 0.021810176849365235, 0.021711872100830077, 0.021535743713378908, 0.021582847595214845, 0.021521408081054686, 0.02145075225830078, 0.02187468719482422, 0.02143436813354492, 0.02145484733581543, 0.021525503158569336, 0.021622783660888673, 0.021687295913696288, 0.021629951477050782, 0.02147737693786621, 0.021622783660888673, 0.021679103851318358, 0.021388288497924804, 0.02166783905029297, 0.0214835205078125, 0.02166988754272461, 0.021483615875244142, 0.021405664443969727, 0.021476287841796875, 0.021429376602172853, 0.021533567428588866, 0.02146201515197754, 0.02153267288208008, 0.0214517765045166, 0.021493824005126953, 0.02159916877746582, 0.021519359588623048, 0.021604352951049805, 0.021626880645751953, 0.022758399963378906, 0.02306355285644531, 0.022715391159057616, 0.022569984436035157, 0.02266214370727539, 0.04642816162109375, 0.02273587226867676, 0.022692863464355468, 0.022771711349487304, 0.022775808334350587, 0.02277894401550293, 0.02258937644958496, 0.0226693115234375, 0.022746112823486327, 0.022718463897705078, 0.022742015838623047, 0.022755327224731444, 0.022738943099975584, 0.023169023513793945, 0.022759424209594727, 0.022755327224731444, 0.02285670471191406, 0.022759424209594727, 0.022822912216186524, 0.022793216705322264, 0.022733823776245117, 0.022709247589111328, 0.022754304885864256, 0.022792192459106447, 0.022759424209594727, 0.022779903411865234, 0.02269900894165039, 0.02264575958251953, 0.022820863723754883, 0.02268262481689453, 0.02250547218322754, 0.022483968734741212, 0.022610944747924806, 0.022633472442626954, 0.022542335510253905, 0.022031360626220704, 0.022311935424804686, 0.02267955207824707, 0.022475776672363282, 0.02250649642944336, 0.022452224731445314, 0.022567935943603516, 0.0224849910736084, 0.022397951126098634, 0.02250351905822754, 0.022749088287353517, 0.022381568908691408, 0.02243174362182617, 0.022571008682250978, 0.02248089599609375, 0.022640640258789063, 0.022564863204956053, 0.02251263999938965, 0.022549503326416014, 0.022584320068359375, 0.022519807815551757, 0.022467584609985353, 0.02282700729370117, 0.022468639373779298, 0.022391935348510743, 0.02254332733154297, 0.02267430305480957, 0.022750207901000977, 0.04662988662719727, 0.022409215927124023, 0.022370367050170897, 0.02230076789855957, 0.02207027244567871, 0.02236911964416504, 0.022517759323120116, 0.022584352493286133, 0.0226856632232666, 0.02247987174987793, 0.02240716743469238, 0.02245631980895996, 0.022478847503662108, 0.02252288055419922, 0.022935552597045897, 0.02267238426208496, 0.02272870445251465, 0.022811647415161132, 0.02268671989440918, 0.022700031280517577, 0.022730752944946288, 0.022831104278564454, 0.022714368820190428, 0.022751232147216797, 0.02275328063964844, 0.02272972869873047, 0.02289254379272461, 0.022788095474243163, 0.02294790458679199, 0.022905792236328125, 0.022761472702026365, 0.022801408767700194, 0.022777856826782225, 0.02285977554321289, 0.022767616271972657, 0.022790143966674805, 0.022833152770996092, 0.02310553550720215, 0.022748159408569335, 0.02265395164489746, 0.022647808074951172, 0.02270515251159668, 0.02272051239013672, 0.022722560882568358, 0.022766592025756836, 0.0226375675201416, 0.022649856567382814, 0.022717439651489257, 0.02285158348083496, 0.02291814422607422, 0.024172544479370117, 0.023150592803955077, 0.022890495300292968, 0.02286591911315918, 0.02267852783203125, 0.022701055526733398, 0.022684671401977538, 0.022740991592407226, 0.02281062316894531, 0.022697984695434572, 0.02268262481689453, 0.022972415924072266, 0.022773759841918945, 0.04684697723388672, 0.022648832321166993, 0.02265907287597656, 0.022709247589111328, 0.022737920761108397, 0.022675455093383787, 0.02269593620300293, 0.02263039970397949, 0.022725664138793945, 0.022663135528564454, 0.022797311782836914, 0.02272870445251465, 0.022543359756469726, 0.022708223342895507, 0.02269388771057129, 0.02305638313293457, 0.023341056823730468, 0.02262835121154785, 0.022754304885864256, 0.02268262481689453, 0.022762496948242186, 0.02271129608154297, 0.02268262481689453, 0.022586368560791017, 0.022822912216186524, 0.022692863464355468, 0.022929407119750975, 0.022788095474243163, 0.022575103759765625, 0.0227194881439209, 0.022781951904296875, 0.02287308883666992, 0.022782976150512696, 0.02267955207824707, 0.022747135162353514, 0.022739967346191405, 0.02265497589111328, 0.02277894401550293, 0.022729663848876952, 0.022737920761108397, 0.0226375675201416, 0.02307174491882324, 0.02280243110656738, 0.022692863464355468, 0.0226693115234375, 0.022649856567382814, 0.022598655700683593, 0.022725631713867187, 0.022765567779541016, 0.022796287536621093, 0.02269696044921875, 0.02262835121154785, 0.022590463638305663, 0.022832128524780275, 0.022590463638305663, 0.022388736724853517, 0.02285260772705078, 0.022785024642944338, 0.02274508857727051, 0.022776832580566408, 0.022692863464355468, 0.023166976928710937, 0.0227194881439209]",tokens/s,43.67194068468962,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12723,7 +12723,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4694-667fbf2b444a6eac3017876b;7de1532a-3519-4ac9-81bf-3a8b6a6f369d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9ee-316227f45874b5bc5d72622f;c3bbb8d2-4b58-4e72-9d41-c224c686cccd) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12778,7 +12778,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp1w09c0h3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyg4_od93/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,M,M,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -12820,7 +12820,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e952f-73bb692e627879e46dd6c2d1;c75c4cfa-5434-498d-afcd-484e1b7f6a28) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6ec-7feaa55734c062764e7aee50;b2621aff-c058-4753-a320-4f9f36558b71) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12890,7 +12890,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9606-0980065772bf7bd87da18847;912ddd0d-268a-4b0f-9867-b776263f08db) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7c5-677859ee6a3afef67aaaefd6;4183cbb0-f3f6-4ec0-b227-fb2e5e765b4b) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -12922,7 +12922,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1292.10368,17102.798848,0.0,16456.351744,16359.853056,s,10,1.0287446746826174,0.10287446746826172,0.001574352666792247,0.1023089599609375,0.1031449493408203,0.10536452407836913,0.1071401838684082,"[0.10758409881591798, 0.10240496063232422, 0.10230368041992187, 0.1022434539794922, 0.10224969482421875, 0.10231423950195312, 0.10226022338867187, 0.10228489685058594, 0.10244771575927734, 0.1026517105102539]",tokens/s,2488.4697466743123,kWh,1.2122054962344181e-06,6.642319068268247e-07,5.7926718676940615e-06,7.669109270755305e-06,tokens/kWh,33380669.248801485,MB,1292.10368,17102.798848,0.0,16456.351744,16416.242176,s,10,29.410879882812498,2.94108798828125,0.004424772369057995,2.9409384765624997,2.9470216064453125,2.9476450805664065,2.9481438598632814,"[2.9482685546875, 2.946883056640625, 2.935814697265625, 2.939655517578125, 2.93367822265625, 2.9402763671875, 2.943142822265625, 2.937574951171875, 2.9416005859375, 2.943985107421875]",tokens/s,21.42064441833198,kWh,3.477936609903701e-05,1.9060741952693285e-05,0.00012457632131630225,0.00017841642936803254,tokens/kWh,353106.4948623387,,s,629,30.28513584136962,0.0481480697001107,0.012214817356114677,0.04664934539794922,0.046851686096191406,0.04733276138305664,0.14937149353027343,"[0.04658995056152344, 0.04657766342163086, 0.046698558807373045, 0.046656448364257815, 0.046625823974609376, 0.04664214324951172, 0.04668928146362305, 0.04677427291870117, 0.04667903900146484, 0.04730060958862305, 0.047685630798339845, 0.047546367645263675, 0.047473663330078124, 0.046508033752441405, 0.046502910614013675, 0.04651520156860352, 0.04653363037109375, 0.04659814453125, 0.0465203857421875, 0.046744510650634764, 0.04672819137573242, 0.04664012908935547, 0.04670361709594727, 0.0478474235534668, 0.04763033676147461, 0.04754227066040039, 0.047580158233642575, 0.04751462554931641, 0.04651520156860352, 0.04645580673217774, 0.046604286193847655, 0.04659302520751953, 0.04655718231201172, 0.046581760406494144, 0.046568447113037106, 0.04662681579589844, 0.046615550994873044, 0.046622718811035156, 0.04671897506713867, 0.046635009765625, 0.04675993728637695, 0.046663681030273435, 0.046671871185302735, 0.046639102935791016, 0.04694015884399414, 0.04679270553588867, 0.04659609603881836, 0.04648755264282227, 0.04719619369506836, 0.047478816986083985, 0.04671583938598633, 0.046545921325683595, 0.046712833404541014, 0.046562305450439455, 0.046691326141357424, 0.046581760406494144, 0.046690303802490236, 0.04663603210449219, 0.04666572952270508, 0.04665651321411133, 0.04668009567260742, 0.04654691314697266, 0.14947225952148438, 0.046685184478759766, 0.04649881744384766, 0.04677119827270508, 0.04667801666259765, 0.04733030319213867, 0.046312446594238284, 0.04638412857055664, 0.04655615997314453, 0.046706687927246096, 0.04676095962524414, 0.04660326385498047, 0.04668108749389648, 0.046676990509033206, 0.046637054443359374, 0.046688255310058595, 0.046682113647460936, 0.04665958404541016, 0.04665446472167969, 0.04666470336914062, 0.04671692657470703, 0.04721667098999023, 0.0465305290222168, 0.04648857498168945, 0.046688255310058595, 0.047391742706298826, 0.04675686264038086, 0.04659711837768555, 0.046637054443359374, 0.046690303802490236, 0.04670873641967774, 0.046870529174804686, 0.046693374633789066, 0.04668415832519531, 0.04679679870605469, 0.046548992156982424, 0.04660326385498047, 0.04666163253784179, 0.04666777420043945, 0.04665958404541016, 0.0465530891418457, 0.04662169647216797, 0.04656947326660156, 0.04639129638671875, 0.046317569732666014, 0.046427135467529294, 0.04659199905395508, 0.04667084884643555, 0.04727500915527344, 0.04753919982910156, 0.04715315246582031, 0.04726476669311523, 0.04731903839111328, 0.047230976104736325, 0.04743884658813476, 0.047612926483154294, 0.04740300750732422, 0.04728422546386719, 0.04637388610839844, 0.04665446472167969, 0.04674764633178711, 0.04660326385498047, 0.0465530891418457, 0.14944256591796876, 0.04665651321411133, 0.04665139389038086, 0.04667289733886719, 0.046685184478759766, 0.046666751861572264, 0.046604286193847655, 0.04664831924438476, 0.046650367736816405, 0.046611457824707034, 0.04656742477416992, 0.0464721908569336, 0.04624486541748047, 0.04662793731689453, 0.046518241882324216, 0.04654892730712891, 0.046483455657958986, 0.04661043167114258, 0.04659814453125, 0.04659814453125, 0.046227455139160156, 0.0463012466430664, 0.046314430236816403, 0.04667903900146484, 0.04665446472167969, 0.04671180725097656, 0.04667596817016602, 0.04950223922729492, 0.04683055877685547, 0.046429183959960936, 0.04655104064941406, 0.04668108749389648, 0.046615615844726566, 0.04664825439453125, 0.04652851104736328, 0.04653772735595703, 0.0461998405456543, 0.04648137664794922, 0.0464640007019043, 0.04653055953979492, 0.04647116851806641, 0.04653772735595703, 0.04628377532958984, 0.04633804702758789, 0.046306304931640625, 0.04667084884643555, 0.04674662399291992, 0.04675481414794922, 0.04651827239990235, 0.04665651321411133, 0.046676990509033206, 0.046714881896972656, 0.047148033142089846, 0.046496768951416016, 0.04640256118774414, 0.046532608032226565, 0.04649574279785156, 0.046255104064941405, 0.046281726837158206, 0.04637388610839844, 0.046496768951416016, 0.04657356643676758, 0.046311424255371096, 0.14914764404296876, 0.04664012908935547, 0.046483455657958986, 0.04638412857055664, 0.04694940948486328, 0.0466728630065918, 0.04648448181152344, 0.046637054443359374, 0.04677529525756836, 0.04654489517211914, 0.04660019302368164, 0.046690303802490236, 0.04721664047241211, 0.04734668731689453, 0.046809120178222655, 0.04668412780761719, 0.04654489517211914, 0.04669440078735351, 0.04667494583129883, 0.04680089569091797, 0.046740478515625, 0.0468592643737793, 0.0464721908569336, 0.04657664108276367, 0.046663681030273435, 0.04656947326660156, 0.04665657424926758, 0.046613441467285154, 0.046676990509033206, 0.046615585327148434, 0.04660323333740234, 0.04671692657470703, 0.04668115234375, 0.04666259384155273, 0.046532608032226565, 0.04660326385498047, 0.046332992553710935, 0.04632160186767578, 0.04636876678466797, 0.046581760406494144, 0.046584831237792966, 0.046669822692871094, 0.04667903900146484, 0.046733375549316405, 0.046623680114746095, 0.04668108749389648, 0.0466063346862793, 0.04657356643676758, 0.04671590423583984, 0.046720001220703126, 0.046721023559570314, 0.046696449279785154, 0.046581760406494144, 0.04684492874145508, 0.04660531234741211, 0.04657254409790039, 0.04667494583129883, 0.04660224151611328, 0.04659814453125, 0.046690303802490236, 0.046721023559570314, 0.04664422225952149, 0.04674969482421875, 0.14936781311035155, 0.046698558807373045, 0.0464967041015625, 0.04665958404541016, 0.046565376281738284, 0.04652851104736328, 0.04612607955932617, 0.04642303848266602, 0.04643840026855469, 0.04656332778930664, 0.046639102935791016, 0.04674764633178711, 0.046644287109375, 0.04670969772338867, 0.04668928146362305, 0.04668108749389648, 0.046647296905517575, 0.04663296127319336, 0.04651827239990235, 0.0466319351196289, 0.04624486541748047, 0.046437374114990236, 0.046513214111328124, 0.046523326873779296, 0.046496768951416016, 0.04671897506713867, 0.0466431999206543, 0.046622718811035156, 0.04674460983276367, 0.04663292694091797, 0.04659609603881836, 0.046650367736816405, 0.04655923080444336, 0.0465715217590332, 0.046192638397216795, 0.04637900924682617, 0.04635443115234375, 0.04660940933227539, 0.04667193603515625, 0.046634944915771484, 0.04691251373291016, 0.04655411148071289, 0.04658182525634766, 0.046705600738525394, 0.04657766342163086, 0.046532608032226565, 0.046318622589111326, 0.04634006500244141, 0.04625305557250976, 0.046529537200927736, 0.046615550994873044, 0.04675174331665039, 0.046496768951416016, 0.04628070449829102, 0.04632166290283203, 0.04637696075439453, 0.046491649627685545, 0.04671897506713867, 0.04667494583129883, 0.046992385864257816, 0.0464640007019043, 0.046620670318603515, 0.046650367736816405, 0.1494958038330078, 0.04671590423583984, 0.046483455657958986, 0.04697804641723633, 0.046701568603515625, 0.046617599487304685, 0.046876670837402344, 0.04656025695800781, 0.04643635177612305, 0.0465530891418457, 0.04657459259033203, 0.04664115142822266, 0.046683135986328124, 0.04667801666259765, 0.04664524841308594, 0.046706687927246096, 0.04666777420043945, 0.046635009765625, 0.04670159912109375, 0.04681622314453125, 0.0465797119140625, 0.04664524841308594, 0.046655487060546875, 0.04663603210449219, 0.04665446472167969, 0.046668800354003906, 0.04650905609130859, 0.04662783813476563, 0.046840831756591796, 0.04665139389038086, 0.04664422225952149, 0.04681625747680664, 0.04722687911987305, 0.04669440078735351, 0.046342144012451174, 0.04629913711547851, 0.04659097671508789, 0.04660531234741211, 0.04656742477416992, 0.04653772735595703, 0.046461952209472655, 0.04648448181152344, 0.04646092987060547, 0.04656639862060547, 0.0465049934387207, 0.04683055877685547, 0.047608833312988284, 0.04683366394042969, 0.046835712432861325, 0.04674355316162109, 0.046410751342773435, 0.04667596817016602, 0.04659404754638672, 0.04681932830810547, 0.04665961456298828, 0.04665135955810547, 0.04656335830688477, 0.04672201538085938, 0.04672716903686523, 0.04662886428833008, 0.04666163253784179, 0.046676990509033206, 0.04654796981811524, 0.1495665283203125, 0.04660831832885742, 0.04655001449584961, 0.04666470336914062, 0.04655104064941406, 0.04733440017700195, 0.04753100967407226, 0.04713369750976563, 0.046584831237792966, 0.04646912002563477, 0.04659609603881836, 0.04654182434082031, 0.04656639862060547, 0.046635009765625, 0.04662886428833008, 0.04675174331665039, 0.04667494583129883, 0.04662988662719727, 0.046668800354003906, 0.046704639434814454, 0.046622718811035156, 0.04669440078735351, 0.04665139389038086, 0.046653438568115234, 0.046663681030273435, 0.046653438568115234, 0.04661248016357422, 0.046744640350341794, 0.04667180633544922, 0.046660606384277346, 0.04667398452758789, 0.04658476638793945, 0.04784537506103516, 0.04724428939819336, 0.04643942260742188, 0.04662169647216797, 0.0466319351196289, 0.04667084884643555, 0.04673535919189453, 0.04664524841308594, 0.046599166870117184, 0.04664934539794922, 0.04667596817016602, 0.046881790161132815, 0.04650700759887695, 0.04687257766723633, 0.04653363037109375, 0.046806015014648435, 0.046450687408447267, 0.046581760406494144, 0.04668415832519531, 0.04667903900146484, 0.046620670318603515, 0.04671184158325195, 0.04664009475708008, 0.04669747161865234, 0.04664217758178711, 0.04663296127319336, 0.046676990509033206, 0.04667903900146484, 0.046630912780761716, 0.046734336853027345, 0.0467322883605957, 0.1493729248046875, 0.04665958404541016, 0.04658073425292969, 0.04659609603881836, 0.046723072052001956, 0.04662988662719727, 0.04681727981567383, 0.046669822692871094, 0.04658380889892578, 0.04666572952270508, 0.04672204971313477, 0.04682547378540039, 0.046524417877197265, 0.046562305450439455, 0.04651007843017578, 0.04664934539794922, 0.04657254409790039, 0.04631961441040039, 0.04642508697509766, 0.046489601135253904, 0.04651724624633789, 0.04665958404541016, 0.04655615997314453, 0.046683135986328124, 0.04667494583129883, 0.04654182434082031, 0.04669440078735351, 0.046685184478759766, 0.046752769470214846, 0.04653363037109375, 0.046653438568115234, 0.0466165771484375, 0.04659711837768555, 0.04657049560546875, 0.046317569732666014, 0.04642508697509766, 0.04642816162109375, 0.046440448760986325, 0.04673535919189453, 0.046595073699951174, 0.04671590423583984, 0.046650367736816405, 0.04665753555297852, 0.046655487060546875, 0.04668620681762695, 0.04669235229492188, 0.04664115142822266, 0.04665862274169922, 0.046642112731933597, 0.046680065155029295, 0.04661862564086914, 0.0466431999206543, 0.04664115142822266, 0.046611457824707034, 0.04676403045654297, 0.04673126220703125, 0.0466165771484375, 0.04662988662719727, 0.04675174331665039, 0.04685823822021484, 0.04659711837768555, 0.04656435012817383, 0.046615550994873044, 0.149465087890625, 0.046635009765625, 0.0465715217590332, 0.046706687927246096, 0.04679065704345703, 0.046611457824707034, 0.046601215362548826, 0.04664422225952149, 0.0466431999206543, 0.04664012908935547, 0.04672512054443359, 0.04656332778930664, 0.04672716903686523, 0.046723072052001956, 0.04667391967773438, 0.04664934539794922, 0.04663603210449219, 0.04660019302368164, 0.04676095962524414, 0.04679372787475586, 0.04657561492919922, 0.04669440078735351, 0.046620670318603515, 0.04666265487670898, 0.0466431999206543, 0.04668620681762695, 0.04655110549926758, 0.046789569854736326, 0.04666777420043945, 0.046691326141357424, 0.04660019302368164, 0.04668620681762695, 0.046614528656005856, 0.04667289733886719, 0.04666163253784179, 0.04668620681762695, 0.046699520111083984, 0.04664934539794922, 0.04661862564086914, 0.04671692657470703, 0.046717952728271485, 0.04668928146362305, 0.04659711837768555, 0.04671590423583984, 0.046614528656005856, 0.046611457824707034, 0.04657254409790039, 0.046720001220703126, 0.046682113647460936, 0.046720001220703126, 0.0468326416015625, 0.04770611190795898, 0.04695449447631836, 0.04664627075195313, 0.04650188827514649, 0.047132671356201174, 0.046467071533203126, 0.046578689575195314, 0.04655001449584961, 0.046739456176757815, 0.04655411148071289, 0.046827518463134765, 0.046588958740234374, 0.1502340850830078, 0.046944255828857424, 0.04677734375, 0.046630912780761716, 0.046676990509033206, 0.04668012619018555, 0.0466677131652832, 0.046707710266113284, 0.04665753555297852, 0.04670361709594727, 0.04655718231201172, 0.046690303802490236, 0.04661964797973633, 0.04659814453125, 0.04674457550048828, 0.04671897506713867, 0.046699520111083984, 0.04664115142822266, 0.046653438568115234, 0.04670259094238281, 0.04666572952270508, 0.046699520111083984, 0.046631999969482425, 0.04676396942138672, 0.04763238525390625, 0.04698726272583008, 0.046691326141357424, 0.04662681579589844, 0.04671078491210937, 0.04660326385498047, 0.04668316650390625, 0.04659811019897461, 0.046653438568115234, 0.0466954231262207, 0.046723072052001956, 0.04668723297119141, 0.046653438568115234, 0.04678144073486328, 0.04657664108276367, 0.046843902587890625, 0.04685004806518555, 0.04661043167114258, 0.04671180725097656, 0.04659609603881836, 0.04652646255493164, 0.04692582321166992, 0.04676505661010742, 0.046535678863525394, 0.046565376281738284, 0.04657766342163086, 0.04649369430541992, 0.04792934417724609, 0.04684809494018555, 0.04660931015014649, 0.046568447113037106, 0.04664422225952149, 0.04667903900146484, 0.04660838317871094, 0.04666777420043945, 0.04661862564086914, 0.046700607299804686, 0.04675683212280273, 0.0466728630065918]",tokens/s,20.76926460870561,,,main,False,False -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -12946,7 +12946,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp4h_o3ey/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpy3nibjjn/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): @@ -12986,7 +12986,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13015,7 +13015,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4890-53911e6a1e0dbe06772490fb;e0ca4e29-a2db-4890-8da8-4ed4d7edf234) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fec08-758962a56651d01e21a3afef;8a15757d-4755-4536-b581-761ea814ed65) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -13100,7 +13100,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9640-754eb1514798e7e555af02b7;bee288bc-140f-4840-a991-80aa29586a43) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7fb-59d49c87395bc315629d8b91;341e57d9-23a7-4017-bc60-19e67a19a632) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13162,7 +13162,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fe5-5a58c75703c90c092950c225;ee4cb37c-810b-472e-a1a3-f62d2274ddf7) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe180-03d31f85084e05c266c68090;8c3d2ac8-313d-4a1d-91e2-00e2127ca3df) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -13288,7 +13288,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95ce-3bbfad4c53bd948e4aa8f5d9;5c9c9813-a5e9-44f2-8f1a-a6305af201ca) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe790-434117026b23953441b27a43;78a07968-cb9b-4c25-8acc-849db13e338a) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13433,7 +13433,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9085-18aee6dd102a40f70e5b6cc7;c6ebb1ea-9065-44c6-99ef-2bae6e86dc10) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe228-74a3384643e7443d2cc5c39b;1d624e9e-a045-40f5-8682-0273d15011fc) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13526,10 +13526,10 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyqyqr2d1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpa6emsqws/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13568,7 +13568,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d462a-470e06bf3ea470c83d9d2639;5f7a7c57-72a0-4036-a4ae-7f3a34d0a14f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe981-3bdce18e1d1abdb97ae0af5f;4b7c4320-bc60-44df-b715-f48b4d4df771) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13599,7 +13599,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -13623,7 +13623,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5qj8f_pp/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpv2t_xgxb/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,TencentARC/Mistral_Pro_8B_v0.1,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1273.831424,19665.518592,0.0,19019.071488,18747.268096,s,10,1.1004881896972656,0.11004881896972656,0.0023048996004499615,0.10927312088012694,0.11063434829711914,0.11377157325744627,0.116281353225708,"[0.11690879821777343, 0.10935049438476563, 0.10919574737548828, 0.10904892730712891, 0.10894719696044922, 0.10919388580322266, 0.10957100677490235, 0.10940447998046875, 0.10893046569824219, 0.10993718719482422]",tokens/s,2326.2403213107023,kWh,1.2923166515746556e-06,7.081279742657858e-07,7.338004058803867e-06,9.338448684644309e-06,tokens/kWh,27413546.79401451,MB,1273.831424,19665.518592,0.0,19019.071488,18774.938112,s,10,38.08989453125,3.808989453125,0.019078616113550908,3.8025064697265627,3.8398253906249997,3.840421728515625,3.8408987988281247,"[3.83969287109375, 3.817088623046875, 3.84101806640625, 3.788548583984375, 3.787248779296875, 3.803858154296875, 3.792437255859375, 3.796133544921875, 3.80115478515625, 3.8227138671875]",tokens/s,16.539820016648527,kWh,4.4770425936018206e-05,2.4536668305228256e-05,0.0001569185995685957,0.0002262256938098422,tokens/kWh,278482.9562859279,,s,629,39.01295923614504,0.06202378256938794,0.013030398047888114,0.06020505523681641,0.061349887847900395,0.06157086715698242,0.1696836053466797,"[0.06014566421508789, 0.060393470764160156, 0.06012211227416992, 0.06210662460327149, 0.061674495697021485, 0.06131814575195312, 0.06132633590698242, 0.06183731079101563, 0.061502464294433595, 0.061158401489257816, 0.0613939208984375, 0.061315071105957034, 0.06130995178222656, 0.06123212814331055, 0.061502464294433595, 0.06128742218017578, 0.061246463775634766, 0.06126489639282227, 0.061238273620605466, 0.061445121765136716, 0.061418495178222655, 0.06131097412109375, 0.06129151916503906, 0.061282302856445314, 0.061238273620605466, 0.06134783935546875, 0.06123519897460938, 0.06125260925292969, 0.06193561553955078, 0.061625343322753906, 0.061363201141357425, 0.061295616149902345, 0.06102220916748047, 0.06139084625244141, 0.061284351348876956, 0.06127718353271484, 0.0612567024230957, 0.06181785583496094, 0.06134067153930664, 0.06156288146972656, 0.061243392944335937, 0.06116454315185547, 0.06133964920043945, 0.06128947067260742, 0.060061695098876954, 0.060083198547363284, 0.060060672760009766, 0.05997158432006836, 0.06000435256958008, 0.059845630645751956, 0.05991526412963867, 0.05991731262207031, 0.06007807922363281, 0.06015180969238281, 0.06000435256958008, 0.06011699295043945, 0.06089932632446289, 0.06028799819946289, 0.05994393539428711, 0.060091392517089844, 0.060091392517089844, 0.059921409606933596, 0.16969625854492187, 0.060061695098876954, 0.06082252883911133, 0.061591552734375, 0.06101196670532227, 0.06013030242919922, 0.06010572814941406, 0.0602501106262207, 0.06028799819946289, 0.05997158432006836, 0.06132735824584961, 0.06047436904907227, 0.0601077766418457, 0.05990092849731445, 0.0601077766418457, 0.05982617568969727, 0.060012542724609375, 0.05986918258666992, 0.059991039276123044, 0.06011801528930664, 0.060034046173095705, 0.06138163375854492, 0.06113075256347656, 0.0602081298828125, 0.06157619094848633, 0.061553665161132816, 0.06057164764404297, 0.06133964920043945, 0.060126209259033205, 0.060037120819091794, 0.06013542556762695, 0.06003507232666016, 0.060055553436279295, 0.0602347526550293, 0.06018048095703125, 0.060273662567138675, 0.06016921615600586, 0.06019071960449219, 0.0602081298828125, 0.060012542724609375, 0.05991424179077148, 0.06002483367919922, 0.06015385437011719, 0.06015488052368164, 0.06020403289794922, 0.06049894332885742, 0.06033715057373047, 0.060829696655273435, 0.06012723159790039, 0.06236671829223633, 0.06161612701416016, 0.061259777069091796, 0.0615464973449707, 0.06137036895751953, 0.06137036895751953, 0.06138163375854492, 0.06123929595947265, 0.061475841522216794, 0.061618175506591794, 0.061358081817626954, 0.061193214416503904, 0.06094643020629883, 0.06036275100708008, 0.1696942138671875, 0.059963390350341796, 0.060080127716064455, 0.06005759811401367, 0.05998387145996094, 0.06013132858276367, 0.059937793731689455, 0.06092595291137695, 0.060170238494873046, 0.06026342391967773, 0.06095052719116211, 0.061537281036376956, 0.0614389762878418, 0.061521919250488284, 0.06150143814086914, 0.061532161712646485, 0.06149631881713867, 0.061521919250488284, 0.061491199493408207, 0.061297664642333986, 0.06147481536865235, 0.061431808471679686, 0.06186086273193359, 0.061652992248535154, 0.061462528228759764, 0.06092390441894531, 0.060642303466796874, 0.0603955192565918, 0.060916736602783204, 0.06016614532470703, 0.061292545318603515, 0.06056447982788086, 0.06122393417358398, 0.060611583709716796, 0.06010060882568359, 0.06057779312133789, 0.06003302383422852, 0.06037606430053711, 0.060478462219238284, 0.06204723358154297, 0.06168166351318359, 0.06137036895751953, 0.06162432098388672, 0.06139084625244141, 0.061276161193847656, 0.0609699821472168, 0.06122086334228516, 0.061429759979248044, 0.06141952133178711, 0.060104705810546874, 0.06034636688232422, 0.060295169830322265, 0.060012542724609375, 0.06083174514770508, 0.060141632080078125, 0.06118598556518555, 0.06067507171630859, 0.06174822235107422, 0.06235136032104492, 0.0645560302734375, 0.06109593582153321, 0.06043648147583008, 0.06032179260253906, 0.1696563262939453, 0.06014771270751953, 0.06008524703979492, 0.0599736328125, 0.05992038345336914, 0.06009142303466797, 0.0601712646484375, 0.060379104614257814, 0.06002790451049805, 0.05975040054321289, 0.06011391830444336, 0.060055553436279295, 0.060083198547363284, 0.06000230407714844, 0.059878398895263675, 0.06011391830444336, 0.05972582244873047, 0.05986713409423828, 0.059979774475097655, 0.05998188781738281, 0.060198848724365234, 0.060044288635253906, 0.060235774993896485, 0.06028799819946289, 0.06016204833984375, 0.060219390869140625, 0.06016204833984375, 0.060167263031005856, 0.06033295822143555, 0.059843582153320314, 0.05988044738769531, 0.060273662567138675, 0.06010265731811523, 0.06018355178833008, 0.0602347526550293, 0.060148735046386716, 0.05998796844482422, 0.06057369613647461, 0.06069760131835938, 0.06060441589355469, 0.06010265731811523, 0.06028595352172852, 0.060276737213134764, 0.060007423400878904, 0.059924480438232425, 0.05986918258666992, 0.05994291305541992, 0.059979774475097655, 0.06023372650146484, 0.06008422470092773, 0.059840511322021485, 0.06009548950195313, 0.06000435256958008, 0.060025856018066405, 0.05995622253417969, 0.06044672012329102, 0.06012211227416992, 0.060260353088378904, 0.05996134567260742, 0.06030847930908203, 0.06129151916503906, 0.06032998275756836, 0.05996646499633789, 0.16963388061523438, 0.05992643356323242, 0.05969305419921875, 0.06001766586303711, 0.060075008392333984, 0.06026342391967773, 0.06012211227416992, 0.060006401062011716, 0.06002175903320312, 0.060114944458007816, 0.06008729553222656, 0.05980876922607422, 0.05993267059326172, 0.06014879989624024, 0.06008211135864258, 0.060050430297851565, 0.06004940795898438, 0.06002995300292969, 0.06032588958740234, 0.060096511840820314, 0.060633087158203126, 0.060158977508544924, 0.05991116714477539, 0.059924480438232425, 0.060093441009521485, 0.06010163116455078, 0.06045491027832031, 0.06027468872070312, 0.06011391830444336, 0.06007295989990234, 0.06010265731811523, 0.05996236801147461, 0.05997568130493164, 0.05994905471801758, 0.060020736694335934, 0.05989376068115235, 0.06005350494384765, 0.060093441009521485, 0.06013030242919922, 0.06020505523681641, 0.06084505462646484, 0.06030847930908203, 0.059930622100830076, 0.0600002555847168, 0.06010060882568359, 0.06002175903320312, 0.06013337707519531, 0.060058624267578124, 0.059937793731689455, 0.060128257751464846, 0.060175361633300783, 0.05999411010742187, 0.06016716766357422, 0.060082176208496096, 0.0601712646484375, 0.06014054489135742, 0.060316673278808595, 0.06019276809692383, 0.06014976119995117, 0.06023168182373047, 0.06025932693481445, 0.06012723159790039, 0.060148735046386716, 0.1697617950439453, 0.06006272125244141, 0.06000537490844727, 0.05998591995239258, 0.06001971054077149, 0.05995929718017578, 0.060022785186767576, 0.06013132858276367, 0.060252159118652344, 0.06019583892822265, 0.06055833435058594, 0.06031568145751953, 0.06125052642822266, 0.060652542114257815, 0.06033715057373047, 0.06034534454345703, 0.060827648162841794, 0.060903423309326174, 0.060485633850097656, 0.06008627319335937, 0.06022041702270508, 0.06029926300048828, 0.06068838500976562, 0.06021836853027344, 0.060548095703125, 0.06034739303588867, 0.06013542556762695, 0.06061670303344727, 0.060484638214111326, 0.06049174499511719, 0.06041702270507812, 0.06026444625854492, 0.06048767852783203, 0.060142593383789064, 0.06002175903320312, 0.06013849639892578, 0.05992755126953125, 0.06064332962036133, 0.06011084747314453, 0.06056447982788086, 0.06036684799194336, 0.0600709114074707, 0.06015488052368164, 0.0601343994140625, 0.059947006225585936, 0.06070272064208984, 0.06040371322631836, 0.06091475296020508, 0.06036678314208985, 0.06094028854370117, 0.06049075317382813, 0.06029209518432617, 0.060216320037841796, 0.060012542724609375, 0.06012108612060547, 0.06060441589355469, 0.06030438232421875, 0.06061568069458008, 0.06065971374511719, 0.0607303695678711, 0.061126655578613284, 0.06054195022583008, 0.060440574645996094, 0.17018675231933594, 0.05994803237915039, 0.05994291305541992, 0.060028926849365234, 0.06000128173828125, 0.05982822418212891, 0.06005452728271484, 0.06007910537719727, 0.060104705810546874, 0.061291584014892576, 0.060910526275634765, 0.06029721450805664, 0.060080127716064455, 0.060333057403564455, 0.060047359466552735, 0.06012723159790039, 0.06020403289794922, 0.06009548950195313, 0.06017740631103516, 0.06015084838867187, 0.06011795043945312, 0.060171295166015625, 0.060176414489746095, 0.06022649765014648, 0.060063743591308595, 0.06020505523681641, 0.06025939178466797, 0.06018348693847656, 0.06049894332885742, 0.06037299346923828, 0.060227584838867185, 0.06006476974487305, 0.06196022415161133, 0.06052246475219727, 0.06003507232666016, 0.05996646499633789, 0.060111873626708986, 0.06007398223876953, 0.06016716766357422, 0.06012723159790039, 0.0600709114074707, 0.060077056884765626, 0.060047359466552735, 0.06002687835693359, 0.05998080062866211, 0.05997158432006836, 0.060020801544189456, 0.06005651092529297, 0.060028926849365234, 0.05994905471801758, 0.05987737655639649, 0.06006784057617188, 0.060055553436279295, 0.060009471893310545, 0.06000336074829102, 0.05989782333374023, 0.060572673797607425, 0.0606484489440918, 0.060281856536865235, 0.0601016960144043, 0.05993056106567383, 0.060265472412109375, 0.060668926239013675, 0.17047654724121095, 0.06005964660644531, 0.06021324920654297, 0.06000537490844727, 0.059859966278076174, 0.05996748733520508, 0.060028926849365234, 0.06006476974487305, 0.06010572814941406, 0.060333057403564455, 0.06016204833984375, 0.06027571105957031, 0.060951553344726565, 0.06043545532226562, 0.06069760131835938, 0.060633087158203126, 0.06048668670654297, 0.06047126388549805, 0.06047334289550781, 0.06032486343383789, 0.06001356887817383, 0.06045798492431641, 0.060009471893310545, 0.06001663970947266, 0.05995315170288086, 0.05999513626098633, 0.060249088287353515, 0.0600709114074707, 0.06002790451049805, 0.06007398223876953, 0.06011699295043945, 0.06021529769897461, 0.06023372650146484, 0.06009446334838867, 0.060061695098876954, 0.06016614532470703, 0.06054297637939453, 0.06076620864868164, 0.06059622573852539, 0.060075008392333984, 0.060022785186767576, 0.060042240142822265, 0.060028926849365234, 0.060142593383789064, 0.060120094299316404, 0.06017839813232422, 0.06066592025756836, 0.06103545761108398, 0.06056249618530273, 0.060310462951660156, 0.06016409683227539, 0.060246017456054686, 0.060115966796875, 0.060055553436279295, 0.060265472412109375, 0.06010265731811523, 0.06011603164672852, 0.0601701774597168, 0.06032998275756836, 0.06029312133789062, 0.06021222305297851, 0.06017433547973633, 0.06041497421264649, 0.17006285095214843, 0.06015999984741211, 0.06017433547973633, 0.06015590286254883, 0.060249088287353515, 0.06020198440551758, 0.060199935913085936, 0.060189697265625, 0.05984460830688477, 0.060247039794921874, 0.06025932693481445, 0.060037120819091794, 0.06019276809692383, 0.05994710540771484, 0.06027356719970703, 0.060109825134277345, 0.060034046173095705, 0.060265472412109375, 0.06053887939453125, 0.06002483367919922, 0.06007295989990234, 0.05989068984985352, 0.060042240142822265, 0.06014156723022461, 0.06014156723022461, 0.05990399932861328, 0.06002380752563476, 0.06008524703979492, 0.06018867111206055, 0.0601569938659668, 0.06012409591674805, 0.06041190338134766, 0.060224510192871096, 0.06030950546264648, 0.06030847930908203, 0.06073855972290039, 0.06051737594604492, 0.060063743591308595, 0.06013132858276367, 0.060243968963623044, 0.06011084747314453, 0.060028926849365234, 0.06010879898071289, 0.06014668655395508, 0.06014566421508789, 0.060561439514160154, 0.06110614395141602, 0.06026342391967773, 0.060352512359619144, 0.060194816589355465, 0.06028287887573242, 0.06029312133789062, 0.06018355178833008, 0.060142593383789064, 0.06020505523681641, 0.060900352478027345, 0.06121062469482422, 0.06106009674072266, 0.0612567024230957, 0.06125260925292969, 0.06122393417358398, 0.06104166412353516, 0.06120550537109375, 0.17150157165527344, 0.061088768005371094, 0.061110271453857425, 0.06109395217895508, 0.061724609375, 0.0612782096862793, 0.06109798431396484, 0.06099353790283203, 0.06109183883666992, 0.06110310363769531, 0.06113792037963867, 0.06126899337768555, 0.061300735473632816, 0.06097715377807617, 0.06128947067260742, 0.06111334228515625, 0.061257728576660155, 0.061067264556884764, 0.06129971313476563, 0.06122598266601562, 0.0609617919921875, 0.06116454315185547, 0.061058048248291016, 0.06101094436645508, 0.061093887329101565, 0.06106316757202149, 0.06106009674072266, 0.0616703987121582, 0.06128844833374023, 0.061018112182617185, 0.06104166412353516, 0.06119014358520508, 0.06163046264648438, 0.06039961624145508, 0.060224510192871096, 0.060115966796875, 0.06006579208374024, 0.06017433547973633, 0.05986406326293945, 0.06012211227416992, 0.05998387145996094, 0.060061695098876954, 0.06016614532470703, 0.06012313461303711, 0.06006784057617188, 0.06012723159790039, 0.060082176208496096, 0.06021017456054688, 0.060165119171142575, 0.060142593383789064, 0.06000332641601563, 0.060243968963623044, 0.06011084747314453, 0.059991039276123044, 0.06011084747314453, 0.060037120819091794, 0.06013644790649414, 0.06024192047119141, 0.06015795135498047, 0.060148735046386716, 0.06015078353881836, 0.05998591995239258, 0.06001152038574219]",tokens/s,16.122847697675788,,,main,False,False @@ -13666,7 +13666,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9382-2e22807013547fd41308e6e7;e8f0f6af-521b-4671-bad2-ac9968ac0a10) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe534-49c3c575649eff0456a8cfe4;023919e1-2d0b-4e95-a19b-712a7ad7c824) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -13722,7 +13722,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1560, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpj1spz7c2/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpjnffrq6r/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-350m,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1155.096576,1455.947776,0.0,809.500672,723.533824,s,15,0.173844352722168,0.011589623514811197,0.0003799565019044181,0.01169200038909912,0.011827808189392091,0.012035174560546875,0.012418913116455078,"[0.012514847755432129, 0.011829600334167481, 0.011780799865722657, 0.011762816429138184, 0.011121055603027345, 0.011691712379455566, 0.011717568397521973, 0.011177023887634278, 0.011095871925354004, 0.011142815589904785, 0.011825119972229004, 0.01169200038909912, 0.011690336227416992, 0.011700032234191894, 0.011102751731872558]",tokens/s,22088.72442429554,kWh,1.3687280103232357e-07,7.499941463656173e-08,3.880742128667657e-07,5.99946428535651e-07,tokens/kWh,426704765.3318726,MB,1155.096576,1455.947776,0.0,809.500672,749.216256,s,15,10.518155212402345,0.7012103474934896,0.010939446017162301,0.7029287719726562,0.7118413452148438,0.7139651794433594,0.7139718444824219,"[0.7139735107421875, 0.7086609497070312, 0.7139616088867188, 0.7029287719726562, 0.7076468505859375, 0.7025215454101562, 0.70141259765625, 0.6947970581054688, 0.6885543212890625, 0.6679688720703125, 0.7027149047851563, 0.7057315063476562, 0.7060153198242187, 0.7033875732421875, 0.6978798217773438]",tokens/s,89.84465250006157,kWh,8.281585413480256e-06,4.537992137930547e-06,1.557408670500015e-05,2.839366425641095e-05,tokens/kWh,2218804.85135959,,s,944,10.667562987327564,0.011300384520474126,0.0014620249435332547,0.011165696144104004,0.011293696403503419,0.01140469717979431,0.022238607692718505,"[0.011946016311645508, 0.011907039642333984, 0.012149760246276856, 0.010962944030761718, 0.01061888027191162, 0.010612735748291016, 0.010579968452453613, 0.010592255592346191, 0.010800127983093261, 0.011176959991455078, 0.011200511932373047, 0.011199487686157226, 0.011238431930541993, 0.011479007720947265, 0.01143398380279541, 0.011431936264038087, 0.011265024185180664, 0.011222016334533692, 0.01135103988647461, 0.01123737621307373, 0.011149312019348144, 0.011206656455993653, 0.01123635196685791, 0.01124454402923584, 0.011256832122802735, 0.011200511932373047, 0.011184127807617187, 0.011299936294555665, 0.011191200256347657, 0.011272192001342773, 0.011169792175292969, 0.011192319869995117, 0.011144191741943359, 0.011204607963562012, 0.01117903995513916, 0.011198431968688964, 0.011172863960266113, 0.01124556827545166, 0.011199487686157226, 0.011174912452697755, 0.011217920303344727, 0.011229184150695801, 0.011172863960266113, 0.01120358371734619, 0.011223039627075194, 0.011226112365722657, 0.011174912452697755, 0.011188223838806152, 0.011195391654968261, 0.011210751533508301, 0.011783167839050293, 0.013306879997253418, 0.013592576026916504, 0.012147711753845216, 0.011752448081970214, 0.011341823577880859, 0.011339776039123535, 0.011367424011230469, 0.011053055763244628, 0.010977279663085938, 0.011018239974975585, 0.011346943855285644, 0.02332876777648926, 0.01091481590270996, 0.010882047653198243, 0.011111455917358399, 0.011453408241271973, 0.011422719955444336, 0.011257856369018555, 0.011213824272155762, 0.011287551879882812, 0.01124665641784668, 0.011330495834350586, 0.011282431602478027, 0.011202560424804688, 0.011185152053833008, 0.011208703994750976, 0.011286527633666991, 0.01124454402923584, 0.0112424955368042, 0.011131903648376466, 0.011344896316528321, 0.011210751533508301, 0.0112424955368042, 0.011263999938964844, 0.011302911758422851, 0.011213824272155762, 0.011325440406799316, 0.011231231689453124, 0.011256832122802735, 0.01124556827545166, 0.011246591567993165, 0.011188223838806152, 0.011262975692749023, 0.011303936004638672, 0.011294719696044921, 0.01122208023071289, 0.011206591606140136, 0.0112424955368042, 0.011308032035827637, 0.011212800025939941, 0.011331583976745606, 0.011241472244262696, 0.011345919609069824, 0.011208703994750976, 0.011266048431396485, 0.011198464393615723, 0.011206656455993653, 0.011239423751831054, 0.011283455848693847, 0.01124454402923584, 0.01136844825744629, 0.011285504341125489, 0.011257856369018555, 0.01122815990447998, 0.01116982364654541, 0.011263968467712403, 0.011196415901184082, 0.011222016334533692, 0.011268095970153809, 0.011254783630371093, 0.011423744201660157, 0.011216896057128906, 0.011208703994750976, 0.011232255935668945, 0.02350592041015625, 0.0112609281539917, 0.012005375862121583, 0.011383808135986329, 0.011304960250854493, 0.011283455848693847, 0.011226112365722657, 0.011202560424804688, 0.011263999938964844, 0.011246591567993165, 0.011610112190246581, 0.011381759643554687, 0.011267071723937988, 0.01233510398864746, 0.011413503646850585, 0.011245632171630859, 0.011271103858947754, 0.011197440147399903, 0.011265024185180664, 0.011243519783020019, 0.011241472244262696, 0.011213855743408203, 0.011274208068847656, 0.011226112365722657, 0.011177984237670899, 0.01124556827545166, 0.01125376033782959, 0.011276288032531738, 0.011593728065490723, 0.012101632118225097, 0.011258879661560058, 0.011426848411560058, 0.011340767860412597, 0.011250687599182128, 0.011226112365722657, 0.01131827163696289, 0.011140095710754394, 0.011276288032531738, 0.011244576454162597, 0.011317215919494628, 0.011682815551757812, 0.01140121555328369, 0.01131827163696289, 0.011275263786315918, 0.011298815727233886, 0.011229184150695801, 0.01125273609161377, 0.011293696403503419, 0.011226112365722657, 0.011249664306640626, 0.011280384063720703, 0.011209728240966797, 0.011223039627075194, 0.011274239540100098, 0.011243552207946777, 0.01130288028717041, 0.011293696403503419, 0.011228192329406738, 0.011246560096740722, 0.011202560424804688, 0.01121177577972412, 0.01123737621307373, 0.011276288032531738, 0.02346188735961914, 0.011205632209777832, 0.011248640060424805, 0.011196415901184082, 0.011185152053833008, 0.011194368362426758, 0.011103232383728028, 0.011118592262268067, 0.010899456024169921, 0.010898431777954102, 0.010889216423034668, 0.011025407791137695, 0.0111595516204834, 0.011188223838806152, 0.011126784324645997, 0.011218943595886231, 0.011179007530212403, 0.011150336265563965, 0.011134976387023926, 0.011125760078430176, 0.011155455589294434, 0.011191295623779298, 0.011165696144104004, 0.01113804817199707, 0.011150336265563965, 0.01121996784210205, 0.011215871810913085, 0.011169792175292969, 0.01112985610961914, 0.011187199592590333, 0.011195391654968261, 0.011234304428100587, 0.011191295623779298, 0.011135999679565429, 0.011169792175292969, 0.011190272331237794, 0.011175935745239257, 0.011165696144104004, 0.010827775955200195, 0.010895359992980956, 0.010907648086547851, 0.011126784324645997, 0.011133952140808106, 0.011169792175292969, 0.011165696144104004, 0.011185152053833008, 0.011100159645080567, 0.011106304168701172, 0.011142144203186035, 0.011210751533508301, 0.011107328414916993, 0.011194368362426758, 0.011146240234375, 0.011167743682861327, 0.011464703559875488, 0.011312128067016602, 0.011296768188476563, 0.011158528327941895, 0.01112166404724121, 0.011174912452697755, 0.01123532772064209, 0.011336704254150391, 0.011124735832214355, 0.022219776153564453, 0.011018239974975585, 0.011139072418212891, 0.01112985610961914, 0.011140095710754394, 0.011172863960266113, 0.0111278076171875, 0.011171872138977051, 0.011145183563232422, 0.011186176300048829, 0.011166720390319825, 0.011091967582702637, 0.011150336265563965, 0.011148287773132324, 0.011134976387023926, 0.011150336265563965, 0.011160575866699218, 0.011149312019348144, 0.0111278076171875, 0.01112063980102539, 0.01131007957458496, 0.01112985610961914, 0.011163647651672364, 0.01115340805053711, 0.01120358371734619, 0.011196415901184082, 0.011167743682861327, 0.011165696144104004, 0.011246591567993165, 0.011224063873291015, 0.011179007530212403, 0.011167743682861327, 0.011182080268859864, 0.011175935745239257, 0.011095040321350098, 0.011188223838806152, 0.01114521598815918, 0.011164671897888183, 0.011201536178588867, 0.01122815990447998, 0.01115340805053711, 0.011130880355834961, 0.011168767929077148, 0.011240447998046875, 0.01115340805053711, 0.01117081642150879, 0.011123711585998536, 0.011975680351257324, 0.014036992073059081, 0.011767807960510255, 0.011272192001342773, 0.011220992088317871, 0.011161600112915039, 0.011171839714050292, 0.011180031776428222, 0.011186176300048829, 0.011100159645080567, 0.01124454402923584, 0.011142144203186035, 0.011199487686157226, 0.011167776107788087, 0.011124704360961914, 0.011181056022644043, 0.023325727462768554, 0.01110524845123291, 0.011058176040649414, 0.010943488121032715, 0.010982399940490722, 0.010989567756652833, 0.011012096405029297, 0.011171839714050292, 0.011198464393615723, 0.011144224166870118, 0.011127776145935058, 0.011077695846557618, 0.011132863998413086, 0.011165696144104004, 0.011160575866699218, 0.011229184150695801, 0.01121177577972412, 0.011184127807617187, 0.011172863960266113, 0.01111244773864746, 0.011144191741943359, 0.011133952140808106, 0.011180031776428222, 0.011130880355834961, 0.011205632209777832, 0.011103232383728028, 0.011167743682861327, 0.01122713565826416, 0.011155455589294434, 0.01112063980102539, 0.011210751533508301, 0.011217920303344727, 0.011107328414916993, 0.011163647651672364, 0.01111244773864746, 0.011090944290161133, 0.011166720390319825, 0.011087871551513672, 0.011205632209777832, 0.01115340805053711, 0.011155455589294434, 0.01112063980102539, 0.011190272331237794, 0.01112985610961914, 0.011212800025939941, 0.01121177577972412, 0.011147263526916504, 0.011180031776428222, 0.011126784324645997, 0.011220992088317871, 0.011133983612060546, 0.01121174430847168, 0.011220992088317871, 0.011192319869995117, 0.011101183891296386, 0.011101183891296386, 0.011166751861572266, 0.011146240234375, 0.011119584083557128, 0.011091967582702637, 0.011132927894592285, 0.011150336265563965, 0.011187199592590333, 0.023353343963623048, 0.011106304168701172, 0.011140095710754394, 0.01115340805053711, 0.01093120002746582, 0.01091481590270996, 0.01095372772216797, 0.011004927635192872, 0.01112985610961914, 0.011101183891296386, 0.011104255676269532, 0.010960895538330078, 0.01094758415222168, 0.010938367843627929, 0.011023360252380371, 0.011216896057128906, 0.01113702392578125, 0.011067392349243164, 0.010962944030761718, 0.010912768363952637, 0.010910719871520995, 0.010944512367248535, 0.010978303909301757, 0.011214847564697266, 0.01115135955810547, 0.011186176300048829, 0.011133952140808106, 0.011190272331237794, 0.011031552314758301, 0.010978303909301757, 0.010934271812438966, 0.0111278076171875, 0.011119615554809571, 0.011147263526916504, 0.011101183891296386, 0.011167743682861327, 0.011180031776428222, 0.011135999679565429, 0.011117568016052246, 0.011113471984863281, 0.011185152053833008, 0.01125376033782959, 0.011240447998046875, 0.011155455589294434, 0.011187199592590333, 0.011194368362426758, 0.011183103561401368, 0.011171839714050292, 0.011149312019348144, 0.011119615554809571, 0.011183103561401368, 0.011135999679565429, 0.011386879920959473, 0.011439104080200196, 0.01154355239868164, 0.011313152313232423, 0.0112609281539917, 0.011216896057128906, 0.011262975692749023, 0.011165696144104004, 0.011180031776428222, 0.011125823974609376, 0.011188159942626954, 0.02225049591064453, 0.01052569580078125, 0.010568703651428223, 0.010543104171752929, 0.010622976303100586, 0.010621952056884766, 0.010568703651428223, 0.010541055679321289, 0.010622976303100586, 0.010554368019104005, 0.010582015991210938, 0.010565631866455078, 0.010530816078186036, 0.010882047653198243, 0.011097087860107421, 0.01112985610961914, 0.01112985610961914, 0.011177984237670899, 0.011173888206481934, 0.011111424446105958, 0.011111488342285156, 0.011157440185546876, 0.011141119956970215, 0.01115135955810547, 0.011143168449401856, 0.01112883186340332, 0.011140095710754394, 0.01136844825744629, 0.01112166404724121, 0.011107328414916993, 0.011119615554809571, 0.011123711585998536, 0.011150336265563965, 0.01120358371734619, 0.01112063980102539, 0.011146271705627441, 0.011225055694580079, 0.011163647651672364, 0.011206656455993653, 0.011156479835510253, 0.011208703994750976, 0.011188223838806152, 0.011194368362426758, 0.011114496231079102, 0.01114521598815918, 0.011217920303344727, 0.011176959991455078, 0.01121177577972412, 0.011148287773132324, 0.010975232124328613, 0.010976256370544434, 0.010999808311462403, 0.011003904342651367, 0.011169792175292969, 0.011200511932373047, 0.011224063873291015, 0.011240447998046875, 0.011239423751831054, 0.010964991569519043, 0.010970111846923827, 0.011025407791137695, 0.011190272331237794, 0.011210783958435059, 0.022187999725341797, 0.010556415557861328, 0.010572799682617188, 0.010587136268615722, 0.010652671813964844, 0.01060969638824463, 0.010614751815795898, 0.010569727897644043, 0.010598400115966796, 0.010579999923706054, 0.010570719718933105, 0.010619903564453125, 0.010581024169921875, 0.010570752143859862, 0.010605536460876464, 0.010647551536560058, 0.010609663963317872, 0.010669055938720704, 0.010605567932128907, 0.010513407707214355, 0.010582015991210938, 0.010595328330993652, 0.010590208053588868, 0.010577919960021973, 0.010600447654724121, 0.01055846405029297, 0.011377663612365722, 0.011400192260742188, 0.011234304428100587, 0.011131903648376466, 0.011103232383728028, 0.011122688293457032, 0.011140095710754394, 0.011132927894592285, 0.01112985610961914, 0.01115135955810547, 0.011166720390319825, 0.011167743682861327, 0.011173888206481934, 0.011142144203186035, 0.011146240234375, 0.011175935745239257, 0.01126195240020752, 0.011149312019348144, 0.011132927894592285, 0.011160575866699218, 0.01123840045928955, 0.011169792175292969, 0.011114496231079102, 0.011144191741943359, 0.011139072418212891, 0.010988544464111329, 0.010944512367248535, 0.010891263961791992, 0.010995712280273438, 0.011147263526916504, 0.011293696403503419, 0.011174912452697755, 0.011168767929077148, 0.011181056022644043, 0.01122815990447998, 0.011097087860107421, 0.011144191741943359, 0.022222848892211915, 0.010568703651428223, 0.010520640373229981, 0.010560447692871094, 0.010555392265319824, 0.010669055938720704, 0.010604543685913086, 0.010606592178344726, 0.010740736007690429, 0.010578944206237792, 0.010570752143859862, 0.010628095626831055, 0.010579968452453613, 0.010572799682617188, 0.010601471900939942, 0.01063424015045166, 0.010566656112670898, 0.010643551826477051, 0.010620832443237305, 0.010530816078186036, 0.010621952056884766, 0.010595328330993652, 0.010600447654724121, 0.010604543685913086, 0.010577919960021973, 0.0105830717086792, 0.010615776062011719, 0.010611712455749512, 0.010565631866455078, 0.010570752143859862, 0.010562560081481934, 0.01053593635559082, 0.010546175956726075, 0.01061888027191162, 0.010591232299804687, 0.010600447654724121, 0.010589183807373047, 0.010599424362182617, 0.010596351623535156, 0.010602527618408204, 0.010623968124389649, 0.010583040237426757, 0.010593279838562012, 0.010605567932128907, 0.010557439804077149, 0.01064140796661377, 0.010597375869750977, 0.010564607620239258, 0.010590208053588868, 0.010691583633422852, 0.010604543685913086, 0.010619903564453125, 0.01062604808807373, 0.010612735748291016, 0.010584063529968261, 0.010546175956726075, 0.010605567932128907, 0.010622976303100586, 0.010594304084777833, 0.010560511589050293, 0.010608639717102051, 0.010576895713806152, 0.010612768173217773, 0.023476192474365234, 0.011000831604003907, 0.010892288208007812, 0.011049983978271484, 0.011250687599182128, 0.011179007530212403, 0.01115443229675293, 0.011082847595214844, 0.01117686367034912, 0.011156479835510253, 0.011122688293457032, 0.011104255676269532, 0.011077631950378418, 0.011192319869995117, 0.011141119956970215, 0.011192319869995117, 0.01115340805053711, 0.011197471618652343, 0.011192288398742676, 0.0111278076171875, 0.011124735832214355, 0.011157504081726074, 0.011213824272155762, 0.01112883186340332, 0.0111595516204834, 0.011113471984863281, 0.011140128135681153, 0.011154399871826173, 0.01114521598815918, 0.011107328414916993, 0.011158528327941895, 0.01115340805053711, 0.011140095710754394, 0.011147263526916504, 0.011156479835510253, 0.011115519523620606, 0.011163647651672364, 0.011156479835510253, 0.01115135955810547, 0.01116262435913086, 0.011130880355834961, 0.011122688293457032, 0.011018239974975585, 0.010986495971679687, 0.011015232086181641, 0.011019200325012206, 0.0112609281539917, 0.011164671897888183, 0.011179007530212403, 0.011146240234375, 0.01121177577972412, 0.01115135955810547, 0.011171839714050292, 0.011141119956970215, 0.011281408309936524, 0.01120358371734619, 0.011191295623779298, 0.011168767929077148, 0.011190272331237794, 0.011324480056762695, 0.011292608261108398, 0.011140095710754394, 0.011161600112915039, 0.023377920150756838, 0.011315199851989746, 0.011220992088317871, 0.011183103561401368, 0.011199487686157226, 0.011177984237670899, 0.011420672416687011, 0.011294783592224121, 0.011265983581542969, 0.011155455589294434, 0.011230208396911622, 0.011168767929077148, 0.011123711585998536, 0.01124556827545166, 0.011197440147399903, 0.011199487686157226, 0.01115443229675293, 0.011144191741943359, 0.01113804817199707, 0.011110400199890137, 0.011140095710754394, 0.011111424446105958, 0.011141119956970215, 0.011123711585998536, 0.0111595516204834, 0.011122688293457032, 0.011305983543395997, 0.01131929588317871, 0.011192319869995117, 0.011145248413085937, 0.011159520149230957, 0.011119615554809571, 0.011196415901184082, 0.01122815990447998, 0.011187199592590333, 0.011146240234375, 0.0111595516204834, 0.011174976348876953, 0.011206591606140136, 0.011270144462585448, 0.01117081642150879, 0.011179007530212403, 0.011171839714050292, 0.01115443229675293, 0.011140095710754394, 0.011191295623779298, 0.011165696144104004, 0.011374591827392578, 0.01121996784210205, 0.011220992088317871, 0.011185152053833008, 0.011156479835510253, 0.011411456108093262, 0.011210751533508301, 0.01112166404724121, 0.011126784324645997, 0.011168767929077148, 0.011199487686157226, 0.011240447998046875, 0.011131903648376466, 0.011180031776428222, 0.01113702392578125, 0.011262975692749023, 0.023784448623657226, 0.011406335830688476, 0.011194368362426758, 0.010918911933898925, 0.010921983718872071, 0.011123711585998536, 0.01111244773864746, 0.011032575607299805, 0.011065343856811523, 0.011059200286865235, 0.011974656105041503, 0.01143295955657959, 0.011126784324645997, 0.011075648307800293, 0.01116870403289795, 0.011183103561401368, 0.011117568016052246, 0.011180031776428222, 0.011175935745239257, 0.011152383804321288, 0.011309056282043458, 0.011322367668151855, 0.011193344116210938, 0.011293696403503419, 0.011202560424804688, 0.011185152053833008, 0.011208703994750976, 0.011263999938964844, 0.01115135955810547, 0.011171839714050292, 0.011182080268859864, 0.011140095710754394, 0.011177984237670899, 0.011115519523620606, 0.011192352294921874, 0.011250656127929687, 0.011204607963562012, 0.011172863960266113, 0.011143168449401856, 0.011181056022644043, 0.011270144462585448, 0.011250687599182128, 0.01125376033782959, 0.011148287773132324, 0.011201536178588867, 0.011172863960266113, 0.01122815990447998, 0.011149312019348144, 0.011142144203186035, 0.011180031776428222, 0.011180031776428222, 0.011220992088317871, 0.011209728240966797, 0.011199487686157226, 0.011199487686157226, 0.011239423751831054, 0.011174912452697755, 0.01120358371734619, 0.011176992416381835, 0.011191264152526855, 0.011183103561401368, 0.01112063980102539, 0.011161600112915039, 0.02329599952697754, 0.011206656455993653, 0.011105279922485351, 0.011160575866699218, 0.011181056022644043, 0.011124735832214355, 0.010936320304870606, 0.01092300796508789, 0.011149312019348144, 0.01091481590270996, 0.010949631690979005, 0.01113702392578125, 0.011117568016052246, 0.011149312019348144, 0.01125376033782959, 0.011171839714050292, 0.011158528327941895, 0.011171839714050292, 0.01114521598815918, 0.011100159645080567, 0.011187264442443848, 0.011203519821166992, 0.011156479835510253, 0.011087871551513672, 0.011157504081726074, 0.011142144203186035, 0.011171839714050292, 0.011243519783020019, 0.011124735832214355, 0.011180031776428222, 0.011176959991455078, 0.011116543769836425, 0.011148287773132324, 0.011171839714050292, 0.01115443229675293, 0.01115340805053711, 0.011131903648376466, 0.011108415603637695, 0.011206591606140136, 0.011194368362426758, 0.011298815727233886, 0.01116262435913086, 0.011177984237670899, 0.011129920005798339, 0.01116051197052002, 0.011156479835510253, 0.011405311584472656, 0.011185152053833008, 0.011182080268859864, 0.011201536178588867, 0.011184127807617187, 0.011123711585998536, 0.011195391654968261, 0.01122713565826416, 0.011183199882507324, 0.011243424415588378, 0.011175935745239257, 0.01132748794555664, 0.011174912452697755, 0.011171839714050292, 0.011160575866699218, 0.011133952140808106, 0.011172863960266113, 0.022174720764160157, 0.010556415557861328, 0.01095372772216797, 0.011195391654968261, 0.011160575866699218, 0.011092032432556152, 0.01113491153717041, 0.01117084789276123, 0.011182047843933106, 0.011116543769836425, 0.011103232383728028, 0.011118592262268067, 0.011139072418212891, 0.011116543769836425, 0.011149312019348144, 0.011268095970153809, 0.011103232383728028, 0.011250687599182128, 0.011189248085021973, 0.011140159606933594, 0.011080639839172363, 0.011131903648376466, 0.011133952140808106, 0.011148287773132324, 0.01115443229675293, 0.011152383804321288, 0.011223039627075194, 0.011332608222961426, 0.011132927894592285, 0.010942463874816894, 0.010919936180114746, 0.010942463874816894, 0.010935296058654785, 0.01093734359741211, 0.011166720390319825, 0.010996800422668457, 0.010977215766906738, 0.010974207878112792, 0.010911744117736816, 0.010969087600708008, 0.01095577621459961, 0.011114496231079102, 0.011009023666381837, 0.01102233600616455, 0.010984448432922364, 0.0109619197845459, 0.0109486083984375, 0.010946559906005859, 0.010987520217895508, 0.011146240234375, 0.01111244773864746, 0.011174912452697755, 0.011181056022644043, 0.011263999938964844, 0.011266048431396485, 0.011032575607299805, 0.010979328155517578, 0.011013119697570802, 0.010957823753356934, 0.010988544464111329, 0.011087871551513672, 0.011174943923950196, 0.011246560096740722]",tokens/s,88.49256396436706,,,main,False,False @@ -13950,7 +13950,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9314-68233d0f7d76135220f1c501;1c4daaab-009a-4a5e-8b2a-3aab90bd181b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4ca-3deadcc655e8627a232863d1;ac2204bf-ebcf-4623-9b4b-46357ea85d59) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14054,7 +14054,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14093,7 +14093,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4732-1e13a2097ff0cadd438ab75b;02cd02b6-1cfa-493f-828e-dada64dd3c88) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea95-480c4bd71e96d5e75897eae6;0f2b223a-e4be-4c87-936a-4172c8afe65a) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14124,7 +14124,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14163,7 +14163,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46c9-1144823a1cabc3f57b7af293;3982d6d7-498e-479c-9f71-691c31f6055d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea24-2328f7e8428389fe653185e1;8fff63d3-0a06-4e84-96cd-636f5184f9fd) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14195,7 +14195,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-6.7b,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1280.765952,15125.184512,0.0,14478.737408,14237.625344,s,10,0.7475148468017578,0.07475148468017578,0.000259700632932575,0.07466180801391602,0.07482695083618164,0.07517518653869629,0.075453775100708,"[0.07552342224121093, 0.07474956512451172, 0.07465363311767578, 0.07465500640869141, 0.0746383056640625, 0.07464227294921875, 0.07466860961914062, 0.07467030334472656, 0.0746146240234375, 0.07469910430908203]",tokens/s,3424.6811430608495,kWh,8.849784571238814e-07,4.849176747850821e-07,4.5043361076120084e-06,5.874232239520972e-06,tokens/kWh,43580163.255662516,MB,1280.765952,15125.184512,0.0,14478.737408,14440.977408,s,10,21.599800048828126,2.1599800048828124,0.003288007360278083,2.1589039306640627,2.160863452148438,2.1653070922851563,2.1688620043945313,"[2.169750732421875, 2.159219482421875, 2.158219970703125, 2.158555419921875, 2.1598759765625, 2.158583740234375, 2.1588251953125, 2.158982666015625, 2.158514404296875, 2.1592724609375]",tokens/s,29.166936664961398,kWh,2.54379255402444e-05,1.3940759493216047e-05,0.00011370850846258598,0.0001530871934960464,tokens/kWh,411530.1780721914,,s,629,22.236759033203107,0.035352558081404015,0.008907020984191575,0.034277374267578126,0.03439431686401367,0.03449446487426758,0.10923319305419922,"[0.03484364700317383, 0.03491123199462891, 0.03477503967285156, 0.03484262466430664, 0.03492764663696289, 0.03489379119873047, 0.03489894485473633, 0.034857982635498046, 0.03505254364013672, 0.034895870208740236, 0.03494400024414063, 0.03482112121582031, 0.034786304473876956, 0.03454771041870117, 0.03423539352416992, 0.03422208023071289, 0.034301952362060545, 0.034326526641845705, 0.03417292785644531, 0.034239486694335936, 0.03416883087158203, 0.03415961456298828, 0.03419443130493164, 0.034206718444824216, 0.03422412872314453, 0.0342108154296875, 0.03422719955444336, 0.03420569610595703, 0.034260990142822266, 0.03424358367919922, 0.034269184112548826, 0.034260990142822266, 0.0342845458984375, 0.03430092620849609, 0.0343040657043457, 0.03431622314453125, 0.034269184112548826, 0.034328575134277346, 0.03425075149536133, 0.03425996780395508, 0.0342999038696289, 0.03428659057617187, 0.03431423950195313, 0.03431628799438476, 0.03445248031616211, 0.03434905624389648, 0.03437363052368164, 0.03434700775146484, 0.034304000854492187, 0.03437158584594727, 0.034331649780273435, 0.03437363052368164, 0.03433779144287109, 0.03443711853027344, 0.03450368118286133, 0.034371646881103515, 0.034324417114257814, 0.03433779144287109, 0.03438079833984375, 0.0344279670715332, 0.03435103988647461, 0.03438387298583984, 0.10949734497070313, 0.034098175048828124, 0.034119678497314454, 0.034127872467041014, 0.034119678497314454, 0.03415961456298828, 0.0341473274230957, 0.034149375915527344, 0.03415347290039063, 0.03410739135742188, 0.03418931198120117, 0.03413913726806641, 0.034184192657470705, 0.03417190551757812, 0.03415347290039063, 0.03418316650390625, 0.03415961456298828, 0.034135040283203126, 0.03418828964233398, 0.03415552139282227, 0.03419340896606445, 0.03426303863525391, 0.034184192657470705, 0.034239486694335936, 0.03422719955444336, 0.03423129653930664, 0.03420774459838867, 0.03429580688476563, 0.03430604934692383, 0.034223102569580076, 0.034277374267578126, 0.034253822326660154, 0.034290687561035156, 0.034283519744873044, 0.03424563217163086, 0.03425689697265625, 0.034277374267578126, 0.03430809783935547, 0.03433779144287109, 0.03437977600097656, 0.03430297470092773, 0.03441766357421875, 0.034576385498046876, 0.0342999038696289, 0.034336769104003906, 0.034310142517089845, 0.034339839935302735, 0.03437363052368164, 0.03433884811401367, 0.0343592643737793, 0.03433881759643555, 0.03434086227416992, 0.03431731033325195, 0.034352191925048826, 0.034393024444580075, 0.034358272552490236, 0.034402305603027344, 0.034440193176269535, 0.03434086227416992, 0.03433062362670898, 0.034344959259033206, 0.03437977600097656, 0.034377727508544925, 0.10925465393066407, 0.03410128021240234, 0.03414934539794922, 0.034146305084228515, 0.03410739135742188, 0.03412582397460937, 0.034097152709960936, 0.03413302230834961, 0.034131935119628906, 0.03416883087158203, 0.0341258544921875, 0.034161632537841796, 0.03415654373168946, 0.034179073333740234, 0.03414527893066406, 0.034187263488769534, 0.03417804718017578, 0.03420569610595703, 0.03415859222412109, 0.0342108154296875, 0.03414527893066406, 0.03425689697265625, 0.03423027038574219, 0.03420159912109375, 0.03419750213623047, 0.034252799987792966, 0.03422719955444336, 0.03422515106201172, 0.034239486694335936, 0.03424563217163086, 0.03421804809570313, 0.03430495834350586, 0.034277374267578126, 0.03427020645141601, 0.03420876693725586, 0.03426508712768555, 0.03425484848022461, 0.034290687561035156, 0.0342845458984375, 0.03427328109741211, 0.03429478454589844, 0.03430912017822266, 0.034253822326660154, 0.03430912017822266, 0.03429683303833008, 0.034339839935302735, 0.0342999038696289, 0.03440639877319336, 0.03439206314086914, 0.03434188842773438, 0.03433168029785156, 0.03437257766723633, 0.03434598541259765, 0.03435417556762695, 0.034336769104003906, 0.034372608184814454, 0.03435520172119141, 0.03435724639892578, 0.03435212707519531, 0.034344959259033206, 0.03438284683227539, 0.0344002571105957, 0.03437055969238281, 0.10925772857666016, 0.03405516815185547, 0.03412275314331055, 0.034149375915527344, 0.03407257461547852, 0.0341739501953125, 0.03413094329833984, 0.03417292785644531, 0.03415654373168946, 0.034184192657470705, 0.03414425659179687, 0.03417497634887695, 0.034149375915527344, 0.0341104621887207, 0.034157569885253904, 0.03418828964233398, 0.034170879364013675, 0.03418316650390625, 0.03415859222412109, 0.03421491241455078, 0.03420774459838867, 0.03424563217163086, 0.034162689208984375, 0.03419750213623047, 0.03425075149536133, 0.03430297470092773, 0.03423542404174805, 0.03424764633178711, 0.0342210578918457, 0.0342927360534668, 0.03427328109741211, 0.03421286392211914, 0.034239486694335936, 0.03428147125244141, 0.03426508712768555, 0.03433267211914062, 0.03425075149536133, 0.034272254943847655, 0.03429785537719727, 0.03430092620849609, 0.03426611328125, 0.034318336486816405, 0.034282497406005856, 0.0342999038696289, 0.03435007858276367, 0.034305023193359374, 0.03428966522216797, 0.03432550430297852, 0.034369537353515625, 0.03428966522216797, 0.03436550521850586, 0.03433158493041992, 0.03431731033325195, 0.03437158584594727, 0.034318336486816405, 0.03435520172119141, 0.034405376434326174, 0.03436544036865234, 0.03432447814941406, 0.03438079833984375, 0.03449856185913086, 0.034459648132324217, 0.03441254425048828, 0.10924339294433594, 0.03409100723266602, 0.03411763381958008, 0.034100223541259765, 0.034113536834716796, 0.03410230255126953, 0.03408995056152344, 0.03415961456298828, 0.03414220809936523, 0.03415961456298828, 0.03418316650390625, 0.034103294372558594, 0.034164737701416016, 0.034200576782226565, 0.03439513778686523, 0.03429683303833008, 0.03422003173828125, 0.03417497634887695, 0.034249729156494144, 0.034216960906982424, 0.034377727508544925, 0.03458150482177735, 0.03419852828979492, 0.034200576782226565, 0.0342210578918457, 0.034203647613525394, 0.034336769104003906, 0.034202625274658206, 0.034301952362060545, 0.034229248046875, 0.03423231887817383, 0.03418521499633789, 0.03428761672973633, 0.034290687561035156, 0.034301952362060545, 0.03427840042114258, 0.034334720611572264, 0.034282497406005856, 0.034304000854492187, 0.03425894546508789, 0.03433574295043945, 0.03424665451049805, 0.034334720611572264, 0.03429584121704102, 0.0343408317565918, 0.03429580688476563, 0.03433574295043945, 0.034543617248535156, 0.03470131301879883, 0.03439411163330078, 0.03433779144287109, 0.03429478454589844, 0.03436236953735351, 0.03438489532470703, 0.03437158584594727, 0.03430809783935547, 0.034348033905029295, 0.034377727508544925, 0.03435212707519531, 0.03435520172119141, 0.03437875366210937, 0.034339839935302735, 0.0344002571105957, 0.10922803497314452, 0.03407360076904297, 0.03411251068115234, 0.03411558532714844, 0.034118656158447266, 0.03413913726806641, 0.03412275314331055, 0.03416064071655273, 0.034132991790771484, 0.03417292785644531, 0.03413913726806641, 0.034162689208984375, 0.03419443130493164, 0.03419647979736328, 0.034200576782226565, 0.03415039825439453, 0.03416579055786133, 0.03418211364746094, 0.03421798324584961, 0.03418316650390625, 0.034233345031738284, 0.034239486694335936, 0.03422719955444336, 0.03421286392211914, 0.03424051284790039, 0.034200576782226565, 0.03427020645141601, 0.03426303863525391, 0.03424563217163086, 0.03425689697265625, 0.03426201629638672, 0.03427328109741211, 0.034277374267578126, 0.03427328109741211, 0.03429785537719727, 0.03423235321044922, 0.03430499267578125, 0.03429580688476563, 0.034293758392333985, 0.03426508712768555, 0.034310142517089845, 0.03430604934692383, 0.034298881530761716, 0.034323455810546875, 0.034328575134277346, 0.03432447814941406, 0.03431731033325195, 0.034339839935302735, 0.03434188842773438, 0.03436544036865234, 0.034328575134277346, 0.0343111686706543, 0.034339839935302735, 0.0343818244934082, 0.03433881759643555, 0.034336769104003906, 0.034356224060058595, 0.03438079833984375, 0.03439513778686523, 0.03433779144287109, 0.03442176055908203, 0.03434188842773438, 0.03440127944946289, 0.10923519897460937, 0.03415244674682617, 0.03411049652099609, 0.03409814453125, 0.03412998580932617, 0.03419027328491211, 0.03410636901855469, 0.03419033432006836, 0.0344637451171875, 0.03425075149536133, 0.034164737701416016, 0.03414425659179687, 0.03419136047363281, 0.03421798324584961, 0.03420774459838867, 0.034146305084228515, 0.03416371154785156, 0.03421491241455078, 0.034111488342285154, 0.03423436737060547, 0.03419033432006836, 0.034239486694335936, 0.034184192657470705, 0.03421798324584961, 0.034202625274658206, 0.03422515106201172, 0.034181121826171876, 0.03425791931152344, 0.03425689697265625, 0.03424051284790039, 0.03420569610595703, 0.03425177764892578, 0.034252799987792966, 0.03426508712768555, 0.034226207733154296, 0.03426095962524414, 0.03429171371459961, 0.034318336486816405, 0.03422412872314453, 0.03430297470092773, 0.034282497406005856, 0.03431935882568359, 0.03426611328125, 0.034320384979248046, 0.034288639068603514, 0.03436851119995117, 0.034377761840820316, 0.034314208984375, 0.0343296012878418, 0.034385921478271485, 0.034304000854492187, 0.034374656677246096, 0.03436236953735351, 0.03439513778686523, 0.03433267211914062, 0.03433267211914062, 0.034358272552490236, 0.03436441421508789, 0.03431731033325195, 0.03436441421508789, 0.034369537353515625, 0.034423809051513675, 0.034418689727783204, 0.10927206420898437, 0.034062335968017575, 0.034129920959472655, 0.03410124969482422, 0.03409612655639648, 0.03416985702514649, 0.03413094329833984, 0.03412070465087891, 0.03414425659179687, 0.034088958740234376, 0.03414527893066406, 0.03418521499633789, 0.034181121826171876, 0.034148353576660156, 0.03416371154785156, 0.034187263488769534, 0.03418521499633789, 0.03425996780395508, 0.03419647979736328, 0.03418009567260742, 0.03420979309082031, 0.03440332794189453, 0.034533374786376955, 0.0341923828125, 0.03419443130493164, 0.03425996780395508, 0.03437670516967774, 0.03431423950195313, 0.03422208023071289, 0.03428761672973633, 0.034249729156494144, 0.03426406478881836, 0.03428659057617187, 0.03425894546508789, 0.03421593475341797, 0.034285568237304685, 0.03426611328125, 0.034237438201904294, 0.03430092620849609, 0.034321407318115234, 0.034260990142822266, 0.03431731033325195, 0.03428966522216797, 0.03427328109741211, 0.034269184112548826, 0.034307071685791016, 0.0343818244934082, 0.03433779144287109, 0.03434188842773438, 0.034334720611572264, 0.03437055969238281, 0.034334720611572264, 0.034323455810546875, 0.034339839935302735, 0.034342910766601564, 0.034348033905029295, 0.03448831939697266, 0.03439206314086914, 0.034339839935302735, 0.0343633918762207, 0.03435007858276367, 0.034356224060058595, 0.03442892837524414, 0.10916352081298829, 0.03413401412963867, 0.034078720092773435, 0.034121726989746096, 0.03408486557006836, 0.034146305084228515, 0.034103294372558594, 0.03413913726806641, 0.034135040283203126, 0.0341401596069336, 0.03416371154785156, 0.03412684631347656, 0.03415961456298828, 0.034119678497314454, 0.03416166305541992, 0.03416169738769531, 0.034210784912109375, 0.034141185760498044, 0.03420159912109375, 0.03414527893066406, 0.03422412872314453, 0.034195457458496094, 0.03421184158325195, 0.034195457458496094, 0.034249729156494144, 0.034216960906982424, 0.034344959259033206, 0.034405376434326174, 0.03427942276000977, 0.03424563217163086, 0.034290687561035156, 0.03421388626098633, 0.03424256134033203, 0.034277374267578126, 0.034310142517089845, 0.034245697021484375, 0.034280384063720706, 0.03423027038574219, 0.0342845458984375, 0.03423436737060547, 0.034299934387207034, 0.034287582397460936, 0.034326526641845705, 0.03426508712768555, 0.03439308929443359, 0.03458662414550781, 0.034525184631347655, 0.03434905624389648, 0.034339839935302735, 0.0342999038696289, 0.03434188842773438, 0.03433267211914062, 0.03434086227416992, 0.034339839935302735, 0.034359294891357424, 0.034307071685791016, 0.034348033905029295, 0.034318336486816405, 0.034348033905029295, 0.03433881759643555, 0.0344002571105957, 0.03432550430297852, 0.034375679016113284, 0.10929971313476562, 0.03405414581298828, 0.03410432052612305, 0.03409612655639648, 0.034135040283203126, 0.03409920120239258, 0.034154495239257815, 0.03415859222412109, 0.03416985702514649, 0.03415654373168946, 0.03416678237915039, 0.03412684631347656, 0.03419955062866211, 0.03427123260498047, 0.0343818244934082, 0.03427020645141601, 0.034170879364013675, 0.03421491241455078, 0.03416678237915039, 0.0342108154296875, 0.03421491241455078, 0.03421798324584961, 0.03422208023071289, 0.03418931198120117, 0.03422617721557617, 0.03422003173828125, 0.03427123260498047, 0.03423539352416992, 0.03428659057617187, 0.03430198287963867, 0.03428860855102539, 0.03420979309082031, 0.034282497406005856, 0.03431423950195313, 0.034305023193359374, 0.03427328109741211, 0.034255870819091795, 0.03426816177368164, 0.034274303436279296, 0.03429171371459961, 0.034290687561035156, 0.034260990142822266, 0.03443097686767578, 0.034353153228759765, 0.034328575134277346, 0.03427328109741211, 0.03432447814941406, 0.03437363052368164, 0.03430604934692383, 0.034323455810546875, 0.03433779144287109, 0.03434092712402344, 0.03447187042236328, 0.03434393692016602, 0.03442073440551758, 0.03433881759643555, 0.03439513778686523, 0.03445452880859375, 0.034353153228759765, 0.03435212707519531, 0.03438387298583984, 0.03437977600097656, 0.03439923095703125]",tokens/s,28.286496204811137,,,main,False,False -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14263,7 +14263,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96b4-5d39cc4e550a55ed3888caf7;e2f1dc3b-ab97-4099-8ec3-3dd2ea582f20) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe870-3373b632274deddc7d833417;dae7deaa-cb1c-49ff-8303-7a08dda4ef81) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14295,7 +14295,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1140.355072,6809.976832,0.0,6169.821184,6038.342656,s,10,0.47966221237182616,0.04796622123718262,0.0007691327798441897,0.04779440116882324,0.0483730770111084,0.04923428325653076,0.04992324825286865,"[0.050095489501953125, 0.04746470260620117, 0.04725696182250977, 0.04799817657470703, 0.04747948837280273, 0.04759062576293945, 0.04818169784545898, 0.048028766632080076, 0.04752729415893555, 0.048039009094238284]",tokens/s,5337.089172276783,kWh,5.837309862918781e-07,3.1982100842241164e-07,4.071502983527083e-06,4.975054978241371e-06,tokens/kWh,51456717.78897472,MB,1140.355072,6809.976832,0.0,6169.821184,6138.651648,s,10,12.003288085937498,1.2003288085937498,0.014982751552041281,1.2024702758789063,1.21298896484375,1.2213482421875,1.2280356640624999,"[1.22970751953125, 1.19094677734375, 1.16836669921875, 1.2027806396484375, 1.189383056640625, 1.2023114013671874, 1.21113134765625, 1.202629150390625, 1.2047396240234376, 1.2012918701171875]",tokens/s,52.48561856463973,kWh,1.3905145442577158e-05,7.617866285702359e-06,4.862481695127448e-05,7.014782867955399e-05,tokens/kWh,898103.3509646269,,s,629,12.41336014938355,0.019735071779624073,0.005729428753984044,0.019175424575805664,0.019344998931884766,0.019746816635131836,0.06659940368652345,"[0.019190784454345702, 0.01903206443786621, 0.01924710464477539, 0.019734527587890623, 0.01991993522644043, 0.020001728057861327, 0.019952640533447266, 0.019772415161132813, 0.019746816635131836, 0.01982361602783203, 0.020435968399047853, 0.019950592041015625, 0.019336191177368164, 0.01921433639526367, 0.01925632095336914, 0.019985408782958985, 0.01987583923339844, 0.019915775299072267, 0.020152351379394532, 0.01988809585571289, 0.01982464027404785, 0.019672063827514647, 0.019746816635131836, 0.01985843276977539, 0.019722240447998047, 0.019893247604370116, 0.019744768142700195, 0.019619903564453124, 0.019469247817993165, 0.019283967971801756, 0.019358720779418945, 0.019043392181396484, 0.01917535972595215, 0.018775039672851563, 0.019187711715698243, 0.019219455718994142, 0.019171327590942384, 0.019117055892944337, 0.019396608352661132, 0.0191016960144043, 0.019147775650024415, 0.01923072052001953, 0.019106815338134766, 0.019176448822021484, 0.01922662353515625, 0.019144704818725586, 0.0193832950592041, 0.019203071594238282, 0.019137535095214844, 0.019120128631591796, 0.01924505615234375, 0.02048409652709961, 0.020222976684570314, 0.019615743637084963, 0.019252223968505858, 0.01924505615234375, 0.019174400329589843, 0.019109888076782225, 0.019150848388671874, 0.019487775802612305, 0.020645856857299805, 0.019575807571411134, 0.06711910247802734, 0.019195903778076173, 0.019215360641479492, 0.019172351837158205, 0.019143680572509765, 0.01944063949584961, 0.019182592391967773, 0.019156991958618166, 0.019131391525268555, 0.019547136306762695, 0.019174400329589843, 0.019290111541748048, 0.019175424575805664, 0.019135488510131835, 0.019293184280395507, 0.019290111541748048, 0.019173376083374022, 0.01921433639526367, 0.019150848388671874, 0.019113983154296875, 0.019171327590942384, 0.019188735961914064, 0.01908940887451172, 0.019182592391967773, 0.019183647155761718, 0.019126239776611327, 0.01919692802429199, 0.01918976020812988, 0.019108863830566408, 0.019219455718994142, 0.01920102310180664, 0.019150848388671874, 0.019169279098510742, 0.019178495407104493, 0.019175424575805664, 0.01864806365966797, 0.01846272087097168, 0.0186060791015625, 0.018534400939941405, 0.01856716728210449, 0.018527231216430663, 0.018584640502929687, 0.01856812858581543, 0.018501632690429686, 0.01858358383178711, 0.018563039779663087, 0.018520063400268554, 0.01855897521972656, 0.018503679275512695, 0.018491392135620118, 0.0186060791015625, 0.018506752014160157, 0.018490367889404297, 0.018549760818481444, 0.01844223976135254, 0.018413568496704103, 0.018508832931518556, 0.0185067195892334, 0.018455551147460936, 0.018541568756103514, 0.01846681594848633, 0.01843507194519043, 0.018479103088378905, 0.0661934051513672, 0.018535423278808593, 0.0184586238861084, 0.018482175827026368, 0.018380800247192384, 0.01858252716064453, 0.018457599639892578, 0.018480127334594726, 0.01841049575805664, 0.018535423278808593, 0.018331647872924805, 0.01838387107849121, 0.018325504302978517, 0.018293760299682618, 0.018292800903320312, 0.018249664306640625, 0.018300928115844727, 0.018390016555786134, 0.01848320007324219, 0.018455551147460936, 0.018495487213134765, 0.018491392135620118, 0.0184453125, 0.018482175827026368, 0.018465791702270508, 0.018487295150756835, 0.0184453125, 0.01845248031616211, 0.0184453125, 0.018499584197998048, 0.01845145606994629, 0.018481151580810547, 0.018472959518432617, 0.018455551147460936, 0.018421760559082033, 0.01850060844421387, 0.018530303955078126, 0.018450464248657227, 0.01846063995361328, 0.018481151580810547, 0.018471935272216796, 0.018496511459350586, 0.018497535705566406, 0.018381824493408205, 0.018480127334594726, 0.01849241638183594, 0.018412544250488282, 0.018562047958374024, 0.019050495147705078, 0.018512895584106445, 0.018498559951782227, 0.018498559951782227, 0.018526208877563476, 0.01945088005065918, 0.01924198341369629, 0.019125247955322267, 0.019204095840454103, 0.019188735961914064, 0.019111936569213867, 0.018516992568969725, 0.01847500801086426, 0.018479103088378905, 0.0185262393951416, 0.06707913970947266, 0.018968576431274413, 0.019175424575805664, 0.019307519912719725, 0.01920512008666992, 0.018541568756103514, 0.018550783157348632, 0.018515968322753908, 0.018501632690429686, 0.018508800506591795, 0.018482175827026368, 0.018752511978149415, 0.01922969627380371, 0.019151872634887695, 0.019171327590942384, 0.019207168579101562, 0.019224576950073242, 0.01914886474609375, 0.01892755126953125, 0.01941094398498535, 0.019192832946777344, 0.01919696044921875, 0.01914467239379883, 0.018750463485717773, 0.019192832946777344, 0.019351551055908203, 0.0192174072265625, 0.019324928283691405, 0.019170303344726563, 0.019178495407104493, 0.01923072052001953, 0.019147775650024415, 0.0184770565032959, 0.018513919830322266, 0.01929113578796387, 0.020198400497436524, 0.019627071380615233, 0.019247039794921875, 0.019216384887695313, 0.01904640007019043, 0.019088384628295898, 0.01923379135131836, 0.018512895584106445, 0.01849241638183594, 0.018593791961669923, 0.01927475166320801, 0.019358720779418945, 0.01917750358581543, 0.019226591110229493, 0.019138559341430664, 0.01916624069213867, 0.019272672653198243, 0.019141632080078123, 0.019184640884399414, 0.01924710464477539, 0.019449920654296876, 0.019213247299194335, 0.01929523277282715, 0.019184640884399414, 0.019170303344726563, 0.019172351837158205, 0.019185663223266602, 0.01922969627380371, 0.0664268798828125, 0.018554880142211915, 0.018568191528320312, 0.018553855895996094, 0.018498559951782227, 0.018561023712158203, 0.01929523277282715, 0.019198976516723632, 0.019094528198242186, 0.019132415771484376, 0.019208192825317383, 0.01923276710510254, 0.0192993278503418, 0.019224576950073242, 0.019160064697265625, 0.01927884864807129, 0.019165184020996092, 0.019311616897583008, 0.01850060844421387, 0.018518016815185546, 0.018497535705566406, 0.01904128074645996, 0.01919795227050781, 0.019317760467529296, 0.019212287902832033, 0.019500032424926757, 0.01963520050048828, 0.019307519912719725, 0.019137535095214844, 0.019377151489257814, 0.01923379135131836, 0.018913280487060546, 0.019182592391967773, 0.018577407836914063, 0.018481151580810547, 0.018524160385131837, 0.018522111892700196, 0.01850060844421387, 0.018515968322753908, 0.018530303955078126, 0.0184586238861084, 0.018530303955078126, 0.018511871337890624, 0.01848422431945801, 0.01849456024169922, 0.018499488830566405, 0.01846784019470215, 0.01999667167663574, 0.019162111282348633, 0.019125247955322267, 0.019148799896240236, 0.019140607833862306, 0.019143680572509765, 0.01923481559753418, 0.018471935272216796, 0.018449407577514648, 0.0184453125, 0.018507776260375978, 0.01859686470031738, 0.018580480575561522, 0.018486272811889647, 0.01849241638183594, 0.018508800506591795, 0.06666649627685547, 0.01880985641479492, 0.018586624145507814, 0.018561023712158203, 0.018482175827026368, 0.018540544509887694, 0.018499584197998048, 0.01850060844421387, 0.018518016815185546, 0.018547712326049806, 0.018464767456054687, 0.018611200332641603, 0.018494464874267577, 0.019162111282348633, 0.019224576950073242, 0.019195999145507812, 0.01922960090637207, 0.01924608039855957, 0.019219455718994142, 0.01906073570251465, 0.019216384887695313, 0.01925632095336914, 0.01927475166320801, 0.01921331214904785, 0.019219455718994142, 0.019163135528564454, 0.019169279098510742, 0.01927884864807129, 0.019163135528564454, 0.01928294372558594, 0.01916524887084961, 0.019212223052978514, 0.01918976020812988, 0.019293184280395507, 0.019162111282348633, 0.019296255111694336, 0.0192174072265625, 0.019174400329589843, 0.01923583984375, 0.01920614433288574, 0.01924710464477539, 0.01923174476623535, 0.01924608039855957, 0.019120128631591796, 0.01923583984375, 0.01924198341369629, 0.01937919998168945, 0.01927168083190918, 0.01922150421142578, 0.019169279098510742, 0.019158016204833983, 0.01920102310180664, 0.019215360641479492, 0.01925324821472168, 0.019123199462890626, 0.019168256759643554, 0.019212287902832033, 0.01928294372558594, 0.01922559928894043, 0.019162111282348633, 0.019215360641479492, 0.019108928680419923, 0.019227584838867186, 0.06794547271728515, 0.019092479705810548, 0.019204095840454103, 0.019195903778076173, 0.019207168579101562, 0.019124223709106446, 0.019212287902832033, 0.019188735961914064, 0.019142656326293944, 0.0192174072265625, 0.019116031646728517, 0.01922764778137207, 0.019160064697265625, 0.01923072052001953, 0.019190784454345702, 0.01926144027709961, 0.019156991958618166, 0.019284000396728517, 0.0191866569519043, 0.01927065658569336, 0.01932089614868164, 0.019302335739135743, 0.019302400588989257, 0.01921843147277832, 0.01923379135131836, 0.019169279098510742, 0.019268608093261717, 0.019203071594238282, 0.01924403190612793, 0.019154943466186524, 0.0192675838470459, 0.019260416030883788, 0.019184640884399414, 0.01924710464477539, 0.019322879791259767, 0.019310592651367187, 0.019179519653320314, 0.01923788833618164, 0.019107839584350587, 0.0192993278503418, 0.01923891258239746, 0.01918156814575195, 0.019293184280395507, 0.01918976020812988, 0.0192225284576416, 0.01920614433288574, 0.0192542724609375, 0.019185663223266602, 0.01927577590942383, 0.019176448822021484, 0.01924198341369629, 0.01923379135131836, 0.01923583984375, 0.0192194881439209, 0.01924300765991211, 0.01991881561279297, 0.01925836753845215, 0.019343360900878907, 0.01923993682861328, 0.0192225284576416, 0.019110912322998046, 0.018712575912475587, 0.018685951232910156, 0.06779904174804688, 0.01921433639526367, 0.01925119972229004, 0.01922047996520996, 0.01887027168273926, 0.019135488510131835, 0.019141632080078123, 0.019074047088623047, 0.01928294372558594, 0.01924812889099121, 0.01920204734802246, 0.01925529670715332, 0.019225664138793945, 0.019174335479736328, 0.01925939178466797, 0.01922867202758789, 0.01918976020812988, 0.01919491195678711, 0.01916208076477051, 0.019372032165527343, 0.01922764778137207, 0.01925734329223633, 0.019122175216674805, 0.01921843147277832, 0.01922559928894043, 0.01914886474609375, 0.019252159118652343, 0.019169279098510742, 0.019147775650024415, 0.019211263656616212, 0.019178495407104493, 0.01918976020812988, 0.019216384887695313, 0.019208192825317383, 0.019006464004516603, 0.018542591094970702, 0.018714624404907225, 0.01920204734802246, 0.019180543899536134, 0.019195903778076173, 0.019150848388671874, 0.019155967712402345, 0.01924198341369629, 0.019088384628295898, 0.019159040451049804, 0.019155967712402345, 0.01924608039855957, 0.019307519912719725, 0.019112960815429687, 0.018725887298583984, 0.019111936569213867, 0.019170303344726563, 0.019165184020996092, 0.01924505615234375, 0.01901670455932617, 0.018856960296630858, 0.018390016555786134, 0.018386943817138672, 0.018903039932250978, 0.018559999465942383, 0.018363391876220703, 0.018537471771240235, 0.018541568756103514, 0.06720716857910156, 0.0192675838470459, 0.019211263656616212, 0.019331071853637697, 0.019002368927001953, 0.018947071075439453, 0.019384319305419923, 0.01923379135131836, 0.019186752319335938, 0.019213247299194335, 0.019169279098510742, 0.01926655960083008, 0.019194879531860352, 0.01920512008666992, 0.019183616638183593, 0.019175424575805664, 0.01924300765991211, 0.019277824401855468, 0.0192491512298584, 0.019160064697265625, 0.01903718376159668, 0.018949119567871094, 0.019116031646728517, 0.018591808319091796, 0.0185599365234375, 0.018569215774536133, 0.019075071334838867, 0.019233888626098632, 0.01920604705810547, 0.01923788833618164, 0.019155967712402345, 0.019166208267211913, 0.019192832946777344, 0.019204095840454103, 0.019149824142456053, 0.01922764778137207, 0.019188735961914064, 0.019192832946777344, 0.019186687469482423, 0.01922867202758789, 0.019179519653320314, 0.019260416030883788, 0.01920614433288574, 0.01917344093322754, 0.019166208267211913, 0.01926652717590332, 0.019009504318237305, 0.019173376083374022, 0.019134464263916014, 0.019088384628295898, 0.018495487213134765, 0.018493440628051756, 0.01847305679321289, 0.018502559661865235, 0.018965503692626954, 0.01887846374511719, 0.020139072418212892, 0.01914566421508789, 0.0192225284576416, 0.01921023941040039, 0.019177471160888672, 0.019117055892944337, 0.01921843147277832, 0.0681707534790039, 0.01927475166320801, 0.019087360382080077, 0.019156991958618166, 0.019145727157592773, 0.019141632080078123, 0.0192542724609375, 0.019323904037475585, 0.01926348876953125, 0.01922047996520996, 0.019136512756347656, 0.019175424575805664, 0.01921023941040039, 0.01922150421142578, 0.019194879531860352, 0.019208192825317383, 0.019159040451049804, 0.019177471160888672, 0.01924300765991211, 0.019292160034179686, 0.019139583587646485, 0.01927577590942383, 0.019110912322998046, 0.01923174476623535, 0.01904025650024414, 0.018912256240844725, 0.018592767715454102, 0.018565120697021483, 0.01862451171875, 0.018610176086425782, 0.018569215774536133, 0.01864192008972168, 0.018528255462646484, 0.018564096450805666, 0.018548736572265623, 0.01863680076599121, 0.018507776260375978, 0.018538496017456055, 0.018569215774536133, 0.018565120697021483, 0.018548736572265623, 0.018577472686767578, 0.018594751358032225, 0.01922559928894043, 0.01920204734802246, 0.019272703170776367, 0.01968332862854004, 0.01919388771057129, 0.01923683166503906, 0.019126304626464842, 0.019258335113525392, 0.019292192459106447, 0.019409887313842772, 0.019215360641479492, 0.019388416290283202, 0.019564544677734375, 0.01923993682861328, 0.01926246452331543, 0.019219455718994142, 0.019192832946777344, 0.01918976020812988, 0.01930342483520508, 0.019342336654663086]",tokens/s,50.67121169695835,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14334,7 +14334,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45c0-5ee5fc90676edd8d16e48bce;5db9635b-f72d-46fe-8e66-facc02889ae4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe919-23d2908a73c47c1d67272d63;003cd8bd-b690-4cdf-ac1a-4c400bc9d1b2) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14519,7 +14519,7 @@ ChildProcessError: Traceback (most recent call last): TypeError: DeciCoderAttention.forward() got an unexpected keyword argument 'cache_position' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14585,7 +14585,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9020-3c31bdb2623b4a84008888a1;0a701014-45d4-43a0-a4d6-1120f10abd1a) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1be-6fb91e1015d1b9282516dba4;7dff9c25-4a06-46bc-9fd5-4cc1f89e6d28) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -14669,7 +14669,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/opt-2.7b,,cuda,0,42,,,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1053.85984,11903.95904,0.0,11263.803392,10971.005952,s,10,0.8630337219238281,0.08630337219238281,0.0005855146901819462,0.08610564804077149,0.08686894912719727,0.08726029930114745,0.08757337944030762,"[0.08765164947509765, 0.08608377838134766, 0.08595763397216796, 0.08605913543701171, 0.08641343688964843, 0.0866407699584961, 0.0853704605102539, 0.08612751770019532, 0.08594735717773437, 0.086781982421875]",tokens/s,2966.280383915227,kWh,1.0394877029789819e-06,5.695905701989097e-07,7.260797718964633e-06,8.869875992142523e-06,tokens/kWh,28861733.831090808,MB,1053.85984,11903.95904,0.0,11263.803392,11168.211456,s,10,17.5690556640625,1.75690556640625,0.006780376558086965,1.7566282348632813,1.7650084350585937,1.7657110656738282,1.7662731701660155,"[1.761355224609375, 1.756353515625, 1.747863525390625, 1.752947998046875, 1.7560301513671874, 1.7569029541015626, 1.7442252197265624, 1.762111083984375, 1.7664136962890624, 1.764852294921875]",tokens/s,35.858500994374154,kWh,2.0598839637306e-05,1.1288458643625504e-05,7.888091008323623e-05,0.00011076820836416775,tokens/kWh,568755.2496369507,,s,629,18.31616716384887,0.02911950264522874,0.010287310475246484,0.027885568618774413,0.028152269363403322,0.028269132614135745,0.11421908935546875,"[0.028080127716064454, 0.027995136260986327, 0.028368896484375, 0.02839347267150879, 0.02799001693725586, 0.028070911407470703, 0.027966463088989257, 0.028026880264282225, 0.02802182388305664, 0.028065727233886718, 0.028064767837524415, 0.028641279220581056, 0.02867715263366699, 0.0284169921875, 0.02838835144042969, 0.02857164764404297, 0.028508159637451173, 0.02832896041870117, 0.028240896224975585, 0.027785215377807617, 0.028045312881469726, 0.02755072021484375, 0.027585535049438475, 0.02756096076965332, 0.027708415985107423, 0.02756915283203125, 0.027617279052734374, 0.027578367233276366, 0.027618303298950195, 0.027610111236572265, 0.02773196792602539, 0.027800575256347656, 0.027603967666625977, 0.027655168533325194, 0.027626495361328125, 0.027642879486083984, 0.027821056365966795, 0.028091392517089843, 0.028318719863891603, 0.028218368530273437, 0.028124160766601562, 0.028131391525268553, 0.02826848030090332, 0.02800230407714844, 0.02775654411315918, 0.027699199676513672, 0.02776473617553711, 0.02796544075012207, 0.027848703384399414, 0.027853824615478515, 0.027711519241333006, 0.027854816436767578, 0.027869184494018553, 0.027896831512451172, 0.02792755126953125, 0.027784191131591796, 0.02775654411315918, 0.0277708797454834, 0.027860992431640624, 0.02772585678100586, 0.027850719451904298, 0.027810815811157227, 0.11423027038574218, 0.02753331184387207, 0.027620351791381836, 0.027468864440917968, 0.027832256317138673, 0.027971584320068358, 0.027495424270629884, 0.027634687423706054, 0.027620351791381836, 0.02800230407714844, 0.027837440490722655, 0.027676671981811524, 0.028087392807006836, 0.028052383422851563, 0.028108800888061523, 0.028116992950439453, 0.027871231079101562, 0.02751283264160156, 0.027627519607543945, 0.027826175689697266, 0.02779648017883301, 0.02800230407714844, 0.027983871459960938, 0.027635711669921875, 0.02758143997192383, 0.027603967666625977, 0.02811903953552246, 0.028081151962280275, 0.027715583801269532, 0.027572223663330078, 0.027619327545166016, 0.027833343505859375, 0.028087295532226563, 0.027603967666625977, 0.027674623489379883, 0.027653120040893556, 0.027707391738891602, 0.028151872634887696, 0.027927488327026368, 0.02772787284851074, 0.027650047302246093, 0.027679744720458983, 0.02814361572265625, 0.028027904510498046, 0.02778726387023926, 0.027650047302246093, 0.02878976058959961, 0.028808191299438478, 0.028521472930908204, 0.028207103729248048, 0.028184640884399415, 0.02820908737182617, 0.02775040054321289, 0.027708415985107423, 0.02819993591308594, 0.02775449562072754, 0.027806720733642577, 0.02775040054321289, 0.02776166343688965, 0.027802623748779298, 0.027888639450073242, 0.027884544372558592, 0.028231679916381838, 0.11419033813476563, 0.02750876808166504, 0.027622367858886717, 0.027703296661376952, 0.027897855758666993, 0.027864063262939453, 0.028017663955688478, 0.028047359466552735, 0.027889663696289063, 0.02795212745666504, 0.027907072067260744, 0.02795929527282715, 0.027817983627319336, 0.027843584060668947, 0.027896831512451172, 0.027691007614135742, 0.027636768341064454, 0.027590623855590822, 0.02752105522155762, 0.027622432708740235, 0.027602880477905274, 0.027635711669921875, 0.02752921676635742, 0.027657215118408202, 0.02753945541381836, 0.027639808654785155, 0.02757427215576172, 0.027637760162353517, 0.02755276870727539, 0.027637760162353517, 0.027554815292358398, 0.027634687423706054, 0.027580415725708008, 0.027640832901000976, 0.027588607788085938, 0.02772889518737793, 0.027675647735595704, 0.02768998336791992, 0.02774527931213379, 0.027831296920776367, 0.027857919692993165, 0.02796441650390625, 0.02795929527282715, 0.02798182487487793, 0.027687936782836913, 0.0277391357421875, 0.027684864044189454, 0.027775999069213866, 0.027695104598999022, 0.02771968078613281, 0.02773606491088867, 0.027675647735595704, 0.02771455955505371, 0.027806720733642577, 0.02772275161743164, 0.02777292823791504, 0.027828224182128908, 0.027852800369262694, 0.02773094367980957, 0.027786239624023438, 0.027786239624023438, 0.027842559814453126, 0.027743295669555666, 0.11437049865722657, 0.02759884834289551, 0.027520000457763674, 0.02754457664489746, 0.027513856887817382, 0.027692031860351563, 0.027481088638305663, 0.027661312103271486, 0.027546623229980468, 0.027687936782836913, 0.02755788803100586, 0.027672576904296874, 0.02750464057922363, 0.028065792083740236, 0.027636735916137696, 0.02757427215576172, 0.027621376037597657, 0.0275281925201416, 0.027672607421875, 0.02757219123840332, 0.027630592346191408, 0.02755686378479004, 0.027653120040893556, 0.027576320648193358, 0.0276112003326416, 0.027572160720825196, 0.027641855239868163, 0.027586559295654296, 0.027615232467651366, 0.027613183975219727, 0.027760639190673828, 0.027885568618774413, 0.027585535049438475, 0.027711488723754882, 0.028269567489624024, 0.028498943328857423, 0.028224512100219725, 0.02795212745666504, 0.027715583801269532, 0.028291072845458985, 0.028035072326660155, 0.028121088027954103, 0.028033023834228517, 0.028049407958984376, 0.027807743072509765, 0.027720703125, 0.02786204719543457, 0.028103744506835938, 0.028073888778686523, 0.028128255844116212, 0.028084224700927734, 0.028052480697631835, 0.02791116714477539, 0.02778726387023926, 0.028025856018066408, 0.02813132858276367, 0.02816819190979004, 0.028096511840820314, 0.028024896621704102, 0.027795391082763674, 0.027777023315429687, 0.02798182487487793, 0.02826144027709961, 0.11479341125488281, 0.027822080612182616, 0.027892736434936522, 0.027877376556396483, 0.028070911407470703, 0.027888639450073242, 0.027676671981811524, 0.02755891227722168, 0.02791731262207031, 0.0277258243560791, 0.0275230712890625, 0.02769715118408203, 0.027509759902954102, 0.02768998336791992, 0.02753945541381836, 0.027665407180786132, 0.02759065628051758, 0.02795008087158203, 0.027845632553100585, 0.02795622444152832, 0.027983871459960938, 0.02800127983093262, 0.02799308776855469, 0.02799001693725586, 0.028019712448120116, 0.028019712448120116, 0.028061695098876953, 0.02813542366027832, 0.027975679397583008, 0.02802895927429199, 0.027966432571411133, 0.02770227241516113, 0.027586591720581054, 0.027748319625854494, 0.027966463088989257, 0.02815488052368164, 0.028047359466552735, 0.028047359466552735, 0.028028928756713867, 0.02791219139099121, 0.02771968078613281, 0.027638784408569338, 0.027666431427001953, 0.02778112030029297, 0.027665407180786132, 0.027741184234619142, 0.02778009605407715, 0.028085248947143555, 0.028247039794921876, 0.028030975341796875, 0.027797504425048827, 0.02775551986694336, 0.02773401641845703, 0.02776678466796875, 0.027868160247802733, 0.027741184234619142, 0.027757568359375, 0.028112895965576173, 0.027979776382446288, 0.027806720733642577, 0.027995136260986327, 0.02819174385070801, 0.028076032638549804, 0.11479551696777343, 0.02749951934814453, 0.027642879486083984, 0.027485183715820313, 0.027657215118408202, 0.027830272674560546, 0.027696128845214843, 0.027802623748779298, 0.028034048080444338, 0.028077056884765625, 0.027992063522338868, 0.027915264129638673, 0.02795417594909668, 0.027961376190185547, 0.027890655517578126, 0.02775859260559082, 0.027798528671264647, 0.02770534324645996, 0.02778112030029297, 0.02775961685180664, 0.027708415985107423, 0.02753740882873535, 0.027652095794677735, 0.02775961685180664, 0.027890687942504884, 0.02791935920715332, 0.028314624786376953, 0.027650047302246093, 0.027888639450073242, 0.028021760940551758, 0.02798182487487793, 0.02800230407714844, 0.028000255584716797, 0.028031999588012696, 0.028006399154663086, 0.027999231338500977, 0.028012544631958007, 0.028005376815795898, 0.028091392517089843, 0.02794291114807129, 0.02810572814941406, 0.028227584838867188, 0.02772275161743164, 0.02774630355834961, 0.02815385627746582, 0.028048416137695313, 0.028056543350219728, 0.028008447647094727, 0.028041215896606447, 0.028006399154663086, 0.028056575775146485, 0.028035072326660155, 0.028014623641967773, 0.027736032485961914, 0.02779136085510254, 0.02775449562072754, 0.027752447128295898, 0.02778009605407715, 0.027870208740234374, 0.02771353530883789, 0.027786239624023438, 0.027807743072509765, 0.027907072067260744, 0.11358617401123047, 0.027505664825439452, 0.027651071548461914, 0.02753638458251953, 0.027662336349487306, 0.02751487922668457, 0.027646976470947264, 0.02772585678100586, 0.027819007873535157, 0.027647968292236327, 0.0275230712890625, 0.02772889518737793, 0.02755891227722168, 0.027657215118408202, 0.027631616592407225, 0.027588607788085938, 0.02750668716430664, 0.027673599243164062, 0.027520000457763674, 0.027676671981811524, 0.02755891227722168, 0.027657215118408202, 0.02754560089111328, 0.027630592346191408, 0.027554815292358398, 0.027662336349487306, 0.02754252815246582, 0.027655168533325194, 0.027675647735595704, 0.027632640838623046, 0.027583488464355467, 0.027668479919433595, 0.027633663177490234, 0.027667455673217774, 0.027792383193969726, 0.027634687423706054, 0.027644927978515626, 0.027576320648193358, 0.027676671981811524, 0.027851776123046876, 0.027604991912841798, 0.027715583801269532, 0.027693056106567384, 0.027683839797973633, 0.027720703125, 0.027669536590576173, 0.027687904357910156, 0.027885568618774413, 0.027825151443481445, 0.02769715118408203, 0.027709440231323244, 0.027793407440185547, 0.027676671981811524, 0.02775551986694336, 0.027836416244506838, 0.02771251106262207, 0.02775551986694336, 0.02790399932861328, 0.027841535568237305, 0.02776780891418457, 0.02777190399169922, 0.027844608306884764, 0.027865087509155274, 0.11437670135498047, 0.027510784149169923, 0.027650047302246093, 0.027494400024414063, 0.027682815551757813, 0.02752204895019531, 0.027663360595703124, 0.02753228759765625, 0.027656192779541015, 0.02816716766357422, 0.029268991470336913, 0.028785663604736327, 0.02800230407714844, 0.027871231079101562, 0.027876352310180662, 0.02792038345336914, 0.028018688201904295, 0.028121088027954103, 0.027979776382446288, 0.027982847213745117, 0.028009471893310548, 0.028060672760009765, 0.028021760940551758, 0.02791731262207031, 0.028020736694335937, 0.027992063522338868, 0.027987968444824218, 0.02794803237915039, 0.027696128845214843, 0.027620351791381836, 0.027665407180786132, 0.02812620735168457, 0.028035072326660155, 0.028004352569580077, 0.02775859260559082, 0.027633663177490234, 0.027885568618774413, 0.027736095428466796, 0.028077024459838867, 0.02810982322692871, 0.028082176208496092, 0.02833612823486328, 0.028010496139526365, 0.028064767837524415, 0.028115999221801757, 0.028073951721191405, 0.02814259147644043, 0.028062719345092774, 0.02774835205078125, 0.02775142478942871, 0.027711488723754882, 0.027894784927368164, 0.028168224334716798, 0.028183519363403322, 0.02815488052368164, 0.028136447906494142, 0.02817024040222168, 0.028242944717407226, 0.028076032638549804, 0.027825151443481445, 0.027842559814453126, 0.027852800369262694, 0.028216320037841795, 0.11450265502929688, 0.028048383712768556, 0.027884544372558592, 0.027856895446777344, 0.02790809631347656, 0.02796031951904297, 0.02794803237915039, 0.02815590476989746, 0.02799411201477051, 0.0279552001953125, 0.02795827293395996, 0.02798899269104004, 0.02791628837585449, 0.027922431945800782, 0.027850751876831056, 0.027883520126342775, 0.027906047821044923, 0.02798182487487793, 0.02795008087158203, 0.027967487335205078, 0.02794905662536621, 0.02802079963684082, 0.02800223922729492, 0.02816716766357422, 0.027996160507202147, 0.02797772789001465, 0.028047359466552735, 0.028013568878173828, 0.02795212745666504, 0.02796441650390625, 0.02800127983093262, 0.028025856018066408, 0.028037120819091797, 0.028014591217041016, 0.02795110321044922, 0.028019712448120116, 0.027967487335205078, 0.028011520385742186, 0.027987968444824218, 0.028019712448120116, 0.028009471893310548, 0.02797875213623047, 0.02792959976196289, 0.02777292823791504, 0.027667455673217774, 0.027839487075805663, 0.02896691131591797, 0.02836787223815918, 0.028201984405517577, 0.028120063781738282, 0.028007423400878906, 0.028030975341796875, 0.028034048080444338, 0.028049407958984376, 0.028076032638549804, 0.02807913589477539, 0.028106719970703124, 0.028218368530273437, 0.02815078353881836, 0.02819174385070801, 0.028201984405517577, 0.028220415115356445, 0.0283637752532959, 0.11547647857666016, 0.02796544075012207, 0.02817535972595215, 0.027922431945800782, 0.027983871459960938, 0.0279418888092041, 0.027894784927368164, 0.027890687942504884, 0.027836416244506838, 0.027876352310180662, 0.027872255325317383, 0.02794905662536621, 0.02793164825439453, 0.028009471893310548, 0.028054527282714844, 0.02793574333190918, 0.027974655151367187, 0.028014591217041016, 0.028051456451416015, 0.027974655151367187, 0.028073984146118162, 0.027971584320068358, 0.028007423400878906, 0.027975679397583008, 0.028025856018066408, 0.028090368270874022, 0.028021760940551758, 0.028028928756713867, 0.02809343910217285, 0.027991039276123047, 0.027966463088989257, 0.0279552001953125, 0.0281395206451416, 0.028059648513793944, 0.02794803237915039, 0.027969568252563477, 0.028091360092163085, 0.02792755126953125, 0.028051456451416015, 0.02794905662536621, 0.028045312881469726, 0.0279685115814209, 0.02797670364379883, 0.028100608825683594, 0.028039167404174805, 0.027966463088989257, 0.028047359466552735, 0.028009471893310548, 0.028026880264282225, 0.028008447647094727, 0.028077056884765625, 0.028064767837524415, 0.028096511840820314, 0.028144639968872072, 0.02813542366027832, 0.028060672760009765, 0.028059648513793944, 0.028063743591308594, 0.027874336242675782, 0.02803913688659668, 0.028051456451416015, 0.028053504943847656, 0.027853824615478515]",tokens/s,34.34124587165128,,,main,False,False -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -14746,7 +14746,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96e9-73db0da824ee0b025cdc19cc;6d2547a8-fe90-4f43-80d3-ce0a409645f7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8aa-3611bcbb2baeaad010cd81e6;19dee499-7009-4981-9fb5-038c3e0835e4) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -14806,7 +14806,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8feb-621758a849ea43df3f673164;4c43d2d6-c7ea-44db-9871-f1450c5ba383) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe187-7c7f5eb949ee01737da9554a;a2527e79-1956-44f5-a143-51cde2a244cd) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -15328,7 +15328,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8dde-790e373e478285053a1d6142;4dd9e47f-b475-47bc-a109-c25f66cd365f) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf71-1343b38b2dc0e01c2a566e8c;5b50b1ba-e4b2-4014-9eff-f21f56301a58) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -15413,7 +15413,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9648-568a8ad417b5983b247050be;e60445f5-6d9f-497f-8146-8f67796e325a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe801-6ae48bd76a362cfc08939a61;4f2a8f2c-302e-4f68-8718-af492d70c445) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -15481,7 +15481,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15595,7 +15595,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9535-5b6a0a346a992df755d019fc;997de569-4665-414d-ae35-3f51125708c9) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6f3-69231342001aa2825f56db4d;6c9f84d6-7002-4189-9095-f5b5af57439f) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -15693,7 +15693,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1130.88512,5172.10112,0.0,4531.945472,4478.990336,s,10,0.42166092681884765,0.042166092681884766,0.0005249509913911508,0.0420281925201416,0.04232391090393066,0.0430108190536499,0.043560345573425294,"[0.04369772720336914, 0.042159454345703125, 0.041919521331787106, 0.0421712646484375, 0.04211916732788086, 0.04204617691040039, 0.04201020812988281, 0.04183244705200195, 0.04185776138305664, 0.041847198486328126]",tokens/s,6071.228888371289,kWh,5.16059840697215e-07,2.8277386164204495e-07,3.61884568983405e-06,4.4176793921733096e-06,tokens/kWh,57948976.66262262,MB,1130.88512,5172.10112,0.0,4531.945472,4492.920832,s,10,13.891888793945311,1.3891888793945313,0.023342960546545024,1.3916763305664062,1.42256240234375,1.4231272094726561,1.4235790551757812,"[1.4224368896484374, 1.4236920166015625, 1.3934423828125, 1.3551798095703125, 1.381556396484375, 1.3899102783203126, 1.349650390625, 1.3772557373046874, 1.401121337890625, 1.3976435546875]",tokens/s,45.35020466580335,kWh,1.6424137673465907e-05,9.000330906268072e-06,4.601633290716629e-05,7.144080148690028e-05,tokens/kWh,881849.0090925419,,s,629,14.2456176738739,0.022648040816969636,0.005014991498451189,0.022172672271728516,0.02259086685180664,0.02295295944213867,0.06372139022827149,"[0.023433216094970705, 0.02329599952697754, 0.023392255783081056, 0.02315673637390137, 0.023367679595947266, 0.02327859115600586, 0.022940671920776368, 0.021988351821899413, 0.02204979133605957, 0.022413375854492188, 0.021933023452758788, 0.021866464614868165, 0.021921791076660157, 0.021787647247314454, 0.021924863815307616, 0.02206719970703125, 0.02194534492492676, 0.02182143974304199, 0.021769216537475586, 0.021787647247314454, 0.02168320083618164, 0.023205888748168944, 0.02326527976989746, 0.023174144744873046, 0.022991872787475585, 0.022542335510253905, 0.022572032928466795, 0.022590463638305663, 0.02248089599609375, 0.022627328872680662, 0.022587392807006838, 0.022598655700683593, 0.022595584869384764, 0.022563840866088865, 0.023376895904541017, 0.022808576583862306, 0.0226375675201416, 0.02262118339538574, 0.022571008682250978, 0.0225167350769043, 0.0228351993560791, 0.0225218563079834, 0.02248089599609375, 0.022601728439331056, 0.02248908805847168, 0.022535167694091796, 0.022483968734741212, 0.022526975631713866, 0.0224901123046875, 0.022560768127441407, 0.022564863204956053, 0.02252390480041504, 0.022427648544311524, 0.02250547218322754, 0.02255564880371094, 0.022435840606689454, 0.022697984695434572, 0.02259660720825195, 0.02248089599609375, 0.02245529556274414, 0.02246348762512207, 0.022494207382202147, 0.06446387481689453, 0.022527999877929687, 0.02255564880371094, 0.022553600311279298, 0.022487039566040038, 0.022534143447875975, 0.022207487106323243, 0.02268876838684082, 0.02250444793701172, 0.022905855178833007, 0.02249728012084961, 0.022769664764404295, 0.02271129608154297, 0.02248089599609375, 0.022451200485229493, 0.023824384689331055, 0.02342710494995117, 0.02288739204406738, 0.02251263999938965, 0.02246246337890625, 0.02249625587463379, 0.02255462455749512, 0.022475776672363282, 0.022518783569335937, 0.022562816619873048, 0.022486015319824217, 0.02252288055419922, 0.022609920501708985, 0.02249113655090332, 0.02251263999938965, 0.022071296691894532, 0.02248806381225586, 0.022527999877929687, 0.022623231887817383, 0.022558719635009765, 0.02242870330810547, 0.02251158332824707, 0.022393856048583984, 0.02251263999938965, 0.022587392807006838, 0.02242355155944824, 0.022402048110961914, 0.02260688018798828, 0.022592479705810548, 0.022432767868041992, 0.022462495803833006, 0.022575071334838867, 0.022599679946899414, 0.022401023864746093, 0.022397951126098634, 0.022389759063720704, 0.022466560363769532, 0.022452224731445314, 0.022417407989501953, 0.022502399444580077, 0.022445056915283205, 0.022498367309570312, 0.022459327697753908, 0.022780927658081054, 0.023590911865234376, 0.02341379165649414, 0.023615455627441405, 0.022580223083496095, 0.06491545867919922, 0.02242252731323242, 0.022483968734741212, 0.022397951126098634, 0.022509567260742186, 0.02246348762512207, 0.02244095993041992, 0.022428735733032227, 0.022558656692504883, 0.022467584609985353, 0.022509567260742186, 0.02244710350036621, 0.022419456481933595, 0.02247372817993164, 0.022503423690795898, 0.022433792114257813, 0.02249318313598633, 0.02247987174987793, 0.02245734405517578, 0.022510591506958007, 0.022436864852905275, 0.022433792114257813, 0.022566911697387695, 0.022502399444580077, 0.022500352859497072, 0.022477888107299806, 0.022497215270996095, 0.022535167694091796, 0.022508544921875, 0.022589439392089843, 0.022608896255493165, 0.022737920761108397, 0.022572032928466795, 0.02273587226867676, 0.022585344314575196, 0.023017471313476562, 0.022789119720458984, 0.02249728012084961, 0.02284236717224121, 0.022635520935058592, 0.022640640258789063, 0.02270412826538086, 0.021995519638061522, 0.02122854423522949, 0.02125312042236328, 0.02122035217285156, 0.021156864166259767, 0.021190656661987304, 0.021332992553710937, 0.021195775985717775, 0.021193727493286133, 0.021159936904907226, 0.02123776054382324, 0.02105446434020996, 0.02104729652404785, 0.0211824951171875, 0.021224416732788087, 0.02126438331604004, 0.021192703247070312, 0.021243904113769533, 0.021335039138793945, 0.02126643180847168, 0.021214208602905273, 0.06389555358886718, 0.02123676872253418, 0.021221343994140624, 0.021242879867553712, 0.02125004768371582, 0.02122137641906738, 0.02127359962463379, 0.021167104721069335, 0.0212541446685791, 0.021274688720703126, 0.021263296127319337, 0.02125721549987793, 0.02127257537841797, 0.021226495742797852, 0.021227519989013673, 0.021332992553710937, 0.02123161506652832, 0.0212490234375, 0.02119987106323242, 0.021219327926635743, 0.02125721549987793, 0.021238784790039062, 0.021269535064697264, 0.021258207321166993, 0.02125823974609375, 0.02125004768371582, 0.02128486442565918, 0.021296127319335938, 0.021197887420654298, 0.021256128311157228, 0.02126131248474121, 0.0212541446685791, 0.02128486442565918, 0.021259263992309572, 0.021404672622680664, 0.022982656478881838, 0.022576128005981445, 0.02231705665588379, 0.02217065620422363, 0.02207535934448242, 0.0220897274017334, 0.02207027244567871, 0.022012928009033202, 0.02207744026184082, 0.022168575286865236, 0.02206208038330078, 0.022099967956542968, 0.022071296691894532, 0.022128639221191407, 0.022072351455688477, 0.02212451171875, 0.0220579833984375, 0.02125312042236328, 0.021207040786743164, 0.021200895309448242, 0.021202943801879884, 0.02124083137512207, 0.021191680908203125, 0.021329919815063478, 0.02123676872253418, 0.021241823196411134, 0.02119987106323242, 0.021359615325927735, 0.06389759826660156, 0.021198848724365234, 0.021219327926635743, 0.021164031982421876, 0.02119987106323242, 0.021203968048095705, 0.021217279434204102, 0.021166080474853514, 0.021175296783447265, 0.020980735778808594, 0.02104012870788574, 0.021206016540527343, 0.021214208602905273, 0.021130239486694336, 0.021212160110473634, 0.021157888412475585, 0.022006784439086914, 0.021816320419311523, 0.021991424560546875, 0.021747711181640626, 0.021974016189575195, 0.022148096084594726, 0.021772287368774415, 0.021735424041748046, 0.021770240783691407, 0.02209382438659668, 0.02211123275756836, 0.022115327835083007, 0.022162431716918944, 0.022156288146972656, 0.022164480209350586, 0.022132736206054687, 0.022128639221191407, 0.02211737632751465, 0.022167552947998048, 0.022164480209350586, 0.022165504455566407, 0.022138879776000975, 0.022198272705078126, 0.022200351715087892, 0.022202335357666015, 0.022195199966430663, 0.02253209686279297, 0.022961151123046874, 0.02304614448547363, 0.0224716796875, 0.022180864334106445, 0.02217068862915039, 0.022125503540039063, 0.022129663467407225, 0.022178815841674804, 0.022114368438720704, 0.022077375411987305, 0.022152191162109376, 0.02226790428161621, 0.022255615234375, 0.022260736465454102, 0.022194175720214843, 0.022205440521240235, 0.02226585578918457, 0.022222848892211915, 0.02265088081359863, 0.022260736465454102, 0.06377062225341797, 0.021202943801879884, 0.021223424911499023, 0.02125721549987793, 0.021204992294311522, 0.021222400665283202, 0.021181440353393553, 0.021941247940063476, 0.02229043197631836, 0.022040576934814454, 0.02212761688232422, 0.02223411178588867, 0.022210559844970702, 0.02227609634399414, 0.022223871231079103, 0.022189056396484375, 0.02226585578918457, 0.022099967956542968, 0.02228121566772461, 0.022188032150268554, 0.022153215408325197, 0.02211123275756836, 0.022214656829833986, 0.022185983657836913, 0.022136831283569337, 0.022477823257446287, 0.022183935165405275, 0.02211123275756836, 0.022304800033569334, 0.022066144943237304, 0.022098943710327147, 0.02223411178588867, 0.022170623779296874, 0.02215020751953125, 0.022253503799438478, 0.022131711959838866, 0.022137855529785155, 0.022365184783935548, 0.02209689521789551, 0.022013952255249023, 0.02204979133605957, 0.022167552947998048, 0.021985279083251954, 0.02211123275756836, 0.022173696517944336, 0.022013952255249023, 0.022131711959838866, 0.022112255096435548, 0.022167552947998048, 0.02210508728027344, 0.02209791946411133, 0.022198272705078126, 0.02208665657043457, 0.02215116882324219, 0.02220134353637695, 0.022133760452270508, 0.022160383224487306, 0.022168575286865236, 0.022137855529785155, 0.022143999099731446, 0.022147071838378905, 0.022152191162109376, 0.022178815841674804, 0.06375321578979493, 0.02152448081970215, 0.02125721549987793, 0.02152448081970215, 0.02128281593322754, 0.021408767700195314, 0.021296127319335938, 0.02122547149658203, 0.021251071929931642, 0.021207040786743164, 0.021218303680419923, 0.0212807674407959, 0.0219238395690918, 0.021343231201171875, 0.021255168914794922, 0.021151744842529296, 0.021201919555664063, 0.021195775985717775, 0.02124595260620117, 0.021204992294311522, 0.0212992000579834, 0.02130227279663086, 0.02122035217285156, 0.02128691291809082, 0.021212160110473634, 0.02126438331604004, 0.02128895950317383, 0.021345279693603517, 0.021293088912963866, 0.02132169532775879, 0.021303295135498047, 0.02126950454711914, 0.02128281593322754, 0.02129715156555176, 0.021126144409179686, 0.021585920333862304, 0.022161407470703123, 0.022082559585571288, 0.022195199966430663, 0.02208460807800293, 0.022185983657836913, 0.022150144577026368, 0.022120447158813478, 0.02208051109313965, 0.02208153533935547, 0.02205286407470703, 0.021357568740844726, 0.021193727493286133, 0.02126131248474121, 0.021262367248535155, 0.02118550491333008, 0.021201919555664063, 0.021315584182739256, 0.02126643180847168, 0.021283840179443358, 0.021272640228271484, 0.021241792678833006, 0.021191680908203125, 0.021230592727661132, 0.021186559677124024, 0.021215232849121093, 0.021168127059936523, 0.021204992294311522, 0.0636119041442871, 0.021196800231933592, 0.021167104721069335, 0.021202943801879884, 0.0211712646484375, 0.02117318344116211, 0.02127667236328125, 0.021226495742797852, 0.0212807674407959, 0.021295103073120117, 0.021334016799926758, 0.02145280075073242, 0.021219327926635743, 0.021193727493286133, 0.021218303680419923, 0.021234687805175782, 0.02123263931274414, 0.02121625518798828, 0.02124492835998535, 0.02122854423522949, 0.021222400665283202, 0.021212160110473634, 0.021174272537231444, 0.02131046485900879, 0.021389312744140625, 0.022411264419555665, 0.022288383483886717, 0.022079488754272462, 0.02206003189086914, 0.022288383483886717, 0.02230169677734375, 0.022982656478881838, 0.022477823257446287, 0.022369279861450195, 0.0223242244720459, 0.022389759063720704, 0.02229248046875, 0.022296640396118166, 0.022214591979980467, 0.022177791595458983, 0.022210559844970702, 0.02208153533935547, 0.022189056396484375, 0.02230067253112793, 0.02185932731628418, 0.021811199188232423, 0.022211584091186523, 0.022174720764160157, 0.02229350471496582, 0.022171648025512695, 0.022213632583618165, 0.02233344078063965, 0.02223308753967285, 0.022164480209350586, 0.022404096603393556, 0.022326271057128907, 0.022221824645996095, 0.022175743103027345, 0.02239897537231445, 0.0223242244720459, 0.022138879776000975, 0.022239231109619142, 0.022213632583618165, 0.06426214599609376, 0.021881856918334962, 0.021737472534179687, 0.022112255096435548, 0.022108160018920898, 0.022162431716918944, 0.02210508728027344, 0.02215936088562012, 0.022253568649291993, 0.022240255355834963, 0.022163455963134765, 0.022204416275024414, 0.022139904022216796, 0.022106111526489256, 0.022389759063720704, 0.02223308753967285, 0.022116352081298828, 0.02225049591064453, 0.022107135772705077, 0.02345881652832031, 0.022524927139282228, 0.022189056396484375, 0.02169856071472168, 0.022386688232421875, 0.022495264053344725, 0.022146015167236327, 0.022190080642700196, 0.022185983657836913, 0.02213580894470215, 0.022255615234375, 0.02234060859680176, 0.022230016708374024, 0.02209382438659668, 0.02226380729675293, 0.022303743362426756, 0.022344703674316405, 0.022222848892211915, 0.022303743362426756, 0.02228428840637207, 0.022107135772705077, 0.022152191162109376, 0.02205900764465332, 0.022172672271728516, 0.02209791946411133, 0.02211737632751465, 0.022273056030273436, 0.02215011215209961, 0.02224332809448242, 0.0221214714050293, 0.023085056304931642, 0.02227097511291504, 0.022254592895507814, 0.022328319549560546, 0.022204416275024414, 0.02224127960205078, 0.022228992462158204, 0.022433792114257813, 0.02225868797302246, 0.02230169677734375, 0.022394880294799805, 0.02233344078063965, 0.022226943969726562, 0.022157312393188477, 0.06363955307006836, 0.021313535690307618, 0.02150399971008301, 0.02230169677734375, 0.022204416275024414, 0.022148096084594726, 0.02211020851135254, 0.022236160278320313, 0.022192127227783204, 0.022130687713623046, 0.022344703674316405, 0.022268928527832032, 0.022185983657836913, 0.02221059226989746, 0.022237152099609376, 0.022334463119506837, 0.021828607559204103, 0.022014976501464844, 0.022213632583618165, 0.022231039047241212, 0.022199296951293947, 0.022237184524536133, 0.02227712059020996, 0.022168575286865236, 0.022215679168701173, 0.022239231109619142, 0.022149120330810547, 0.022130687713623046, 0.022120447158813478, 0.022199296951293947, 0.022175743103027345, 0.022183967590332032, 0.022308832168579103, 0.022339584350585938, 0.02228223991394043, 0.022328319549560546, 0.022294559478759766, 0.022185951232910155, 0.022208511352539064, 0.022426624298095704, 0.02245427131652832, 0.02226380729675293, 0.022167552947998048, 0.022177791595458983, 0.022152191162109376, 0.022106143951416017, 0.02216649627685547, 0.022374399185180666, 0.022219776153564453, 0.022164480209350586, 0.022149120330810547, 0.022133760452270508, 0.022179840087890625, 0.02211020851135254, 0.022221824645996095, 0.022182912826538087, 0.02225766372680664, 0.022106111526489256, 0.022401023864746093, 0.022260736465454102, 0.022359039306640623, 0.022246400833129884, 0.022360063552856444]",tokens/s,44.15392960837142,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -15732,7 +15732,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4664-71abc30f1b87c3601dc90295;1a12f0d7-aeac-46b8-96a6-fc55be403b02) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9bd-09b3a3db1f58bc97189ca737;9d4f020f-58b3-4833-9179-fe51a2fc3e3d) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -15837,7 +15837,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9500-041d1a9d52e6c8c625a22ddd;741ceee9-1251-4c5d-b54e-36d37a1ecf70) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6be-2885ba6a26a1ffab678bc5a8;13e5e623-21fa-46cf-888b-a0480359ccd7) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -15907,7 +15907,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95d5-0423a533021c5b8c5bd4e110;9558c737-ac29-45fe-aaa6-472434a86ecf) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe797-11e9c39962b26a6c2df2b5c0;aff2dabf-0116-40ed-959f-4c8d32c78873) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -15975,7 +15975,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16049,7 +16049,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16078,7 +16078,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4861-2eb12b721be7b4ad096f124f;91e9e10f-f4c8-4c5d-a617-60ab5bb1e7cf) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febd7-7385e7f461d3d80179e89b4b;55689d37-4988-42d6-b0f7-990af951d694) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -16163,7 +16163,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e960d-37bb5a366bf2b54831323543;71247c70-2ff8-4a20-868b-4b8fa06f7a79) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7cc-72353ce733dae4a520f29f4f;166f9b4a-f62c-46d2-b72b-771125c80a9e) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -16225,7 +16225,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fb5-4a9c42360a4fc3893da1aac8;c8ec34c6-d489-4a9b-a96d-4e5964478f0b) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe14f-258ae3b41206b0af316ac7ad;249579e4-c620-45f3-9bf4-37b1fa53d3eb) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -16351,7 +16351,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95a0-7e723a67501723f841109eea;3080a888-e9a1-47fb-8a24-3e143a2cb257) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe762-002b1d2828dfa8852f15f262;253680ee-42a9-474f-a8ce-e289e25ba4f3) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -16496,7 +16496,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9055-2c7cc87902c1693616875309;99612c8b-156d-41ee-aee0-75fad45a3f3f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe1f5-0a5725ef247390925955370a;db1d1913-25ad-47b0-a074-cf0953bb3151) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -16566,7 +16566,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,facebook/xglm-4.5B,,cuda,0,42,,,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,d35829e539df8480b726c647eeabf91e41eae047,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1267.949568,20984.6272,0.0,20344.471552,18704.26112,s,10,1.5023166809082031,0.1502316680908203,0.0006441345506977998,0.1502587661743164,0.1509025588989258,0.1511650703430176,0.15137507949829102,"[0.15142758178710938, 0.149262939453125, 0.150144287109375, 0.15084422302246095, 0.15062396240234374, 0.15017861938476562, 0.1504398651123047, 0.14921498107910156, 0.14984130859375, 0.1503389129638672]",tokens/s,1704.0348633101712,kWh,1.8083386599816858e-06,9.908841437209047e-07,1.2620569860757291e-05,1.541979266445988e-05,tokens/kWh,16602039.052706491,MB,1267.949568,20984.6272,0.0,20344.471552,18903.142912,s,10,28.97810498046875,2.897810498046875,0.003902678652760611,2.89655810546875,2.9020618408203127,2.9033882202148438,2.9044493237304687,"[2.9010439453125, 2.90176708984375, 2.895592041015625, 2.904714599609375, 2.89720263671875, 2.8955634765625, 2.901038330078125, 2.892088623046875, 2.89591357421875, 2.8931806640625]",tokens/s,21.740552062483733,kWh,3.416213252678865e-05,1.8722522664320418e-05,0.00013508754830564908,0.0001879722034967582,tokens/kWh,335155.9370377148,,s,629,30.282588092803962,0.04814401922544349,0.01791632267039625,0.04593664169311523,0.04626657180786133,0.04642242584228516,0.19630895141601562,"[0.04616089630126953, 0.04587724685668945, 0.045914112091064455, 0.0458260498046875, 0.04592950439453125, 0.04617212677001953, 0.04592435073852539, 0.04583628845214844, 0.04604620742797851, 0.04650598526000976, 0.04631449508666992, 0.04587417602539062, 0.04608412933349609, 0.04583427047729492, 0.04582393646240234, 0.045778942108154294, 0.04592947387695313, 0.04581171035766601, 0.045723648071289064, 0.04582092666625977, 0.04605542373657227, 0.045808639526367184, 0.04585683059692383, 0.046160831451416015, 0.04600934219360352, 0.04582092666625977, 0.04588236618041992, 0.046080001831054686, 0.04582809448242187, 0.0458158073425293, 0.04588956832885742, 0.04581372833251953, 0.04582297515869141, 0.04576051330566406, 0.04592844772338867, 0.045875297546386716, 0.045821857452392575, 0.04591820907592774, 0.04594790267944336, 0.04638412857055664, 0.04794572830200195, 0.046532608032226565, 0.04618239974975586, 0.046050304412841796, 0.04600012969970703, 0.04581171035766601, 0.046173183441162106, 0.046159870147705076, 0.046453758239746096, 0.04617830276489258, 0.046148609161376954, 0.04589977645874024, 0.04619059371948242, 0.04604828643798828, 0.04619055938720703, 0.046086143493652344, 0.04618137741088867, 0.046069759368896485, 0.04595609664916992, 0.04615884780883789, 0.046036991119384765, 0.046203903198242184, 0.19600076293945312, 0.045813793182373046, 0.045989856719970704, 0.046086143493652344, 0.0458342399597168, 0.04574105453491211, 0.04579942321777344, 0.04579328155517578, 0.04576563262939453, 0.04601958465576172, 0.046115840911865234, 0.046088191986083986, 0.04600934219360352, 0.046023681640625, 0.04577280044555664, 0.045963264465332034, 0.04582912063598633, 0.04586598587036133, 0.04577280044555664, 0.04592127990722656, 0.04582608032226562, 0.045817825317382814, 0.04579635238647461, 0.045810688018798826, 0.04576768112182617, 0.04607078552246094, 0.045965312957763675, 0.045818878173828126, 0.045884414672851564, 0.04588137435913086, 0.045801441192626954, 0.04593971252441406, 0.045908992767333984, 0.046099456787109375, 0.0459233283996582, 0.04597862243652344, 0.046360576629638675, 0.04672614288330078, 0.046279678344726564, 0.04608204650878906, 0.045878273010253906, 0.046274559020996094, 0.04586086273193359, 0.04608512115478516, 0.046569503784179685, 0.046736351013183595, 0.04613836669921875, 0.04693401718139648, 0.046301185607910154, 0.046235649108886716, 0.046002174377441404, 0.046061569213867185, 0.04605132675170898, 0.04594278335571289, 0.04609638214111328, 0.04616191864013672, 0.046281726837158206, 0.04628070449829102, 0.04618137741088867, 0.04613631820678711, 0.04631654357910156, 0.04640460968017578, 0.04618547058105469, 0.1966991424560547, 0.04572159957885742, 0.04575948715209961, 0.04588748931884765, 0.04585779190063476, 0.04602982330322265, 0.045825023651123044, 0.04605542373657227, 0.04582912063598633, 0.045870079040527346, 0.045801536560058594, 0.04579935836791992, 0.045774848937988284, 0.045810688018798826, 0.04591820907592774, 0.045840385437011716, 0.04585881423950195, 0.045871105194091794, 0.0460840950012207, 0.04587724685668945, 0.04588345718383789, 0.045697982788085935, 0.04579020690917969, 0.04593459320068359, 0.045995105743408204, 0.0458177604675293, 0.04616806411743164, 0.04595916748046875, 0.04584454345703125, 0.04604512023925781, 0.045897727966308595, 0.046004222869873046, 0.045830142974853515, 0.04605440139770508, 0.04591923141479492, 0.04604620742797851, 0.046036991119384765, 0.04597350311279297, 0.0459048957824707, 0.04589977645874024, 0.046091262817382815, 0.045868030548095705, 0.046298110961914066, 0.04594483184814453, 0.045932544708251956, 0.04589468765258789, 0.0460513916015625, 0.046010272979736325, 0.046089214324951173, 0.045881343841552735, 0.04601241683959961, 0.046083072662353515, 0.04585369491577149, 0.04609535980224609, 0.04593766403198242, 0.046306304931640625, 0.046018558502197264, 0.046053375244140625, 0.04596633529663086, 0.046148609161376954, 0.04614553451538086, 0.046099456787109375, 0.046124031066894534, 0.19773234558105468, 0.04665446472167969, 0.04600831985473633, 0.04610662460327149, 0.04598067092895508, 0.04581171035766601, 0.04583935928344727, 0.045840385437011716, 0.04583628845214844, 0.04604313659667969, 0.04591308975219727, 0.046064640045166014, 0.04603801727294922, 0.04599603271484375, 0.04634009552001953, 0.04600627136230469, 0.04581683349609375, 0.046063617706298826, 0.045910079956054686, 0.046046142578125, 0.045778942108154294, 0.045908031463623045, 0.045967296600341795, 0.04611481475830078, 0.04583321762084961, 0.04594483184814453, 0.04599193572998047, 0.04584550476074219, 0.0458076171875, 0.045810688018798826, 0.04601651382446289, 0.04591001510620117, 0.04591001510620117, 0.04594790267944336, 0.04598681640625, 0.045932544708251956, 0.04623769760131836, 0.04611072158813476, 0.04617216110229492, 0.046209022521972655, 0.04632064056396484, 0.04634009552001953, 0.04623155212402344, 0.046252033233642575, 0.04630527877807617, 0.046241790771484374, 0.04642406463623047, 0.04644454574584961, 0.04624281692504883, 0.04640563201904297, 0.04616191864013672, 0.04613631820678711, 0.045985790252685545, 0.04613529586791992, 0.04598988723754883, 0.046048255920410154, 0.046004222869873046, 0.046118911743164064, 0.04607385635375977, 0.047001598358154296, 0.04673126220703125, 0.046260223388671876, 0.04624281692504883, 0.1972418518066406, 0.045770751953125, 0.0457625617980957, 0.04593664169311523, 0.04602470397949219, 0.045813758850097655, 0.0458076171875, 0.045846527099609374, 0.04585884857177734, 0.045829086303710936, 0.045808639526367184, 0.04580556869506836, 0.04575027084350586, 0.045871105194091794, 0.046042110443115236, 0.045900798797607424, 0.04591206359863281, 0.04601446533203125, 0.04594073486328125, 0.04590694427490234, 0.045825088500976566, 0.045830078125, 0.045808639526367184, 0.046209022521972655, 0.04587417602539062, 0.0459233283996582, 0.046153728485107424, 0.04601036834716797, 0.046002174377441404, 0.045960193634033204, 0.04588544082641602, 0.046219264984130856, 0.0460134391784668, 0.045884414672851564, 0.04610460662841797, 0.046079967498779295, 0.04592639923095703, 0.04597043228149414, 0.04596428680419922, 0.04616089630126953, 0.046140415191650394, 0.04633599853515625, 0.046004222869873046, 0.04652134323120117, 0.04618547058105469, 0.04593459320068359, 0.045946880340576174, 0.04594790267944336, 0.04597862243652344, 0.045878273010253906, 0.046036991119384765, 0.04589363098144531, 0.046069759368896485, 0.04593561553955078, 0.046094337463378904, 0.04591513442993164, 0.045868030548095705, 0.04636876678466797, 0.046058494567871096, 0.04594585418701172, 0.046002174377441404, 0.04607590484619141, 0.04605952072143555, 0.19731149291992187, 0.046287872314453124, 0.04581785583496094, 0.04574310302734375, 0.0456734733581543, 0.04575027084350586, 0.045927486419677734, 0.04577990341186523, 0.04575743865966797, 0.0459233283996582, 0.04576768112182617, 0.04574515151977539, 0.04578201675415039, 0.04583833694458008, 0.04574105453491211, 0.045808639526367184, 0.045753345489501954, 0.04578406524658203, 0.04575948715209961, 0.046148609161376954, 0.046096446990966794, 0.04599599838256836, 0.0458004150390625, 0.04583833694458008, 0.04585369491577149, 0.04581683349609375, 0.04576665496826172, 0.04581478500366211, 0.04593971252441406, 0.045732929229736326, 0.045806526184082035, 0.04574105453491211, 0.04577177429199219, 0.04583935928344727, 0.045992961883544924, 0.04597862243652344, 0.0459315185546875, 0.04606473541259765, 0.04617718505859375, 0.04617523193359375, 0.04585779190063476, 0.04624895858764649, 0.04622438430786133, 0.04620697784423828, 0.04597555160522461, 0.04604313659667969, 0.046015487670898435, 0.046235649108886716, 0.04595097732543945, 0.04591308975219727, 0.04605440139770508, 0.04595199966430664, 0.046004222869873046, 0.04599193572998047, 0.04612505722045898, 0.04601651382446289, 0.046173183441162106, 0.046295040130615236, 0.046071807861328126, 0.0460769271850586, 0.046043167114257814, 0.04610351943969727, 0.04597760009765625, 0.1973544921875, 0.04614144134521484, 0.045917182922363284, 0.046023681640625, 0.04575033569335937, 0.04632160186767578, 0.04609638214111328, 0.04627558517456055, 0.04617728042602539, 0.04604006576538086, 0.046124031066894534, 0.04621209716796875, 0.046271488189697264, 0.04617216110229492, 0.046219264984130856, 0.046104576110839846, 0.047162368774414064, 0.04659814453125, 0.04620083236694336, 0.04656947326660156, 0.046018558502197264, 0.045843456268310545, 0.045827072143554685, 0.04584960174560547, 0.04578713607788086, 0.046086143493652344, 0.045868030548095705, 0.0459950065612793, 0.04579840087890625, 0.046048255920410154, 0.04586809539794922, 0.045886398315429684, 0.04582195281982422, 0.04611174392700195, 0.04590694427490234, 0.04581171035766601, 0.04590591812133789, 0.0459315185546875, 0.04585580825805664, 0.04592022323608398, 0.04586288070678711, 0.04592025756835937, 0.04589567947387695, 0.045908992767333984, 0.045846527099609374, 0.04594073486328125, 0.04584755325317383, 0.04592537689208984, 0.045917182922363284, 0.04594585418701172, 0.045889537811279295, 0.04598988723754883, 0.04584447860717773, 0.046069759368896485, 0.04592127990722656, 0.04601036834716797, 0.04593971252441406, 0.046083072662353515, 0.046448638916015625, 0.04599398422241211, 0.04592844772338867, 0.04598476791381836, 0.046513153076171876, 0.1957171173095703, 0.045778942108154294, 0.0456703987121582, 0.045795326232910154, 0.04570111846923828, 0.045695999145507815, 0.045687808990478515, 0.04568371200561523, 0.04569804763793945, 0.04594483184814453, 0.04580249786376953, 0.04641996765136719, 0.04698521423339844, 0.045908992767333984, 0.04579942321777344, 0.045963264465332034, 0.04580044937133789, 0.04583731079101563, 0.0457625617980957, 0.045897727966308595, 0.04575539016723633, 0.04580556869506836, 0.04575955200195313, 0.045784000396728516, 0.04574617767333984, 0.04586703872680664, 0.045763553619384764, 0.0458342399597168, 0.04577587127685547, 0.045774848937988284, 0.04583935928344727, 0.04587724685668945, 0.045982719421386715, 0.04593766403198242, 0.045916160583496096, 0.04592230224609375, 0.04590182495117188, 0.045870079040527346, 0.045894657135009766, 0.04587417602539062, 0.04576979064941406, 0.04584134292602539, 0.045846527099609374, 0.04578303909301758, 0.04582710266113281, 0.04589667129516602, 0.045851646423339845, 0.045889537811279295, 0.045878273010253906, 0.04594489669799805, 0.04615366363525391, 0.04611072158813476, 0.045889537811279295, 0.04595199966430664, 0.04599814224243164, 0.046034881591796875, 0.04594278335571289, 0.04601139068603516, 0.045946880340576174, 0.04598681640625, 0.04595916748046875, 0.046107646942138675, 0.04624281692504883, 0.19642880249023437, 0.045797374725341795, 0.04583833694458008, 0.04583833694458008, 0.04597452926635742, 0.04625305557250976, 0.0458076171875, 0.04576768112182617, 0.04578713607788086, 0.04581478500366211, 0.045764606475830076, 0.04578924942016602, 0.04576147079467773, 0.04572467041015625, 0.04575539016723633, 0.04588137435913086, 0.045980640411376957, 0.04705279922485352, 0.04674355316162109, 0.04630323028564453, 0.04589875030517578, 0.045739009857177736, 0.04578307342529297, 0.04593660736083984, 0.04589670562744141, 0.045856769561767576, 0.0457891845703125, 0.04592230224609375, 0.04583116912841797, 0.045846527099609374, 0.045841407775878903, 0.045827072143554685, 0.045886463165283206, 0.04583833694458008, 0.04581683349609375, 0.04579328155517578, 0.046015487670898435, 0.04581990432739258, 0.04590182495117188, 0.04586393737792969, 0.046063617706298826, 0.045976577758789064, 0.045889537811279295, 0.04587519836425781, 0.04592435073852539, 0.045930496215820314, 0.04589977645874024, 0.04591308975219727, 0.0459048957824707, 0.04593561553955078, 0.04588851165771484, 0.0459417610168457, 0.04599705505371094, 0.04648550415039063, 0.04619980621337891, 0.04613836669921875, 0.04597145462036133, 0.04631449508666992, 0.046298110961914066, 0.046050304412841796, 0.04593868637084961, 0.045954078674316404, 0.04616188812255859, 0.19703602600097656, 0.04583628845214844, 0.04582092666625977, 0.04577177429199219, 0.045728767395019534, 0.04566527938842774, 0.04571136093139649, 0.04575641632080078, 0.045712383270263675, 0.045870079040527346, 0.04600012969970703, 0.045753345489501954, 0.04571648025512695, 0.04587212753295898, 0.045722625732421876, 0.0457891845703125, 0.045712383270263675, 0.04588851165771484, 0.04592947387695313, 0.045835262298583986, 0.04598886489868164, 0.04639539337158203, 0.04599193572998047, 0.045843456268310545, 0.04579840087890625, 0.04579020690917969, 0.04589875030517578, 0.04587212753295898, 0.04578201675415039, 0.04582297515869141, 0.045878273010253906, 0.04591414260864258, 0.04586697769165039, 0.04586703872680664, 0.04582499313354492, 0.04634726333618164, 0.046339073181152345, 0.04588851165771484, 0.045985790252685545, 0.04598476791381836, 0.04588339233398438, 0.046045185089111325, 0.04593868637084961, 0.04605440139770508, 0.04590182495117188, 0.04598886489868164, 0.04585779190063476, 0.04600320053100586, 0.0459315185546875, 0.04594483184814453, 0.0459048957824707, 0.04600735855102539, 0.0458771858215332, 0.04599398422241211, 0.04591820907592774, 0.0459048957824707, 0.04596121597290039, 0.045954113006591794, 0.045866943359375, 0.045999103546142575, 0.046265342712402346, 0.04604415893554688, 0.04611174392700195]",tokens/s,20.77101197798444,,,main,False,False -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16605,7 +16605,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45fc-435a9210060e1f3475f4ca14;e3fefb5e-dd53-4082-8665-cf9c7c178770) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe94e-6b9789bb3ff23672275c06bc;255c79db-0f06-4c04-b713-ea81d7fa18b0) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -16636,7 +16636,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -16751,7 +16751,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9352-4c3d9b482c95c2e112401f67;8b8a5543-5217-481a-bd9d-8d70e2fccde7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe505-1e026c150168affb3abdd8b0;e8f9f3ed-445b-4196-b50c-181364771119) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17109,7 +17109,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e92e5-09cd74b03906f08813c35587;f1c3a093-eb92-405f-9971-c12a870e1e9f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe49b-238f7aa63944ab7f1e08606d;f44300d0-b0a1-40db-9f1e-bdafe3a90215) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17213,7 +17213,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17252,7 +17252,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4704-0cbb0e6a687bf9510ac60341;f88d6706-c783-4600-8803-d1529a90910d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea63-182905af62f66f1623a2f224;47bb0a4d-c9a2-4ac7-a406-3ad6cda66d46) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17283,7 +17283,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-eager,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17322,7 +17322,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d469b-18d915d6688904000bd897ba;b4a032eb-7532-4233-8a2f-b061f3cb504e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9f5-798a8f4656dcea02305ad48f;07b0baa5-7c83-495b-83a5-88875601c0a0) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17390,7 +17390,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17458,7 +17458,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96c8-0c1c9a3465cbf6d623967935;d401a1fd-9fe2-404c-936b-e868a17df306) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe884-6331cded66c58c6238917f23;2336b4a8-b400-40fc-9d14-75230262998b) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17489,7 +17489,7 @@ OSError: 8 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17528,7 +17528,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45d6-7a6595323a13f73144c37a8c;88c4b55e-5aba-4a17-99f1-62f713f14348) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe92d-08f25ec03747339d536ea633;477d4b1a-2555-4cca-b8b9-fd0da433271a) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -17729,7 +17729,7 @@ ChildProcessError: Traceback (most recent call last): TypeError: DeciCoderAttention.forward() got an unexpected keyword argument 'cache_position' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17785,7 +17785,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9035-5bbd903b654f6f45029229ac;21c3bd8e-154b-4f99-8770-ab88e431b208) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1d4-5991af251ffa249c745af60d;c2844c68-0d31-4b31-af3f-9e56a2e6b601) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -17889,7 +17889,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -17982,7 +17982,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96fd-024e94026135e7794698fd26;5947581b-79ad-4b2c-991f-0c22af3b44c4) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8be-420408f04cf745d04893143d;078f4a75-bc18-453d-887c-f6f7a6290990) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -18042,7 +18042,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9000-766aecae51b803fb2b689ff3;5673249b-1e99-4701-91d5-7e58662442d8) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe19b-29ba4df74f27a83e5babaa31;7dce8bb2-d3c0-4638-a85b-4b5de2d7200e) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -18540,7 +18540,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8df2-07cba0ca3bb68d3e2f124379;f6677af2-f17b-40f4-917b-e81371a26923) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf86-27b28bc0115828d87901cfef;6e87346e-7a91-4805-b144-5b695ba2239c) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -18625,7 +18625,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e965d-21eb9fd34e203b8f68a57eef;3179f268-3548-4eeb-8202-8d3a0a114af1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe815-78285f074844a5d77a1723a5;2e10e605-a26b-4fc3-b1f2-37cc8dd8d5a9) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -18693,7 +18693,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -18807,7 +18807,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9549-51cbc38b48d7a7c7292f30ca;26d1cb25-39d3-41f4-96b2-d1272fdcd8c7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe707-255167334f0dbcaa14bca6c5;9031f205-26ba-4319-aba9-f527dec3c4c0) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -18876,7 +18876,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,,cuda,0,42,,,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1175.990272,9272.03328,0.0,8631.877632,8498.684928,s,10,0.5260754814147949,0.0526075481414795,0.0002627683235581081,0.05261451148986816,0.053006309509277344,0.05303499412536621,0.05305794181823731,"[0.052999935150146484, 0.052679134368896485, 0.05233830261230469, 0.05238092803955078, 0.05229375839233399, 0.05262063980102539, 0.05260838317871094, 0.0527611198425293, 0.052329601287841795, 0.05306367874145508]",tokens/s,4866.221845419014,kWh,6.546623378241992e-07,3.5871722440627497e-07,4.525012828110586e-06,5.53839239034106e-06,tokens/kWh,46222799.31744512,MB,1175.990272,9272.03328,0.0,8631.877632,8597.292544,s,10,12.151699951171874,1.2151699951171875,0.008398459603249456,1.213122802734375,1.226181201171875,1.2296250854492188,1.2323801928710938,"[1.2254158935546875, 1.214334716796875, 1.2065869140625, 1.2061162109375, 1.206941162109375, 1.2103851318359375, 1.211910888671875, 1.217904052734375, 1.2190360107421876, 1.2330689697265624]",tokens/s,51.844598083517084,kWh,1.4507108161590668e-05,7.949670775071141e-06,5.187353229069146e-05,7.433031122735327e-05,tokens/kWh,847568.0911291038,,s,629,12.604601356506356,0.020039111854541092,0.006308930534271433,0.019177471160888672,0.01962741775512695,0.02015703048706055,0.07194259490966798,"[0.01963315200805664, 0.01924198341369629, 0.019166208267211913, 0.019269632339477538, 0.01925632095336914, 0.019409919738769533, 0.019563520431518554, 0.019211263656616212, 0.019180543899536134, 0.019219455718994142, 0.019316736221313476, 0.0191016960144043, 0.01944166374206543, 0.019489791870117186, 0.01965977668762207, 0.01962598419189453, 0.01945395278930664, 0.019330047607421876, 0.01937919998168945, 0.01926348876953125, 0.019582975387573243, 0.019396608352661132, 0.01921843147277832, 0.019194879531860352, 0.019138559341430664, 0.01924608039855957, 0.02002943992614746, 0.023557119369506836, 0.021587968826293946, 0.020083711624145507, 0.019610624313354492, 0.01926655960083008, 0.019277824401855468, 0.019773439407348634, 0.02042265510559082, 0.019486719131469727, 0.01920102310180664, 0.019134464263916014, 0.019329023361206055, 0.01919795227050781, 0.019195903778076173, 0.019137535095214844, 0.01912118339538574, 0.019141599655151366, 0.01923788833618164, 0.019132415771484376, 0.019195968627929688, 0.019203008651733397, 0.019166208267211913, 0.01925939178466797, 0.019150848388671874, 0.019163135528564454, 0.01923481559753418, 0.01901875114440918, 0.019138559341430664, 0.019421247482299803, 0.019311552047729493, 0.019100671768188478, 0.01914681625366211, 0.019149759292602538, 0.019158016204833983, 0.01920614433288574, 0.0725032958984375, 0.019342336654663086, 0.019745792388916016, 0.019509248733520508, 0.019284992218017577, 0.01926655960083008, 0.019349504470825195, 0.01925119972229004, 0.019513343811035155, 0.019458112716674806, 0.019256256103515626, 0.01942937660217285, 0.019558399200439454, 0.019612672805786133, 0.019710975646972655, 0.019324928283691405, 0.019334144592285156, 0.019574783325195313, 0.0196177921295166, 0.019340288162231444, 0.019776512145996093, 0.019558399200439454, 0.020486143112182616, 0.019766271591186522, 0.019203071594238282, 0.019134464263916014, 0.01906892776489258, 0.019062784194946288, 0.019207168579101562, 0.01899929618835449, 0.019122175216674805, 0.019135488510131835, 0.019127296447753905, 0.019222591400146486, 0.01914361572265625, 0.01904640007019043, 0.01907302474975586, 0.019170303344726563, 0.019078144073486326, 0.01904844856262207, 0.01925324821472168, 0.019083263397216797, 0.01918156814575195, 0.01925119972229004, 0.019006464004516603, 0.018938880920410156, 0.019167232513427734, 0.019105791091918945, 0.019090431213378906, 0.019184640884399414, 0.019133504867553712, 0.019068864822387694, 0.019092479705810548, 0.019091455459594727, 0.019137535095214844, 0.019108928680419923, 0.019029951095581053, 0.019103744506835937, 0.019159040451049804, 0.019123199462890626, 0.0191345272064209, 0.0191077766418457, 0.019082239151000976, 0.07184896087646485, 0.018969600677490234, 0.019158016204833983, 0.019150848388671874, 0.019013631820678712, 0.018913280487060546, 0.019106815338134766, 0.01912937545776367, 0.01914057540893555, 0.0191016960144043, 0.019163135528564454, 0.019566591262817384, 0.019427391052246095, 0.019072959899902344, 0.019111936569213867, 0.01922355270385742, 0.019133440017700197, 0.019131391525268555, 0.019471359252929688, 0.019162111282348633, 0.019190784454345702, 0.019430400848388672, 0.019136512756347656, 0.019183616638183593, 0.019163135528564454, 0.019147775650024415, 0.019153919219970703, 0.019158016204833983, 0.01911299133300781, 0.01912931251525879, 0.01921843147277832, 0.019187711715698243, 0.019128320693969726, 0.019148799896240236, 0.01921331214904785, 0.019119104385375976, 0.019185663223266602, 0.019144704818725586, 0.019095552444458007, 0.019212287902832033, 0.019178495407104493, 0.019083263397216797, 0.019096576690673828, 0.019078144073486326, 0.019111936569213867, 0.019172351837158205, 0.019094528198242186, 0.019107839584350587, 0.019079168319702147, 0.019088384628295898, 0.019123199462890626, 0.01926246452331543, 0.01906380844116211, 0.0191016960144043, 0.019070976257324217, 0.019056640625, 0.019108863830566408, 0.01927987289428711, 0.01905561637878418, 0.019079200744628905, 0.01906377601623535, 0.01904844856262207, 0.019098623275756836, 0.07197183990478516, 0.01904844856262207, 0.01909350395202637, 0.01906790351867676, 0.019059776306152344, 0.019446720123291017, 0.019116031646728517, 0.01903411293029785, 0.019103744506835937, 0.019136512756347656, 0.01908531188964844, 0.019132415771484376, 0.019159040451049804, 0.019104799270629882, 0.0191231689453125, 0.019107839584350587, 0.019165184020996092, 0.019153919219970703, 0.019092479705810548, 0.019087360382080077, 0.019161088943481445, 0.019091455459594727, 0.01906483268737793, 0.019130367279052735, 0.019124223709106446, 0.019110912322998046, 0.019112960815429687, 0.01905356788635254, 0.019122175216674805, 0.019147775650024415, 0.019102720260620116, 0.019144704818725586, 0.019160064697265625, 0.019113983154296875, 0.019139583587646485, 0.019104768753051758, 0.019102720260620116, 0.019187711715698243, 0.019537984848022463, 0.019148735046386717, 0.019211263656616212, 0.019165184020996092, 0.019149824142456053, 0.01909760093688965, 0.019130367279052735, 0.019118080139160155, 0.019165184020996092, 0.019105791091918945, 0.019192832946777344, 0.01923072052001953, 0.019148799896240236, 0.019110912322998046, 0.019141632080078123, 0.019150848388671874, 0.019137535095214844, 0.019149824142456053, 0.019159040451049804, 0.01908121681213379, 0.019169343948364257, 0.01922553634643555, 0.019090463638305664, 0.019134431838989257, 0.019112960815429687, 0.07186739349365234, 0.019123199462890626, 0.019142656326293944, 0.019130367279052735, 0.01920512008666992, 0.019145727157592773, 0.019124223709106446, 0.019139583587646485, 0.019075071334838867, 0.019109888076782225, 0.019147775650024415, 0.019146751403808594, 0.019162111282348633, 0.019141632080078123, 0.019756032943725587, 0.019247167587280272, 0.01894393539428711, 0.018899967193603515, 0.018892799377441406, 0.018883583068847656, 0.019104768753051758, 0.019087360382080077, 0.019140607833862306, 0.019090463638305664, 0.0191866569519043, 0.019126272201538085, 0.019182592391967773, 0.019160064697265625, 0.019211263656616212, 0.019311616897583008, 0.019565568923950196, 0.019174400329589843, 0.019152896881103516, 0.019135488510131835, 0.019156991958618166, 0.019140607833862306, 0.019183616638183593, 0.019127296447753905, 0.019143680572509765, 0.019165184020996092, 0.019126272201538085, 0.01923072052001953, 0.019164159774780275, 0.019173376083374022, 0.019123199462890626, 0.019138559341430664, 0.019148799896240236, 0.019155967712402345, 0.019216384887695313, 0.019134464263916014, 0.019139583587646485, 0.019124223709106446, 0.019116031646728517, 0.019141632080078123, 0.01919692802429199, 0.019144704818725586, 0.019158016204833983, 0.019155040740966797, 0.019119007110595703, 0.01918976020812988, 0.019145727157592773, 0.019070976257324217, 0.019120128631591796, 0.07306240081787109, 0.019148799896240236, 0.019194879531860352, 0.019175424575805664, 0.019167232513427734, 0.019169279098510742, 0.019141632080078123, 0.019115007400512696, 0.019152896881103516, 0.01924300765991211, 0.019139583587646485, 0.019135488510131835, 0.019277824401855468, 0.019130399703979492, 0.0191364803314209, 0.019147775650024415, 0.019152896881103516, 0.019124223709106446, 0.01923891258239746, 0.01926348876953125, 0.019174400329589843, 0.019100671768188478, 0.019106815338134766, 0.019130367279052735, 0.01920102310180664, 0.01910688018798828, 0.01914054489135742, 0.01922150421142578, 0.01924608039855957, 0.01920102310180664, 0.019176448822021484, 0.01918156814575195, 0.0192194881439209, 0.019194847106933595, 0.019120128631591796, 0.01964543914794922, 0.019177471160888672, 0.019170368194580078, 0.01915897560119629, 0.019125247955322267, 0.019111936569213867, 0.019175424575805664, 0.019140607833862306, 0.019167232513427734, 0.019088384628295898, 0.019171327590942384, 0.019297279357910157, 0.019110912322998046, 0.019132415771484376, 0.019182592391967773, 0.019121152877807617, 0.019203071594238282, 0.019317760467529296, 0.019169376373291015, 0.01918659210205078, 0.019180543899536134, 0.01967001533508301, 0.019216384887695313, 0.01924095916748047, 0.019311616897583008, 0.019350528717041016, 0.019168256759643554, 0.019188735961914064, 0.07226163482666016, 0.019095552444458007, 0.0192225284576416, 0.019208192825317383, 0.019143680572509765, 0.019175424575805664, 0.019224576950073242, 0.019110912322998046, 0.019136512756347656, 0.01923583984375, 0.019131391525268555, 0.019200000762939453, 0.01920518493652344, 0.01913542366027832, 0.019129344940185547, 0.019141632080078123, 0.01923891258239746, 0.019162111282348633, 0.019208192825317383, 0.019113983154296875, 0.019120128631591796, 0.019158016204833983, 0.019170303344726563, 0.019102720260620116, 0.01926553535461426, 0.019116031646728517, 0.019123199462890626, 0.019178495407104493, 0.01911404800415039, 0.01957164764404297, 0.019147775650024415, 0.019171327590942384, 0.019244096755981446, 0.019138496398925783, 0.019166208267211913, 0.019103744506835937, 0.019173376083374022, 0.019116031646728517, 0.01932806396484375, 0.01916102409362793, 0.019163135528564454, 0.01922150421142578, 0.019159040451049804, 0.018988031387329102, 0.019722240447998047, 0.020699136734008788, 0.019927040100097656, 0.01923072052001953, 0.019185663223266602, 0.019136512756347656, 0.01926563262939453, 0.019165088653564453, 0.01908531188964844, 0.01901875114440918, 0.019212287902832033, 0.019283967971801756, 0.01902284812927246, 0.019547136306762695, 0.019326976776123047, 0.019296255111694336, 0.019298303604125978, 0.019212287902832033, 0.01923276710510254, 0.0725401611328125, 0.019340288162231444, 0.019264511108398438, 0.019264511108398438, 0.01923583984375, 0.019244096755981446, 0.019226560592651366, 0.01926044845581055, 0.01947337532043457, 0.019610687255859374, 0.019303359985351563, 0.019207168579101562, 0.019316736221313476, 0.01925836753845215, 0.019352575302124024, 0.019194879531860352, 0.019298303604125978, 0.01924608039855957, 0.01925632095336914, 0.019168256759643554, 0.019177471160888672, 0.01922047996520996, 0.019161088943481445, 0.019173376083374022, 0.019993600845336915, 0.019537919998168944, 0.01946009635925293, 0.01949286460876465, 0.019193920135498047, 0.019314624786376952, 0.019211263656616212, 0.01940787124633789, 0.019467264175415038, 0.019252288818359376, 0.019223487854003907, 0.019207168579101562, 0.01923788833618164, 0.019211263656616212, 0.01924608039855957, 0.019179519653320314, 0.019200000762939453, 0.019167232513427734, 0.01926246452331543, 0.019185663223266602, 0.01902796745300293, 0.01903001594543457, 0.019248159408569335, 0.018996192932128907, 0.019105791091918945, 0.01927993583679199, 0.019248064041137696, 0.01923788833618164, 0.0192225284576416, 0.01903411293029785, 0.019994623184204103, 0.021124095916748048, 0.01987993621826172, 0.019523647308349608, 0.01950611114501953, 0.01925529670715332, 0.01926655960083008, 0.01925324821472168, 0.01922355270385742, 0.07199846649169922, 0.019154943466186524, 0.01923481559753418, 0.01925836753845215, 0.01922355270385742, 0.019203071594238282, 0.01922047996520996, 0.019172351837158205, 0.019140607833862306, 0.01920921516418457, 0.019182592391967773, 0.01925017547607422, 0.019136512756347656, 0.019183616638183593, 0.01916111946105957, 0.01927574348449707, 0.019316736221313476, 0.019363840103149413, 0.01922355270385742, 0.019177471160888672, 0.018966527938842775, 0.01919385528564453, 0.019710975646972655, 0.020303871154785155, 0.0194703369140625, 0.019502111434936523, 0.01950204849243164, 0.019491840362548828, 0.019547136306762695, 0.020298751831054687, 0.019386367797851564, 0.01920102310180664, 0.019178495407104493, 0.019176448822021484, 0.01922764778137207, 0.019155967712402345, 0.01918976020812988, 0.019149824142456053, 0.01904537582397461, 0.019123199462890626, 0.01919795227050781, 0.01902796745300293, 0.019187711715698243, 0.01918156814575195, 0.019308544158935546, 0.019474431991577147, 0.019483648300170898, 0.01923276710510254, 0.019320831298828126, 0.01920921516418457, 0.01942118453979492, 0.01940377616882324, 0.019475456237792968, 0.019429407119750976, 0.019400672912597658, 0.019479551315307618, 0.019552255630493166, 0.019283967971801756, 0.019216415405273437, 0.019967967987060548, 0.01963929557800293, 0.019543039321899415, 0.01993011283874512, 0.07285350036621094, 0.019086336135864256, 0.019083263397216797, 0.019111936569213867, 0.019337215423583985, 0.019299392700195313, 0.019156927108764647, 0.019108863830566408, 0.019182592391967773, 0.019107839584350587, 0.019148799896240236, 0.019185663223266602, 0.019147775650024415, 0.019150848388671874, 0.019140607833862306, 0.019156991958618166, 0.019804159164428712, 0.020758527755737305, 0.019825664520263672, 0.019070976257324217, 0.01919795227050781, 0.018947071075439453, 0.01920921516418457, 0.01922355270385742, 0.019139583587646485, 0.019160064697265625, 0.019166208267211913, 0.019126272201538085, 0.01926348876953125, 0.019743743896484374, 0.019358783721923827, 0.019277759552001953, 0.019212287902832033, 0.01923276710510254, 0.019294208526611328, 0.01908121681213379, 0.01945907211303711, 0.020368383407592772, 0.020304895401000975, 0.020364288330078126, 0.020291584014892578, 0.02040934371948242, 0.02032537651062012, 0.02021683120727539, 0.020247552871704103, 0.020221952438354493, 0.019969024658203126, 0.019164159774780275, 0.020377599716186523, 0.020360191345214843, 0.02026700782775879, 0.02031820869445801, 0.02003558349609375, 0.02001817512512207, 0.02004582405090332, 0.02020249557495117, 0.02008883285522461, 0.01991372871398926, 0.019971071243286134, 0.018980863571166993, 0.01902284812927246, 0.01902284812927246, 0.018976768493652343]",tokens/s,49.902411207579966,,,main,False,False -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -18931,7 +18931,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1135.742976,5172.10112,0.0,4531.945472,4478.990336,s,10,0.3975123519897461,0.03975123519897461,0.0002546493339729439,0.039652048110961916,0.03992862548828125,0.040197560501098635,0.04041270851135254,"[0.04046649551391602, 0.039794273376464843, 0.03960784149169922, 0.039637569427490235, 0.039667007446289065, 0.0395384635925293, 0.03963785552978515, 0.03962774276733398, 0.03966624069213867, 0.03986886215209961]",tokens/s,6440.051452957205,kWh,4.958956027407804e-07,2.717270876475679e-07,3.4272040434811473e-06,4.194826733869495e-06,tokens/kWh,61027550.418954775,MB,1135.742976,5172.10112,0.0,4531.945472,4492.86912,s,10,13.15693322753906,1.3156933227539063,0.012732326368063573,1.3180799560546874,1.3296379516601562,1.331017889404297,1.3321218395996095,"[1.3323978271484376, 1.3132144775390624, 1.3219439697265625, 1.31023486328125, 1.2851285400390624, 1.316025634765625, 1.329331298828125, 1.322101318359375, 1.32013427734375, 1.3064210205078124]",tokens/s,47.88349907266637,kWh,1.5338403623861138e-05,8.405267576681391e-06,4.508622365611892e-05,6.882989485666146e-05,tokens/kWh,915299.9598676965,,s,629,13.492060171127314,0.021450016170313703,0.004714803071535541,0.021008384704589843,0.021200077438354493,0.02137784385681152,0.0603588200378418,"[0.02043187141418457, 0.020913152694702147, 0.020732927322387695, 0.02106675148010254, 0.021152767181396484, 0.021196800231933592, 0.021191680908203125, 0.021094400405883788, 0.021115903854370118, 0.021109760284423826, 0.021183488845825195, 0.021596160888671875, 0.021082111358642578, 0.02109542465209961, 0.021053440093994142, 0.021124095916748048, 0.02110771179199219, 0.021151744842529296, 0.021168127059936523, 0.02110771179199219, 0.021214208602905273, 0.021329919815063478, 0.0212490234375, 0.021102655410766603, 0.021130176544189454, 0.02142207908630371, 0.021587968826293946, 0.021522432327270507, 0.021393407821655275, 0.021209087371826172, 0.02125312042236328, 0.021196800231933592, 0.02124595260620117, 0.02107596778869629, 0.021147647857666017, 0.02110771179199219, 0.021046272277832033, 0.02110771179199219, 0.021166080474853514, 0.021045248031616212, 0.020993024826049804, 0.02105855941772461, 0.021174272537231444, 0.021167104721069335, 0.02122137641906738, 0.021111808776855468, 0.021230592727661132, 0.021004287719726563, 0.021086208343505858, 0.02126950454711914, 0.021152767181396484, 0.021131263732910157, 0.020728832244873048, 0.021046272277832033, 0.02105958366394043, 0.02100223922729492, 0.021170175552368165, 0.02105958366394043, 0.02147225570678711, 0.02107084846496582, 0.02106470489501953, 0.020653055191040038, 0.06047948837280273, 0.020197376251220703, 0.020176895141601564, 0.02024345588684082, 0.020132863998413086, 0.02026905632019043, 0.020198400497436524, 0.020126720428466797, 0.020121599197387697, 0.020187135696411132, 0.020164608001708984, 0.020231168746948244, 0.02021990394592285, 0.020208639144897463, 0.02167296028137207, 0.021166112899780272, 0.02107491111755371, 0.021005311965942384, 0.02107596778869629, 0.021152767181396484, 0.020932607650756836, 0.02092748832702637, 0.020957183837890626, 0.020913152694702147, 0.021032960891723632, 0.0208353271484375, 0.02068172836303711, 0.020720640182495118, 0.02074015998840332, 0.020693952560424805, 0.021008384704589843, 0.02110873603820801, 0.020988927841186524, 0.021021696090698243, 0.02105958366394043, 0.021012479782104493, 0.020993024826049804, 0.02104012870788574, 0.020988927841186524, 0.020956159591674805, 0.02101862335205078, 0.020943872451782225, 0.02142310333251953, 0.02090598487854004, 0.02106265640258789, 0.02105855941772461, 0.02109542465209961, 0.020950016021728517, 0.020999168395996092, 0.02102579116821289, 0.021195775985717775, 0.021112831115722656, 0.021000192642211913, 0.02104115104675293, 0.02122444725036621, 0.021137407302856445, 0.021137439727783203, 0.021055456161499023, 0.020993024826049804, 0.021014528274536134, 0.02091929626464844, 0.021000192642211913, 0.020961280822753905, 0.0604139518737793, 0.020188159942626953, 0.020197376251220703, 0.020792320251464845, 0.02129715156555176, 0.021787647247314454, 0.021170175552368165, 0.02109132766723633, 0.020982784271240236, 0.020940799713134766, 0.020962303161621093, 0.02089779281616211, 0.02066534423828125, 0.020855808258056642, 0.020972543716430665, 0.020757503509521484, 0.020767744064331056, 0.02086502456665039, 0.020926528930664063, 0.021031871795654297, 0.02103193664550781, 0.02104319953918457, 0.020995071411132812, 0.020978687286376953, 0.021174272537231444, 0.02106675148010254, 0.020946943283081054, 0.021098495483398438, 0.020993024826049804, 0.02104217529296875, 0.021120000839233398, 0.020996095657348633, 0.020942848205566408, 0.021012544631958008, 0.02105132865905762, 0.02106265640258789, 0.02101759910583496, 0.020983808517456053, 0.02099404716491699, 0.020993024826049804, 0.020925439834594727, 0.02108006477355957, 0.02100223922729492, 0.021003263473510742, 0.021005311965942384, 0.021004287719726563, 0.020978687286376953, 0.02101043128967285, 0.020972543716430665, 0.02110873603820801, 0.02109542465209961, 0.021115903854370118, 0.020942848205566408, 0.02105036735534668, 0.020945920944213867, 0.021009408950805664, 0.02104115104675293, 0.02105241584777832, 0.02106470489501953, 0.02104012870788574, 0.021008384704589843, 0.02102272033691406, 0.02104422378540039, 0.06033817672729492, 0.02025164794921875, 0.020212736129760742, 0.020356096267700196, 0.020153343200683595, 0.02023628807067871, 0.02053222465515137, 0.020296703338623046, 0.020189184188842774, 0.02021683120727539, 0.020215808868408205, 0.020184064865112306, 0.020147199630737304, 0.02023423957824707, 0.02024345588684082, 0.02023526382446289, 0.02022809600830078, 0.02024038314819336, 0.019929088592529298, 0.020762624740600585, 0.02106982421875, 0.020896768569946288, 0.021038080215454103, 0.02110361671447754, 0.02103910446166992, 0.020974592208862306, 0.020915199279785156, 0.020970495223999023, 0.020702207565307617, 0.020757503509521484, 0.02088140869140625, 0.02269491195678711, 0.02131865692138672, 0.02108518409729004, 0.02098182487487793, 0.021071807861328125, 0.020999200820922853, 0.02106262397766113, 0.02103910446166992, 0.02099404716491699, 0.020907007217407226, 0.021007360458374022, 0.02103398323059082, 0.02089779281616211, 0.02101043128967285, 0.021015552520751952, 0.02103606414794922, 0.020960224151611327, 0.02100223922729492, 0.021001216888427734, 0.020976640701293944, 0.020923391342163086, 0.02105753517150879, 0.02090598487854004, 0.02093062400817871, 0.021104576110839844, 0.021372928619384765, 0.021001216888427734, 0.020979711532592774, 0.02089472007751465, 0.02106060791015625, 0.020970495223999023, 0.021053440093994142, 0.060386302947998044, 0.020133888244628906, 0.020171775817871093, 0.02023321533203125, 0.020175872802734376, 0.020188159942626953, 0.02018611145019531, 0.02026393508911133, 0.020199424743652345, 0.020222976684570314, 0.020264959335327147, 0.020221952438354493, 0.020205568313598633, 0.020134912490844727, 0.020239360809326173, 0.020256767272949217, 0.020212736129760742, 0.020358144760131838, 0.020221952438354493, 0.02023219108581543, 0.020176895141601564, 0.0202106876373291, 0.02025164794921875, 0.020184064865112306, 0.02022707176208496, 0.02025267219543457, 0.020304895401000975, 0.020404224395751954, 0.02028339195251465, 0.020185087203979494, 0.01999564743041992, 0.020125696182250977, 0.020174848556518556, 0.020197376251220703, 0.02023321533203125, 0.020246528625488282, 0.020230144500732423, 0.020192256927490236, 0.020249599456787108, 0.020625408172607423, 0.020185087203979494, 0.02027212715148926, 0.020214784622192384, 0.020213760375976563, 0.020321279525756835, 0.020288511276245116, 0.020180992126464844, 0.02021683120727539, 0.020166656494140626, 0.020561920166015626, 0.02112928009033203, 0.021022655487060546, 0.021016576766967773, 0.020997119903564454, 0.021011455535888672, 0.021097471237182617, 0.020982784271240236, 0.02103910446166992, 0.020960256576538085, 0.021081087112426757, 0.021004287719726563, 0.020907007217407226, 0.020695039749145508, 0.0602470703125, 0.02016764831542969, 0.020189184188842774, 0.020199424743652345, 0.020153343200683595, 0.020168703079223634, 0.020146175384521483, 0.020158464431762696, 0.020213760375976563, 0.020247552871704103, 0.020166656494140626, 0.020140031814575195, 0.020988927841186524, 0.020925439834594727, 0.02106675148010254, 0.021545984268188476, 0.021132287979125978, 0.02087833595275879, 0.020917247772216797, 0.020935680389404295, 0.020982784271240236, 0.020983808517456053, 0.020930559158325195, 0.02142207908630371, 0.021004287719726563, 0.020965375900268556, 0.020980735778808594, 0.020921344757080077, 0.02108723258972168, 0.021218303680419923, 0.022168575286865236, 0.021304319381713867, 0.021300224304199217, 0.02105446434020996, 0.021155839920043946, 0.021098495483398438, 0.02104217529296875, 0.021008384704589843, 0.020966400146484376, 0.020970495223999023, 0.020999168395996092, 0.0206059513092041, 0.020985919952392577, 0.02094483184814453, 0.02107494354248047, 0.02102579116821289, 0.020948991775512696, 0.02067865562438965, 0.02106982421875, 0.02106368064880371, 0.020971519470214844, 0.021008384704589843, 0.02123263931274414, 0.02105958366394043, 0.021045248031616212, 0.021134336471557616, 0.021150720596313476, 0.020955135345458984, 0.021021696090698243, 0.020816896438598635, 0.02068172836303711, 0.020958208084106447, 0.021007360458374022, 0.06107648086547852, 0.020995071411132812, 0.020976640701293944, 0.020980735778808594, 0.021007360458374022, 0.021505023956298826, 0.021173248291015623, 0.02103398323059082, 0.020961280822753905, 0.020959232330322267, 0.020993024826049804, 0.02129100799560547, 0.021020671844482423, 0.020995136260986327, 0.02105952072143555, 0.021021696090698243, 0.020983808517456053, 0.021109760284423826, 0.020961280822753905, 0.021016576766967773, 0.021021696090698243, 0.02110873603820801, 0.021132287979125978, 0.020989952087402345, 0.02106368064880371, 0.02104319953918457, 0.020996095657348633, 0.02106982421875, 0.020965375900268556, 0.02103910446166992, 0.02150809669494629, 0.02129817581176758, 0.021082111358642578, 0.021120000839233398, 0.021171199798583985, 0.02104832077026367, 0.021081087112426757, 0.020967424392700194, 0.021164031982421876, 0.021169151306152344, 0.02168217658996582, 0.02204979133605957, 0.021548032760620117, 0.021113855361938477, 0.02118147277832031, 0.021089248657226563, 0.02107187271118164, 0.02100223922729492, 0.021008384704589843, 0.020983808517456053, 0.02104012870788574, 0.020989952087402345, 0.02103603172302246, 0.021381120681762695, 0.020976640701293944, 0.020907007217407226, 0.020690944671630858, 0.02091929626464844, 0.021014528274536134, 0.020990976333618162, 0.0210882568359375, 0.020990976333618162, 0.021098495483398438, 0.06039449691772461, 0.02021990394592285, 0.020229120254516602, 0.0202608642578125, 0.020198400497436524, 0.02087424087524414, 0.02103091239929199, 0.021014528274536134, 0.021106687545776368, 0.021053440093994142, 0.021008384704589843, 0.02102681541442871, 0.02100223922729492, 0.021198848724365234, 0.02107187271118164, 0.021194751739501954, 0.021172224044799806, 0.020951072692871095, 0.02091209602355957, 0.02109651184082031, 0.020949951171875, 0.02088755226135254, 0.020808704376220705, 0.020801536560058592, 0.02102579116821289, 0.021156864166259767, 0.021114879608154297, 0.02103398323059082, 0.021098495483398438, 0.021029888153076173, 0.021140480041503908, 0.02107494354248047, 0.02103500747680664, 0.02121625518798828, 0.021126144409179686, 0.021073919296264648, 0.02147532844543457, 0.021143552780151367, 0.02105548858642578, 0.021012479782104493, 0.02104012870788574, 0.021038080215454103, 0.021163007736206055, 0.021124095916748048, 0.021169151306152344, 0.021133312225341795, 0.02124595260620117, 0.021098495483398438, 0.021089279174804687, 0.021173248291015623, 0.020959232330322267, 0.020962303161621093, 0.02131865692138672, 0.0209039363861084, 0.02082611274719238, 0.020962303161621093, 0.020997119903564454, 0.021165056228637694, 0.02126848030090332, 0.02105548858642578, 0.021114879608154297, 0.02043391990661621, 0.02026700782775879, 0.06036684799194336, 0.020290592193603515, 0.020193248748779296, 0.02024448013305664, 0.02021785545349121, 0.020340736389160157, 0.02042572784423828, 0.02036735916137695, 0.02110771179199219, 0.020945920944213867, 0.021104639053344726, 0.02100223922729492, 0.02104729652404785, 0.021204992294311522, 0.020979711532592774, 0.02106572723388672, 0.02101862335205078, 0.020963327407836914, 0.021321727752685548, 0.02103603172302246, 0.021004287719726563, 0.02101862335205078, 0.02106675148010254, 0.020986879348754883, 0.020948991775512696, 0.02109337615966797, 0.020977664947509765, 0.021009408950805664, 0.02108518409729004, 0.02109951972961426, 0.021004287719726563, 0.021184511184692383, 0.021045248031616212, 0.02104934310913086, 0.021029888153076173, 0.02109337615966797, 0.02104422378540039, 0.021106687545776368, 0.021090303421020508, 0.02110771179199219, 0.020957183837890626, 0.021020671844482423, 0.02105036735534668, 0.021009408950805664, 0.021111808776855468, 0.02106368064880371, 0.021078016281127928, 0.021146623611450196, 0.02126233673095703, 0.021008384704589843, 0.02109443283081055, 0.021007328033447265, 0.021094400405883788, 0.02101862335205078, 0.020711423873901368, 0.020786176681518553, 0.020919328689575196, 0.020999168395996092, 0.0210861759185791, 0.021045248031616212, 0.021036096572875976, 0.02097657585144043, 0.0210513916015625, 0.06038323211669922, 0.020132863998413086, 0.02003865623474121, 0.020231168746948244, 0.020247552871704103, 0.02026700782775879, 0.02028339195251465, 0.02024345588684082, 0.020335615158081053, 0.02025881576538086, 0.02020966339111328, 0.020247552871704103, 0.020304895401000975, 0.020338687896728515, 0.02026393508911133, 0.02025372886657715, 0.020227039337158203, 0.020325439453125, 0.02019014358520508, 0.020358144760131838, 0.020377599716186523, 0.020360191345214843, 0.020274175643920898, 0.020246528625488282, 0.020170751571655272, 0.020137983322143553, 0.020025344848632814, 0.02031001663208008, 0.020520959854125977, 0.021547008514404296, 0.02227302360534668, 0.021370880126953123, 0.021106687545776368, 0.021145599365234375, 0.02130534362792969, 0.021159936904907226, 0.021429248809814453, 0.0211015682220459, 0.02109132766723633, 0.020986879348754883, 0.02105753517150879, 0.020958208084106447, 0.02103603172302246, 0.020995071411132812, 0.021061632156372072, 0.02103603172302246, 0.020990976333618162, 0.02101043128967285, 0.020993024826049804, 0.020731903076171874, 0.02128486442565918, 0.020934656143188478, 0.02103193664550781, 0.020953088760375976, 0.021168127059936523, 0.02122956848144531, 0.02109542465209961, 0.02106572723388672, 0.021311487197875977, 0.02108518409729004, 0.021142559051513674, 0.021019647598266602, 0.021042144775390625]",tokens/s,46.620011475048464,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -18970,7 +18970,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4678-7bc7941b74501cb74d852df7;7517daa8-7b82-4dc6-acf7-98eb8a63183b) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9d3-28f7ff0d0431ed3800a1a6fd;d4dd4116-f784-4f94-a808-ca2d12ea91bc) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19067,7 +19067,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9514-5be08225287ca4077f40fbb2;f454385e-2890-4d11-92cb-a37259202efc) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6d2-0afa1c7940316cb57d42f7fa;a3659ed9-47ad-448d-ac2f-83c8dedaea46) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19137,7 +19137,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95e9-01655f42558fe88d5aeeffdd;6ac3ab8f-e7f4-4981-8658-2ccc7922cae5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7ab-78c580d910fd04fe56fa53e2;96033c1d-7b63-48b4-8835-2ae470119444) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19205,7 +19205,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19269,7 +19269,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19298,7 +19298,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4874-2bc2ab3d5c11dfdc3b28ffd8;e5b17931-1e5c-4d3e-9e34-384fe8599563) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febeb-39333fb002d1698c4c309658;aab86821-8c87-4fc0-89be-50559381b111) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -19383,7 +19383,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9622-07cfad1704ec621d2230176c;68bedc23-55be-4d1d-85b7-d4215bb3fa90) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7e0-003d852f5c76eb467717261b;b22176c9-b4eb-48a0-aabb-9a7fd8d02c35) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19415,7 +19415,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1138.663424,12505.841664,0.0,11865.686016,11620.236288,s,10,0.8953843536376954,0.08953843536376953,0.0005007823305073723,0.0896009292602539,0.0900410270690918,0.09027113914489747,0.09045522880554199,"[0.09050125122070313, 0.08883433532714843, 0.08888851165771484, 0.0890060806274414, 0.0893661117553711, 0.08958678436279296, 0.08961507415771484, 0.0899898910522461, 0.08976191711425781, 0.08983439636230468]",tokens/s,2859.1073650097173,kWh,1.0913446682308792e-06,5.980059409871459e-07,7.663048943889849e-06,9.352399553107876e-06,tokens/kWh,27372654.3168196,MB,1138.663424,12505.841664,0.0,11865.686016,11816.868352,s,10,18.09325134277344,1.8093251342773438,0.0034958578847460696,1.8082963256835938,1.8097242553710937,1.814743963623047,1.8187597302246095,"[1.819763671875, 1.8083280029296875, 1.8080596923828125, 1.8074124755859375, 1.8083851318359374, 1.80781689453125, 1.8080611572265626, 1.8085509033203124, 1.8086087646484375, 1.8082646484375]",tokens/s,34.81961246570678,kWh,2.12664901080117e-05,1.1654451651398178e-05,8.844563905411209e-05,0.00012136658081352198,tokens/kWh,519088.529788926,,s,629,18.86789333915711,0.02999665077767425,0.010752799533975921,0.02872217559814453,0.028827853012084962,0.028974860382080077,0.11874865234375,"[0.02944819259643555, 0.02935603141784668, 0.029410303115844725, 0.02919628715515137, 0.02934988784790039, 0.02930892753601074, 0.02930790328979492, 0.02921062469482422, 0.029287424087524414, 0.029328384399414063, 0.02928748893737793, 0.029151168823242188, 0.02902739143371582, 0.028851167678833008, 0.029152223587036133, 0.029071359634399413, 0.02876518440246582, 0.02855833625793457, 0.02858700752258301, 0.028556287765502928, 0.028579839706420897, 0.02857676887512207, 0.028602367401123048, 0.02857676887512207, 0.028753919601440428, 0.028618751525878908, 0.02857676887512207, 0.028588064193725587, 0.02862179183959961, 0.028621824264526367, 0.028589056015014647, 0.028692480087280273, 0.028708864212036132, 0.028697599411010744, 0.028770303726196288, 0.028721151351928712, 0.02872831916809082, 0.02871500778198242, 0.028711936950683595, 0.028735488891601563, 0.02879078483581543, 0.028815359115600587, 0.028875776290893555, 0.028743711471557618, 0.028741600036621094, 0.028795904159545898, 0.0288143367767334, 0.028826623916625976, 0.02877644729614258, 0.02876825523376465, 0.028804096221923828, 0.02875187110900879, 0.028759040832519532, 0.028787712097167968, 0.02892902374267578, 0.028932096481323243, 0.028799999237060548, 0.028837888717651368, 0.028778495788574218, 0.028812288284301758, 0.028901376724243165, 0.028793855667114256, 0.11871129608154297, 0.028834815979003905, 0.028648448944091798, 0.02857779121398926, 0.028556287765502928, 0.0285665283203125, 0.028646400451660156, 0.028598272323608398, 0.02855219268798828, 0.028589056015014647, 0.028651519775390624, 0.028669952392578125, 0.02858700752258301, 0.028612607955932616, 0.02854604721069336, 0.02854297637939453, 0.02853376007080078, 0.02856345558166504, 0.028531711578369142, 0.02855526351928711, 0.028572671890258788, 0.028579839706420897, 0.02857472038269043, 0.028614656448364258, 0.02855936050415039, 0.02857062339782715, 0.028589056015014647, 0.028606464385986328, 0.028641279220581056, 0.02865974426269531, 0.02859107208251953, 0.028593151092529297, 0.02879897689819336, 0.028870655059814454, 0.028721151351928712, 0.02873958396911621, 0.028710912704467774, 0.02872319984436035, 0.028725248336791992, 0.02878156852722168, 0.028705791473388673, 0.02873139190673828, 0.02874982452392578, 0.02889727973937988, 0.02899046325683594, 0.028831743240356447, 0.028803071975708007, 0.028750848770141602, 0.02875801658630371, 0.028794879913330077, 0.02874367904663086, 0.0287774715423584, 0.02876108741760254, 0.0287774715423584, 0.02879283142089844, 0.028753919601440428, 0.0287324161529541, 0.02875699234008789, 0.02873244857788086, 0.028798944473266603, 0.02875596809387207, 0.02877337646484375, 0.02874060821533203, 0.11872358703613281, 0.02882252883911133, 0.028638208389282226, 0.028596223831176756, 0.02859110450744629, 0.02858188819885254, 0.02855731201171875, 0.02855526351928711, 0.028556287765502928, 0.028564479827880858, 0.02856959915161133, 0.0286167049407959, 0.028693504333496093, 0.028589056015014647, 0.028590080261230468, 0.028610559463500978, 0.02854400062561035, 0.028564479827880858, 0.02857779121398926, 0.028572671890258788, 0.028553216934204102, 0.02856755256652832, 0.028564479827880858, 0.028590112686157226, 0.028569568634033204, 0.02860032081604004, 0.028625919342041017, 0.028631040573120117, 0.02856755256652832, 0.02856345558166504, 0.02857574462890625, 0.028627967834472655, 0.028795904159545898, 0.028864511489868162, 0.028886016845703126, 0.028795936584472655, 0.028867551803588867, 0.02873958396911621, 0.02871500778198242, 0.028724288940429686, 0.028708799362182617, 0.02874060821533203, 0.028759040832519532, 0.028712959289550782, 0.0287324161529541, 0.02874675178527832, 0.028753919601440428, 0.028806175231933594, 0.028761056900024413, 0.028750848770141602, 0.028753919601440428, 0.02876620864868164, 0.02876825523376465, 0.028787712097167968, 0.028763200759887694, 0.028756927490234375, 0.02875395202636719, 0.02877641677856445, 0.02879283142089844, 0.02875187110900879, 0.028759040832519532, 0.02876518440246582, 0.02876108741760254, 0.1187583999633789, 0.028806175231933594, 0.028635103225708006, 0.02860748863220215, 0.028536832809448243, 0.028542015075683595, 0.028544960021972657, 0.02854617691040039, 0.02855615997314453, 0.02855731201171875, 0.02858393669128418, 0.02855014419555664, 0.02856959915161133, 0.0285296630859375, 0.028522592544555664, 0.028571552276611328, 0.028807167053222657, 0.02877132797241211, 0.028538976669311523, 0.02853264045715332, 0.02852454376220703, 0.028550239562988283, 0.02854390335083008, 0.02857676887512207, 0.028564672470092773, 0.02853356742858887, 0.02854400062561035, 0.02855423927307129, 0.02856550407409668, 0.02859212875366211, 0.028603456497192384, 0.028627904891967773, 0.02869862365722656, 0.028708864212036132, 0.028705791473388673, 0.02874470329284668, 0.02882252883911133, 0.0287457275390625, 0.028719104766845704, 0.0287324161529541, 0.028718080520629883, 0.02875289535522461, 0.028762111663818358, 0.02873139190673828, 0.02876006317138672, 0.028733440399169922, 0.02874982452392578, 0.02880102348327637, 0.02875289535522461, 0.02876825523376465, 0.02874880027770996, 0.0287774715423584, 0.02879795265197754, 0.028794879913330077, 0.02877644729614258, 0.028804096221923828, 0.02874777603149414, 0.028831743240356447, 0.028770303726196288, 0.02876620864868164, 0.028762111663818358, 0.028799999237060548, 0.028801055908203126, 0.11917820739746093, 0.029079551696777343, 0.028626943588256838, 0.02856755256652832, 0.0285347843170166, 0.02855219268798828, 0.02858291244506836, 0.0285665283203125, 0.02859519958496094, 0.0285614070892334, 0.02858598327636719, 0.028630016326904296, 0.02857369613647461, 0.02858291244506836, 0.02858188819885254, 0.02858700752258301, 0.02854297637939453, 0.0285665283203125, 0.02853785514831543, 0.02855219268798828, 0.02857062339782715, 0.028642303466796876, 0.028625919342041017, 0.028627967834472655, 0.0285296630859375, 0.028553216934204102, 0.02855116844177246, 0.02857881546020508, 0.028611583709716795, 0.0286167049407959, 0.02857574462890625, 0.028564479827880858, 0.02873855972290039, 0.02875596809387207, 0.028969024658203123, 0.028978111267089844, 0.02875494384765625, 0.02876620864868164, 0.028711936950683595, 0.02876108741760254, 0.02878156852722168, 0.02873958396911621, 0.02876313591003418, 0.02872422409057617, 0.02876620864868164, 0.028787712097167968, 0.028753919601440428, 0.028750848770141602, 0.028750848770141602, 0.02878463935852051, 0.028838911056518556, 0.02875494384765625, 0.028762111663818358, 0.02875596809387207, 0.02877132797241211, 0.028815359115600587, 0.02877440071105957, 0.02879283142089844, 0.02876620864868164, 0.02878156852722168, 0.028820480346679687, 0.02875494384765625, 0.02875801658630371, 0.1193359375, 0.028793855667114256, 0.028620800018310546, 0.028588031768798827, 0.02855014419555664, 0.028596223831176756, 0.02854297637939453, 0.02854604721069336, 0.028543071746826174, 0.02854185676574707, 0.02857369613647461, 0.02858598327636719, 0.02876006317138672, 0.02878361511230469, 0.0285665283203125, 0.028621824264526367, 0.02859929656982422, 0.02862387275695801, 0.028651519775390624, 0.028609535217285157, 0.02855936050415039, 0.02855833625793457, 0.02875494384765625, 0.02857881546020508, 0.02860748863220215, 0.028638208389282226, 0.028553216934204102, 0.0285849609375, 0.02857676887512207, 0.028580863952636718, 0.02858700752258301, 0.028612607955932616, 0.02872217559814453, 0.02871500778198242, 0.028701696395874023, 0.028708864212036132, 0.0287324161529541, 0.02876416015625, 0.028726303100585937, 0.028717023849487305, 0.028711936950683595, 0.028734464645385743, 0.02875699234008789, 0.02874470329284668, 0.028729343414306642, 0.02874163246154785, 0.02873855972290039, 0.02877235221862793, 0.028778495788574218, 0.02875699234008789, 0.02874060821533203, 0.02874880027770996, 0.02876736068725586, 0.02878553581237793, 0.028727296829223634, 0.02874777603149414, 0.028759040832519532, 0.02877337646484375, 0.02877542304992676, 0.02880102348327637, 0.02878976058959961, 0.02877145576477051, 0.02875276756286621, 0.1193493423461914, 0.028752799987792968, 0.028579839706420897, 0.02858393669128418, 0.02853785514831543, 0.028536832809448243, 0.02856038475036621, 0.028636159896850585, 0.02857676887512207, 0.028593151092529297, 0.02857164764404297, 0.02854707145690918, 0.028536832809448243, 0.028553216934204102, 0.0285347843170166, 0.02853273582458496, 0.02856857681274414, 0.028590080261230468, 0.02855219268798828, 0.02855833625793457, 0.02856038475036621, 0.028577823638916016, 0.028570592880249022, 0.028627967834472655, 0.02860748863220215, 0.02857574462890625, 0.02855423927307129, 0.028593151092529297, 0.028628992080688476, 0.028646400451660156, 0.028612607955932616, 0.02861568069458008, 0.028702720642089844, 0.02872012710571289, 0.028703744888305665, 0.02872319984436035, 0.02879692840576172, 0.028725248336791992, 0.02872319984436035, 0.02872319984436035, 0.028735488891601563, 0.02877337646484375, 0.028734464645385743, 0.02876620864868164, 0.02873651123046875, 0.028729343414306642, 0.028785663604736327, 0.028838911056518556, 0.028767232894897462, 0.028770303726196288, 0.028803071975708007, 0.028835840225219726, 0.028932096481323243, 0.02875494384765625, 0.02878054428100586, 0.02878156852722168, 0.028793920516967775, 0.028791744232177733, 0.02877542304992676, 0.02876416015625, 0.02878156852722168, 0.02895564842224121, 0.02905708885192871, 0.11985504150390625, 0.0287825927734375, 0.028598272323608398, 0.02856755256652832, 0.02854911994934082, 0.028598272323608398, 0.028599327087402343, 0.028550111770629882, 0.02860032081604004, 0.02855833625793457, 0.028564479827880858, 0.02857676887512207, 0.028588031768798827, 0.02857676887512207, 0.028539903640747072, 0.02855219268798828, 0.02857369613647461, 0.028545024871826172, 0.02856857681274414, 0.02859110450744629, 0.028588031768798827, 0.02857472038269043, 0.02854707145690918, 0.02857472038269043, 0.0285665283203125, 0.02858198356628418, 0.028593055725097655, 0.028618751525878908, 0.028589056015014647, 0.02859212875366211, 0.02857676887512207, 0.028632064819335938, 0.02875596809387207, 0.02876518440246582, 0.028704767227172853, 0.028725248336791992, 0.028713983535766603, 0.02872217559814453, 0.02874675178527832, 0.028735488891601563, 0.02872217559814453, 0.02873139190673828, 0.028729343414306642, 0.02874163246154785, 0.028815359115600587, 0.028821504592895508, 0.028778560638427736, 0.02882348823547363, 0.028827648162841796, 0.02880512046813965, 0.02905606460571289, 0.029043647766113283, 0.02896895980834961, 0.028851200103759765, 0.02877235221862793, 0.028799999237060548, 0.02877440071105957, 0.028785663604736327, 0.028821504592895508, 0.02876313591003418, 0.028750848770141602, 0.02877952003479004, 0.02879897689819336, 0.11958271789550781, 0.028827648162841796, 0.0286167049407959, 0.02855219268798828, 0.02855014419555664, 0.028553216934204102, 0.02855116844177246, 0.02856755256652832, 0.02859929656982422, 0.02859110450744629, 0.02855731201171875, 0.02856755256652832, 0.02858291244506836, 0.02857062339782715, 0.0285614070892334, 0.02858598327636719, 0.02856038475036621, 0.028589056015014647, 0.02855014419555664, 0.0285614070892334, 0.02855833625793457, 0.028588064193725587, 0.02859823989868164, 0.02860032081604004, 0.02856038475036621, 0.02856755256652832, 0.028621824264526367, 0.028589056015014647, 0.028602367401123048, 0.028618751525878908, 0.02860851287841797, 0.02859929656982422, 0.028711936950683595, 0.028708864212036132, 0.02894540786743164, 0.02932326316833496, 0.02896998405456543, 0.02873651123046875, 0.02873139190673828, 0.028759040832519532, 0.028721151351928712, 0.02872831916809082, 0.028726272583007813, 0.028729343414306642, 0.02873958396911621, 0.028762111663818358, 0.02878156852722168, 0.028762111663818358, 0.02876108741760254, 0.0287825927734375, 0.028804096221923828, 0.02877132797241211, 0.0287825927734375, 0.028802047729492186, 0.028839935302734376, 0.028853248596191407, 0.02876927947998047, 0.02876006317138672, 0.028759040832519532, 0.028785663604736327, 0.02879692840576172, 0.02876108741760254, 0.02875699234008789, 0.11962470245361329, 0.028828672409057617, 0.028604415893554686, 0.028593151092529297, 0.028589056015014647, 0.02855014419555664, 0.02855731201171875, 0.02855731201171875, 0.028606464385986328, 0.02855423927307129, 0.0285665283203125, 0.028609535217285157, 0.02855731201171875, 0.02852556800842285, 0.02853887939453125, 0.02855014419555664, 0.028575775146484374, 0.0285644474029541, 0.02856755256652832, 0.02857779121398926, 0.02855014419555664, 0.02857779121398926, 0.02857779121398926, 0.02857676887512207, 0.028635135650634767, 0.028580863952636718, 0.02857676887512207, 0.0285614070892334, 0.028598272323608398, 0.028642303466796876, 0.028596223831176756, 0.02860748863220215, 0.02874163246154785, 0.028705791473388673, 0.028701696395874023, 0.02876927947998047, 0.028721151351928712, 0.02875494384765625, 0.02876825523376465, 0.02877644729614258, 0.02873859214782715, 0.02878460884094238, 0.028818431854248046, 0.02876108741760254, 0.0287324161529541, 0.02876416015625, 0.0287457275390625, 0.02877952003479004, 0.02877132797241211, 0.02882252883911133, 0.028762111663818358, 0.028865535736083983, 0.029043712615966798, 0.028887039184570314, 0.02877337646484375, 0.02875289535522461, 0.028753919601440428, 0.028835840225219726, 0.028838911056518556, 0.02879078483581543, 0.02876416015625, 0.028767232894897462, 0.028824640274047852]",tokens/s,33.33705510697463,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19471,7 +19471,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fc9-44b2f54f59661d1230036870;e7af452d-24f8-460b-9565-bbe622bc2161) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe163-31e9c5ec4e7510a97c32388d;082c24b8-b5e7-4c15-b411-5d94d2b8f5b7) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -19557,7 +19557,7 @@ Traceback (most recent call last): OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19623,7 +19623,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95b4-35ff096e007d2a8037ddbc93;2195434c-4710-45c5-acd7-f716b6b7d3e7) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe776-4c53dbf178b35ee76080930c;bf3ba195-7927-4e5c-8999-635209d28fad) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19757,7 +19757,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e906a-5172bd3c73b9f8762a5a238d;ea8b3ef2-fc77-42e3-b240-7a61698d0e24) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe20d-2fdf956a56b4c8631699c164;99155889-af2b-4152-abf1-4e1453b1b049) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19879,7 +19879,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -19918,7 +19918,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d460f-7c1f29c51648b3f6481f83f6;494ce54f-c514-4bb4-bde1-a385d8a83fef) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe964-293f4a0c6c5424136ac7e175;44fc3406-e381-4fa3-be3c-6255b4d9859a) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -19949,7 +19949,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20054,7 +20054,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9366-7e994b5e0b4aa6ba14e2bd18;d58c5e15-811d-49c5-91fe-7802a9f2feb2) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe519-30491e9051a89faa4c584713;f079fa33-21d8-4bba-acb8-0d6f39af4d2e) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -20085,7 +20085,7 @@ OSError: s is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20294,7 +20294,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20485,7 +20485,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e92f9-6260c2dd42aeba9c4fa723fb;a7cec0d2-862f-438d-8445-e9cba318a7a1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4af-079ad9223639200b18286b1b;e69e702f-5c05-4a4a-9f18-e97bb92ac311) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -20589,7 +20589,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20628,7 +20628,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4718-0a1d96260d5406ef322c76f0;eaed83da-f14e-4a0f-ab9b-2fd1a885580d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea7a-0bb2244f7a0ba3e778963583;393e7ee0-f8de-4174-a1f6-084e76784268) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -20659,7 +20659,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +float32-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20698,7 +20698,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46af-762948da2f31d5d729ace71c;19a59b86-a054-4d53-b25b-77cea4ec6df5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea09-6d61d5074f65be8a36ff272c;3633b163-9731-4f7d-b09b-0ad4edb02912) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -20756,7 +20756,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: GPTNeoXForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,.,.,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20824,7 +20824,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e96d5-3b8ae2315f3363367179ebe2;99e3dd2d-9913-442f-95c4-0f27c26b732a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe893-4a14e3fb069b40920d0649a1;a41137f2-793a-4169-91b9-ddc013f87414) Repository Not Found for url: https://huggingface.co/8/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -20855,7 +20855,7 @@ OSError: 8 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,2,2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -20894,7 +20894,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d45e6-7ba3c2eb385283e34319c1a4;0ee0ce1a-3512-46f6-8704-caf43bc4ff0f) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe93a-5c885d736a536cc46e3cb324;4efddf5d-7a7a-43d8-b104-2d8331081653) Repository Not Found for url: https://huggingface.co/2/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -21095,7 +21095,7 @@ ChildProcessError: Traceback (most recent call last): TypeError: DeciCoderAttention.forward() got an unexpected keyword argument 'cache_position' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21151,7 +21151,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e9042-7191ce831c5211501fad509a;a6a69636-0317-4483-80b1-d6adbe1547dc) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1e1-1da1a19d4865c8ad7041a650;2bddd13a-e42f-4c3d-967b-0deca1dcfa00) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/recurrentgemma-2b/resolve/main/config.json. @@ -21255,7 +21255,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -21348,7 +21348,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e958c-21faadbc200ab2263b5598b3;f04490a4-b597-46c0-8942-873036817358) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe8cc-280e59395f74908f6d65bccd;ec833190-8623-4549-9e50-e798854a49b9) Repository Not Found for url: https://huggingface.co/x/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -21408,7 +21408,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e900d-5321286676bdf4f025a9c23b;7aca8343-e092-4e2d-bd70-9325a9c6d538) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe1a9-7825bc250a227a3e6d32d647;368900ee-f689-46da-8d30-eda02a46757f) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-7b/resolve/main/config.json. @@ -21861,7 +21861,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8e00-19c629e878ff821b34fac1e6;8a5e1730-b680-4073-b28a-3077278277a7) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fdf93-339196bb5b5a783d031701db;64417a2b-518d-4762-9be5-a93b076baa47) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/databricks/dbrx-base/resolve/main/config.json. @@ -21946,7 +21946,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e966c-33ffa1dc23ce3c70113dd72b;2034b3bf-6366-4aa4-a10e-0408be2a68c6) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe825-6ddf283223d9070b53fa6898;e25a6a1c-8d94-4610-8a65-f5ea885e335f) Repository Not Found for url: https://huggingface.co/l/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -21978,7 +21978,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1235.922944,14529.593344,0.0,13883.14624,13705.186304,s,10,0.9381064758300781,0.0938106475830078,0.0002782777167609782,0.0937690544128418,0.09394051055908202,0.09426363220214844,0.09452212951660156,"[0.09458675384521484, 0.093621826171875, 0.09378880310058593, 0.09377235412597656, 0.09377174377441407, 0.09347682952880859, 0.09386870574951171, 0.09369475555419922, 0.09376636505126953, 0.09375833892822266]",tokens/s,2728.901319794002,kWh,1.1091462254276654e-06,6.077594067647053e-07,5.774441275831742e-06,7.4913469080241126e-06,tokens/kWh,34172760.004718766,MB,1235.922944,14529.593344,0.0,13883.14624,13856.22528,s,10,22.542584716796878,2.254258471679688,0.003272167817286955,2.2532373046875,2.2557274169921877,2.259641296386719,2.262772399902344,"[2.26355517578125, 2.25456201171875, 2.253303466796875, 2.254278564453125, 2.252525146484375, 2.253171142578125, 2.25120556640625, 2.254857666015625, 2.25266845703125, 2.25245751953125]",tokens/s,27.947105796194517,kWh,2.6553851566736822e-05,1.455239421363509e-05,0.0001132887095301677,0.00015439495531053963,tokens/kWh,408044.42006078525,,s,629,23.349358585357674,0.037121396797071014,0.011210628935323555,0.03575497436523437,0.03586355209350586,0.035984383392333986,0.13010292602539061,"[0.03647084808349609, 0.03641644668579101, 0.03629056167602539, 0.03638272094726563, 0.03651686477661133, 0.036311038970947264, 0.036367359161376955, 0.03647795104980469, 0.03638272094726563, 0.036400127410888675, 0.03618201446533203, 0.03621068954467774, 0.03609907150268555, 0.035798015594482424, 0.03578572845458984, 0.03580723190307617, 0.03576934432983398, 0.03578777694702148, 0.03570483016967774, 0.03568742370605469, 0.035789825439453124, 0.03580108642578125, 0.0357130241394043, 0.035705856323242184, 0.03575603103637695, 0.03582156753540039, 0.03575807952880859, 0.03580825424194336, 0.035705856323242184, 0.035724288940429685, 0.03573555374145508, 0.035716094970703126, 0.035724288940429685, 0.03574169540405273, 0.03575296020507813, 0.03576934432983398, 0.0357314567565918, 0.03577753448486328, 0.03576934432983398, 0.035813377380371096, 0.03587071990966797, 0.0359024658203125, 0.03580108642578125, 0.03586355209350586, 0.03577036666870117, 0.03579391860961914, 0.035866622924804685, 0.035862529754638675, 0.03592704010009766, 0.03588608169555664, 0.03587276840209961, 0.035783679962158206, 0.035794944763183595, 0.03580723190307617, 0.035860481262207033, 0.03587891387939453, 0.035773441314697264, 0.035846145629882815, 0.03582668685913086, 0.035781631469726564, 0.035813377380371096, 0.03589836883544922, 0.13003878784179687, 0.03566592025756836, 0.0356577262878418, 0.03570995330810547, 0.035710975646972655, 0.0358389778137207, 0.03573964691162109, 0.03566899108886719, 0.03567308807373047, 0.03573964691162109, 0.03577241516113281, 0.03572332763671875, 0.03569452667236328, 0.03570175933837891, 0.035947521209716796, 0.035970046997070314, 0.035806209564208984, 0.03571916961669922, 0.03567718505859375, 0.035716094970703126, 0.03569356918334961, 0.035722240447998044, 0.03570278549194336, 0.035736640930175784, 0.03576518249511719, 0.03578060913085938, 0.035757057189941405, 0.03576115036010742, 0.03575603103637695, 0.0357283821105957, 0.03575398254394531, 0.035781631469726564, 0.035757057189941405, 0.03574272155761719, 0.03570995330810547, 0.03574272155761719, 0.03575193786621094, 0.035768318176269534, 0.03573350524902344, 0.03586764907836914, 0.03602739334106445, 0.03579596710205078, 0.03579289627075195, 0.0358021125793457, 0.0357918701171875, 0.035794944763183595, 0.03586355209350586, 0.03580416107177734, 0.03577753448486328, 0.035781631469726564, 0.035775489807128906, 0.03579391860961914, 0.03581542587280274, 0.035784702301025394, 0.03577753448486328, 0.03578879928588867, 0.03580723190307617, 0.03581951904296875, 0.035811328887939455, 0.036013057708740234, 0.03587481689453125, 0.035896320343017575, 0.035860481262207033, 0.13015245056152344, 0.03561471939086914, 0.03559526443481445, 0.03567001724243164, 0.03563417434692383, 0.035643390655517575, 0.035642368316650394, 0.0356495361328125, 0.035707904815673826, 0.035655681610107424, 0.03566796875, 0.035642368316650394, 0.03563622283935547, 0.03566592025756836, 0.03565055847167969, 0.035798015594482424, 0.036050945281982424, 0.03570073699951172, 0.03568230438232422, 0.035655681610107424, 0.035746814727783204, 0.0358389778137207, 0.035789825439453124, 0.035724288940429685, 0.03571814346313477, 0.0357283821105957, 0.035776512145996094, 0.03575807952880859, 0.035789825439453124, 0.035796993255615236, 0.03573452758789063, 0.035783679962158206, 0.035953662872314454, 0.03597318267822266, 0.03580307388305664, 0.035732479095458985, 0.0357314567565918, 0.03570175933837891, 0.03574272155761719, 0.03571507263183594, 0.03573555374145508, 0.03575398254394531, 0.03574169540405273, 0.03575296020507813, 0.03571712112426758, 0.03571916961669922, 0.035748863220214845, 0.035748863220214845, 0.03582156753540039, 0.03580928039550781, 0.03580313491821289, 0.035860481262207033, 0.035781631469726564, 0.035759105682373046, 0.035746814727783204, 0.03577753448486328, 0.035773441314697264, 0.03576627349853516, 0.035858463287353516, 0.035872737884521486, 0.036124671936035156, 0.035975166320800785, 0.03581951904296875, 0.13026611328125, 0.03571507263183594, 0.035800064086914066, 0.035724288940429685, 0.03587583923339844, 0.03587481689453125, 0.03567923355102539, 0.035694591522216795, 0.035746814727783204, 0.035707904815673826, 0.03571712112426758, 0.035745792388916016, 0.03571814346313477, 0.035683326721191407, 0.035705856323242184, 0.03571507263183594, 0.03572531127929687, 0.035683326721191407, 0.03578265762329102, 0.035705856323242184, 0.03569356918334961, 0.0357386245727539, 0.035745792388916016, 0.03572531127929687, 0.035705856323242184, 0.03573350524902344, 0.035724288940429685, 0.03572326278686523, 0.03576319885253906, 0.035737598419189456, 0.035762176513671876, 0.03577036666870117, 0.035757057189941405, 0.03575296020507813, 0.035743743896484374, 0.03577036666870117, 0.03576319885253906, 0.03577753448486328, 0.03580928039550781, 0.03579084777832031, 0.0358021125793457, 0.03583180618286133, 0.03582156753540039, 0.035787841796875, 0.035798976898193356, 0.035789825439453124, 0.03582361602783203, 0.03581235122680664, 0.035829761505126956, 0.03577753448486328, 0.035814399719238284, 0.035811328887939455, 0.03581545639038086, 0.03579388809204102, 0.03580723190307617, 0.03586867141723633, 0.03581951904296875, 0.0358287353515625, 0.035871742248535156, 0.03583078384399414, 0.03584102249145508, 0.03585331344604492, 0.03583283233642578, 0.13040640258789063, 0.03571712112426758, 0.03564646530151367, 0.03563315200805664, 0.0356577262878418, 0.03562905502319336, 0.035694591522216795, 0.03573964691162109, 0.03569356918334961, 0.035683326721191407, 0.03568435287475586, 0.03564851379394531, 0.03565875244140625, 0.03563731384277344, 0.03567814254760742, 0.0356864013671875, 0.03572326278686523, 0.035778560638427735, 0.03568025588989258, 0.03568742370605469, 0.03575091171264649, 0.03570278549194336, 0.03570483016967774, 0.035705856323242184, 0.03568537521362305, 0.035707904815673826, 0.03571712112426758, 0.03571916961669922, 0.035729408264160156, 0.0357386245727539, 0.03572326278686523, 0.0356864013671875, 0.03577753448486328, 0.03573452758789063, 0.03576115036010742, 0.035781631469726564, 0.035746849060058594, 0.03575497436523437, 0.03571916961669922, 0.03576115036010742, 0.035746814727783204, 0.035746814727783204, 0.035760128021240234, 0.03577958297729492, 0.035760128021240234, 0.03577859115600586, 0.035772384643554686, 0.03577958297729492, 0.035776512145996094, 0.035789825439453124, 0.03580416107177734, 0.035798015594482424, 0.03583692932128906, 0.03582156753540039, 0.0358205451965332, 0.03581849670410156, 0.03580416107177734, 0.0358389778137207, 0.0357918701171875, 0.035800064086914066, 0.035811328887939455, 0.03583795166015625, 0.035871742248535156, 0.12986778259277343, 0.03565158462524414, 0.03564748764038086, 0.03563212966918945, 0.03562700653076172, 0.03564748764038086, 0.035659774780273434, 0.035661823272705076, 0.035681278228759765, 0.03568435287475586, 0.035675136566162106, 0.03571507263183594, 0.035691520690917966, 0.035699710845947266, 0.03566284942626953, 0.035659774780273434, 0.0356577262878418, 0.03569868850708008, 0.035683326721191407, 0.03567718505859375, 0.03572022247314453, 0.03574166488647461, 0.03575296020507813, 0.035692543029785154, 0.03572633743286133, 0.03575193786621094, 0.03573964691162109, 0.0357314567565918, 0.0357386245727539, 0.035730430603027344, 0.03572531127929687, 0.03599052810668945, 0.03640422439575195, 0.03587071990966797, 0.03575091171264649, 0.03575296020507813, 0.035727359771728515, 0.035746814727783204, 0.03580108642578125, 0.03573964691162109, 0.03577139282226562, 0.035760128021240234, 0.03574784088134766, 0.035745792388916016, 0.03574784088134766, 0.03583795166015625, 0.035778560638427735, 0.035768318176269534, 0.03581951904296875, 0.03581849670410156, 0.035765247344970705, 0.03578675079345703, 0.03579289627075195, 0.035871742248535156, 0.035794944763183595, 0.03578060913085938, 0.03580108642578125, 0.035790878295898436, 0.03583894348144531, 0.035776512145996094, 0.035807262420654296, 0.0358502082824707, 0.035814399719238284, 0.13022720336914062, 0.03561471939086914, 0.03560038375854492, 0.03563724899291992, 0.035620864868164064, 0.03561471939086914, 0.03560345458984375, 0.03562188720703125, 0.03568537521362305, 0.03563212966918945, 0.035729408264160156, 0.035694591522216795, 0.03567001724243164, 0.03566284942626953, 0.03563008117675781, 0.03565158462524414, 0.035659774780273434, 0.0356495361328125, 0.03568742370605469, 0.035655681610107424, 0.03567001724243164, 0.03567923355102539, 0.03569664001464844, 0.0356864013671875, 0.03563417434692383, 0.035681278228759765, 0.03568435287475586, 0.03567718505859375, 0.0357314567565918, 0.03568025588989258, 0.03571814346313477, 0.0357130241394043, 0.035724288940429685, 0.03574476623535156, 0.035781631469726564, 0.03584204864501953, 0.0357498893737793, 0.0357386245727539, 0.035767295837402346, 0.035710975646972655, 0.03573350524902344, 0.035737598419189456, 0.03573350524902344, 0.03572326278686523, 0.035716094970703126, 0.03574169540405273, 0.03575193786621094, 0.035737598419189456, 0.035915775299072264, 0.03585539245605469, 0.03604169464111328, 0.03591372680664062, 0.03579391860961914, 0.03576934432983398, 0.03583078384399414, 0.03580108642578125, 0.03580416107177734, 0.035767295837402346, 0.03578265762329102, 0.035748863220214845, 0.035784702301025394, 0.03578675079345703, 0.03578060913085938, 0.13018418884277344, 0.035768318176269534, 0.035722240447998044, 0.03574476623535156, 0.0357130241394043, 0.03566796875, 0.03569868850708008, 0.0357283821105957, 0.03570278549194336, 0.0356864013671875, 0.03573350524902344, 0.03586150360107422, 0.03599871826171875, 0.035716094970703126, 0.03570995330810547, 0.03573555374145508, 0.03574476623535156, 0.03578988647460937, 0.035861438751220706, 0.035762176513671876, 0.03576319885253906, 0.035716094970703126, 0.0357130241394043, 0.0357283821105957, 0.0357314567565918, 0.035727359771728515, 0.035732479095458985, 0.035746814727783204, 0.035773441314697264, 0.03580518341064453, 0.03574784088134766, 0.03577446365356445, 0.03582566452026367, 0.035759105682373046, 0.035768318176269534, 0.03580518341064453, 0.03578060913085938, 0.035773441314697264, 0.035745792388916016, 0.0357498893737793, 0.035768318176269534, 0.035768318176269534, 0.03577241516113281, 0.035767295837402346, 0.03578572845458984, 0.03582463836669922, 0.0358021125793457, 0.03585126495361328, 0.03594342422485351, 0.035811328887939455, 0.03584000015258789, 0.03582668685913086, 0.03584000015258789, 0.035806209564208984, 0.03582361602783203, 0.035846145629882815, 0.03583180618286133, 0.03582156753540039, 0.035813377380371096, 0.0358389778137207, 0.03583283233642578, 0.035811328887939455, 0.03585433578491211, 0.13020364379882812, 0.03566284942626953, 0.035664894104003905, 0.03565670394897461, 0.03565158462524414, 0.03565465545654297, 0.03566899108886719, 0.03567103958129883, 0.035688449859619144, 0.035727359771728515, 0.035694591522216795, 0.03566694259643555, 0.03567001724243164, 0.03567103958129883, 0.03567001724243164, 0.03567308807373047, 0.03578675079345703, 0.036073471069335936, 0.03572326278686523, 0.03570687866210937, 0.03570995330810547, 0.035697662353515625, 0.035707904815673826, 0.035681278228759765, 0.035716094970703126, 0.035729408264160156, 0.035737598419189456, 0.03573452758789063, 0.035732479095458985, 0.035724288940429685, 0.03587481689453125, 0.035776512145996094, 0.03576115036010742, 0.035737598419189456, 0.03573964691162109, 0.0357386245727539, 0.035714046478271484, 0.035740673065185545, 0.03577241516113281, 0.035724288940429685, 0.035745792388916016, 0.03577036666870117, 0.035745792388916016, 0.03576319885253906, 0.03575807952880859, 0.03577036666870117, 0.035783679962158206, 0.03578060913085938, 0.035829761505126956, 0.035783679962158206, 0.03579596710205078, 0.035798015594482424, 0.03577958297729492, 0.03577139282226562, 0.03580825424194336, 0.03579904174804688, 0.035810302734375, 0.035773441314697264, 0.03583488082885742, 0.035776512145996094, 0.035814399719238284, 0.03582361602783203, 0.03582566452026367, 0.13012786865234374, 0.03561779022216797, 0.03565468978881836, 0.03567715072631836, 0.03570892715454101, 0.03566387176513672, 0.035659774780273434, 0.03562905502319336, 0.03569561767578125, 0.0356495361328125, 0.03569664001464844, 0.035717151641845704, 0.03570070266723633, 0.03565363311767578, 0.035642368316650394, 0.03568025588989258, 0.035716129302978517, 0.035691486358642575, 0.03568435287475586, 0.03567308807373047, 0.03567411041259766, 0.035745792388916016, 0.03569664001464844, 0.035676158905029294, 0.035675136566162106, 0.035699710845947266, 0.03580108642578125, 0.03570278549194336, 0.035765247344970705, 0.03571507263183594, 0.03572531127929687, 0.035759105682373046, 0.03572531127929687, 0.03571507263183594, 0.03572531127929687, 0.035746814727783204, 0.035746814727783204, 0.035745792388916016, 0.03577753448486328, 0.03574169540405273, 0.035745792388916016, 0.03575500869750976, 0.035746814727783204, 0.03574169540405273, 0.035727359771728515, 0.03577241516113281, 0.03578879928588867, 0.03579391860961914, 0.03588608169555664, 0.03607654571533203, 0.035915775299072264, 0.03580416107177734, 0.03579289627075195, 0.03583180618286133, 0.035757057189941405, 0.03579596710205078, 0.03581644821166992, 0.03581235122680664, 0.035846145629882815, 0.035806209564208984, 0.03584000015258789, 0.03584716796875, 0.03585638427734375]",tokens/s,26.93864149203844,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22092,7 +22092,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9556-2549550b4a1ec5ef4a1a6a54;820ae7e5-b1ea-45aa-8a2f-9564bc4c4d6a) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe714-26c5a9ac59f9138b553224d8;689c9ea5-eddc-4f73-b36e-5e06be19dc31) Repository Not Found for url: https://huggingface.co/i/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -22161,7 +22161,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-1.8B,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1272.225792,5222.432768,0.0,4575.985664,4408.404992,s,10,0.22872108650207515,0.02287210865020752,0.0014333987066370093,0.02240225601196289,0.02294665241241455,0.025058829784393305,0.02674857168197632,"[0.027171007156372072, 0.022398687362670897, 0.02240876770019531, 0.022405824661254882, 0.022477279663085936, 0.02237504005432129, 0.022351551055908202, 0.022351680755615236, 0.02241417694091797, 0.0223670711517334]",tokens/s,11192.671559719847,kWh,2.662624772246536e-07,1.458880744185326e-07,1.223439417189218e-06,1.6355899688324041e-06,tokens/kWh,156518445.86864904,MB,1272.225792,5222.432768,0.0,4575.985664,4408.407552,s,10,12.555456298828124,1.2555456298828125,0.0031298792264682768,1.2557788696289063,1.2598449340820312,1.2602314880371093,1.2605407312011718,"[1.2606180419921875, 1.256319091796875, 1.25747705078125, 1.252046875, 1.251937744140625, 1.2525050048828126, 1.2520966796875, 1.2552386474609376, 1.259759033203125, 1.2574581298828125]",tokens/s,50.177387822918206,kWh,1.4821127998578297e-05,8.121728151220826e-06,3.728181360921318e-05,6.02246697590123e-05,tokens/kWh,1046082.9466079784,,s,629,12.73492682266234,0.020246306554312164,0.002720546001744483,0.019894271850585937,0.02012180404663086,0.02045071334838867,0.04268367858886719,"[0.021436416625976562, 0.020221952438354493, 0.020122623443603514, 0.0196945915222168, 0.01990656089782715, 0.019689472198486328, 0.01974176025390625, 0.019659711837768556, 0.019768320083618163, 0.01964851188659668, 0.019677183151245118, 0.01965465545654297, 0.020074495315551756, 0.019970048904418947, 0.019922943115234376, 0.01995065689086914, 0.019897279739379884, 0.020199424743652345, 0.020116479873657226, 0.019900415420532228, 0.019878911972045898, 0.019958784103393554, 0.020130815505981444, 0.019900415420532228, 0.01988096046447754, 0.01982259178161621, 0.019911680221557617, 0.020585472106933594, 0.020221952438354493, 0.02038374328613281, 0.01990553665161133, 0.019741695404052736, 0.01969152069091797, 0.01989836883544922, 0.020676607131958007, 0.019954687118530275, 0.019932159423828123, 0.019792896270751953, 0.02030284881591797, 0.01974995231628418, 0.019694528579711913, 0.019721216201782226, 0.019755008697509766, 0.01981439971923828, 0.019718143463134767, 0.019741695404052736, 0.01966796875, 0.019731456756591798, 0.01970278358459473, 0.019698688507080078, 0.02006630325317383, 0.020017215728759766, 0.02019731140136719, 0.020510719299316405, 0.020756479263305663, 0.02023219108581543, 0.019986431121826173, 0.019962879180908204, 0.019936256408691407, 0.019900415420532228, 0.019918848037719726, 0.019933183670043944, 0.042788864135742184, 0.019915775299072267, 0.01992915153503418, 0.019855295181274414, 0.019941375732421874, 0.020148223876953125, 0.019959808349609375, 0.0198922233581543, 0.019710975646972655, 0.019826688766479493, 0.019912704467773438, 0.019907583236694337, 0.01987379264831543, 0.01983897590637207, 0.019817535400390624, 0.01973446464538574, 0.01987583923339844, 0.01986764717102051, 0.019899391174316407, 0.01985843276977539, 0.019997695922851562, 0.020105215072631837, 0.02041753578186035, 0.020494335174560546, 0.020567039489746093, 0.020505599975585938, 0.020007936477661133, 0.019976192474365235, 0.019938304901123048, 0.020377599716186523, 0.019900415420532228, 0.01986457633972168, 0.019900415420532228, 0.019876863479614256, 0.019920896530151368, 0.01985536003112793, 0.01992192077636719, 0.019954687118530275, 0.019796991348266603, 0.02030284881591797, 0.019876863479614256, 0.020017152786254884, 0.019895296096801757, 0.019973119735717772, 0.019989503860473632, 0.019919872283935547, 0.01997209548950195, 0.01968230438232422, 0.01966694450378418, 0.0196823673248291, 0.019783615112304687, 0.01985536003112793, 0.019937280654907227, 0.019907583236694337, 0.019935232162475586, 0.019955711364746095, 0.020006912231445313, 0.019777536392211914, 0.019738624572753907, 0.019817472457885742, 0.019705856323242187, 0.01968339157104492, 0.01973855972290039, 0.042807296752929686, 0.01998028755187988, 0.020099071502685546, 0.019969024658203126, 0.019975168228149414, 0.02002022361755371, 0.019973119735717772, 0.0200898551940918, 0.01993011283874512, 0.01988198471069336, 0.019966976165771484, 0.019957759857177734, 0.019938304901123048, 0.019933183670043944, 0.019922943115234376, 0.020139007568359374, 0.020372480392456056, 0.021173248291015623, 0.020789247512817383, 0.02006937599182129, 0.019727359771728514, 0.019710975646972655, 0.019771392822265626, 0.02001919937133789, 0.019931135177612306, 0.019938304901123048, 0.019664896011352538, 0.019899391174316407, 0.019941375732421874, 0.019919872283935547, 0.019935232162475586, 0.019944448471069336, 0.01987379264831543, 0.019775487899780272, 0.019768320083618163, 0.019909631729125975, 0.02000486373901367, 0.019948543548583983, 0.019927040100097656, 0.020007936477661133, 0.019909631729125975, 0.019991552352905274, 0.019895296096801757, 0.019851264953613282, 0.019885055541992186, 0.019776512145996093, 0.019742719650268553, 0.019765247344970704, 0.01987379264831543, 0.019968000411987305, 0.019973119735717772, 0.019932159423828123, 0.019903488159179687, 0.019991552352905274, 0.019900415420532228, 0.019776512145996093, 0.019786752700805665, 0.02009600067138672, 0.020002815246582033, 0.019920896530151368, 0.01972428894042969, 0.019693567276000978, 0.019941375732421874, 0.042744831085205076, 0.019855392456054687, 0.01982972717285156, 0.019851264953613282, 0.01982464027404785, 0.019796991348266603, 0.019631103515625, 0.0196495361328125, 0.01960550308227539, 0.01972428894042969, 0.01995884895324707, 0.019676095962524415, 0.019728384017944335, 0.019941375732421874, 0.01984000015258789, 0.01983590316772461, 0.0198287353515625, 0.019719167709350584, 0.01984000015258789, 0.019667999267578125, 0.019641311645507812, 0.019765247344970704, 0.01981644821166992, 0.019842048645019532, 0.019809280395507813, 0.019909631729125975, 0.01982771110534668, 0.02001919937133789, 0.019885055541992186, 0.019920896530151368, 0.019891199111938478, 0.019896320343017578, 0.019861503601074217, 0.01988812828063965, 0.0198922233581543, 0.019877887725830077, 0.020006912231445313, 0.019948543548583983, 0.019857440948486328, 0.02017791938781738, 0.020028383255004882, 0.019896320343017578, 0.019910655975341796, 0.019739648818969727, 0.019917823791503905, 0.019923967361450197, 0.01989017677307129, 0.01986867141723633, 0.019870719909667968, 0.019944448471069336, 0.02012057685852051, 0.02002124786376953, 0.02003046417236328, 0.02007040023803711, 0.01997209548950195, 0.019931135177612306, 0.0198287353515625, 0.01981439971923828, 0.019748863220214845, 0.01988915252685547, 0.01984000015258789, 0.02000486373901367, 0.019944511413574218, 0.042783679962158205, 0.01982975959777832, 0.01966182327270508, 0.019784704208374023, 0.01969049644470215, 0.01964646339416504, 0.019734527587890623, 0.01966694450378418, 0.019729408264160156, 0.019719167709350584, 0.019736576080322265, 0.01989017677307129, 0.019909631729125975, 0.019955711364746095, 0.019862527847290038, 0.020023296356201172, 0.019853311538696287, 0.019934207916259765, 0.01967411231994629, 0.01965056037902832, 0.019720191955566405, 0.01963724708557129, 0.020051967620849608, 0.01998847961425781, 0.01993011283874512, 0.019899391174316407, 0.019862527847290038, 0.019917823791503905, 0.019922943115234376, 0.020214784622192384, 0.02003558349609375, 0.02002739143371582, 0.01987379264831543, 0.019945472717285157, 0.01984409523010254, 0.019869695663452147, 0.019647487640380858, 0.019773439407348634, 0.019929088592529298, 0.020242431640625, 0.02000588798522949, 0.019851264953613282, 0.0198922233581543, 0.01985843276977539, 0.019854335784912108, 0.019993600845336915, 0.01984102439880371, 0.019907583236694337, 0.019824703216552733, 0.019876800537109374, 0.019919872283935547, 0.01983590316772461, 0.01984409523010254, 0.019882015228271484, 0.019843040466308595, 0.019714048385620117, 0.020011007308959963, 0.019883007049560548, 0.01988403129577637, 0.019932159423828123, 0.019922943115234376, 0.01988096046447754, 0.019911680221557617, 0.042714111328125, 0.019883007049560548, 0.019887104034423828, 0.01966080093383789, 0.01969049644470215, 0.019696640014648437, 0.019870719909667968, 0.01983283233642578, 0.02002739143371582, 0.020229120254516602, 0.01985843276977539, 0.01986662483215332, 0.019963903427124022, 0.019851264953613282, 0.01983180809020996, 0.019951616287231445, 0.02002739143371582, 0.019932159423828123, 0.019957759857177734, 0.019914751052856446, 0.019899391174316407, 0.01991372871398926, 0.019903488159179687, 0.019664896011352538, 0.019706880569458008, 0.019734527587890623, 0.019813375473022463, 0.01971615982055664, 0.019678144454956054, 0.019671104431152345, 0.0196658878326416, 0.019643360137939454, 0.01981644821166992, 0.01998028755187988, 0.019983360290527344, 0.01983897590637207, 0.02006425666809082, 0.019810304641723633, 0.01989017677307129, 0.019936256408691407, 0.01990457534790039, 0.01994028854370117, 0.020025344848632814, 0.019895296096801757, 0.020462591171264647, 0.019970048904418947, 0.01987379264831543, 0.01987583923339844, 0.019700735092163087, 0.019793920516967774, 0.019942399978637695, 0.019938304901123048, 0.019968000411987305, 0.019898399353027344, 0.01987171173095703, 0.01984511947631836, 0.01986764717102051, 0.019672063827514647, 0.019599359512329103, 0.019895296096801757, 0.019887104034423828, 0.020206592559814454, 0.019900415420532228, 0.042635265350341796, 0.020101119995117187, 0.02004377555847168, 0.01980620765686035, 0.01988403129577637, 0.019935232162475586, 0.019870719909667968, 0.01985843276977539, 0.019870719909667968, 0.019680255889892577, 0.019722240447998047, 0.019719200134277345, 0.019775455474853515, 0.01980723190307617, 0.020254720687866212, 0.019887104034423828, 0.01987174415588379, 0.02005504035949707, 0.019728384017944335, 0.01997209548950195, 0.020008960723876954, 0.019974143981933593, 0.019994623184204103, 0.019934207916259765, 0.01988403129577637, 0.019851264953613282, 0.019741727828979493, 0.01969865608215332, 0.019680255889892577, 0.019801088333129883, 0.0197325439453125, 0.019651519775390626, 0.01965158462524414, 0.019834880828857423, 0.01984819221496582, 0.019753984451293945, 0.02002022361755371, 0.01988812828063965, 0.02043391990661621, 0.02008684730529785, 0.019865535736083986, 0.019862527847290038, 0.01981439971923828, 0.019825664520263672, 0.019842048645019532, 0.019924991607666014, 0.02005504035949707, 0.019878911972045898, 0.019886079788208007, 0.019962911605834962, 0.019795936584472658, 0.019719167709350584, 0.019767295837402343, 0.019915775299072267, 0.0198922233581543, 0.019825664520263672, 0.019894271850585937, 0.019869695663452147, 0.01986662483215332, 0.01982156753540039, 0.019727359771728514, 0.019598335266113282, 0.01992192077636719, 0.042665985107421874, 0.019796991348266603, 0.019671104431152345, 0.019626943588256836, 0.019834880828857423, 0.019802112579345704, 0.02008780860900879, 0.02000486373901367, 0.01986774444580078, 0.0198624324798584, 0.01988812828063965, 0.01970790481567383, 0.019796991348266603, 0.01963724708557129, 0.019760128021240234, 0.019642400741577148, 0.01968227195739746, 0.01964851188659668, 0.019864608764648437, 0.019857376098632813, 0.019926015853881835, 0.0198973445892334, 0.019885055541992186, 0.01987276840209961, 0.019908607482910155, 0.019929088592529298, 0.019773439407348634, 0.01967820739746094, 0.01965977668762207, 0.019765247344970704, 0.019628032684326172, 0.020123647689819335, 0.019994623184204103, 0.019861503601074217, 0.019883039474487305, 0.019828704833984374, 0.019762176513671875, 0.01966592025756836, 0.01962188720703125, 0.02007142448425293, 0.019853311538696287, 0.01984511947631836, 0.019918848037719726, 0.019951616287231445, 0.01983692741394043, 0.01985638427734375, 0.020115455627441405, 0.01990553665161133, 0.020060159683227538, 0.019943424224853516, 0.019910655975341796, 0.019917823791503905, 0.01987276840209961, 0.019965951919555663, 0.022185983657836913, 0.020792320251464845, 0.02006937599182129, 0.020121599197387697, 0.02001919937133789, 0.020135936737060548, 0.020017215728759766, 0.01994745635986328, 0.019939327239990236, 0.04274995040893555, 0.019887104034423828, 0.019883007049560548, 0.019943424224853516, 0.019900415420532228, 0.01988921546936035, 0.019882944107055663, 0.01985945510864258, 0.019933183670043944, 0.019917823791503905, 0.019885055541992186, 0.01984614372253418, 0.019711999893188475, 0.019596288681030274, 0.01962700843811035, 0.01962188720703125, 0.01962700843811035, 0.019775487899780272, 0.01965772819519043, 0.01984409523010254, 0.019903488159179687, 0.01990656089782715, 0.019951616287231445, 0.019878911972045898, 0.019944448471069336, 0.02001919937133789, 0.020098047256469728, 0.019896320343017578, 0.02007244873046875, 0.019896320343017578, 0.019926015853881835, 0.020002815246582033, 0.019974143981933593, 0.019900415420532228, 0.019894271850585937, 0.01979091262817383, 0.019626943588256836, 0.019718175888061525, 0.01968124771118164, 0.019720191955566405, 0.019933183670043944, 0.01992192077636719, 0.02021887969970703, 0.020384767532348632, 0.021629951477050782, 0.02067865562438965, 0.02047283172607422, 0.02043903923034668, 0.01996905517578125, 0.019977184295654298, 0.019965951919555663, 0.019932159423828123, 0.019985408782958985, 0.020526079177856444, 0.020519935607910156, 0.02045849609375, 0.019933183670043944, 0.01993011283874512, 0.02024345588684082, 0.020413440704345705, 0.019909631729125975, 0.019993600845336915, 0.019927040100097656, 0.04269055938720703, 0.019899391174316407, 0.019826688766479493, 0.0196997127532959, 0.019809280395507813, 0.01988403129577637, 0.019918848037719726, 0.01987993621826172, 0.01985228729248047, 0.019811328887939454, 0.019748863220214845, 0.021755903244018555, 0.02129408073425293, 0.02049228858947754, 0.019876863479614256, 0.019935232162475586, 0.01982259178161621, 0.02031718444824219, 0.02123980712890625, 0.020098047256469728, 0.019899391174316407, 0.019940351486206053, 0.019998720169067383, 0.019607551574707033, 0.01965056037902832, 0.01963827133178711, 0.019721216201782226, 0.019899391174316407, 0.020147199630737304, 0.019833856582641602, 0.019966976165771484, 0.019886079788208007, 0.01991372871398926, 0.0198973445892334, 0.019903488159179687, 0.019885152816772462, 0.019882911682128905, 0.019907583236694337, 0.019917823791503905, 0.019818496704101563, 0.01987174415588379, 0.019655679702758787, 0.019612672805786133, 0.01959321594238281, 0.01969254493713379, 0.019885055541992186, 0.019826688766479493, 0.019812351226806642, 0.01990656089782715, 0.02000588798522949, 0.01987174415588379, 0.019868703842163087, 0.01990345573425293, 0.019948543548583983, 0.019848224639892578, 0.0200980167388916, 0.019912704467773438, 0.01985843276977539, 0.01984819221496582, 0.01986662483215332, 0.01984000015258789, 0.02001919937133789, 0.019944448471069336]",tokens/s,49.39172472358988,,,main,False,False -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22216,7 +22216,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1258.74176,3041.394688,0.0,2394.947584,2279.417856,s,10,0.24129708862304688,0.02412970886230469,0.0009275755891401461,0.024045184135437013,0.024764550018310547,0.025596467399597166,0.026262001304626463,"[0.026428384780883787, 0.023323360443115233, 0.02338118362426758, 0.024444223403930664, 0.023216384887695313, 0.023652992248535155, 0.024437376022338867, 0.023386943817138673, 0.02444655990600586, 0.024579679489135742]",tokens/s,10609.328171378062,kWh,2.829735362897333e-07,1.550558548124651e-07,1.0543529168468965e-06,1.4923823079490949e-06,tokens/kWh,171537814.8323185,MB,1259.06944,3041.394688,0.0,2394.947584,2279.420416,s,10,14.706535156249998,1.470653515625,0.02462968819862708,1.4804291381835937,1.4855615844726564,1.4865984191894532,1.4874278869628907,"[1.440400390625, 1.478913818359375, 1.4812783203125, 1.4824281005859374, 1.4075567626953125, 1.4783734130859374, 1.4850379638671876, 1.4795799560546874, 1.48763525390625, 1.4853311767578126]",tokens/s,42.83809839003865,kWh,1.6815152510897653e-05,9.214636577634288e-06,3.707637203215265e-05,6.310616112068459e-05,tokens/kWh,998317.7376218217,,s,629,14.8957398891449,0.023681621445381395,0.002902418586720406,0.023476224899291992,0.023700889205932617,0.023891558837890625,0.04635525131225587,"[0.02434764862060547, 0.02344550323486328, 0.02329702377319336, 0.0230830078125, 0.023096319198608398, 0.023059455871582032, 0.022979583740234375, 0.023313440322875977, 0.023432159423828126, 0.023243776321411135, 0.023096319198608398, 0.02310860824584961, 0.023026687622070312, 0.02312499237060547, 0.023179264068603517, 0.023175167083740233, 0.02310860824584961, 0.023164928436279295, 0.02305843162536621, 0.023142400741577147, 0.023002111434936523, 0.023181312561035155, 0.023236608505249022, 0.023367679595947266, 0.023235584259033205, 0.023150592803955077, 0.022930431365966796, 0.022755327224731444, 0.022408191680908202, 0.022520832061767578, 0.022561792373657227, 0.022385663986206054, 0.02243174362182617, 0.022386688232421875, 0.022478847503662108, 0.022436864852905275, 0.02246451187133789, 0.02249625587463379, 0.022394880294799805, 0.02229248046875, 0.022417407989501953, 0.022486015319824217, 0.02246451187133789, 0.02243174362182617, 0.022368255615234374, 0.022372352600097657, 0.022329343795776366, 0.022417407989501953, 0.022475807189941407, 0.02237129592895508, 0.022436864852905275, 0.022335487365722655, 0.022540288925170897, 0.022395904541015626, 0.02244095993041992, 0.02246143913269043, 0.022441984176635742, 0.022420480728149415, 0.022418432235717774, 0.02227712059020996, 0.024041471481323243, 0.02351513671875, 0.04617830276489258, 0.022366207122802736, 0.022373376846313478, 0.02330624008178711, 0.023311359405517578, 0.023335935592651368, 0.02347417640686035, 0.023459840774536132, 0.023857152938842774, 0.023444480895996093, 0.02352742385864258, 0.023427072525024413, 0.023436288833618164, 0.02342911911010742, 0.02350492858886719, 0.023503839492797853, 0.02346598434448242, 0.023595008850097656, 0.023435264587402343, 0.02350284767150879, 0.023427072525024413, 0.0234967041015625, 0.023796735763549806, 0.023385087966918947, 0.023446527481079102, 0.023417856216430662, 0.023431167602539063, 0.023413759231567383, 0.023549951553344727, 0.02350182342529297, 0.02353561592102051, 0.02350284767150879, 0.02349158477783203, 0.02343731117248535, 0.023558143615722657, 0.023428096771240234, 0.023537664413452147, 0.023553024291992186, 0.023571456909179687, 0.023558208465576172, 0.023587776184082032, 0.023434240341186522, 0.023508991241455078, 0.02345062446594238, 0.023421951293945312, 0.023483455657958983, 0.02340447998046875, 0.023491615295410155, 0.02347007942199707, 0.02358268737792969, 0.02369843292236328, 0.023563264846801758, 0.02350284767150879, 0.02349772834777832, 0.023409664154052736, 0.023464960098266603, 0.02341891288757324, 0.02349769592285156, 0.023417856216430662, 0.02345369529724121, 0.023545856475830077, 0.025411584854125976, 0.023580671310424805, 0.04688588714599609, 0.023653375625610353, 0.02348748779296875, 0.02351513671875, 0.023414783477783203, 0.02351820755004883, 0.023434240341186522, 0.02351103973388672, 0.023517183303833008, 0.02350387191772461, 0.023516159057617187, 0.023946239471435548, 0.02411315155029297, 0.02370047950744629, 0.023623680114746092, 0.023617536544799804, 0.02349567985534668, 0.023399423599243165, 0.02352025604248047, 0.023548927307128906, 0.023592960357666014, 0.02352332878112793, 0.023415807723999024, 0.02349772834777832, 0.02367283248901367, 0.023411712646484374, 0.02349158477783203, 0.023433216094970705, 0.023545856475830077, 0.02345267105102539, 0.023777280807495117, 0.02349465560913086, 0.02346291160583496, 0.0234833927154541, 0.023593984603881835, 0.02346291160583496, 0.02348953628540039, 0.023497760772705076, 0.023456735610961912, 0.02349875259399414, 0.02350182342529297, 0.023412736892700195, 0.02332979202270508, 0.02346291160583496, 0.023366655349731445, 0.023409664154052736, 0.02349158477783203, 0.023555072784423828, 0.02343731117248535, 0.023588863372802735, 0.023418880462646483, 0.02346598434448242, 0.023441408157348635, 0.02347417640686035, 0.023414783477783203, 0.023417856216430662, 0.023431167602539063, 0.023409664154052736, 0.02345369529724121, 0.023504896163940428, 0.023446527481079102, 0.023619583129882812, 0.02346086311340332, 0.048465919494628903, 0.023447551727294923, 0.023423999786376954, 0.023398399353027344, 0.02346598434448242, 0.023441408157348635, 0.02346086311340332, 0.023636991500854493, 0.02389606475830078, 0.023550975799560548, 0.023388160705566406, 0.023459840774536132, 0.023406591415405274, 0.023541759490966797, 0.02350284767150879, 0.023588863372802735, 0.02349875259399414, 0.02368716812133789, 0.023344127655029297, 0.02326323127746582, 0.023436288833618164, 0.023627775192260742, 0.02348646354675293, 0.02347110366821289, 0.02352025604248047, 0.023488512039184572, 0.023464960098266603, 0.02353561592102051, 0.02347110366821289, 0.02345267105102539, 0.023571456909179687, 0.023532543182373047, 0.02348236846923828, 0.023518272399902344, 0.023555007934570313, 0.023534656524658203, 0.023495616912841796, 0.02352639961242676, 0.02437222480773926, 0.023982080459594726, 0.02350387191772461, 0.023568384170532225, 0.02345881652832031, 0.02347724723815918, 0.023412736892700195, 0.0276889591217041, 0.024598527908325195, 0.02370355224609375, 0.023488512039184572, 0.02346598434448242, 0.02390019226074219, 0.023435232162475585, 0.023396352767944335, 0.02349465560913086, 0.023434240341186522, 0.023488512039184572, 0.023359487533569336, 0.023178239822387696, 0.023190528869628906, 0.02229964828491211, 0.022338560104370117, 0.02234060859680176, 0.02230271911621094, 0.04605644989013672, 0.02233241653442383, 0.022225984573364256, 0.022307775497436524, 0.022171648025512695, 0.02230681610107422, 0.02225971221923828, 0.0222873592376709, 0.022227968215942383, 0.02226790428161621, 0.022228992462158204, 0.022231039047241212, 0.022312959671020507, 0.02224332809448242, 0.02230067253112793, 0.022289407730102538, 0.022239231109619142, 0.022339584350585938, 0.02225766372680664, 0.02225868797302246, 0.022335487365722655, 0.0223191032409668, 0.022361087799072265, 0.022244352340698242, 0.02232524871826172, 0.022213632583618165, 0.02226688003540039, 0.022252544403076172, 0.022346752166748047, 0.0222423038482666, 0.022611967086791994, 0.022323200225830078, 0.022370304107666016, 0.02239286422729492, 0.022396928787231447, 0.022254560470581056, 0.022387712478637696, 0.02235699272155762, 0.022386688232421875, 0.022377471923828125, 0.022388736724853517, 0.022452224731445314, 0.02230886459350586, 0.022366207122802736, 0.022338560104370117, 0.02245631980895996, 0.022508544921875, 0.02230784034729004, 0.022326303482055665, 0.022441951751708985, 0.022213632583618165, 0.022360063552856444, 0.022348831176757813, 0.022378463745117188, 0.0223191032409668, 0.022468608856201173, 0.022352895736694335, 0.02246143913269043, 0.022387712478637696, 0.0224532470703125, 0.022494207382202147, 0.022375423431396483, 0.022380544662475587, 0.04667084884643555, 0.02244095993041992, 0.022312959671020507, 0.022444032669067384, 0.023698463439941406, 0.023480287551879885, 0.023618560791015625, 0.02349977684020996, 0.023416831970214845, 0.023427072525024413, 0.0235100154876709, 0.023860223770141603, 0.023397375106811523, 0.0234833927154541, 0.02345062446594238, 0.02348646354675293, 0.023980031967163085, 0.02409779167175293, 0.023796735763549806, 0.023441408157348635, 0.023567359924316408, 0.023451648712158202, 0.02351513671875, 0.02350182342529297, 0.023413759231567383, 0.02346291160583496, 0.02342911911010742, 0.023498783111572264, 0.023575551986694337, 0.023567327499389647, 0.02368307113647461, 0.02352742385864258, 0.02345062446594238, 0.023407615661621094, 0.0235284481048584, 0.02348543930053711, 0.02351820755004883, 0.02348543930053711, 0.023623680114746092, 0.023556095123291015, 0.023493696212768554, 0.02359187126159668, 0.023408672332763673, 0.023400415420532228, 0.02343731117248535, 0.023372800827026367, 0.023372800827026367, 0.02353459167480469, 0.02346188735961914, 0.023439359664916993, 0.02348236846923828, 0.02348134422302246, 0.023513088226318358, 0.023784511566162108, 0.02346182441711426, 0.023446527481079102, 0.02344550323486328, 0.023601152420043944, 0.02346188735961914, 0.023602176666259765, 0.023524351119995117, 0.02347417640686035, 0.02349260711669922, 0.04845363235473633, 0.02351206398010254, 0.02345779228210449, 0.023384063720703126, 0.023342079162597656, 0.023385087966918947, 0.023463935852050782, 0.023431167602539063, 0.023386112213134767, 0.023566335678100587, 0.02343734359741211, 0.02350588798522949, 0.023451648712158202, 0.023399423599243165, 0.023629823684692384, 0.0235100154876709, 0.023439359664916993, 0.02349363136291504, 0.02352332878112793, 0.023402496337890624, 0.023553024291992186, 0.02395955276489258, 0.024360960006713867, 0.023872512817382813, 0.023620607376098633, 0.023513088226318358, 0.023617536544799804, 0.023734272003173826, 0.02376192092895508, 0.023395328521728515, 0.023517183303833008, 0.023559167861938478, 0.023826431274414063, 0.02346700859069824, 0.023630847930908205, 0.02349977684020996, 0.023442432403564452, 0.02354380798339844, 0.023451648712158202, 0.023418880462646483, 0.023521280288696288, 0.02349056053161621, 0.023830528259277343, 0.023536640167236327, 0.023587839126586914, 0.023843839645385743, 0.023639072418212892, 0.02358268737792969, 0.0235284481048584, 0.023645183563232423, 0.023635967254638672, 0.023714815139770508, 0.023757823944091795, 0.023448575973510744, 0.023463935852050782, 0.02345881652832031, 0.02348134422302246, 0.02352230453491211, 0.023636991500854493, 0.023618560791015625, 0.023581695556640626, 0.023576576232910155, 0.023478271484375, 0.04642406463623047, 0.0224901123046875, 0.023410688400268553, 0.023560192108154295, 0.023390207290649414, 0.023657472610473632, 0.023405567169189453, 0.023468032836914062, 0.02347007942199707, 0.02368921661376953, 0.023809024810791016, 0.02349363136291504, 0.02349567985534668, 0.02352230453491211, 0.023610368728637695, 0.023553024291992186, 0.02350182342529297, 0.02358892822265625, 0.023443391799926758, 0.02346086311340332, 0.022725631713867187, 0.02353152084350586, 0.023367679595947266, 0.023666688919067383, 0.02392985534667969, 0.023682048797607422, 0.023432191848754884, 0.023590911865234376, 0.023605247497558594, 0.023430143356323242, 0.02353971290588379, 0.02349465560913086, 0.023373823165893554, 0.02345372772216797, 0.023540704727172852, 0.02364723205566406, 0.023456768035888673, 0.023607295989990236, 0.023524351119995117, 0.023706623077392578, 0.023500799179077148, 0.02388479995727539, 0.023567359924316408, 0.02343731117248535, 0.02364825630187988, 0.023368703842163087, 0.02347417640686035, 0.02346188735961914, 0.02342095947265625, 0.023388128280639648, 0.023388160705566406, 0.023472127914428712, 0.023413759231567383, 0.02392166328430176, 0.02345267105102539, 0.023398399353027344, 0.023459840774536132, 0.023386112213134767, 0.023517183303833008, 0.02346598434448242, 0.02341993522644043, 0.02337174415588379, 0.023434240341186522, 0.04841471862792969, 0.023625728607177734, 0.023516159057617187, 0.023827455520629884, 0.023547903060913086, 0.023661567687988282, 0.023480319976806642, 0.023603200912475586, 0.023702527999877928, 0.023559167861938478, 0.023617536544799804, 0.02359609603881836, 0.02362668800354004, 0.023773183822631837, 0.023508991241455078, 0.023589887619018556, 0.023542783737182618, 0.023610368728637695, 0.02350796890258789, 0.023500799179077148, 0.023654399871826173, 0.023577600479125976, 0.023669759750366212, 0.023431167602539063, 0.023416831970214845, 0.023435264587402343, 0.02353561592102051, 0.023472127914428712, 0.023508991241455078, 0.023610368728637695, 0.023349248886108398, 0.023838720321655273, 0.023574527740478517, 0.023665664672851562, 0.023545856475830077, 0.02347007942199707, 0.02351923179626465, 0.02348441505432129, 0.023556095123291015, 0.023553024291992186, 0.023809024810791016, 0.023447551727294923, 0.02365132713317871, 0.02346291160583496, 0.023472127914428712, 0.023814144134521483, 0.023644159317016602, 0.02351103973388672, 0.023439359664916993, 0.02353561592102051, 0.023576576232910155, 0.02348134422302246, 0.023403520584106444, 0.02349260711669922, 0.023463935852050782, 0.024232959747314452, 0.024779775619506835, 0.0238786563873291, 0.023740415573120118, 0.02353561592102051, 0.023565311431884766, 0.023536640167236327, 0.02392780876159668, 0.048901119232177735, 0.02348134422302246, 0.02345267105102539, 0.02350284767150879, 0.023435264587402343, 0.02349567985534668, 0.02346086311340332, 0.023742528915405275, 0.023418815612792968, 0.023916608810424806, 0.02359391975402832, 0.023771135330200196, 0.023630847930908205, 0.02353049659729004, 0.023763967514038087, 0.02369638442993164, 0.023540735244750977, 0.02351206398010254, 0.02410086441040039, 0.0234967041015625, 0.023630847930908205, 0.02355200004577637, 0.02347520065307617, 0.023739456176757812, 0.02362668800354004, 0.023443456649780273, 0.02349772834777832, 0.023350271224975586, 0.02365132713317871, 0.023480319976806642, 0.02349977684020996, 0.023561216354370116, 0.023613439559936524, 0.023544832229614256, 0.02345779228210449, 0.023640064239501952, 0.02358787155151367, 0.023503839492797853, 0.023582719802856447, 0.023504896163940428, 0.023393280029296876, 0.023476224899291992, 0.02352230453491211, 0.02345267105102539, 0.023410688400268553, 0.02344550323486328, 0.023578624725341796, 0.023750656127929686, 0.02352025604248047, 0.023536640167236327, 0.023419904708862304, 0.024018943786621092, 0.02368409538269043, 0.023427072525024413, 0.023582719802856447, 0.023547935485839843, 0.02356937599182129, 0.023626752853393555, 0.023521280288696288, 0.0234967041015625, 0.02345267105102539, 0.023432191848754884, 0.02369126319885254]",tokens/s,42.22683832297425,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,v,v,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22255,7 +22255,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4685-089f97c457003aff4f27b4ec;36dc4bff-f9af-4a8d-ac9c-5119aa7f8b5c) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe9e0-79c929074c6b3aca0dbfcd99;ce48ba21-efe7-4a1d-ac09-f3d3039a040d) Repository Not Found for url: https://huggingface.co/v/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -22352,7 +22352,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9522-345c5d385eb787b2543c0cd4;b98dc05e-b0be-41e4-a827-eddacf2221c1) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe6df-586f849d7d05dab54caa7ccc;24fa0d72-b50d-4705-9eae-a5eee0f85f5b) Repository Not Found for url: https://huggingface.co/M/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -22422,7 +22422,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95f7-737820362fd55be048f608a3;6c3607c2-1f8f-4102-b654-4815d25f83c0) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7b8-487a39b32ae4d6893480aeaf;4aac74ec-1d2b-42b6-b094-5a03ddf690fe) Repository Not Found for url: https://huggingface.co/r/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -22454,7 +22454,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,Qwen/Qwen1.5-7B,,cuda,0,42,,,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.0,217063f5c507ed7cc255df7e1f64c4333a0b4dfe,4.40.2,,0.30.1,,,,1.19.2,,,,0.10.0,,,MB,1281.077248,17102.798848,0.0,16456.351744,16359.853056,s,10,0.9702524795532226,0.09702524795532226,0.00025320433899165477,0.09696044540405274,0.09724756317138672,0.09748131790161133,0.09766832168579101,"[0.09771507263183593, 0.09698252868652343, 0.09697071838378907, 0.09687910461425782, 0.096891357421875, 0.0969501724243164, 0.09686860656738282, 0.09702301025390625, 0.09677629089355469, 0.09719561767578125]",tokens/s,2638.488490314209,kWh,1.1476153758994457e-06,6.288410415630634e-07,5.71933035755311e-06,7.495786775015618e-06,tokens/kWh,34152518.966158375,MB,1281.077248,17102.798848,0.0,16456.351744,16416.242176,s,10,23.38539697265625,2.3385396972656247,0.0029707981617589597,2.3381635742187497,2.3395967529296877,2.3432687133789063,2.346206281738281,"[2.346940673828125, 2.336866943359375, 2.335611083984375, 2.336256103515625, 2.338372314453125, 2.338546875, 2.337695068359375, 2.33810107421875, 2.33878076171875, 2.33822607421875]",tokens/s,26.93988905711704,kWh,2.7537362347593073e-05,1.5091464710438814e-05,0.00011439315462125123,0.00015702198167928308,tokens/kWh,401217.7105793844,,s,629,24.219824024200456,0.03850528461717081,0.011591252179652836,0.03709337615966797,0.037211750030517576,0.03740590057373047,0.1345780224609375,"[0.037854110717773434, 0.03778051376342773, 0.0378449592590332, 0.03768739318847656, 0.037798816680908204, 0.0376995849609375, 0.037776382446289065, 0.03777433776855469, 0.037754878997802735, 0.037515262603759765, 0.0376258544921875, 0.037651454925537106, 0.03721113586425781, 0.037026817321777344, 0.03700838470458984, 0.037026817321777344, 0.03706163024902344, 0.037029888153076174, 0.037010433197021485, 0.03705036926269531, 0.03703398513793945, 0.03711795043945312, 0.03704115295410156, 0.03706777572631836, 0.0370513916015625, 0.037043201446533204, 0.037131263732910154, 0.0370882568359375, 0.03702783966064453, 0.03705344009399414, 0.03727360153198242, 0.03744460678100586, 0.037152767181396484, 0.03709747314453125, 0.0370882568359375, 0.037103614807128905, 0.037087230682373046, 0.037157886505126955, 0.03709542465209961, 0.03708006286621094, 0.03709337615966797, 0.03710771179199219, 0.03706880187988281, 0.0371599349975586, 0.03728486251831055, 0.037116928100585936, 0.03711795043945312, 0.03723571014404297, 0.03726335906982422, 0.03715584182739258, 0.03713536071777344, 0.03712819290161133, 0.03712409591674805, 0.037099582672119144, 0.037134273529052735, 0.03709337615966797, 0.03708108901977539, 0.037144577026367184, 0.037149696350097655, 0.037130241394042966, 0.037116928100585936, 0.037166080474853515, 0.13465087890625, 0.03696230316162109, 0.03696640014648438, 0.036947967529296875, 0.03693670272827149, 0.03698995208740234, 0.03701145553588867, 0.03700735855102539, 0.03697868728637695, 0.036959232330322264, 0.03699814224243164, 0.037015552520751956, 0.037005313873291014, 0.036982784271240236, 0.03711897659301758, 0.037512191772460936, 0.03710771179199219, 0.03706675338745117, 0.03701145553588867, 0.03703302383422852, 0.037025726318359375, 0.03700838470458984, 0.03701760101318359, 0.03699814224243164, 0.03700534439086914, 0.03702985763549805, 0.03704217529296875, 0.03707392120361328, 0.03705036926269531, 0.03700735855102539, 0.03700838470458984, 0.03727155303955078, 0.037498878479003905, 0.03712716674804688, 0.03704422378540039, 0.03708927917480469, 0.037087230682373046, 0.037059585571289064, 0.037032958984375, 0.03714355087280274, 0.037294078826904296, 0.0370964469909668, 0.037177345275878904, 0.03710774230957031, 0.037061599731445315, 0.0370709114074707, 0.03721516799926758, 0.037103614807128905, 0.037108734130859376, 0.037119998931884765, 0.037116928100585936, 0.037136383056640625, 0.03711180877685547, 0.037082111358642575, 0.03712102508544922, 0.03713433456420898, 0.03710771179199219, 0.037163009643554686, 0.03712409591674805, 0.037133312225341795, 0.03714976119995117, 0.03714041519165039, 0.03712211227416992, 0.13467027282714844, 0.03700735855102539, 0.036983806610107424, 0.03702374267578125, 0.03700838470458984, 0.036944896697998046, 0.03693772888183594, 0.03698995208740234, 0.037032958984375, 0.03700940704345703, 0.03698688125610351, 0.037026817321777344, 0.03703398513793945, 0.03702169418334961, 0.037002239227294925, 0.03697459030151367, 0.03705241775512695, 0.03711283111572266, 0.037026817321777344, 0.037032958984375, 0.03701760101318359, 0.03724800109863281, 0.0370247688293457, 0.037116928100585936, 0.036994049072265625, 0.03702272033691406, 0.03704012680053711, 0.03703705596923828, 0.037028865814208986, 0.037048320770263675, 0.03709235382080078, 0.03709132766723633, 0.03703398513793945, 0.03706265640258789, 0.03708108901977539, 0.03707494354248047, 0.03706367874145508, 0.037032958984375, 0.03711180877685547, 0.03706572723388672, 0.03703398513793945, 0.03708313751220703, 0.03709542465209961, 0.03708313751220703, 0.03706265640258789, 0.03706367874145508, 0.03712409591674805, 0.037114879608154294, 0.03711897659301758, 0.03717324829101563, 0.0370964469909668, 0.037085182189941404, 0.03709036636352539, 0.0371578254699707, 0.03714048004150391, 0.03710771179199219, 0.03709030532836914, 0.03710367965698242, 0.03713222503662109, 0.037119998931884765, 0.03712307357788086, 0.03711180877685547, 0.03712307357788086, 0.13458432006835938, 0.036961280822753906, 0.03693670272827149, 0.037028865814208986, 0.03695820617675781, 0.036980735778808595, 0.03699302291870117, 0.03704422378540039, 0.03700121688842774, 0.03701356887817383, 0.037033920288085935, 0.03703603363037109, 0.03701862335205078, 0.03699715042114258, 0.037002273559570316, 0.03705440139770508, 0.037032958984375, 0.037028865814208986, 0.037048320770263675, 0.03703500747680664, 0.037013504028320314, 0.037031936645507815, 0.03700838470458984, 0.037031936645507815, 0.03704524612426758, 0.037029888153076174, 0.037010433197021485, 0.03706572723388672, 0.03712819290161133, 0.03706982421875, 0.03708931350708008, 0.037086177825927734, 0.03706572723388672, 0.03707494354248047, 0.037131263732910154, 0.03709132766723633, 0.03708006286621094, 0.037106689453125, 0.037186622619628906, 0.03711174392700195, 0.0370882568359375, 0.03712614440917969, 0.03708927917480469, 0.03708108901977539, 0.037100543975830076, 0.03712307357788086, 0.03710771179199219, 0.03709439849853516, 0.0370964469909668, 0.037104671478271484, 0.03710563278198242, 0.037114879608154294, 0.037125118255615236, 0.03713945770263672, 0.0371517448425293, 0.03712921524047851, 0.03716505432128906, 0.0371486701965332, 0.03713433456420898, 0.03718963241577149, 0.03713129425048828, 0.037109729766845706, 0.03719782257080078, 0.1347071990966797, 0.03697155380249023, 0.03693257522583008, 0.037010433197021485, 0.03698688125610351, 0.0370063362121582, 0.03709542465209961, 0.037000190734863284, 0.03700940704345703, 0.03705855941772461, 0.036977664947509765, 0.03699612808227539, 0.03700527954101562, 0.03698688125610351, 0.037026817321777344, 0.036988929748535154, 0.037007423400878904, 0.0370656623840332, 0.0370247688293457, 0.03705241775512695, 0.03698175811767578, 0.03700428771972656, 0.037016609191894534, 0.03706876754760742, 0.037026817321777344, 0.03710976028442383, 0.0370513916015625, 0.03708927917480469, 0.037032993316650394, 0.03708208084106445, 0.03703705596923828, 0.03713740921020508, 0.037028865814208986, 0.037061695098876954, 0.03702880096435547, 0.03703705596923828, 0.037048320770263675, 0.03706880187988281, 0.0372490234375, 0.0377968635559082, 0.037154815673828126, 0.0372674560546875, 0.03717324829101563, 0.03723571014404297, 0.0370964469909668, 0.037125118255615236, 0.03711078262329102, 0.037171199798583986, 0.03709439849853516, 0.03712819290161133, 0.037119998931884765, 0.037169151306152344, 0.037177345275878904, 0.03715584182739258, 0.037130241394042966, 0.037272575378417966, 0.03749068832397461, 0.0374015998840332, 0.03717836761474609, 0.03717228698730469, 0.03718956756591797, 0.03724800109863281, 0.03721420669555664, 0.13471539306640626, 0.036983806610107424, 0.037015552520751956, 0.036972545623779295, 0.03695820617675781, 0.03699097442626953, 0.03697049713134765, 0.03702783966064453, 0.036997119903564454, 0.037000190734863284, 0.036983806610107424, 0.03704115295410156, 0.03701561737060547, 0.037039039611816406, 0.037032958984375, 0.03703705596923828, 0.037138431549072266, 0.03708108901977539, 0.03706675338745117, 0.037054462432861326, 0.037028865814208986, 0.037215232849121094, 0.03749273681640625, 0.03716198348999023, 0.03709952163696289, 0.03718963241577149, 0.037116928100585936, 0.037157886505126955, 0.03707904052734375, 0.03708006286621094, 0.037084159851074217, 0.03710771179199219, 0.03712409591674805, 0.037141502380371096, 0.037157886505126955, 0.037101566314697264, 0.03711385726928711, 0.037182464599609374, 0.03722547149658203, 0.03729100799560547, 0.037343231201171875, 0.03715584182739258, 0.03713536071777344, 0.037119998931884765, 0.0370882568359375, 0.037101566314697264, 0.03717222213745117, 0.03718348693847656, 0.03716819381713867, 0.037178302764892576, 0.03712716674804688, 0.03711590576171875, 0.03711385726928711, 0.037146625518798826, 0.037163009643554686, 0.03712102508544922, 0.037125118255615236, 0.03714767837524414, 0.03712815856933594, 0.037100543975830076, 0.03711590576171875, 0.03712102508544922, 0.037116928100585936, 0.13456182861328125, 0.03697046279907226, 0.036975616455078124, 0.037013504028320314, 0.03701657485961914, 0.03698995208740234, 0.03698483276367188, 0.03703398513793945, 0.03703603363037109, 0.03699507141113281, 0.03696640014648438, 0.037026817321777344, 0.037029888153076174, 0.036969470977783206, 0.03699100875854492, 0.03699299240112305, 0.036999168395996096, 0.03706982421875, 0.037005313873291014, 0.03706572723388672, 0.0370247688293457, 0.0371486701965332, 0.03702579116821289, 0.03722547149658203, 0.03751321411132812, 0.037084159851074217, 0.03704217529296875, 0.03707187271118164, 0.03705855941772461, 0.03707904052734375, 0.037101566314697264, 0.03706780624389648, 0.03703907012939453, 0.03707699203491211, 0.037059585571289064, 0.03707494354248047, 0.03710771179199219, 0.03717222213745117, 0.037084159851074217, 0.037106689453125, 0.03711180877685547, 0.037147647857666014, 0.03708313751220703, 0.03714252853393555, 0.037130241394042966, 0.03716505432128906, 0.03712102508544922, 0.03716815948486328, 0.03715887832641602, 0.0371671028137207, 0.03711795043945312, 0.03719270324707031, 0.03713228988647461, 0.03715071868896484, 0.037133312225341795, 0.03719680023193359, 0.03715071868896484, 0.03715689468383789, 0.03718038558959961, 0.037294078826904296, 0.037182464599609374, 0.037223457336425785, 0.037200862884521485, 0.13478501892089845, 0.03696332931518555, 0.036969470977783206, 0.036947967529296875, 0.037378047943115236, 0.03700940704345703, 0.03700940704345703, 0.03699609756469727, 0.03700428771972656, 0.03703091049194336, 0.03701964950561523, 0.036983806610107424, 0.036985855102539066, 0.03702272033691406, 0.036982784271240236, 0.03700428771972656, 0.0370780143737793, 0.03708313751220703, 0.03701356887817383, 0.03705344009399414, 0.03708102416992187, 0.03709235382080078, 0.03712716674804688, 0.03706367874145508, 0.03708620834350586, 0.03706675338745117, 0.03703091049194336, 0.03710675048828125, 0.03711174392700195, 0.03706777572631836, 0.03705344009399414, 0.03709542465209961, 0.03713433456420898, 0.03713945770263672, 0.03719372940063476, 0.03722137451171875, 0.03713945770263672, 0.03709132766723633, 0.03712819290161133, 0.03712307357788086, 0.03711180877685547, 0.03721932983398438, 0.03712204742431641, 0.037141502380371096, 0.03725721740722656, 0.037185535430908204, 0.0372305908203125, 0.03719475173950195, 0.037131263732910154, 0.037144577026367184, 0.0370964469909668, 0.037111839294433596, 0.03718038558959961, 0.0371610221862793, 0.03713222503662109, 0.03715891265869141, 0.03721011352539062, 0.03718963241577149, 0.037116928100585936, 0.03715584182739258, 0.03715379333496094, 0.03713433456420898, 0.037174270629882815, 0.13449215698242187, 0.03707494354248047, 0.037082111358642575, 0.036980735778808595, 0.03698995208740234, 0.03716505432128906, 0.03707292938232422, 0.03710460662841797, 0.03713433456420898, 0.03703910446166992, 0.03699507141113281, 0.0370063362121582, 0.03706982421875, 0.03717222213745117, 0.0370513916015625, 0.03697663879394531, 0.03704422378540039, 0.03705753707885742, 0.03709337615966797, 0.0371486701965332, 0.037059585571289064, 0.0370513916015625, 0.03706163024902344, 0.037100543975830076, 0.03709747314453125, 0.037054462432861326, 0.03700735855102539, 0.037103614807128905, 0.037073951721191406, 0.03710051345825195, 0.03706880187988281, 0.037108734130859376, 0.03710566329956055, 0.03712102508544922, 0.03703910446166992, 0.037157886505126955, 0.03708006286621094, 0.03705548858642578, 0.03703500747680664, 0.03709952163696289, 0.03707699203491211, 0.03709030532836914, 0.0370964469909668, 0.037108734130859376, 0.03708313751220703, 0.03744870376586914, 0.03763407897949219, 0.03717532730102539, 0.03707795333862305, 0.0371517448425293, 0.037100543975830076, 0.03718656158447266, 0.03730636978149414, 0.03715071868896484, 0.03710771179199219, 0.037125118255615236, 0.03713945770263672, 0.03716403198242187, 0.037119998931884765, 0.03720502471923828, 0.03716195297241211, 0.037160961151123044, 0.03721113586425781, 0.13518643188476562, 0.03713740921020508, 0.0370063362121582, 0.03700944137573242, 0.03694998550415039, 0.03706163024902344, 0.03701657485961914, 0.036997119903564454, 0.03697356796264648, 0.0370247688293457, 0.03704729461669922, 0.03702272033691406, 0.03705344009399414, 0.03707596969604492, 0.03704012680053711, 0.03701964950561523, 0.03702272033691406, 0.03702169418334961, 0.037000190734863284, 0.03702169418334961, 0.03701964950561523, 0.03707699203491211, 0.03705753707885742, 0.03704934310913086, 0.0370821762084961, 0.03707795333862305, 0.03707392120361328, 0.03703500747680664, 0.03727769470214844, 0.03741593551635742, 0.03714355087280274, 0.037195777893066405, 0.03704422378540039, 0.03713945770263672, 0.0371486701965332, 0.0370780143737793, 0.03707494354248047, 0.03706675338745117, 0.03709756851196289, 0.03709328079223633, 0.037082111358642575, 0.03720294570922852, 0.03712819290161133, 0.0371671028137207, 0.03721529769897461, 0.037120960235595704, 0.03711795043945312, 0.037185535430908204, 0.037292030334472655, 0.03715071868896484, 0.037108734130859376, 0.03708927917480469, 0.03709952163696289, 0.03716198348999023, 0.03712614440917969, 0.037166080474853515, 0.0371599349975586, 0.037408767700195314, 0.03712204742431641, 0.03715891265869141, 0.037146625518798826, 0.03711590576171875, 0.03715379333496094]",tokens/s,25.970461196229316,,,main,False,False -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22518,7 +22518,7 @@ ChildProcessError: Traceback (most recent call last): torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22547,7 +22547,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664d4883-2db0b70b1f489e2f58456f07;b6bc3c5b-7dea-4eaf-8d3f-03eb786ba14e) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664febfa-34929d2c46ac31dd495fe192;2a71ff2b-6653-4983-84fe-e07867b62bed) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/tiiuae/falcon-180B/resolve/main/config.json. @@ -22632,7 +22632,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9633-6c510f0b630eabf405c07ab0;af55d371-30a9-4f8b-b73f-7a68d75fa131) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe7ed-390ef61c549b3b6f564ea376;92c31a6c-709f-47fe-ad1f-1a484f445316) Repository Not Found for url: https://huggingface.co/a/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -22664,7 +22664,7 @@ If this is a private repository, make sure to pass a token having permission to ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.0,,,MB,1248.722944,6715.604992,0.0,6069.157888,5763.863552,s,10,0.3015369281768798,0.03015369281768799,0.00029932930486819073,0.030029088020324706,0.030538534164428713,0.03073526668548584,0.030892652702331543,"[0.03093199920654297, 0.030049375534057617, 0.030092960357666017, 0.029983840942382812, 0.02999942398071289, 0.029926080703735352, 0.030000255584716796, 0.0300183048248291, 0.030039871215820312, 0.030494815826416017]",tokens/s,8489.839090283225,kWh,3.568219259186214e-07,1.9552162898641277e-07,2.169461285752255e-06,2.7218048406572892e-06,tokens/kWh,94055237.23669273,MB,1248.722944,6736.576512,0.0,6090.129408,5863.102976,s,10,14.880395874023437,1.4880395874023438,0.012550265247513353,1.4854010620117188,1.507277880859375,1.507602197265625,1.507861650390625,"[1.507205810546875, 1.4989151611328124, 1.487388427734375, 1.49186767578125, 1.4719144287109376, 1.4717625732421875, 1.507926513671875, 1.4812845458984376, 1.478717041015625, 1.4834136962890625]",tokens/s,42.33758330984896,kWh,1.8004725348803372e-05,9.86657931815692e-06,5.9096726616047514e-05,8.696803128300781e-05,tokens/kWh,724404.1180487112,,s,629,15.125405706405617,0.02404674993069259,0.003655537751274818,0.02347315216064453,0.024345395278930662,0.024510873794555663,0.05393481719970703,"[0.02405887985229492, 0.023488512039184572, 0.023574527740478517, 0.023568416595458986, 0.02432611274719238, 0.02532966423034668, 0.02715648078918457, 0.0249036808013916, 0.024391679763793944, 0.02426470375061035, 0.024376319885253905, 0.02433433532714844, 0.024373247146606446, 0.024376319885253905, 0.024285184860229493, 0.023608320236206053, 0.02370047950744629, 0.023575584411621095, 0.023516128540039063, 0.023569408416748046, 0.023540735244750977, 0.023529504776000975, 0.024470560073852538, 0.02481350326538086, 0.02452787208557129, 0.024437759399414064, 0.024422399520874022, 0.02447871971130371, 0.024201215744018553, 0.024164352416992187, 0.023334911346435547, 0.023409664154052736, 0.023574527740478517, 0.023645183563232423, 0.02338921546936035, 0.02358268737792969, 0.02330624008178711, 0.023407615661621094, 0.02332467269897461, 0.02328166389465332, 0.02330009651184082, 0.023775232315063476, 0.023933952331542968, 0.02391142463684082, 0.023969791412353517, 0.023642112731933593, 0.02350284767150879, 0.023556095123291015, 0.02449612808227539, 0.02407731246948242, 0.023613439559936524, 0.023372800827026367, 0.02347110366821289, 0.023564287185668945, 0.02344550323486328, 0.023302143096923827, 0.023254016876220703, 0.023396415710449217, 0.023318464279174805, 0.023591936111450194, 0.023558143615722657, 0.023635967254638672, 0.05416447830200195, 0.023436288833618164, 0.023521280288696288, 0.02353049659729004, 0.023374879837036133, 0.023309280395507812, 0.023406591415405274, 0.02332467269897461, 0.02332159996032715, 0.023822336196899413, 0.025536512374877928, 0.02478489685058594, 0.024374336242675782, 0.024349632263183593, 0.02428313636779785, 0.024457216262817383, 0.024086528778076172, 0.02452070426940918, 0.023537727355957033, 0.023306175231933592, 0.023609344482421874, 0.023345151901245118, 0.023262208938598632, 0.02332876777648926, 0.02324684715270996, 0.02331340789794922, 0.023282688140869142, 0.02333695983886719, 0.023249919891357423, 0.023860223770141603, 0.023302143096923827, 0.023397375106811523, 0.023331840515136718, 0.02328883171081543, 0.02333695983886719, 0.02325606346130371, 0.023254016876220703, 0.02368409538269043, 0.024555519104003908, 0.02434048080444336, 0.02477568054199219, 0.024549375534057616, 0.024353792190551758, 0.02437222480773926, 0.024452096939086915, 0.024352767944335937, 0.02413670349121094, 0.023988224029541014, 0.024000511169433594, 0.02392166328430176, 0.024070144653320313, 0.023979007720947267, 0.02429644775390625, 0.024370176315307617, 0.024357887268066408, 0.023441440582275392, 0.02338915252685547, 0.023348224639892577, 0.023347200393676756, 0.02332159996032715, 0.02332467269897461, 0.02327859115600586, 0.02354380798339844, 0.053994495391845705, 0.023410688400268553, 0.023232511520385742, 0.02326425552368164, 0.023249919891357423, 0.023358463287353515, 0.023248895645141602, 0.02329497528076172, 0.023446527481079102, 0.023357440948486328, 0.023419904708862304, 0.023508991241455078, 0.024246271133422852, 0.02407935905456543, 0.023417919158935548, 0.02404243278503418, 0.024442880630493165, 0.024069120407104492, 0.024037376403808593, 0.02405068778991699, 0.024060928344726562, 0.023391231536865235, 0.025050111770629883, 0.0250644474029541, 0.024574975967407226, 0.024395776748657227, 0.024415231704711913, 0.023508991241455078, 0.02395955276489258, 0.02384998321533203, 0.023625728607177734, 0.023571456909179687, 0.023545856475830077, 0.02332979202270508, 0.02330009651184082, 0.02327039909362793, 0.02327347183227539, 0.023443456649780273, 0.02330931282043457, 0.02327654457092285, 0.023388160705566406, 0.023570432662963867, 0.023330816268920897, 0.023340032577514647, 0.023376895904541017, 0.02326016044616699, 0.023248960494995117, 0.023310304641723633, 0.02332259178161621, 0.023371776580810546, 0.023323648452758788, 0.023384063720703126, 0.023301151275634764, 0.023423967361450197, 0.023412736892700195, 0.02333798408508301, 0.023400447845458985, 0.02335443115234375, 0.023664575576782226, 0.023591936111450194, 0.023573503494262696, 0.023375871658325196, 0.023406591415405274, 0.0538787841796875, 0.023500799179077148, 0.023626752853393555, 0.0232857608795166, 0.023388160705566406, 0.023395328521728515, 0.023365663528442382, 0.02331439971923828, 0.023302143096923827, 0.02332262420654297, 0.023319551467895508, 0.02367487907409668, 0.02347417640686035, 0.023394304275512694, 0.02325606346130371, 0.02355200004577637, 0.02347929573059082, 0.023596031188964844, 0.023449632644653322, 0.023644128799438478, 0.02352332878112793, 0.023817216873168946, 0.02352230453491211, 0.023580671310424805, 0.023237632751464843, 0.023428096771240234, 0.02332979202270508, 0.02335436820983887, 0.02332979202270508, 0.023585792541503905, 0.023622655868530275, 0.0233123836517334, 0.023319551467895508, 0.023241727828979493, 0.023430143356323242, 0.0235284481048584, 0.023541759490966797, 0.023319551467895508, 0.023438335418701172, 0.023584768295288085, 0.02347315216064453, 0.02328166389465332, 0.023373823165893554, 0.024606719970703125, 0.025400320053100587, 0.024451072692871095, 0.02425753593444824, 0.024394752502441407, 0.02424831962585449, 0.024389631271362306, 0.024344575881958007, 0.02430156707763672, 0.024012800216674804, 0.023966720581054687, 0.024081407546997072, 0.024374271392822267, 0.02438860893249512, 0.02434867286682129, 0.02424015998840332, 0.02334511947631836, 0.023198720932006835, 0.023568384170532225, 0.023609344482421874, 0.054042686462402345, 0.023474111557006835, 0.023588863372802735, 0.023438335418701172, 0.023442432403564452, 0.023414783477783203, 0.02329599952697754, 0.023128063201904296, 0.023218175888061524, 0.023214080810546874, 0.023257087707519532, 0.02325196838378906, 0.023198720932006835, 0.02327654457092285, 0.0233175048828125, 0.023391231536865235, 0.023422975540161133, 0.02327142333984375, 0.023147520065307618, 0.023226367950439454, 0.023243776321411135, 0.023209983825683594, 0.02349260711669922, 0.023622655868530275, 0.023383039474487305, 0.02344960021972656, 0.02350387191772461, 0.02345471954345703, 0.02350182342529297, 0.02351103973388672, 0.023360511779785157, 0.023472127914428712, 0.02346598434448242, 0.02345471954345703, 0.02348953628540039, 0.023564287185668945, 0.023428096771240234, 0.02345881652832031, 0.02349465560913086, 0.023377920150756838, 0.02328371238708496, 0.02329190444946289, 0.02324787139892578, 0.023196672439575194, 0.02326016044616699, 0.02326323127746582, 0.023206911087036132, 0.02328678321838379, 0.02304921531677246, 0.023795711517333985, 0.02386227226257324, 0.023135263442993163, 0.023203807830810545, 0.023230464935302734, 0.02313523292541504, 0.023315456390380858, 0.02329190444946289, 0.02332057571411133, 0.023342079162597656, 0.02334726333618164, 0.02318636894226074, 0.02347315216064453, 0.023245824813842773, 0.05386240005493164, 0.02328473663330078, 0.02332876777648926, 0.023245824813842773, 0.023373823165893554, 0.023197696685791015, 0.023205888748168944, 0.023217151641845703, 0.023174144744873046, 0.023222272872924804, 0.023348224639892577, 0.023244800567626952, 0.02390630340576172, 0.02372096061706543, 0.02350387191772461, 0.023554048538208007, 0.023597055435180665, 0.023584768295288085, 0.023442432403564452, 0.023340032577514647, 0.02327347183227539, 0.02323967933654785, 0.02326118469238281, 0.02331648063659668, 0.023266304016113282, 0.023237632751464843, 0.023249919891357423, 0.023147520065307618, 0.023612415313720703, 0.02371993637084961, 0.02327654457092285, 0.02327859115600586, 0.02323967933654785, 0.023198720932006835, 0.023229440689086913, 0.02332876777648926, 0.023397439956665038, 0.023549888610839845, 0.02349260711669922, 0.02348646354675293, 0.02369945526123047, 0.023477279663085937, 0.02350588798522949, 0.02329804801940918, 0.02320899200439453, 0.023221216201782226, 0.023351295471191406, 0.02327039909362793, 0.023198720932006835, 0.02330931282043457, 0.02326118469238281, 0.023194623947143556, 0.023262208938598632, 0.02334617614746094, 0.023282688140869142, 0.02347007942199707, 0.02327039909362793, 0.023292959213256834, 0.02334102439880371, 0.023329856872558594, 0.023330751419067382, 0.023334911346435547, 0.023334911346435547, 0.054063102722167966, 0.02329599952697754, 0.027489280700683592, 0.023642112731933593, 0.02323967933654785, 0.023605247497558594, 0.02347007942199707, 0.023794687271118165, 0.023441408157348635, 0.0235100154876709, 0.02346291160583496, 0.023542783737182618, 0.023931903839111326, 0.024778751373291014, 0.02372812843322754, 0.02390630340576172, 0.024247295379638673, 0.02453196716308594, 0.02411008071899414, 0.024204288482666016, 0.024252416610717774, 0.024220672607421875, 0.02408550453186035, 0.024116256713867187, 0.02392470359802246, 0.024040447235107423, 0.02411622428894043, 0.02416640090942383, 0.024030208587646484, 0.024122367858886717, 0.024040447235107423, 0.024396799087524415, 0.024262655258178712, 0.024193023681640623, 0.02409676742553711, 0.024022016525268555, 0.023996416091918944, 0.024031232833862305, 0.024155136108398437, 0.024204288482666016, 0.024242176055908202, 0.024185855865478514, 0.02384588813781738, 0.024396799087524415, 0.02502348709106445, 0.024414207458496092, 0.023544832229614256, 0.02348953628540039, 0.023443456649780273, 0.023587839126586914, 0.023662591934204103, 0.02323148727416992, 0.023383039474487305, 0.02344550323486328, 0.023151615142822265, 0.023538688659667968, 0.023516159057617187, 0.02351411247253418, 0.023610368728637695, 0.023657472610473632, 0.023517183303833008, 0.023595008850097656, 0.02348134422302246, 0.054089729309082034, 0.02324684715270996, 0.023299072265625, 0.023143423080444335, 0.023211008071899415, 0.023327743530273438, 0.023154687881469727, 0.023166976928710937, 0.023370752334594725, 0.02326425552368164, 0.02326937675476074, 0.02327654457092285, 0.023266304016113282, 0.023174144744873046, 0.02325196838378906, 0.02318547248840332, 0.023305152893066405, 0.02352025604248047, 0.02344960021972656, 0.023446527481079102, 0.02322329521179199, 0.023250944137573244, 0.02346598434448242, 0.023645183563232423, 0.023222272872924804, 0.023244800567626952, 0.023230464935302734, 0.023389184951782226, 0.023641088485717773, 0.02353152084350586, 0.023143423080444335, 0.023421951293945312, 0.023439359664916993, 0.023348224639892577, 0.023554048538208007, 0.023541759490966797, 0.02332057571411133, 0.023576576232910155, 0.023299072265625, 0.02330112075805664, 0.02348543930053711, 0.023617536544799804, 0.02349158477783203, 0.023600128173828124, 0.02322329521179199, 0.02327859115600586, 0.023430143356323242, 0.023555072784423828, 0.025754623413085938, 0.02454015922546387, 0.023628799438476563, 0.023800832748413086, 0.023954431533813478, 0.023345151901245118, 0.02349567985534668, 0.023776256561279296, 0.023582719802856447, 0.0236943359375, 0.023876672744750978, 0.02378848075866699, 0.02447667121887207, 0.02409062385559082, 0.023642112731933593, 0.053956607818603515, 0.02349465560913086, 0.02352742385864258, 0.023399423599243165, 0.023340032577514647, 0.023244800567626952, 0.02325299263000488, 0.023229440689086913, 0.023195648193359376, 0.023220224380493162, 0.023203840255737306, 0.02323148727416992, 0.023923776626586915, 0.02353555107116699, 0.02353152084350586, 0.02393907165527344, 0.023735296249389647, 0.0236759033203125, 0.02353971290588379, 0.023271455764770507, 0.023575519561767577, 0.02329599952697754, 0.02326323127746582, 0.02327244758605957, 0.02349158477783203, 0.02329292869567871, 0.023435264587402343, 0.023362560272216795, 0.02328473663330078, 0.023315456390380858, 0.023630847930908205, 0.023582719802856447, 0.023395328521728515, 0.023216127395629883, 0.023259136199951173, 0.023347200393676756, 0.023339008331298827, 0.02357254409790039, 0.023504831314086913, 0.023665664672851562, 0.023319551467895508, 0.02348134422302246, 0.02330419158935547, 0.023545856475830077, 0.02348646354675293, 0.023399456024169922, 0.023434207916259765, 0.02347315216064453, 0.02322329521179199, 0.023243776321411135, 0.023284767150878908, 0.023274463653564455, 0.02330931282043457, 0.023368703842163087, 0.023541759490966797, 0.023606271743774415, 0.023504896163940428, 0.02347929573059082, 0.023593984603881835, 0.024039424896240235, 0.023569408416748046, 0.023673856735229492, 0.02495078468322754, 0.055223297119140625, 0.0233123836517334, 0.023559167861938478, 0.023833599090576172, 0.02348953628540039, 0.023440383911132814, 0.02351513671875, 0.023631872177124022, 0.023561248779296873, 0.023585760116577148, 0.02350492858886719, 0.023351264953613282, 0.023340032577514647, 0.023603200912475586, 0.023581695556640626, 0.02346905517578125, 0.023310367584228515, 0.023294944763183594, 0.02324787139892578, 0.023362560272216795, 0.02347110366821289, 0.02329497528076172, 0.023525375366210938, 0.02352230453491211, 0.02330112075805664, 0.023375871658325196, 0.023573503494262696, 0.023245824813842773, 0.023340032577514647, 0.023517215728759765, 0.023287776947021485, 0.02330828857421875, 0.02330624008178711, 0.023837696075439452, 0.02346700859069824, 0.023440383911132814, 0.02369843292236328, 0.02351820755004883, 0.02355200004577637, 0.0233175048828125, 0.0233175048828125, 0.02366464042663574, 0.02345881652832031, 0.02364419174194336, 0.023471071243286134, 0.02390425682067871, 0.0239554557800293, 0.023352319717407227, 0.02324787139892578, 0.023352319717407227, 0.023342079162597656, 0.02345267105102539, 0.023432191848754884, 0.02371993637084961, 0.023976959228515626, 0.023941120147705077, 0.02353971290588379, 0.023971839904785155, 0.023639039993286134, 0.02390323257446289, 0.0239554557800293, 0.023649280548095702, 0.02406399917602539]",tokens/s,41.58566138385414,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22720,7 +22720,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 367, in hf_raise_for_status raise HfHubHTTPError(message, response=response) from e -huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664e8fd7-3947ef7008f4fc9a103ec287;9a8223ca-4bb0-4362-9efb-3ab1bc6f1fb2) +huggingface_hub.utils._errors.HfHubHTTPError: (Request ID: Root=1-664fe172-40f1fb9d0f5572b43cee3e8e;d0513359-a8fe-4079-b1ee-db8f46016de3) 403 Forbidden: Authorization error.. Cannot access content at: https://huggingface.co/google/gemma-2b/resolve/main/config.json. @@ -22806,7 +22806,7 @@ Traceback (most recent call last): OSError: Incorrect path_or_model_id: '-'. Please provide either the path to a local folder or the repo_id of a model on the Hub. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -22872,7 +22872,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e95c1-48e8af0b4f618cfc14729154;eec9d163-017b-4b77-80d1-dd631665c58e) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe783-5bc9a4843df9ed692c2d5be2;1caf5b8e-f430-454c-9ee1-2c9ddf73c228) Repository Not Found for url: https://huggingface.co/t/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23006,7 +23006,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9078-56944231312158967ace8b4d;6044b254-37d8-4397-8290-5180a0b96a36) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe21a-47f9021174c0af264a191357;7e7dc752-a546-4358-9ea7-43000c7acf47) Repository Not Found for url: https://huggingface.co/google/recurrentgemma-7b/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23128,7 +23128,7 @@ ChildProcessError: Traceback (most recent call last): ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,B,B,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23167,7 +23167,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d461d-7794032b7b67e3cf04fb1ed4;b3a01b3d-fae2-4a38-b322-9b4ae6f0c4e5) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe974-3c6858bc5d716a9f70f831a9;86b38f2a-463a-4d13-a413-dab2d20011d2) Repository Not Found for url: https://huggingface.co/B/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23198,7 +23198,7 @@ OSError: B is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23265,7 +23265,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9374-68c444910caae7d0207ae905;47c02d98-4bb0-4903-bbd3-92dfc9b0f59d) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe526-22496341613718ea6da435d9;20c569ea-5158-43c3-bd2b-1733103f3b33) Repository Not Found for url: https://huggingface.co/s/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23296,7 +23296,7 @@ OSError: s is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23469,7 +23469,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.40.2,,0.30.1,,,,1.19.2,,,,0.11.1,,,MB,1227.173888,14563.147776,0.0,13916.700672,13738.740736,s,10,0.9368582153320313,0.09368582153320312,0.00029103446743837454,0.09357998657226563,0.09383599014282226,0.0941846996307373,0.09446366722106934,"[0.09453340911865235, 0.09357814025878906, 0.09357881927490234, 0.09349008178710938, 0.09358115386962891, 0.09375849914550781, 0.09352550506591797, 0.09354483032226563, 0.0936296615600586, 0.09363811492919921]",tokens/s,2732.537280566742,kWh,1.1091232609278441e-06,6.077465462512239e-07,5.8868764644298375e-06,7.603746271608905e-06,tokens/kWh,33667614.732998185,MB,1227.173888,14563.147776,0.0,13916.700672,13889.779712,s,10,22.537959716796877,2.2537959716796876,0.0008759771139935921,2.2536420898437504,2.254865625,2.254890454101562,2.2549103173828122,"[2.254915283203125, 2.254842041015625, 2.254397216796875, 2.253551513671875, 2.253302734375, 2.253732666015625, 2.252381103515625, 2.25331982421875, 2.2526572265625, 2.254860107421875]",tokens/s,27.952840803530215,kWh,2.6543364576678166e-05,1.4546689112517989e-05,0.0001127380239909712,0.00015382807768016734,tokens/kWh,409548.1198886647,,s,629,23.343781822204623,0.037112530718926215,0.011189030628809745,0.03576115036010742,0.03585863723754883,0.035902259826660156,0.12990176574707032,"[0.03586764907836914, 0.035659774780273434, 0.03563724899291992, 0.03562905502319336, 0.03574476623535156, 0.03574784088134766, 0.03612057495117187, 0.03586969757080078, 0.035714046478271484, 0.03576627349853516, 0.03570380783081055, 0.035688449859619144, 0.035714046478271484, 0.03577753448486328, 0.0360263671875, 0.03580825424194336, 0.035716094970703126, 0.03568742370605469, 0.03570687866210937, 0.0356864013671875, 0.035716094970703126, 0.03570892715454101, 0.035722240447998044, 0.03571916961669922, 0.035829761505126956, 0.035724288940429685, 0.035757057189941405, 0.035714046478271484, 0.03571814346313477, 0.03568947219848633, 0.03570073699951172, 0.03574169540405273, 0.03574476623535156, 0.035794944763183595, 0.03602534484863281, 0.03585945510864258, 0.03575807952880859, 0.03574169540405273, 0.03579289627075195, 0.035783679962158206, 0.03579084777832031, 0.03576422500610352, 0.03578675079345703, 0.035740673065185545, 0.035796993255615236, 0.03577753448486328, 0.03580723190307617, 0.03573657608032227, 0.03580928039550781, 0.03578060913085938, 0.035813377380371096, 0.0357918701171875, 0.03584511947631836, 0.03582668685913086, 0.035811328887939455, 0.035806209564208984, 0.03582361602783203, 0.03579904174804688, 0.03581644821166992, 0.035805248260498045, 0.03581228637695313, 0.0358287353515625, 0.1300193328857422, 0.0356495361328125, 0.03566387176513672, 0.03567411041259766, 0.03566796875, 0.03569049453735352, 0.035722240447998044, 0.03572531127929687, 0.03571507263183594, 0.03569356918334961, 0.03572531127929687, 0.03570278549194336, 0.03572531127929687, 0.035681278228759765, 0.035699710845947266, 0.03571814346313477, 0.03576115036010742, 0.03570175933837891, 0.03570687866210937, 0.035745792388916016, 0.03579084777832031, 0.03584307098388672, 0.035790878295898436, 0.03576726531982422, 0.035757057189941405, 0.0358021125793457, 0.035768318176269534, 0.03575193786621094, 0.035716094970703126, 0.035781631469726564, 0.035850238800048825, 0.035789825439453124, 0.03580928039550781, 0.035811328887939455, 0.03583488082885742, 0.03575807952880859, 0.03575500869750976, 0.03578675079345703, 0.035860481262207033, 0.035775489807128906, 0.03580825424194336, 0.03580313491821289, 0.035827713012695314, 0.03583180618286133, 0.035800064086914066, 0.0358205451965332, 0.03590348815917969, 0.035827713012695314, 0.03582259368896484, 0.035796993255615236, 0.0358021125793457, 0.035844097137451174, 0.035947521209716796, 0.03580313491821289, 0.035833854675292966, 0.035850238800048825, 0.035850238800048825, 0.03583180618286133, 0.035833854675292966, 0.035850238800048825, 0.03592396926879883, 0.03582876968383789, 0.03585327911376953, 0.12993740844726562, 0.03567411041259766, 0.03566796875, 0.03569049453735352, 0.03570691299438476, 0.03572323226928711, 0.03569561767578125, 0.0356577262878418, 0.03569260787963867, 0.03573241424560547, 0.03575296020507813, 0.03571507263183594, 0.03571507263183594, 0.03571200180053711, 0.03570892715454101, 0.035707904815673826, 0.03570892715454101, 0.03570483016967774, 0.03572019195556641, 0.03577958297729492, 0.03571814346313477, 0.03570892715454101, 0.03570175933837891, 0.03577241516113281, 0.03573555374145508, 0.03571916961669922, 0.03579391860961914, 0.035833854675292966, 0.035846145629882815, 0.035745792388916016, 0.03575193786621094, 0.03575603103637695, 0.035833854675292966, 0.03601408004760742, 0.0358666877746582, 0.03574470520019531, 0.03575091171264649, 0.03576422500610352, 0.03574272155761719, 0.03577036666870117, 0.03575398254394531, 0.03576627349853516, 0.03577036666870117, 0.03578265762329102, 0.035767295837402346, 0.03578572845458984, 0.03579391860961914, 0.03589324951171875, 0.0360079345703125, 0.03583180618286133, 0.035794944763183595, 0.0358809585571289, 0.03581644821166992, 0.035862529754638675, 0.035827713012695314, 0.03580825424194336, 0.0357918701171875, 0.035800064086914066, 0.03584000015258789, 0.03583078384399414, 0.0358809585571289, 0.035831871032714846, 0.03590444946289063, 0.12986883544921876, 0.035616737365722656, 0.03567411041259766, 0.0356577262878418, 0.035714046478271484, 0.035672065734863284, 0.0357918701171875, 0.03592297744750977, 0.035671009063720706, 0.03566592025756836, 0.0356864013671875, 0.035681278228759765, 0.03568537521362305, 0.035694591522216795, 0.03563315200805664, 0.03566899108886719, 0.03564652633666992, 0.03566483306884766, 0.035688449859619144, 0.035724288940429685, 0.03568947219848633, 0.0357212142944336, 0.035748863220214845, 0.03569561767578125, 0.03575807952880859, 0.0357314567565918, 0.03579084777832031, 0.035757057189941405, 0.03576934432983398, 0.03578572845458984, 0.03570483016967774, 0.035767295837402346, 0.035746814727783204, 0.035783679962158206, 0.03574476623535156, 0.03579289627075195, 0.03572633743286133, 0.035827713012695314, 0.03578265762329102, 0.03576422500610352, 0.03580313491821289, 0.035784702301025394, 0.035773441314697264, 0.03577446365356445, 0.03581951904296875, 0.03578572845458984, 0.03582156753540039, 0.0357918701171875, 0.03588198471069336, 0.035813377380371096, 0.03579084777832031, 0.03582361602783203, 0.03583795166015625, 0.03589120101928711, 0.03585126495361328, 0.035827713012695314, 0.03582566452026367, 0.03584000015258789, 0.03585740661621094, 0.035945472717285154, 0.0358021125793457, 0.03582668685913086, 0.03579084777832031, 0.12991897583007814, 0.03563014221191406, 0.035665855407714844, 0.03561471939086914, 0.03564137649536133, 0.0356607666015625, 0.03569868850708008, 0.0356495361328125, 0.035661823272705076, 0.03565670394897461, 0.03562496185302735, 0.03564851379394531, 0.03564851379394531, 0.03566080093383789, 0.03564031982421875, 0.03567001724243164, 0.03566080093383789, 0.03580928039550781, 0.03584000015258789, 0.03576934432983398, 0.035678207397460936, 0.03575296020507813, 0.03567103958129883, 0.03574784088134766, 0.03590041732788086, 0.03589017486572266, 0.035745792388916016, 0.035705856323242184, 0.03571916961669922, 0.035727359771728515, 0.03579391860961914, 0.03577036666870117, 0.035745792388916016, 0.0357386245727539, 0.03577756881713867, 0.03579388809204102, 0.03576319885253906, 0.03579289627075195, 0.035729408264160156, 0.03576422500610352, 0.035722240447998044, 0.03579391860961914, 0.03577446365356445, 0.03589017486572266, 0.035775489807128906, 0.03577958297729492, 0.03581644821166992, 0.035765247344970705, 0.03575091171264649, 0.035784702301025394, 0.03582566452026367, 0.03583795166015625, 0.03603046417236328, 0.035885055541992186, 0.03578572845458984, 0.035800064086914066, 0.0358021125793457, 0.03586764907836914, 0.03580928039550781, 0.035885055541992186, 0.035817470550537106, 0.03586150360107422, 0.03585638427734375, 0.13014016723632812, 0.03572326278686523, 0.035757057189941405, 0.03573964691162109, 0.03574169540405273, 0.03570380783081055, 0.03574476623535156, 0.03572326278686523, 0.035783679962158206, 0.03570380783081055, 0.035727359771728515, 0.035705856323242184, 0.0357509765625, 0.035719104766845707, 0.035765247344970705, 0.035729408264160156, 0.03580108642578125, 0.035705856323242184, 0.035762176513671876, 0.035732479095458985, 0.03574070358276367, 0.035756000518798826, 0.03571814346313477, 0.03570892715454101, 0.035705856323242184, 0.035773441314697264, 0.03571712112426758, 0.0357283821105957, 0.03573452758789063, 0.0357561264038086, 0.03573443222045898, 0.03576115036010742, 0.035730430603027344, 0.035774494171142576, 0.035808223724365235, 0.035746814727783204, 0.035757057189941405, 0.03579391860961914, 0.03576934432983398, 0.035765247344970705, 0.03576115036010742, 0.0358021125793457, 0.035762176513671876, 0.03578265762329102, 0.03574991989135742, 0.035791839599609375, 0.03585433578491211, 0.03579596710205078, 0.035776512145996094, 0.03581647872924805, 0.0357611198425293, 0.03580825424194336, 0.035798015594482424, 0.035813377380371096, 0.03578879928588867, 0.03580928039550781, 0.0358021125793457, 0.03584310531616211, 0.035839969635009766, 0.035811328887939455, 0.035817470550537106, 0.03582361602783203, 0.03579289627075195, 0.12990463256835938, 0.0356506233215332, 0.035687358856201175, 0.03567923355102539, 0.03564031982421875, 0.035659774780273434, 0.03563315200805664, 0.03566387176513672, 0.03566796875, 0.035675136566162106, 0.03567001724243164, 0.03566694259643555, 0.03574272155761719, 0.035707904815673826, 0.035768318176269534, 0.03571200180053711, 0.0356864013671875, 0.03568025588989258, 0.03568435287475586, 0.03568025588989258, 0.03568230438232422, 0.035678207397460936, 0.03569356918334961, 0.03570380783081055, 0.035705856323242184, 0.03570278549194336, 0.035730430603027344, 0.035726398468017576, 0.03575084686279297, 0.03573555374145508, 0.035743743896484374, 0.03572326278686523, 0.0357314567565918, 0.035732479095458985, 0.03575091171264649, 0.03575296020507813, 0.035748863220214845, 0.035748863220214845, 0.03574272155761719, 0.03575500869750976, 0.03572531127929687, 0.035765247344970705, 0.03576118469238281, 0.03576623916625977, 0.035776512145996094, 0.035824703216552733, 0.035798976898193356, 0.035760128021240234, 0.03577753448486328, 0.035767295837402346, 0.03585228729248047, 0.03611545562744141, 0.03584204864501953, 0.035789825439453124, 0.03579084777832031, 0.03580825424194336, 0.035789825439453124, 0.03578572845458984, 0.035794944763183595, 0.03581644821166992, 0.03580416107177734, 0.03579596710205078, 0.03581951904296875, 0.12989439392089844, 0.03562188720703125, 0.03564748764038086, 0.03563212966918945, 0.03562393569946289, 0.03563827133178711, 0.035656734466552736, 0.035691486358642575, 0.035783679962158206, 0.03569049453735352, 0.03566080093383789, 0.03566188812255859, 0.03566995239257813, 0.035683326721191407, 0.035724288940429685, 0.035707904815673826, 0.0357386245727539, 0.03568742370605469, 0.03565670394897461, 0.035688449859619144, 0.03569356918334961, 0.035714046478271484, 0.035692543029785154, 0.03579084777832031, 0.03620249557495117, 0.03587481689453125, 0.03575398254394531, 0.03571814346313477, 0.035746814727783204, 0.035743743896484374, 0.0357283821105957, 0.0357386245727539, 0.035796993255615236, 0.035737598419189456, 0.03574272155761719, 0.035737598419189456, 0.03576422500610352, 0.03577958297729492, 0.03575398254394531, 0.03578572845458984, 0.03574784088134766, 0.035773441314697264, 0.035727359771728515, 0.03574784088134766, 0.035768318176269534, 0.0358021125793457, 0.03579084777832031, 0.035784702301025394, 0.03580928039550781, 0.03581644821166992, 0.03584511947631836, 0.03580825424194336, 0.035885055541992186, 0.03588915252685547, 0.03582463836669922, 0.035813377380371096, 0.0357918701171875, 0.03584000015258789, 0.03590659332275391, 0.03582358551025391, 0.03580928039550781, 0.035827713012695314, 0.035827713012695314, 0.12999679565429687, 0.035642368316650394, 0.03561983871459961, 0.03564134216308594, 0.035688449859619144, 0.03566899108886719, 0.03565465545654297, 0.03566592025756836, 0.03563622283935547, 0.03564134216308594, 0.035681278228759765, 0.03565465545654297, 0.03562393569946289, 0.035694591522216795, 0.03562905502319336, 0.03568947219848633, 0.03563417434692383, 0.03572326278686523, 0.035675136566162106, 0.03567001724243164, 0.03563008117675781, 0.03570483016967774, 0.03573452758789063, 0.03569356918334961, 0.03568947219848633, 0.035737598419189456, 0.03567001724243164, 0.03570175933837891, 0.035737598419189456, 0.03578265762329102, 0.03580416107177734, 0.03574784088134766, 0.03581849670410156, 0.035730430603027344, 0.03571712112426758, 0.035740673065185545, 0.03577753448486328, 0.035716094970703126, 0.03574995040893555, 0.03573241424560547, 0.03574687957763672, 0.035853248596191406, 0.035999744415283204, 0.035773441314697264, 0.035724288940429685, 0.03578060913085938, 0.03577139282226562, 0.03581235122680664, 0.035783679962158206, 0.03579596710205078, 0.035781631469726564, 0.03585843276977539, 0.03577241516113281, 0.03578060913085938, 0.035781631469726564, 0.035768318176269534, 0.035970046997070314, 0.036127742767333985, 0.035866622924804685, 0.035844097137451174, 0.035888126373291016, 0.035888126373291016, 0.035798015594482424, 0.13012582397460937, 0.035737598419189456, 0.035760128021240234, 0.03591372680664062, 0.035920894622802735, 0.03570380783081055, 0.035705856323242184, 0.035727359771728515, 0.035714046478271484, 0.03569664001464844, 0.035681278228759765, 0.035730430603027344, 0.035714046478271484, 0.03570073699951172, 0.03569664001464844, 0.03573555374145508, 0.03568435287475586, 0.035724288940429685, 0.03569356918334961, 0.0357386245727539, 0.03566080093383789, 0.03571507263183594, 0.03567411041259766, 0.03570892715454101, 0.03569664001464844, 0.035740673065185545, 0.0358287353515625, 0.03573964691162109, 0.035716094970703126, 0.035765247344970705, 0.03575091171264649, 0.03582668685913086, 0.0357386245727539, 0.035748863220214845, 0.03573350524902344, 0.03575296020507813, 0.035806209564208984, 0.035807296752929686, 0.035908576965332034, 0.03578774261474609, 0.03573350524902344, 0.035768318176269534, 0.035748863220214845, 0.03582668685913086, 0.03581033706665039, 0.03578569412231445, 0.03577036666870117, 0.035844097137451174, 0.03585228729248047, 0.03586457443237305, 0.03585228729248047, 0.035862529754638675, 0.03586355209350586, 0.035871742248535156, 0.03582156753540039, 0.03584102249145508, 0.03602329635620117, 0.035904510498046875, 0.03587481689453125, 0.03587276840209961, 0.035814399719238284, 0.03588608169555664, 0.03582361602783203]",tokens/s,26.94507705695294,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23632,7 +23632,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664e9307-267e4ea344e9ed1050985a6f;235c3ff0-7e1d-477e-a4e4-947f3c447571) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fe4bc-5051e14872fa7bef3b13674e;0e28f694-a2d9-4a15-85cf-6238d933784e) Repository Not Found for url: https://huggingface.co/m/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23736,7 +23736,7 @@ Cannot access gated repo for url https://huggingface.co/mistralai/Mixtral-8x22B- Access to model mistralai/Mixtral-8x22B-v0.1 is restricted and you are not in the authorized list. Visit https://huggingface.co/mistralai/Mixtral-8x22B-v0.1 to ask for access. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,1,1,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23775,7 +23775,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d4725-4cc4c6f562d67be2112eaa48;879ab619-a8b1-4e6f-87e8-10e1acc1dfee) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea87-166a064e06fe8ceb34f64cff;c68d62cc-4222-4cb2-944a-9ca4473f2f28) Repository Not Found for url: https://huggingface.co/1/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`. @@ -23806,7 +23806,7 @@ OSError: 1 is not a local folder and is not a valid model identifier listed on ' If this is a private repository, make sure to pass a token having permission to this repo either by logging in with `huggingface-cli login` or by passing `token=` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.0,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): +bfloat16-sdpa,pytorch,2.3.0+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,0,0,cuda,0,42,,,True,True,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, AMD EPYC 7R32,16,66697.29792,Linux,x86_64,Linux-5.10.215-203.850.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['NVIDIA A10G'],1,24146608128,0.2.1,,4.41.1,,0.30.1,,,,1.19.2,,,,0.11.1,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -23845,7 +23845,7 @@ Traceback (most recent call last): hf_raise_for_status(response) File ""/usr/local/lib/python3.10/dist-packages/huggingface_hub/utils/_errors.py"", line 352, in hf_raise_for_status raise RepositoryNotFoundError(message, response) from e -huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664d46bc-455b5c6e363edb810eeb8b76;564c0151-9fcd-45aa-b097-06f53e330740) +huggingface_hub.utils._errors.RepositoryNotFoundError: 404 Client Error. (Request ID: Root=1-664fea17-275cf84e6a6488107cc7878a;b45fb033-979c-4279-927f-315056e66109) Repository Not Found for url: https://huggingface.co/0/resolve/main/config.json. Please make sure you specified the correct `repo_id` and `repo_type`.