config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.bits,config.backend.quantization_config.use_exllama ,config.backend.quantization_config.version,config.backend.quantization_config.model_seqlen,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3854.815232,4533.911552,0.0,4131.38944,4012.475392,s,1,10.4359755859375,10.4359755859375,0.0,10.4359755859375,10.4359755859375,10.4359755859375,10.4359755859375,[10.4359755859375],,kWh,9.456019279581747e-05,1.0418479603681079e-05,3.0113079646010288e-05,0.00013509175204550882,,MB,2172.612608,4733.140992,0.0,4315.938816,4233.462784,s,10,1.79479150390625,0.179479150390625,0.00012085025752500717,0.17947809600830078,0.1796345962524414,0.17968389053344727,0.17972332595825197,"[0.17951036071777343, 0.17949586486816407, 0.1793469696044922, 0.17932780456542968, 0.17943836975097657, 0.1794603271484375, 0.17973318481445313, 0.17935116577148438, 0.17950381469726562, 0.17962364196777345]",tokens/s,1426.3495199460897,kWh,5.309990883485185e-06,5.855978730670862e-07,3.5183715015636847e-06,9.413960258115956e-06,tokens/kWh,27193656.333878983,MB,2182.836224,4837.998592,0.0,4420.796416,4335.693312,s,10,28.3771533203125,2.8377153320312503,0.015283578343084556,2.8350107421875,2.8584761474609377,2.859589392089844,2.860479987792969,"[2.816822265625, 2.82559912109375, 2.827145751953125, 2.82000390625, 2.828824951171875, 2.846341552734375, 2.852287841796875, 2.86070263671875, 2.841196533203125, 2.858228759765625]",tokens/s,22.20095838679643,kWh,8.276242480068366e-05,9.128718705613379e-06,4.7673502785236645e-05,0.0001395646462915337,tokens/kWh,451403.71629933134,,s,630,28.37420579528808,0.04503842189728268,0.0006235716879006908,0.04490500831604004,0.04555915260314941,0.04598231906890869,0.04810931335449219,"[0.04497708892822266, 0.04481363296508789, 0.044593921661376955, 0.04465862274169922, 0.0445621452331543, 0.0452303695678711, 0.044542049407958986, 0.04468112182617188, 0.04456857681274414, 0.04515542221069336, 0.044802974700927735, 0.044728321075439455, 0.04453376007080078, 0.04452880096435547, 0.0444567985534668, 0.0445296630859375, 0.044539905548095705, 0.04454969787597656, 0.04512979125976563, 0.04503385543823242, 0.044580833435058594, 0.04438224029541016, 0.04447225570678711, 0.044351551055908205, 0.04451532745361328, 0.044627967834472655, 0.044404289245605466, 0.04470191955566406, 0.04522975921630859, 0.0444699821472168, 0.04473436737060547, 0.04480092620849609, 0.044727935791015624, 0.04463004684448242, 0.04467043304443359, 0.04492566299438477, 0.04451548767089844, 0.04474265670776367, 0.0445148811340332, 0.044426944732666014, 0.044929790496826175, 0.04509868621826172, 0.045019199371337894, 0.0447421760559082, 0.04443392181396484, 0.04480400085449219, 0.04478780746459961, 0.04480022430419922, 0.04463411331176758, 0.04477951812744141, 0.045367294311523435, 0.044677120208740234, 0.044865535736083983, 0.045072383880615234, 0.04507235336303711, 0.04488399887084961, 0.04477724838256836, 0.044548320770263675, 0.04435968017578125, 0.044316287994384765, 0.044386688232421874, 0.044557823181152346, 0.04461209487915039, 0.046035457611083984, 0.04565932846069336, 0.04680992126464844, 0.04461891174316406, 0.04442607879638672, 0.04520345687866211, 0.044650142669677734, 0.04655260848999023, 0.045214527130126955, 0.04538284683227539, 0.04501001739501953, 0.04501679992675781, 0.04525260925292969, 0.045465057373046874, 0.044853759765625, 0.044576801300048825, 0.04452761459350586, 0.045059070587158204, 0.044587230682373045, 0.0449769287109375, 0.04470163345336914, 0.04449811172485352, 0.04460985565185547, 0.04455072021484375, 0.04459929656982422, 0.04450825500488281, 0.044399520874023435, 0.0444846076965332, 0.04521136093139649, 0.04439478302001953, 0.04451532745361328, 0.044636032104492185, 0.04469772720336914, 0.04466175842285156, 0.04465055847167969, 0.0450302734375, 0.04460259246826172, 0.045153118133544924, 0.0452751350402832, 0.044482494354248045, 0.045088191986083985, 0.044749439239501955, 0.045649921417236325, 0.04458607864379883, 0.044649375915527346, 0.04477951812744141, 0.04471603012084961, 0.04451532745361328, 0.04460748672485351, 0.044483840942382814, 0.04514214324951172, 0.044498783111572265, 0.044641056060791016, 0.04456982421875, 0.04467792129516602, 0.0444846076965332, 0.04443337631225586, 0.04445187377929687, 0.04483686447143555, 0.04475904083251953, 0.04449689483642578, 0.0444846076965332, 0.04449280166625977, 0.04484348678588867, 0.044832767486572264, 0.044650081634521485, 0.044599712371826174, 0.0452191047668457, 0.04574886322021485, 0.04502864074707031, 0.04496876907348633, 0.044891647338867184, 0.04478579330444336, 0.04444812774658203, 0.044660736083984375, 0.04446831893920898, 0.044510528564453124, 0.04843376159667969, 0.04485283279418945, 0.044730785369873044, 0.04474879837036133, 0.04461929702758789, 0.04465507125854492, 0.04492675018310547, 0.044626079559326175, 0.04455574417114258, 0.0445384635925293, 0.04491468811035156, 0.044693023681640624, 0.04451561737060547, 0.04463167953491211, 0.04508134460449219, 0.04484076690673828, 0.04526489639282227, 0.04542259216308594, 0.04501446533203125, 0.045042240142822265, 0.04467302322387695, 0.044662784576416016, 0.04713631820678711, 0.04685049438476562, 0.04500275039672851, 0.04456857681274414, 0.04474451065063476, 0.044847297668457034, 0.044797279357910155, 0.044730945587158205, 0.04449689483642578, 0.04442873764038086, 0.044524192810058594, 0.044439552307128906, 0.04446822357177734, 0.044365825653076174, 0.045096736907958984, 0.044505313873291014, 0.044472320556640625, 0.04434915161132812, 0.0446929931640625, 0.04449359893798828, 0.04453180694580078, 0.044554145812988284, 0.04487686538696289, 0.04462278366088867, 0.04472371292114258, 0.04468377685546875, 0.044772575378417966, 0.04473712158203125, 0.04471635055541992, 0.04475699234008789, 0.0445002555847168, 0.04497891235351562, 0.04460134506225586, 0.04501504135131836, 0.044609535217285154, 0.0445807991027832, 0.044733665466308595, 0.04444623947143555, 0.044415294647216795, 0.04454393768310547, 0.04862508773803711, 0.045136032104492185, 0.04461820983886719, 0.044883968353271485, 0.04469091033935547, 0.044509727478027346, 0.04508224105834961, 0.0445382080078125, 0.04470991897583008, 0.04456243133544922, 0.04478566360473633, 0.044967937469482425, 0.04472598266601562, 0.04444188690185547, 0.0444846076965332, 0.04526694488525391, 0.044488479614257816, 0.04468304061889648, 0.04460793685913086, 0.044660736083984375, 0.044488704681396485, 0.04461977767944336, 0.04437740707397461, 0.04447711944580078, 0.044437503814697264, 0.04446822357177734, 0.044733631134033204, 0.04477420806884766, 0.044407936096191404, 0.04452851104736328, 0.04465459060668946, 0.04449875259399414, 0.04452985763549805, 0.044646400451660156, 0.045055999755859374, 0.04487945556640625, 0.04465910339355469, 0.04523622512817383, 0.044773216247558596, 0.04463017654418945, 0.044490623474121097, 0.04484688186645508, 0.04456687927246094, 0.04490387344360352, 0.045115966796875, 0.04502262496948242, 0.04481289672851563, 0.04452252960205078, 0.04462076950073242, 0.0448256950378418, 0.04489059066772461, 0.044639583587646484, 0.044785888671875, 0.044746814727783205, 0.0446324462890625, 0.044676513671875, 0.04523273468017578, 0.04465663909912109, 0.0445412483215332, 0.04465913772583008, 0.04433686447143555, 0.04497795104980469, 0.04459596633911133, 0.04458870315551758, 0.0442778549194336, 0.044456192016601566, 0.044358783721923825, 0.044485504150390626, 0.04452143859863281, 0.04621478271484375, 0.04448614501953125, 0.044585182189941404, 0.04457952117919922, 0.04447641754150391, 0.04711740875244141, 0.045980575561523435, 0.044627967834472655, 0.044693504333496094, 0.04475904083251953, 0.04513792037963867, 0.04520140838623047, 0.045037025451660155, 0.04512793731689453, 0.04464668655395508, 0.04474879837036133, 0.04472012710571289, 0.04456243133544922, 0.044758464813232424, 0.04479971313476563, 0.04444655990600586, 0.04479715347290039, 0.04491548919677734, 0.04754431915283203, 0.044727935791015624, 0.04470345687866211, 0.04453590393066406, 0.044640766143798825, 0.04485945510864258, 0.04493107223510742, 0.04493721771240235, 0.04513792037963867, 0.04513382339477539, 0.04503497695922851, 0.045021728515625, 0.04500630569458008, 0.04495209503173828, 0.044998561859130856, 0.04466902542114258, 0.04468940734863281, 0.04512675094604492, 0.04478841781616211, 0.045794784545898436, 0.04482534408569336, 0.04555583953857422, 0.045032032012939455, 0.0449986572265625, 0.045113121032714844, 0.04495382308959961, 0.044957695007324217, 0.04494131088256836, 0.04483203125, 0.04492275238037109, 0.04487801742553711, 0.04464831924438477, 0.04480284881591797, 0.04516179275512695, 0.04487443161010742, 0.0460552978515625, 0.04474643325805664, 0.04468137741088867, 0.04480438232421875, 0.04465795135498047, 0.045517536163330076, 0.04521955108642578, 0.04517507171630859, 0.044781150817871096, 0.04490895843505859, 0.04479312133789062, 0.04482940673828125, 0.04519110488891601, 0.04513324737548828, 0.04509900665283203, 0.04475353622436523, 0.04478966522216797, 0.044665985107421875, 0.04475564956665039, 0.04476716613769531, 0.04482902526855469, 0.04500582504272461, 0.049463550567626954, 0.04520832061767578, 0.044799999237060545, 0.04478345489501953, 0.044841121673583985, 0.04472217559814453, 0.0449536018371582, 0.045328384399414064, 0.045700641632080076, 0.045262367248535156, 0.04500368118286133, 0.045037601470947264, 0.045217281341552736, 0.04521187210083008, 0.04507468795776367, 0.045416481018066404, 0.04563763046264648, 0.04509286499023438, 0.04564787292480469, 0.04504931259155273, 0.0450032958984375, 0.045230079650878906, 0.048365150451660156, 0.0460865592956543, 0.04511520004272461, 0.044992702484130856, 0.044974079132080076, 0.04632297515869141, 0.0450546875, 0.04460879898071289, 0.04464905548095703, 0.04457279968261719, 0.04537139129638672, 0.04503756713867187, 0.0451932144165039, 0.04528236770629883, 0.04527779388427734, 0.04512803268432617, 0.04522598266601562, 0.04538777542114258, 0.045352958679199216, 0.04525235366821289, 0.045887744903564454, 0.04546355056762695, 0.045305694580078125, 0.04491484832763672, 0.04487366485595703, 0.045004257202148436, 0.04529801559448242, 0.04553254318237305, 0.04485823822021484, 0.04456857681274414, 0.04506623840332031, 0.0446033935546875, 0.045428737640380856, 0.04464432144165039, 0.04459113693237305, 0.04451123046875, 0.04478976058959961, 0.04492287826538086, 0.04468867111206055, 0.048122592926025394, 0.045666305541992185, 0.04567561721801758, 0.04573084640502929, 0.04531801605224609, 0.04527452850341797, 0.04556246566772461, 0.04534886550903321, 0.04578508758544922, 0.04524031829833984, 0.04561715316772461, 0.045554721832275394, 0.045378528594970706, 0.045362590789794925, 0.045532958984375, 0.045280063629150394, 0.04532137680053711, 0.04518998336791992, 0.0452006721496582, 0.04560079956054688, 0.045277088165283204, 0.045257503509521485, 0.045276161193847655, 0.045265918731689454, 0.045418495178222655, 0.04512153625488281, 0.04512768173217773, 0.04533967971801758, 0.0454389762878418, 0.04614044952392578, 0.045547679901123045, 0.045530849456787106, 0.04532467269897461, 0.04523772811889649, 0.04607241439819336, 0.04518489456176758, 0.04563203048706055, 0.04552294540405273, 0.045312000274658204, 0.04542259216308594, 0.04536524963378906, 0.04544233703613281, 0.04670086288452149, 0.04528323364257812, 0.045156352996826174, 0.04562790298461914, 0.04564582443237305, 0.04523212814331055, 0.04509600067138672, 0.04530428695678711, 0.045416126251220705, 0.04544793701171875, 0.045797409057617186, 0.045681758880615236, 0.04538566589355469, 0.04516758346557617, 0.04586844635009766, 0.04527369689941406, 0.04502937698364258, 0.045499935150146484, 0.04534473419189453, 0.04543948745727539, 0.045206687927246095, 0.045240894317626956, 0.044947742462158206, 0.04468262481689453, 0.0445522575378418, 0.04484972763061523, 0.046693504333496096, 0.04521798324584961, 0.045099712371826174, 0.04518409729003906, 0.045085216522216795, 0.045402206420898435, 0.04792758560180664, 0.045475135803222655, 0.04517110443115235, 0.045372833251953126, 0.046134143829345706, 0.04537120056152344, 0.045246654510498044, 0.045461727142333985, 0.045157184600830076, 0.04534985733032226, 0.045440383911132816, 0.04518278503417969, 0.045438945770263674, 0.04512444686889648, 0.04497612762451172, 0.04484540939331055, 0.04486681747436523, 0.04457907104492188, 0.04559299087524414, 0.04534092712402344, 0.04540415954589844, 0.04525433731079102, 0.045934913635253906, 0.04515020751953125, 0.04506787109375, 0.04531398391723633, 0.045908447265625, 0.0452751350402832, 0.045346561431884765, 0.045261054992675784, 0.0460873908996582, 0.045257503509521485, 0.045055553436279296, 0.04505644989013672, 0.04522111892700195, 0.04514041519165039, 0.04571372985839844, 0.044614688873291015, 0.0447784652709961, 0.044799999237060545, 0.04475699234008789, 0.04452761459350586, 0.04477337646484375, 0.045104576110839845, 0.04537401580810547, 0.04555878448486328, 0.04482118225097656, 0.044773696899414066, 0.04485232162475586, 0.04531670379638672, 0.04461996841430664, 0.044742782592773436, 0.04499660873413086, 0.0446484489440918, 0.04463411331176758, 0.0445665283203125, 0.04468735885620117, 0.045079967498779294, 0.0444749755859375, 0.044560382843017575, 0.04460486221313477, 0.044550209045410155, 0.04476128005981445, 0.0448474235534668, 0.04513977432250976, 0.045383102416992185, 0.045475841522216794, 0.04522854232788086, 0.04480419158935547, 0.044566238403320316, 0.0445731201171875, 0.04458700942993164, 0.04466659164428711, 0.04470198440551758, 0.04871372985839844, 0.04511043167114258, 0.04513840103149414, 0.04456390380859375, 0.04545836639404297, 0.04523622512817383, 0.04524227142333984, 0.0457391357421875, 0.04548067092895508, 0.045369342803955076, 0.0452567024230957, 0.04640972900390625, 0.04534886550903321, 0.04522393417358399, 0.04598374557495117, 0.045385726928710936, 0.04529878234863281, 0.04501187133789063, 0.04807680130004883, 0.04564787292480469, 0.04516563034057617, 0.04491360092163086, 0.044969249725341796, 0.04530863952636719, 0.04500844955444336, 0.045042110443115235, 0.044844192504882814, 0.044872543334960935, 0.04480409622192383, 0.044988414764404294, 0.04466624069213867, 0.04486822509765625, 0.045178878784179685, 0.04637900924682617, 0.04541843032836914, 0.04497177505493164, 0.04482489776611328, 0.04494540786743164, 0.04496502304077148, 0.044958560943603516, 0.04487081527709961, 0.044888927459716794, 0.044843006134033206, 0.04480963134765625, 0.04744252777099609, 0.04689100646972656, 0.04490614318847656, 0.045171039581298825, 0.04506371307373047, 0.04500323104858398, 0.0449249267578125, 0.045096958160400394, 0.04522118377685547, 0.044860095977783204, 0.04479532623291015, 0.04562771224975586, 0.04523571014404297, 0.045499137878417965, 0.0456511344909668, 0.04535110473632813, 0.04515084838867187, 0.0481976318359375, 0.04552214431762695, 0.04590409469604492, 0.04523884963989258, 0.04663679885864258, 0.044990718841552736, 0.04548387145996094, 0.04468892669677734, 0.04464704132080078]",tokens/s,22.203264632154738,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,825.131008,545.128448,0.0,159.383552,143.673856,s,1,7.73799560546875,7.73799560546875,0.0,7.73799560546875,7.73799560546875,7.73799560546875,7.73799560546875,[7.73799560546875],,kWh,1.1002869616701598e-05,1.2060808126243883e-06,2.7222243999813767e-06,1.4931174829307362e-05,,MB,1342.889984,616.431616,0.0,199.22944,186.684928,s,19,0.19152707195281982,0.010080372208043148,0.0003365190027857135,0.010015935897827148,0.010246790313720703,0.01085495672225952,0.011141666240692138,"[0.010030431747436524, 0.00982265567779541, 0.009850144386291504, 0.010815135955810547, 0.010030752182006835, 0.00987548828125, 0.009902912139892578, 0.010098560333251953, 0.010015935897827148, 0.011213343620300293, 0.009973952293395996, 0.010052191734313964, 0.009920448303222655, 0.00991385555267334, 0.010054495811462403, 0.010104703903198241, 0.00993280029296875, 0.009895584106445313, 0.010023679733276367]",tokens/s,25395.88764348772,kWh,2.955258249409259e-07,3.259125785066744e-08,1.2309529640685493e-07,4.512123791984483e-07,tokens/kWh,567360320.3324533,MB,1382.739968,618.528768,0.0,201.326592,186.687488,s,19,10.012183837890625,0.5269570440995066,0.002872720266168862,0.526202880859375,0.5314283447265625,0.5317847961425781,0.5338151135253907,"[0.5235718383789062, 0.5343226928710938, 0.5236062622070312, 0.5251556396484375, 0.5257523803710937, 0.5269797973632813, 0.5251487426757813, 0.527787353515625, 0.5288425903320313, 0.5261953125, 0.526202880859375, 0.5254588623046875, 0.524680419921875, 0.5315028076171875, 0.5314097290039063, 0.526810791015625, 0.5282140502929688, 0.522894775390625, 0.5276469116210938]",tokens/s,119.55433693396753,kWh,1.5136497833391946e-05,1.6692905834529984e-06,5.240432386225572e-06,2.2046220803070518e-05,tokens/kWh,2857632.632946577,,s,1197,10.000772610664368,0.008354864336394627,0.00020488175669350434,0.008317055702209473,0.008443858909606933,0.00855712013244629,0.008912132034301756,"[0.008239199638366699, 0.008333215713500977, 0.008282112121582032, 0.00831612777709961, 0.008248096466064453, 0.008289695739746094, 0.008296575546264649, 0.008328767776489257, 0.008323840141296387, 0.008331007957458496, 0.008292287826538085, 0.008323552131652832, 0.008284159660339355, 0.008256671905517578, 0.00825430393218994, 0.008281503677368164, 0.008247584342956542, 0.00830844783782959, 0.008462143898010253, 0.00846553611755371, 0.008406911849975586, 0.00825654411315918, 0.008388799667358398, 0.00832960033416748, 0.008289504051208495, 0.00829759979248047, 0.00827785587310791, 0.008286208152770995, 0.008256896018981934, 0.00827235221862793, 0.008529600143432617, 0.008298208236694336, 0.008300767898559571, 0.008275775909423829, 0.008272255897521973, 0.008282015800476075, 0.008272255897521973, 0.00830412769317627, 0.0083088960647583, 0.008292256355285644, 0.008294015884399415, 0.008275967597961426, 0.00832806396484375, 0.008275967597961426, 0.00826598358154297, 0.008283167839050294, 0.008278752326965332, 0.008271967887878418, 0.008245152473449707, 0.008271903991699218, 0.00825545597076416, 0.00832102394104004, 0.00827187156677246, 0.008272159576416015, 0.008331263542175293, 0.008306400299072265, 0.00829644775390625, 0.008281087875366211, 0.008287232398986816, 0.008355839729309082, 0.008272928237915039, 0.008276960372924805, 0.008272992134094239, 0.008324511528015137, 0.008297056198120116, 0.008524127960205078, 0.008515168190002441, 0.009025440216064454, 0.008854016304016114, 0.008383935928344726, 0.008359807968139649, 0.008319071769714356, 0.008319328308105469, 0.008345600128173827, 0.008324864387512208, 0.00837388801574707, 0.008384896278381347, 0.008392767906188965, 0.00824726390838623, 0.008300479888916015, 0.008274111747741699, 0.008325119972229005, 0.008262751579284668, 0.008296511650085448, 0.00833017635345459, 0.008430496215820312, 0.00825382423400879, 0.008257856369018554, 0.0082291841506958, 0.008378368377685547, 0.00825500774383545, 0.008264512062072753, 0.008279711723327636, 0.008316927909851075, 0.00827017593383789, 0.008299903869628906, 0.008327360153198243, 0.008306303977966308, 0.008384991645812988, 0.00991436767578125, 0.010825728416442871, 0.013303423881530762, 0.008366463661193847, 0.008300607681274413, 0.00832262420654297, 0.008280096054077148, 0.008269951820373536, 0.008241375923156739, 0.00825334358215332, 0.00833459186553955, 0.008233823776245118, 0.008296768188476562, 0.008341343879699707, 0.008232416152954102, 0.008236639976501465, 0.00826483154296875, 0.008260543823242188, 0.00831772804260254, 0.008257247924804688, 0.008265952110290527, 0.008259424209594727, 0.008279359817504883, 0.008252320289611816, 0.008246975898742676, 0.008261856079101562, 0.008263903617858887, 0.008281567573547362, 0.008300992012023925, 0.00840726375579834, 0.00829964828491211, 0.008298368453979492, 0.008332032203674317, 0.00833529567718506, 0.008270432472229004, 0.00831004810333252, 0.008376864433288575, 0.008348608016967773, 0.008262623786926269, 0.008259167671203613, 0.008263327598571778, 0.008389375686645507, 0.008262975692749024, 0.00825823974609375, 0.00828451156616211, 0.008296223640441894, 0.008294272422790527, 0.008353695869445801, 0.008296544075012208, 0.008271712303161621, 0.008269984245300293, 0.008254688262939453, 0.00827023983001709, 0.008353216171264648, 0.00824345588684082, 0.008256447792053223, 0.008236800193786621, 0.00827939224243164, 0.00823532772064209, 0.008303232192993164, 0.008371968269348145, 0.00828822422027588, 0.008245247840881348, 0.008242752075195313, 0.008251839637756348, 0.00835590362548828, 0.008320351600646973, 0.008277824401855469, 0.008265600204467774, 0.00825830364227295, 0.00822640037536621, 0.00823977565765381, 0.008247200012207032, 0.0082774076461792, 0.008323679924011231, 0.008666175842285156, 0.008276415824890136, 0.008371999740600586, 0.008261471748352052, 0.008307583808898926, 0.008259136199951172, 0.008348320007324219, 0.0082674560546875, 0.008244928359985351, 0.008234911918640137, 0.008324607849121094, 0.008288576126098632, 0.008370176315307617, 0.008532095909118652, 0.00831663990020752, 0.00829644775390625, 0.008331263542175293, 0.008364224433898925, 0.008316160202026367, 0.008301088333129883, 0.008374496459960938, 0.008300352096557616, 0.00832921600341797, 0.008280351638793945, 0.008304448127746581, 0.008409088134765624, 0.008287296295166016, 0.008433823585510253, 0.008466719627380371, 0.008294943809509278, 0.008280223846435546, 0.00836787223815918, 0.008284223556518555, 0.008275679588317871, 0.008282719612121582, 0.008287839889526367, 0.008300543785095215, 0.00829856014251709, 0.00830844783782959, 0.008329119682312012, 0.008328864097595215, 0.008350367546081544, 0.008383520126342773, 0.008346207618713379, 0.008339872360229492, 0.008303647994995117, 0.008313920021057129, 0.008316255569458008, 0.008313376426696778, 0.00831488037109375, 0.008371328353881836, 0.008274687767028808, 0.008261568069458007, 0.008287455558776856, 0.008257568359375, 0.00828003215789795, 0.00826483154296875, 0.008287232398986816, 0.008292511940002441, 0.008296480178833008, 0.00826249599456787, 0.008277888298034669, 0.008296064376831055, 0.008296863555908203, 0.008396736145019531, 0.008285599708557129, 0.008747584342956543, 0.008295552253723144, 0.00827286434173584, 0.00829430389404297, 0.008299903869628906, 0.008338047981262206, 0.008347776412963867, 0.008340576171875, 0.008327967643737793, 0.008368127822875977, 0.008371999740600586, 0.008419551849365235, 0.008461343765258788, 0.008419424057006837, 0.008343680381774903, 0.008452863693237304, 0.00829148769378662, 0.00830726432800293, 0.008312479972839355, 0.008307168006896973, 0.008302751541137696, 0.008304927825927734, 0.008324031829833984, 0.00832310390472412, 0.00845315170288086, 0.008277215957641602, 0.00827888011932373, 0.008307776451110839, 0.008282143592834473, 0.008284735679626464, 0.0083374080657959, 0.008356160163879394, 0.008400575637817383, 0.008351743698120117, 0.00832307243347168, 0.008296607971191406, 0.008365407943725586, 0.008388447761535644, 0.008298720359802246, 0.008331647872924805, 0.00828012752532959, 0.00827347183227539, 0.008265727996826172, 0.008308287620544433, 0.008301440238952637, 0.008275967597961426, 0.008321056365966797, 0.00828752040863037, 0.008282719612121582, 0.008318400382995606, 0.008297087669372559, 0.008382335662841797, 0.008328800201416015, 0.008332192420959473, 0.00832703971862793, 0.008421343803405762, 0.00837724781036377, 0.008320992469787597, 0.008336031913757324, 0.00831443214416504, 0.00829529571533203, 0.008615615844726563, 0.008449312210083007, 0.008383008003234863, 0.0082990083694458, 0.008285344123840332, 0.00827187156677246, 0.008308992385864258, 0.008370783805847168, 0.008328639984130859, 0.008303263664245605, 0.00835968017578125, 0.00836137580871582, 0.008383071899414063, 0.00829148769378662, 0.008301568031311036, 0.008301568031311036, 0.008326944351196288, 0.008300767898559571, 0.00828774356842041, 0.008570688247680664, 0.00835472011566162, 0.00833459186553955, 0.008438048362731933, 0.008358143806457519, 0.008294400215148925, 0.008384511947631837, 0.008334431648254394, 0.008313440322875976, 0.008288576126098632, 0.008273088455200195, 0.008323328018188476, 0.008319295883178712, 0.00831494426727295, 0.008421567916870118, 0.00858844757080078, 0.0083439359664917, 0.00834607982635498, 0.00832307243347168, 0.008421376228332519, 0.008294528007507324, 0.00843273639678955, 0.0083189115524292, 0.00828707218170166, 0.00829856014251709, 0.008303936004638673, 0.008286304473876953, 0.008288800239562988, 0.008297504425048828, 0.00835260772705078, 0.008301983833312989, 0.008298720359802246, 0.00828876781463623, 0.00841932773590088, 0.008343903541564942, 0.008326208114624024, 0.008294464111328125, 0.008320575714111328, 0.008281056404113769, 0.008300543785095215, 0.008320063591003417, 0.008293184280395507, 0.008388544082641601, 0.008300736427307128, 0.008331487655639649, 0.008382240295410156, 0.008302592277526855, 0.00832271957397461, 0.008974687576293945, 0.008323264122009278, 0.008275327682495117, 0.008382207870483398, 0.008331199645996094, 0.008399456024169923, 0.008910816192626953, 0.008325311660766601, 0.008304863929748536, 0.008329024314880371, 0.008241567611694336, 0.008296319961547852, 0.008423551559448243, 0.008301664352416992, 0.008360447883605958, 0.008294912338256836, 0.008281888008117676, 0.00828006362915039, 0.008318976402282715, 0.008298591613769531, 0.008325183868408203, 0.008286175727844238, 0.00829759979248047, 0.008301471710205078, 0.008388383865356445, 0.008308799743652343, 0.008371392250061034, 0.008346752166748047, 0.008294079780578613, 0.008275039672851562, 0.008299136161804199, 0.008298784255981446, 0.008317055702209473, 0.00834934425354004, 0.008294624328613281, 0.00826972770690918, 0.008310879707336426, 0.00829974365234375, 0.008315679550170898, 0.008304032325744629, 0.008273823738098145, 0.008301440238952637, 0.00831868839263916, 0.00827184009552002, 0.008414624214172363, 0.008394623756408691, 0.008297311782836913, 0.008349184036254884, 0.008331775665283203, 0.008352864265441894, 0.008424351692199707, 0.008396160125732422, 0.008365983963012696, 0.008305567741394043, 0.008286016464233398, 0.008320992469787597, 0.008365568161010742, 0.008300383567810059, 0.008309632301330567, 0.00830025577545166, 0.008345376014709473, 0.008337087631225586, 0.008410816192626952, 0.008291263580322266, 0.00831612777709961, 0.008328160285949708, 0.008296256065368652, 0.008289952278137207, 0.008296832084655762, 0.008345024108886719, 0.008290783882141113, 0.00860371208190918, 0.008312000274658202, 0.008287263870239257, 0.00836460781097412, 0.008411552429199219, 0.008359935760498047, 0.008331263542175293, 0.008341024398803711, 0.00836451244354248, 0.008350720405578613, 0.008337920188903808, 0.008541695594787598, 0.008453472137451172, 0.00860739231109619, 0.008474464416503907, 0.008480544090270997, 0.008442239761352539, 0.00843603229522705, 0.008488960266113281, 0.00844489574432373, 0.008469216346740723, 0.008415040016174316, 0.00836457633972168, 0.008369248390197754, 0.008386464118957519, 0.008344032287597657, 0.008335583686828614, 0.00836524772644043, 0.008316864013671875, 0.00833619213104248, 0.008341631889343261, 0.008342464447021485, 0.008322015762329101, 0.008381888389587403, 0.008387136459350585, 0.008372223854064942, 0.008396544456481934, 0.008396608352661133, 0.008344256401062011, 0.008346879959106445, 0.008331423759460449, 0.008314847946166992, 0.008268159866333008, 0.008284159660339355, 0.008502752304077148, 0.00831494426727295, 0.008329440116882324, 0.008317184448242188, 0.008333151817321777, 0.008368576049804687, 0.008285920143127441, 0.008348671913146973, 0.008296799659729005, 0.00837932777404785, 0.008332096099853515, 0.008345696449279785, 0.008368736267089843, 0.008366592407226562, 0.008338144302368164, 0.008391200065612793, 0.008319231986999511, 0.008370400428771973, 0.008290176391601562, 0.008259712219238281, 0.008298272132873536, 0.008308287620544433, 0.008292736053466797, 0.00829360008239746, 0.008307807922363282, 0.008310463905334473, 0.008300543785095215, 0.00830463981628418, 0.008389760017395019, 0.008317824363708495, 0.008297568321228027, 0.008368736267089843, 0.008408736228942872, 0.00833788776397705, 0.008327712059020996, 0.008334752082824706, 0.008317248344421387, 0.008344639778137207, 0.008348480224609375, 0.008327232360839844, 0.008414815902709961, 0.00838492774963379, 0.008361439704895019, 0.008432160377502442, 0.008371583938598633, 0.009284223556518555, 0.008418368339538575, 0.008422335624694825, 0.008558591842651368, 0.008460288047790527, 0.00841113567352295, 0.008568832397460938, 0.008466431617736817, 0.008374272346496582, 0.008407039642333984, 0.008434752464294433, 0.008385087966918945, 0.008411520004272461, 0.008366080284118652, 0.008449119567871094, 0.008342432022094727, 0.0083372802734375, 0.008492287635803222, 0.008332159996032715, 0.008457920074462891, 0.008324799537658691, 0.008339360237121582, 0.008294207572937012, 0.008328096389770508, 0.008312447547912598, 0.00830412769317627, 0.00836083221435547, 0.008284064292907714, 0.008562047958374024, 0.008340543746948242, 0.008482751846313476, 0.008301312446594239, 0.008330207824707031, 0.008484992027282715, 0.008349568367004394, 0.00832921600341797, 0.008337535858154298, 0.008292223930358887, 0.008316800117492676, 0.00860979175567627, 0.008361984252929687, 0.008769536018371582, 0.00841932773590088, 0.008404288291931152, 0.008350208282470703, 0.008290495872497559, 0.008316287994384766, 0.008292991638183594, 0.008308927536010742, 0.00832310390472412, 0.00831056022644043, 0.008439519882202148, 0.008300127983093262, 0.008301376342773437, 0.008319135665893554, 0.008311615943908692, 0.00834447956085205, 0.008363103866577149, 0.008280415534973145, 0.008311360359191895, 0.008308735847473145, 0.008376319885253907, 0.00830463981628418, 0.008296575546264649, 0.008351615905761719, 0.008267552375793456, 0.008284576416015625, 0.008273728370666504, 0.008299551963806152, 0.00830953598022461, 0.008275168418884277, 0.008346400260925292, 0.008302783966064453, 0.00832921600341797, 0.008292351722717285, 0.008296159744262696, 0.008347647666931152, 0.008354080200195313, 0.00829030418395996, 0.008316032409667969, 0.008301216125488281, 0.008300224304199218, 0.008303135871887206, 0.008320447921752929, 0.00840556812286377, 0.00833737564086914, 0.008352095603942872, 0.008459199905395507, 0.008407808303833008, 0.008366080284118652, 0.008372223854064942, 0.00834489631652832, 0.00833625602722168, 0.00834108829498291, 0.008314175605773926, 0.008359135627746582, 0.008337087631225586, 0.008386912345886231, 0.008310688018798829, 0.00834774398803711, 0.008321791648864747, 0.008329919815063477, 0.008344544410705566, 0.008311967849731445, 0.008309151649475098, 0.008315296173095703, 0.00832921600341797, 0.008318976402282715, 0.008320223808288575, 0.008417759895324707, 0.008307040214538574, 0.008335328102111817, 0.00832646369934082, 0.008344256401062011, 0.00836787223815918, 0.008404928207397461, 0.00835750389099121, 0.008331040382385253, 0.008311776161193848, 0.008308320045471192, 0.008372032165527343, 0.008357728004455566, 0.008353919982910156, 0.00834934425354004, 0.0083090238571167, 0.008288928031921387, 0.008292320251464844, 0.008292351722717285, 0.008318976402282715, 0.008341343879699707, 0.008304351806640626, 0.00831935977935791, 0.008284223556518555, 0.008349023818969726, 0.008323679924011231, 0.008363103866577149, 0.008709088325500488, 0.008364031791687012, 0.00840441608428955, 0.008333888053894043, 0.008437376022338867, 0.008345024108886719, 0.008434623718261718, 0.00834540843963623, 0.00834988784790039, 0.008548352241516113, 0.008372223854064942, 0.008314656257629395, 0.008370400428771973, 0.008320128440856934, 0.00827683162689209, 0.008278047561645508, 0.008296511650085448, 0.00827564811706543, 0.008311039924621582, 0.008335359573364258, 0.008283807754516601, 0.008279999732971192, 0.008309151649475098, 0.008292160034179687, 0.008327136039733887, 0.008395296096801758, 0.008340224266052246, 0.008334272384643555, 0.008345952033996582, 0.00833459186553955, 0.008311296463012695, 0.008361248016357422, 0.008303071975708008, 0.008320799827575684, 0.008319456100463868, 0.008363776206970215, 0.008333567619323731, 0.008455840110778809, 0.008396608352661133, 0.008394847869873047, 0.00840771198272705, 0.008402144432067871, 0.0084071683883667, 0.008368512153625489, 0.00839891242980957, 0.008356127738952637, 0.008322560310363769, 0.008315296173095703, 0.008341312408447266, 0.008288191795349122, 0.008347552299499511, 0.008308223724365234, 0.008288928031921387, 0.0082774076461792, 0.008295007705688476, 0.008273920059204102, 0.008267744064331055, 0.008316960334777832, 0.00838361644744873, 0.008289248466491698, 0.008290207862854004, 0.00828006362915039, 0.008290592193603516, 0.008264863967895508, 0.008354496002197266, 0.008312704086303711, 0.00831283187866211, 0.008345600128173827, 0.00834886360168457, 0.008340479850769043, 0.00852563190460205, 0.008324640274047852, 0.0083374080657959, 0.008352224349975585, 0.008331423759460449, 0.008312671661376954, 0.008296256065368652, 0.00847702407836914, 0.008308575630187989, 0.008286496162414551, 0.008281472206115723, 0.008286463737487792, 0.008328736305236817, 0.008332127571105958, 0.008307807922363282, 0.008300992012023925, 0.008288288116455078, 0.008286368370056153, 0.008286399841308594, 0.008285504341125488, 0.00833132839202881, 0.008317472457885743, 0.00826905632019043, 0.008300800323486329, 0.008274432182312011, 0.008333312034606934, 0.008277088165283204, 0.008270912170410157, 0.008281855583190919, 0.008283616065979004, 0.008264320373535156, 0.008292160034179687, 0.00835807991027832, 0.008323007583618165, 0.008347935676574707, 0.008265503883361816, 0.00825712013244629, 0.00826204776763916, 0.008269280433654784, 0.008301024436950683, 0.008289728164672851, 0.008292799949645996, 0.008329407691955566, 0.008324416160583496, 0.008335136413574218, 0.008350624084472656, 0.008421567916870118, 0.008396608352661133, 0.00829206371307373, 0.008287615776062011, 0.008284704208374023, 0.008307071685791015, 0.00833897590637207, 0.008259231567382813, 0.008259936332702637, 0.008243680000305176, 0.008270048141479492, 0.008517439842224122, 0.00834982395172119, 0.008288096427917481, 0.008341504096984862, 0.008257535934448243, 0.008304703712463378, 0.008253376007080078, 0.008281536102294922, 0.008364352226257324, 0.008282048225402832, 0.008326751708984375, 0.00862281608581543, 0.008543744087219238, 0.008272735595703126, 0.008590496063232422, 0.008339296340942383, 0.008313504219055176, 0.008421183586120606, 0.008305983543395996, 0.008320159912109375, 0.00830025577545166, 0.008362336158752441, 0.008301823616027833, 0.008257599830627441, 0.008259936332702637, 0.008269472122192383, 0.0082640962600708, 0.008270048141479492, 0.008258144378662109, 0.00824351978302002, 0.0082510404586792, 0.00825551986694336, 0.00827132797241211, 0.008327712059020996, 0.008293791770935059, 0.008286815643310547, 0.008274080276489258, 0.008296287536621094, 0.008273920059204102, 0.008274208068847657, 0.008338239669799805, 0.00827280044555664, 0.008294400215148925, 0.008253503799438476, 0.008281920433044434, 0.008276224136352539, 0.008375807762145996, 0.008320863723754884, 0.008343263626098633, 0.0083689603805542, 0.008376319885253907, 0.008359935760498047, 0.00832688045501709, 0.008417568206787109, 0.008382464408874512, 0.008431072235107423, 0.008515775680541992, 0.008569120407104492, 0.008533120155334473, 0.008461248397827149, 0.00843769645690918, 0.008368191719055175, 0.008330400466918946, 0.008334176063537597, 0.008359135627746582, 0.008297311782836913, 0.008382335662841797, 0.00834771156311035, 0.008374272346496582, 0.008450079917907714, 0.008521696090698243, 0.008445952415466309, 0.00841932773590088, 0.008492223739624024, 0.008492959976196288, 0.00859008026123047, 0.008648799896240235, 0.008674688339233398, 0.0086627197265625, 0.008692607879638672, 0.008570655822753907, 0.008606047630310058, 0.008751104354858399, 0.008888319969177246, 0.00885747241973877, 0.008732799530029298, 0.008572928428649903, 0.00847276782989502, 0.008453951835632325, 0.00840499210357666, 0.00845206356048584, 0.008423328399658203, 0.008363007545471191, 0.008363200187683105, 0.00836083221435547, 0.008343487739562989, 0.008358943939208984, 0.008425791740417481, 0.008304351806640626, 0.008293312072753906, 0.008311871528625489, 0.008297311782836913, 0.008665184020996093, 0.008632479667663574, 0.008830816268920898, 0.00894371223449707, 0.00841750431060791, 0.008435551643371581, 0.00834329605102539, 0.008357983589172363, 0.008410143852233886, 0.00834233570098877, 0.00836575984954834, 0.008336864471435546, 0.008301568031311036, 0.008335359573364258, 0.008270048141479492, 0.008668959617614746, 0.008347647666931152, 0.00832102394104004, 0.00832921600341797, 0.008340831756591796, 0.008322976112365722, 0.008469535827636718, 0.008420160293579101, 0.008437919616699218, 0.008396832466125489, 0.008362719535827637, 0.0085032958984375, 0.00833459186553955, 0.008375200271606445, 0.008415295600891113, 0.008430912017822265, 0.00839846420288086, 0.008473440170288086, 0.008554400444030762, 0.008591456413269043, 0.008433568000793456, 0.008371552467346192, 0.00833180809020996, 0.008405216217041015, 0.00832851219177246, 0.008503999710083008, 0.00872383975982666, 0.008687904357910156, 0.008556896209716797, 0.008407360076904296, 0.008355744361877441, 0.008345184326171875, 0.008392895698547363, 0.008415231704711914, 0.008353728294372559, 0.008436960220336914, 0.008411328315734863, 0.008612319946289063, 0.008689663887023925, 0.008852767944335937, 0.008771743774414063, 0.009069120407104492, 0.008558015823364257, 0.008519231796264648, 0.008448415756225586, 0.008389216423034668, 0.00831283187866211, 0.008283679962158202, 0.008278592109680176, 0.008400064468383789, 0.008329952239990234, 0.00828544044494629, 0.00825830364227295, 0.008279744148254395, 0.008285856246948243, 0.008312704086303711, 0.008291104316711425, 0.008385600090026856, 0.008313183784484863, 0.00830508804321289, 0.00828335952758789, 0.008391615867614746, 0.008309791564941407, 0.00833568000793457, 0.008294655799865722, 0.008332832336425781, 0.008285247802734375, 0.008301600456237793, 0.008303008079528808, 0.00828659152984619, 0.008354144096374512, 0.008310175895690919, 0.008290016174316406, 0.008387328147888184, 0.008277440071105957, 0.008275712013244629, 0.008258144378662109, 0.008310400009155273, 0.008294272422790527, 0.008298144340515137, 0.008307295799255371, 0.008364288330078126, 0.008273920059204102, 0.008296159744262696, 0.00827625560760498, 0.008425472259521484, 0.008260640144348144, 0.008246368408203125, 0.00824953556060791, 0.008236736297607421, 0.008295519828796387, 0.008273887634277343, 0.00824124813079834, 0.00827683162689209, 0.008252927780151367, 0.008346112251281738, 0.008250368118286134, 0.008390848159790038, 0.00824556827545166, 0.00831116771697998, 0.008275967597961426, 0.008460288047790527, 0.008644031524658204, 0.009065024375915528, 0.008480223655700683, 0.009562656402587891, 0.008839167594909669, 0.00829849624633789, 0.008408384323120117, 0.008303296089172364, 0.008303936004638673, 0.008727231979370116, 0.008295583724975585, 0.008302656173706055, 0.008300383567810059, 0.008381695747375488, 0.008601280212402343, 0.008304320335388183, 0.008284480094909667, 0.00825107192993164, 0.008370719909667968, 0.008371999740600586, 0.00833459186553955, 0.008305407524108887, 0.00822662353515625, 0.008285504341125488, 0.008362719535827637, 0.008347999572753907, 0.008341312408447266, 0.008322751998901368, 0.008509023666381836, 0.008288991928100587, 0.008392191886901856, 0.008384736061096191, 0.00830288028717041, 0.008273951530456543, 0.008292448043823243, 0.008371647834777831, 0.008290752410888672, 0.008379839897155761, 0.00830726432800293, 0.008346943855285645, 0.008272576332092284, 0.008259231567382813, 0.008310879707336426, 0.008253696441650391, 0.00825376033782959, 0.008257216453552246, 0.008318976402282715, 0.008269824028015137, 0.008278016090393067, 0.008332384109497071, 0.008274304389953612, 0.008289055824279785, 0.008326911926269531, 0.00833244800567627, 0.008309599876403808, 0.008265727996826172, 0.008285792350769042, 0.008274335861206055, 0.00826809597015381, 0.008334527969360352, 0.008292256355285644, 0.008283807754516601, 0.008273920059204102, 0.008267775535583496, 0.00831488037109375, 0.008271615982055664, 0.00826198387145996, 0.008302271842956543, 0.008272064208984376, 0.008273183822631836, 0.008280735969543457, 0.008328991889953613, 0.008302751541137696, 0.008283616065979004, 0.008290016174316406, 0.008263903617858887, 0.008272640228271485, 0.008255488395690918, 0.00829644775390625, 0.008271200180053711, 0.008241439819335937, 0.008269472122192383, 0.00830288028717041, 0.008243647575378418, 0.00832096004486084, 0.008267840385437012, 0.008261631965637208, 0.008258655548095703, 0.00828115177154541, 0.008277695655822754, 0.008332672119140625, 0.008307071685791015, 0.008286911964416504, 0.008291872024536132, 0.00826204776763916, 0.008258784294128418, 0.00836460781097412, 0.008285823822021485, 0.00827014446258545, 0.008281408309936523, 0.008262399673461915, 0.008278016090393067, 0.008261407852172852, 0.008274144172668457, 0.00830463981628418, 0.008255488395690918, 0.008257823944091797, 0.008275168418884277, 0.008306655883789062, 0.008312543869018555, 0.008296832084655762, 0.008515711784362792, 0.008299967765808106, 0.008319264411926269, 0.008272480010986329, 0.008295424461364746, 0.00827455997467041, 0.008298879623413085, 0.008254624366760254, 0.008256352424621582, 0.008341504096984862, 0.008324416160583496, 0.008375328063964843, 0.008357631683349609, 0.008320896148681641, 0.008295807838439941, 0.008298720359802246, 0.00837491226196289, 0.008335488319396973, 0.008456064224243163, 0.008327168464660644, 0.008390496253967285, 0.008375616073608398, 0.008463199615478516, 0.00834886360168457, 0.008315711975097657, 0.008443167686462402, 0.008351648330688476, 0.008283167839050294, 0.008247072219848633, 0.008263615608215332, 0.008284223556518555, 0.008314816474914551, 0.008333375930786133, 0.008280159950256348, 0.008353695869445801, 0.008294400215148925, 0.008282112121582032, 0.008472512245178223, 0.009198847770690919, 0.00968115234375, 0.008580831527709961, 0.008777888298034667, 0.008370016098022461, 0.008364031791687012, 0.008358752250671387, 0.008459456443786621, 0.008415743827819825, 0.008293984413146972, 0.008297183990478515, 0.008302656173706055, 0.008279423713684082, 0.00830726432800293, 0.008277312278747558, 0.00826268768310547, 0.008393471717834472, 0.00831167984008789, 0.008263551712036132, 0.008414527893066407, 0.008280927658081055, 0.008244992256164551, 0.008235424041748048, 0.008295968055725097, 0.008259776115417481, 0.008245375633239746, 0.008275679588317871, 0.008237504005432128, 0.008295552253723144, 0.008288991928100587, 0.008269824028015137, 0.008387968063354493, 0.008244095802307129, 0.008305536270141602, 0.008254591941833495, 0.008248255729675294, 0.008297280311584472, 0.008281184196472167]",tokens/s,119.6907525648142,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1570.562048,1604.190208,0.0,1201.668096,1189.151232,s,1,8.401806640625,8.401806640625,0.0,8.401806640625,8.401806640625,8.401806640625,8.401806640625,[8.401806640625],,kWh,3.556609466666032e-05,3.915731534917954e-06,1.0888619821999845e-05,5.037044602357812e-05,,MB,1686.05696,1799.225344,0.0,1382.023168,1351.367168,s,10,0.4756693420410156,0.04756693420410156,0.00015275484322596928,0.047562110900878904,0.04775382080078125,0.04782882995605468,0.04788883728027344,"[0.047584224700927734, 0.04736636734008789, 0.04743743896484375, 0.047473312377929684, 0.047622112274169924, 0.04754950332641601, 0.04742067337036133, 0.047737152099609374, 0.047574718475341796, 0.047903839111328124]",tokens/s,5381.8898418289455,kWh,1.427549544024274e-06,1.5743328940958855e-07,9.501376164780055e-07,2.5351204499118683e-06,tokens/kWh,100981395.1873173,MB,1693.847552,1841.168384,0.0,1423.966208,1407.328256,s,10,13.674264770507813,1.3674264770507814,0.007045571745078874,1.3668082885742188,1.3741877197265624,1.3784882080078125,1.3819285986328125,"[1.3601513671875, 1.3654959716796875, 1.364861572265625, 1.36155078125, 1.3573333740234375, 1.368206787109375, 1.3827886962890625, 1.3732320556640625, 1.3725235595703125, 1.36812060546875]",tokens/s,46.07194687050104,kWh,3.9831488602639934e-05,4.393011124421548e-06,1.8551933540723365e-05,6.277643326778483e-05,tokens/kWh,1003561.3162548038,,s,630,13.671653858184815,0.021701037870134627,0.0004162646086460559,0.021623727798461917,0.021904031372070313,0.022045338535308837,0.023074691810607912,"[0.021573247909545897, 0.021596351623535157, 0.02155743980407715, 0.02169599914550781, 0.02179532814025879, 0.021652864456176757, 0.021800640106201172, 0.021607168197631837, 0.021448896408081054, 0.02148681640625, 0.021555999755859374, 0.021557247161865235, 0.021530624389648437, 0.021436256408691408, 0.021498016357421875, 0.02152448081970215, 0.02158758354187012, 0.021494144439697264, 0.021558719635009764, 0.021594688415527342, 0.021598207473754884, 0.021590368270874023, 0.02167363166809082, 0.02165555191040039, 0.021716224670410157, 0.02170342445373535, 0.02165692710876465, 0.021516319274902343, 0.02162886428833008, 0.02161324882507324, 0.021485567092895508, 0.021747711181640626, 0.021530399322509764, 0.021549280166625977, 0.021596160888671875, 0.02153011131286621, 0.021486080169677735, 0.021824607849121092, 0.021535648345947265, 0.021587648391723634, 0.02142639923095703, 0.021516096115112304, 0.02151203155517578, 0.02150649642944336, 0.021610496520996093, 0.021539968490600588, 0.021502847671508788, 0.021664863586425782, 0.021618976593017578, 0.02155174446105957, 0.021534208297729493, 0.021554752349853514, 0.021861312866210937, 0.02161664009094238, 0.021495359420776367, 0.021582271575927733, 0.02152003288269043, 0.02153913688659668, 0.021749792098999025, 0.021569280624389647, 0.021401599884033205, 0.021537023544311522, 0.021733375549316408, 0.02137273597717285, 0.0215063362121582, 0.021516191482543946, 0.02147737693786621, 0.02142972755432129, 0.02173529624938965, 0.02146371269226074, 0.021727231979370116, 0.021395040512084962, 0.021424543380737304, 0.02172313690185547, 0.02175529670715332, 0.0218089599609375, 0.021727264404296873, 0.021674720764160157, 0.021595935821533203, 0.021643104553222655, 0.021516704559326173, 0.02155094337463379, 0.021388639450073244, 0.021408512115478517, 0.02146895980834961, 0.021440799713134766, 0.02141756820678711, 0.021402015686035156, 0.021708799362182618, 0.021665632247924806, 0.021448863983154296, 0.021432191848754882, 0.021473407745361328, 0.021727231979370116, 0.0216712646484375, 0.02309391975402832, 0.022691839218139647, 0.02181046485900879, 0.021666240692138673, 0.021702943801879884, 0.021743616104125976, 0.021563392639160156, 0.022386688232421875, 0.02302761650085449, 0.0216843204498291, 0.02168832015991211, 0.02162892723083496, 0.021780448913574217, 0.02179075241088867, 0.02154857635498047, 0.021559200286865234, 0.021520639419555666, 0.021498176574707033, 0.021478431701660156, 0.021408735275268556, 0.021497791290283202, 0.021482784271240233, 0.021433120727539064, 0.0215285758972168, 0.02143833541870117, 0.021534271240234375, 0.02166227149963379, 0.022075103759765624, 0.022122592926025392, 0.021855968475341797, 0.021657567977905273, 0.021494848251342773, 0.021482431411743164, 0.021604352951049805, 0.021415935516357423, 0.02145484733581543, 0.021691680908203125, 0.02246076774597168, 0.022122880935668946, 0.021630847930908203, 0.021689983367919923, 0.021680639266967772, 0.021593215942382813, 0.022244224548339842, 0.022859519958496093, 0.022876415252685547, 0.021729215621948243, 0.021610559463500975, 0.02168217658996582, 0.021503679275512694, 0.02156515121459961, 0.021587551116943358, 0.02169343948364258, 0.02166579246520996, 0.021561248779296875, 0.021985376358032226, 0.021605567932128908, 0.021521215438842774, 0.021495840072631837, 0.021516544342041016, 0.021509855270385743, 0.02150809669494629, 0.021506048202514647, 0.021523679733276367, 0.021551904678344728, 0.02155516815185547, 0.021547008514404296, 0.02170262336730957, 0.02149177551269531, 0.02154617691040039, 0.021513023376464845, 0.021540864944458008, 0.02161664009094238, 0.021548608779907226, 0.021600704193115234, 0.021602304458618164, 0.02158099174499512, 0.021553983688354494, 0.02153628730773926, 0.021499359130859375, 0.021601184844970703, 0.021504032135009767, 0.021527616500854493, 0.021691328048706056, 0.021553216934204103, 0.021610496520996093, 0.02153183937072754, 0.02156787109375, 0.02164575958251953, 0.02157145690917969, 0.02159779167175293, 0.02171104049682617, 0.021737823486328123, 0.021710559844970702, 0.021526304244995118, 0.021575904846191405, 0.021437824249267576, 0.021481407165527343, 0.02150876808166504, 0.02146512031555176, 0.021506048202514647, 0.021559295654296876, 0.02149990463256836, 0.021538816452026367, 0.02145280075073242, 0.02149580764770508, 0.021444608688354492, 0.02147942352294922, 0.02156096076965332, 0.02156559944152832, 0.021522655487060546, 0.021605823516845705, 0.021461376190185545, 0.021511423110961915, 0.021455808639526366, 0.0216760311126709, 0.021577535629272462, 0.021633216857910156, 0.021587392807006837, 0.021569536209106444, 0.02152009582519531, 0.02158883285522461, 0.021659456253051757, 0.021852352142333983, 0.021934080123901366, 0.02183782386779785, 0.021747711181640626, 0.021671680450439452, 0.021552383422851564, 0.021633888244628908, 0.021672096252441406, 0.022633535385131836, 0.023001440048217775, 0.021693023681640625, 0.02168956756591797, 0.02185091209411621, 0.021639167785644533, 0.021741567611694337, 0.021636415481567382, 0.021527231216430662, 0.021501440048217774, 0.021471744537353517, 0.021440160751342772, 0.021426528930664063, 0.021592063903808592, 0.021893119812011717, 0.02145280075073242, 0.021518335342407227, 0.02146099281311035, 0.021501344680786134, 0.021469791412353514, 0.02145894432067871, 0.02141980743408203, 0.021374719619750977, 0.0214368953704834, 0.02159721565246582, 0.02152931213378906, 0.021459104537963868, 0.021483455657958984, 0.021645151138305663, 0.021441280364990236, 0.021437536239624022, 0.021393728256225587, 0.021486047744750977, 0.02153606414794922, 0.021450687408447265, 0.02148646354675293, 0.02137468719482422, 0.021411327362060546, 0.021322336196899414, 0.021452768325805664, 0.021595903396606445, 0.021457376480102538, 0.021454816818237306, 0.021495840072631837, 0.02146303939819336, 0.021587968826293946, 0.021489664077758788, 0.021481472015380858, 0.021587007522583006, 0.02156844711303711, 0.021555200576782226, 0.021601728439331055, 0.021657344818115234, 0.021653568267822266, 0.02149247932434082, 0.021503456115722658, 0.0214881591796875, 0.021495744705200194, 0.021555263519287108, 0.02149171257019043, 0.02146713638305664, 0.02157513618469238, 0.021934623718261718, 0.021880064010620117, 0.02168668746948242, 0.021639520645141602, 0.021546783447265624, 0.021500064849853517, 0.021484895706176756, 0.021506784439086914, 0.021605695724487305, 0.021492095947265626, 0.021620223999023438, 0.021492544174194335, 0.022302112579345702, 0.021574079513549806, 0.021527711868286132, 0.021529600143432616, 0.021501087188720704, 0.021492095947265626, 0.021519840240478514, 0.021541887283325196, 0.021488960266113282, 0.021498559951782226, 0.02149100875854492, 0.021500608444213868, 0.021534303665161132, 0.021463071823120117, 0.02163942337036133, 0.021717824935913087, 0.0215380802154541, 0.02154979133605957, 0.021487583160400392, 0.026583072662353515, 0.02278131294250488, 0.02165113639831543, 0.021584831237792968, 0.021643264770507813, 0.02165350341796875, 0.021530624389648437, 0.0216760311126709, 0.021761024475097656, 0.022021120071411132, 0.021743616104125976, 0.021551103591918946, 0.021577728271484374, 0.021716991424560548, 0.02153267288208008, 0.021604352951049805, 0.021542911529541017, 0.021610496520996093, 0.02158492851257324, 0.021947359085083006, 0.021480735778808595, 0.02155183982849121, 0.02145894432067871, 0.021561344146728514, 0.02151628875732422, 0.021607839584350585, 0.021520992279052735, 0.021587968826293946, 0.021573631286621094, 0.021600255966186522, 0.021703840255737305, 0.021590879440307617, 0.021521888732910156, 0.02146748733520508, 0.021575872421264648, 0.02153267288208008, 0.022271999359130858, 0.021564863204956056, 0.021592639923095704, 0.021506048202514647, 0.02159555244445801, 0.021563488006591795, 0.0216048641204834, 0.02153232002258301, 0.021533023834228514, 0.021590015411376954, 0.021590015411376954, 0.021495359420776367, 0.02148601531982422, 0.021576704025268553, 0.021556224822998047, 0.02157155227661133, 0.021593568801879882, 0.02165203285217285, 0.021565439224243164, 0.021639167785644533, 0.021465087890625, 0.021788671493530275, 0.02191564750671387, 0.02190991973876953, 0.021789087295532226, 0.021736576080322267, 0.02166032028198242, 0.02171516799926758, 0.021573631286621094, 0.02198031997680664, 0.022115167617797853, 0.021813087463378907, 0.023271583557128907, 0.021850175857543945, 0.021712831497192383, 0.02164684867858887, 0.02166156768798828, 0.02166383934020996, 0.021813791275024415, 0.021770240783691407, 0.021821151733398436, 0.021717279434204102, 0.02168544006347656, 0.021780895233154296, 0.02170102310180664, 0.021738752365112305, 0.02172185516357422, 0.02169798469543457, 0.02185856056213379, 0.022387008666992187, 0.02205695915222168, 0.02195644760131836, 0.02180281639099121, 0.02230089569091797, 0.02202579116821289, 0.021723712921142578, 0.02168627166748047, 0.021704704284667968, 0.0217640323638916, 0.021684288024902344, 0.021654848098754884, 0.021700960159301758, 0.021960319519042967, 0.022147808074951172, 0.024268800735473633, 0.026814464569091798, 0.021858240127563478, 0.02192140769958496, 0.021801408767700196, 0.02166988754272461, 0.021700607299804688, 0.021843967437744142, 0.021643264770507813, 0.021909503936767577, 0.021742879867553713, 0.021850847244262697, 0.021712896347045898, 0.021843967437744142, 0.021827455520629882, 0.021962879180908203, 0.021755903244018555, 0.02168627166748047, 0.021766143798828123, 0.021710111618041993, 0.021664480209350585, 0.021636735916137694, 0.02190342330932617, 0.02181875228881836, 0.021824127197265626, 0.021857311248779297, 0.021791391372680664, 0.02162719917297363, 0.022025375366210936, 0.021867359161376953, 0.021792736053466797, 0.021866527557373047, 0.021769311904907225, 0.022133663177490236, 0.021782527923583983, 0.021675519943237305, 0.021795263290405275, 0.021728864669799806, 0.021825855255126952, 0.022008991241455077, 0.021744640350341796, 0.021884927749633788, 0.021971168518066405, 0.022019775390625, 0.021827680587768555, 0.02187468719482422, 0.0216595516204834, 0.021769535064697264, 0.02183184051513672, 0.021893312454223633, 0.02180656051635742, 0.021791263580322264, 0.021758176803588866, 0.02206742477416992, 0.02166579246520996, 0.021733375549316408, 0.02171494483947754, 0.021721088409423828, 0.021624671936035157, 0.021825696945190428, 0.021835168838500976, 0.02173174476623535, 0.021672128677368164, 0.021819391250610352, 0.021788543701171875, 0.02166387176513672, 0.021620479583740235, 0.021672191619873046, 0.021689952850341795, 0.021763776779174803, 0.02163759994506836, 0.021586143493652343, 0.021639039993286133, 0.02162499237060547, 0.02166953659057617, 0.02213923263549805, 0.021712896347045898, 0.021764095306396485, 0.021897216796875, 0.021927135467529297, 0.02203113555908203, 0.021753152847290038, 0.021676736831665037, 0.02165900802612305, 0.02174835205078125, 0.02188073539733887, 0.022018495559692382, 0.021874399185180665, 0.02194905662536621, 0.021643264770507813, 0.021568735122680663, 0.02160908889770508, 0.021786239624023436, 0.02150454330444336, 0.02167398452758789, 0.021733375549316408, 0.02166988754272461, 0.021669023513793944, 0.021550975799560546, 0.021961343765258788, 0.021948640823364257, 0.021790143966674804, 0.021964864730834963, 0.02191417694091797, 0.02188038444519043, 0.021835391998291015, 0.02191244888305664, 0.02186412811279297, 0.02165996742248535, 0.021557247161865235, 0.021659648895263672, 0.021590015411376954, 0.021563072204589844, 0.021602432250976564, 0.021584064483642577, 0.021807104110717773, 0.021596000671386718, 0.02160246467590332, 0.021608448028564452, 0.02165113639831543, 0.021678400039672852, 0.022208511352539064, 0.02406934356689453, 0.021963552474975587, 0.021952512741088868, 0.02172313690185547, 0.021656959533691407, 0.021697151184082032, 0.02183782386779785, 0.02162892723083496, 0.021736448287963867, 0.021834527969360352, 0.021780704498291014, 0.021741567611694337, 0.021753856658935547, 0.021794496536254884, 0.02174291229248047, 0.02167091178894043, 0.021552799224853515, 0.02181769561767578, 0.02174550437927246, 0.021790016174316407, 0.021786848068237306, 0.021711488723754883, 0.021616191864013673, 0.021795167922973632, 0.021647455215454102, 0.02166579246520996, 0.021655967712402344, 0.02162131118774414, 0.021577728271484374, 0.021739519119262696, 0.02150150489807129, 0.021516511917114258, 0.02165123176574707, 0.021596063613891603, 0.021600448608398437, 0.02159244728088379, 0.021647296905517577, 0.021614240646362304, 0.021651840209960936, 0.021608448028564452, 0.021640447616577147, 0.021667903900146484, 0.02179340744018555, 0.021915136337280275, 0.02549612808227539, 0.021942272186279296, 0.02186240005493164, 0.021751359939575197, 0.021641408920288086, 0.021612800598144532, 0.021573471069335937, 0.0216080322265625, 0.021598783493041993, 0.0216944637298584, 0.02164531135559082, 0.02161155128479004, 0.021574464797973633, 0.02155945587158203, 0.02159984016418457, 0.021707168579101564, 0.021583871841430666, 0.02164531135559082, 0.021606399536132814, 0.021602304458618164, 0.021583871841430666, 0.021737472534179687, 0.021659456253051757, 0.021627071380615235, 0.021577280044555665, 0.021658048629760743, 0.02182963180541992, 0.02173676872253418, 0.02160047912597656, 0.021588447570800782, 0.021626880645751953, 0.021630239486694337, 0.021544927597045897, 0.021539583206176757, 0.021622783660888673, 0.021563392639160156, 0.021538816452026367, 0.021540864944458008, 0.021622783660888673, 0.021518335342407227, 0.02229248046875, 0.02163711929321289, 0.02171811294555664, 0.021703584671020508, 0.02168009567260742]",tokens/s,46.08074535348462,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.760512,561.905664,0.0,159.383552,143.673856,s,1,7.3676845703125,7.3676845703125,0.0,7.3676845703125,7.3676845703125,7.3676845703125,7.3676845703125,[7.3676845703125],,kWh,1.0916867704190736e-05,1.196920151901788e-06,2.723335512017089e-06,1.4837123368109613e-05,,MB,1306.140672,616.431616,0.0,199.22944,186.684928,s,24,0.19232851219177247,0.00801368800799052,6.165399156196195e-05,0.008013535976409912,0.008090870380401611,0.008098009777069093,0.008146730442047119,"[0.008099200248718262, 0.007954815864562988, 0.007960127830505372, 0.007942751884460449, 0.007936384201049805, 0.008160927772521972, 0.007939871788024902, 0.008080512046813966, 0.007945375919342041, 0.007949920177459716, 0.007964767932891846, 0.00809126377105713, 0.007992832183837891, 0.008029184341430663, 0.008009984016418456, 0.007948351860046386, 0.008051456451416016, 0.008043423652648925, 0.008026975631713867, 0.007994656085968017, 0.008017087936401367, 0.008020544052124023, 0.008078144073486329, 0.00808995246887207]",tokens/s,31945.34148880517,kWh,2.411672569765749e-07,2.65963053540374e-08,9.840756265619628e-08,3.661711249868086e-07,tokens/kWh,699126672.014956,MB,1319.596032,620.62592,0.0,203.423744,186.687488,s,24,10.198731811523437,0.42494715881347656,0.002993508830446943,0.42421670532226563,0.42788203125,0.43008334045410157,0.43438586303710935,"[0.4269555053710937, 0.42416644287109373, 0.4237364196777344, 0.4224688415527344, 0.4228974914550781, 0.4250695495605469, 0.42160183715820315, 0.4242669677734375, 0.42280044555664065, 0.42234713745117186, 0.4238018188476563, 0.42356057739257813, 0.42827911376953126, 0.4249194030761719, 0.4304017333984375, 0.4227448425292969, 0.4239934387207031, 0.422290771484375, 0.4267169189453125, 0.4264006042480469, 0.435575927734375, 0.42447433471679685, 0.42450375366210935, 0.4247579345703125]",tokens/s,148.25372683019347,kWh,1.2420049390939685e-05,1.3697175134143692e-06,4.332186259594147e-06,1.81219531639482e-05,tokens/kWh,3476446.464133466,,s,1512,10.184698696136467,0.0067359118360690974,0.00020807272034875375,0.006702847957611084,0.006794848155975342,0.006853195142745972,0.007366307768821726,"[0.0068031039237976074, 0.006912352085113526, 0.006958335876464844, 0.006751232147216797, 0.006762239933013916, 0.006750207901000976, 0.006713280200958252, 0.006719327926635742, 0.006747583866119385, 0.006662015914916992, 0.006730432033538818, 0.006948863983154297, 0.006755936145782471, 0.006722176074981689, 0.006770688056945801, 0.006781184196472168, 0.0066967358589172365, 0.006879327774047851, 0.006752064228057861, 0.0069012799263000486, 0.006753151893615723, 0.006775487899780273, 0.00684335994720459, 0.006823935985565186, 0.006713344097137451, 0.00671884822845459, 0.006736192226409912, 0.006696576118469238, 0.00671398401260376, 0.006711135864257813, 0.006691328048706055, 0.006713056087493897, 0.00671721601486206, 0.00681932783126831, 0.006746848106384277, 0.006976672172546387, 0.00677564811706543, 0.006715487957000732, 0.006815552234649658, 0.006703199863433838, 0.006797056198120117, 0.006723648071289063, 0.006696288108825684, 0.0067387838363647465, 0.006692863941192627, 0.00667852783203125, 0.006750207901000976, 0.006686719894409179, 0.006846528053283691, 0.006821824073791504, 0.006743807792663575, 0.0067873277664184575, 0.0069918718338012695, 0.006721536159515381, 0.006719488143920899, 0.0069489598274230955, 0.00680460786819458, 0.006727807998657227, 0.006721280097961426, 0.006715744018554687, 0.006695487976074219, 0.00667852783203125, 0.006814943790435791, 0.006721983909606934, 0.006704864025115967, 0.006998208045959473, 0.006762495994567871, 0.006735648155212402, 0.0067402877807617185, 0.006809599876403808, 0.006711135864257813, 0.0066706881523132325, 0.006695744037628174, 0.006718463897705078, 0.006673408031463623, 0.006699903964996338, 0.006727807998657227, 0.006739039897918701, 0.006671264171600342, 0.006719583988189697, 0.006682528018951416, 0.006709248065948486, 0.006726719856262207, 0.0066835517883300785, 0.006776768207550049, 0.006711296081542969, 0.0067114558219909665, 0.006756576061248779, 0.00668342399597168, 0.006692863941192627, 0.006734208106994629, 0.00681660795211792, 0.006756063938140869, 0.006696800231933594, 0.006751584053039551, 0.0066826558113098145, 0.00667523193359375, 0.006704864025115967, 0.006703392028808593, 0.0068056960105895995, 0.006694943904876709, 0.006722879886627197, 0.006707680225372314, 0.006680575847625733, 0.006744063854217529, 0.006715392112731934, 0.00674403190612793, 0.006860832214355469, 0.006692287921905518, 0.006686880111694336, 0.006714975833892823, 0.0067571840286254885, 0.006684671878814697, 0.0067010560035705566, 0.006709248065948486, 0.006673632144927978, 0.006701759815216065, 0.006692959785461426, 0.006653791904449463, 0.006729887962341309, 0.006671711921691894, 0.006703775882720947, 0.006782048225402832, 0.006717376232147217, 0.006673151969909668, 0.006821248054504394, 0.0067274560928344725, 0.006688704013824463, 0.006694943904876709, 0.006739935874938965, 0.00667852783203125, 0.0066960639953613284, 0.006817759990692139, 0.006703392028808593, 0.00670579195022583, 0.006704448223114013, 0.006707647800445557, 0.006709887981414795, 0.006708767890930176, 0.0067248959541320805, 0.006742752075195312, 0.006765888214111328, 0.0067383041381835936, 0.006690720081329346, 0.006690336227416992, 0.006687424182891846, 0.006750720024108887, 0.0066761598587036135, 0.006700992107391357, 0.006705247879028321, 0.00667142391204834, 0.006799647808074951, 0.006707071781158447, 0.006671487808227539, 0.006759871959686279, 0.006709536075592041, 0.006719583988189697, 0.00679036808013916, 0.006911808013916015, 0.006684864044189453, 0.006832831859588623, 0.006712800025939942, 0.006696896076202392, 0.006691487789154052, 0.006729343891143799, 0.006758592128753662, 0.006690112113952637, 0.006711584091186523, 0.0066771841049194335, 0.0067786240577697755, 0.006721119880676269, 0.006723264217376709, 0.0066936960220336916, 0.006665823936462402, 0.006696447849273681, 0.006677055835723877, 0.006693215847015381, 0.006694816112518311, 0.006686048030853271, 0.0067058238983154295, 0.006686143875122071, 0.006678400039672852, 0.006705952167510986, 0.006673696041107177, 0.00677129602432251, 0.006707263946533203, 0.006680064201354981, 0.006742623805999756, 0.006698912143707276, 0.006696959972381592, 0.0066592001914978025, 0.006687615871429443, 0.006696896076202392, 0.006751584053039551, 0.006762847900390625, 0.006666624069213868, 0.006651264190673828, 0.0066538238525390625, 0.006680960178375244, 0.006662528038024903, 0.006721471786499024, 0.006721183776855468, 0.006711711883544922, 0.006656000137329102, 0.00669593620300293, 0.0066713600158691405, 0.006673439979553223, 0.00670307207107544, 0.006688960075378418, 0.006658976078033447, 0.006754208087921143, 0.006670464038848877, 0.006676511764526367, 0.006696800231933594, 0.0067151041030883785, 0.006664480209350586, 0.00667852783203125, 0.006667327880859375, 0.006719840049743652, 0.006695136070251465, 0.006683263778686523, 0.006643455982208252, 0.0067125439643859865, 0.006670783996582031, 0.006671807765960693, 0.006681407928466797, 0.006719840049743652, 0.006670015811920166, 0.0066724481582641606, 0.006684127807617187, 0.006720064163208007, 0.006702400207519531, 0.006677343845367432, 0.006663008213043213, 0.006689760208129883, 0.006737919807434082, 0.0066416640281677245, 0.006699359893798828, 0.0067084159851074215, 0.0067506561279296875, 0.006681695938110351, 0.006683584213256836, 0.006840415954589844, 0.006750112056732178, 0.006729728221893311, 0.006823935985565186, 0.006715392112731934, 0.006681856155395508, 0.006703904151916504, 0.0066468482017517086, 0.006695839881896972, 0.006715648174285889, 0.00662169599533081, 0.006658527851104736, 0.00668236780166626, 0.006643008232116699, 0.00666860818862915, 0.006717343807220459, 0.006673120021820069, 0.006674528121948242, 0.006682528018951416, 0.0066145601272583005, 0.006670815944671631, 0.006692736148834228, 0.006641727924346924, 0.006682943820953369, 0.006759456157684326, 0.006667232036590576, 0.006731296062469482, 0.006697184085845948, 0.00668387222290039, 0.006662559986114502, 0.006684160232543946, 0.006666463851928711, 0.006711967945098877, 0.006688992023468018, 0.006821152210235596, 0.006668799877166748, 0.006689055919647217, 0.006670207977294922, 0.006661151885986328, 0.006685823917388916, 0.006641568183898926, 0.006713119983673096, 0.006674367904663086, 0.006674655914306641, 0.006664031982421875, 0.0067003841400146485, 0.006677311897277832, 0.006658912181854248, 0.006681600093841553, 0.006813632011413574, 0.0066761279106140135, 0.0066724162101745605, 0.006646143913269043, 0.006640704154968262, 0.006667200088500976, 0.006657408237457275, 0.006648608207702637, 0.006774271965026855, 0.006699359893798828, 0.0066641921997070315, 0.0066993279457092285, 0.006688447952270507, 0.006651904106140137, 0.006703104019165039, 0.006675680160522461, 0.00670307207107544, 0.006722239971160888, 0.0074787201881408695, 0.006890175819396972, 0.00671068811416626, 0.006842016220092773, 0.006725887775421143, 0.006681280136108398, 0.006646527767181397, 0.006701183795928955, 0.0066795840263366695, 0.006654079914093018, 0.0066772160530090335, 0.006662144184112549, 0.006658048152923584, 0.006739264011383057, 0.006666944026947021, 0.0066538558006286625, 0.006689919948577881, 0.00667683219909668, 0.006803455829620361, 0.00667852783203125, 0.006668928146362304, 0.006701183795928955, 0.00671343994140625, 0.00667849588394165, 0.00667145586013794, 0.006693183898925781, 0.006674848079681397, 0.006663680076599121, 0.0066993279457092285, 0.00674015998840332, 0.006852416038513184, 0.006745567798614502, 0.006766655921936035, 0.00664847993850708, 0.006746016025543213, 0.006670432090759277, 0.006621183872222901, 0.006797311782836914, 0.0067870721817016606, 0.006739999771118164, 0.006684800148010254, 0.0067285442352294925, 0.007914495944976807, 0.0073768959045410155, 0.0072724480628967286, 0.00667683219909668, 0.006761760234832764, 0.006713727951049805, 0.006672607898712158, 0.006907519817352295, 0.006678688049316406, 0.006658400058746338, 0.006712800025939942, 0.006686944007873535, 0.0067134079933166505, 0.006685664176940918, 0.006699615955352783, 0.0066562237739562985, 0.006673888206481933, 0.006647679805755615, 0.006629951953887939, 0.0066641921997070315, 0.006688992023468018, 0.006639488220214844, 0.006698847770690918, 0.006656288146972656, 0.006645535945892334, 0.006678368091583252, 0.006788832187652588, 0.006688672065734863, 0.006875167846679688, 0.006791967868804932, 0.006686272144317627, 0.006653664112091065, 0.006666975975036621, 0.006645760059356689, 0.006656000137329102, 0.006709248065948486, 0.006651968002319336, 0.006643487930297852, 0.006668288230895996, 0.006709407806396484, 0.006647168159484863, 0.006754496097564697, 0.006696864128112793, 0.006770527839660644, 0.006688767910003662, 0.006673024177551269, 0.0066273918151855465, 0.006674752235412597, 0.006656832218170166, 0.006650752067565918, 0.006668288230895996, 0.006706751823425293, 0.006664544105529785, 0.006674528121948242, 0.0066490240097045895, 0.006631360054016113, 0.006695168018341064, 0.0066566400527954105, 0.006617087841033936, 0.006652991771697998, 0.006695551872253418, 0.006624639987945557, 0.00667519998550415, 0.006708447933197022, 0.006777215957641602, 0.006711904048919678, 0.006719488143920899, 0.006657599925994873, 0.006740032196044922, 0.006658432006835938, 0.006637279987335205, 0.006664480209350586, 0.006660352230072022, 0.0066516480445861816, 0.006692863941192627, 0.006664351940155029, 0.006680416107177734, 0.00667628812789917, 0.006635519981384277, 0.006643904209136963, 0.006694560050964356, 0.006675007820129394, 0.006812479972839356, 0.006673376083374024, 0.006708703994750977, 0.006640160083770752, 0.0066888961791992186, 0.006689727783203125, 0.0066789441108703615, 0.00668726396560669, 0.006690783977508545, 0.0067309122085571285, 0.006649856090545654, 0.006700160026550293, 0.006690559864044189, 0.006668032169342041, 0.006716991901397705, 0.006699103832244873, 0.006654047966003418, 0.006791264057159424, 0.00674019193649292, 0.006701344013214111, 0.006698912143707276, 0.00663705587387085, 0.006653535842895508, 0.0066776638031005855, 0.00688044786453247, 0.00675491189956665, 0.006913951873779297, 0.006678592205047607, 0.006740064144134522, 0.006633535861968994, 0.006680031776428222, 0.0066891517639160155, 0.006652031898498535, 0.006694975852966308, 0.00671724796295166, 0.006654240131378174, 0.006680287837982178, 0.006673759937286377, 0.006647552013397217, 0.006723872184753418, 0.00668236780166626, 0.006658944129943847, 0.006959104061126709, 0.006758399963378906, 0.006676479816436768, 0.006703199863433838, 0.006708320140838623, 0.006673151969909668, 0.00678268814086914, 0.0067341761589050295, 0.006736127853393554, 0.006878719806671142, 0.006686272144317627, 0.006634175777435303, 0.007214975833892822, 0.0067339520454406734, 0.007028128147125244, 0.00670576000213623, 0.0067276802062988285, 0.006681695938110351, 0.006812255859375, 0.006715712070465088, 0.006735648155212402, 0.0066724162101745605, 0.006695295810699463, 0.006779967784881592, 0.006719456195831299, 0.006642176151275635, 0.00673196792602539, 0.00666761589050293, 0.006648575782775879, 0.00666326379776001, 0.006647552013397217, 0.006673696041107177, 0.0066752958297729495, 0.006656288146972656, 0.0066579518318176266, 0.00666153621673584, 0.0066457920074462894, 0.006753759860992432, 0.006678847789764404, 0.006642271995544433, 0.006719488143920899, 0.006684832096099854, 0.006666079998016357, 0.006651679992675782, 0.006695136070251465, 0.006696959972381592, 0.007280640125274658, 0.006961152076721191, 0.0068037757873535155, 0.006710976123809814, 0.006671872138977051, 0.006703296184539795, 0.006681151866912842, 0.006653471946716309, 0.006677919864654541, 0.0066752638816833494, 0.006703104019165039, 0.006671807765960693, 0.006650176048278809, 0.0067363839149475096, 0.006694655895233154, 0.006656000137329102, 0.006696959972381592, 0.006690815925598144, 0.006715487957000732, 0.006698912143707276, 0.0066826238632202144, 0.006688543796539307, 0.0066696639060974125, 0.0067136001586914065, 0.006699647903442383, 0.006677599906921386, 0.006787519931793213, 0.006695680141448975, 0.006716127872467041, 0.006783999919891357, 0.006672383785247803, 0.006660096168518067, 0.006645472049713135, 0.006700672149658203, 0.006698912143707276, 0.006683135986328125, 0.006687200069427491, 0.0067233600616455075, 0.006647808074951172, 0.006670335769653321, 0.006672383785247803, 0.006640768051147461, 0.006742080211639404, 0.00671827220916748, 0.00665718412399292, 0.006688992023468018, 0.006652800083160401, 0.006638976097106933, 0.006689472198486328, 0.0066819839477539065, 0.006701024055480957, 0.006676896095275879, 0.006647808074951172, 0.006638720035552979, 0.006677472114562989, 0.006661312103271484, 0.006648543834686279, 0.0067338237762451176, 0.006801407814025879, 0.006686719894409179, 0.006696959972381592, 0.006698016166687012, 0.006656000137329102, 0.006698080062866211, 0.006762527942657471, 0.006644991874694825, 0.006717343807220459, 0.0066928000450134275, 0.0067873277664184575, 0.006671040058135986, 0.006711103916168213, 0.00669593620300293, 0.006667263984680176, 0.006676447868347168, 0.006719808101654053, 0.006661280155181885, 0.006693855762481689, 0.006682208061218262, 0.006793407917022705, 0.006686719894409179, 0.006678336143493653, 0.006651487827301026, 0.006748576164245606, 0.006666240215301514, 0.006688767910003662, 0.0066776638031005855, 0.006656511783599854, 0.006736095905303955, 0.006738048076629638, 0.006710559844970703, 0.0066847681999206545, 0.006716032028198243, 0.006647808074951172, 0.006626848220825195, 0.006660575866699219, 0.006666240215301514, 0.00663702392578125, 0.006674272060394287, 0.006650559902191162, 0.0070266880989074704, 0.006718656063079834, 0.006699584007263184, 0.006684927940368652, 0.006703296184539795, 0.0066909117698669435, 0.00664137601852417, 0.00668617582321167, 0.006756896018981933, 0.006672383785247803, 0.006754303932189941, 0.006671711921691894, 0.006649631977081299, 0.006701951980590821, 0.006675456047058105, 0.006773759841918945, 0.006657599925994873, 0.006695231914520264, 0.006799456119537354, 0.006678112030029297, 0.006721343994140625, 0.006695551872253418, 0.006665631771087647, 0.006758560180664062, 0.006678976058959961, 0.0067233600616455075, 0.006697184085845948, 0.00667852783203125, 0.006722911834716797, 0.0067283520698547365, 0.0072226881980896, 0.006748159885406494, 0.006668255805969238, 0.006702847957611084, 0.0067060480117797855, 0.006651135921478271, 0.006708191871643066, 0.006724607944488525, 0.006649951934814453, 0.0066928000450134275, 0.006714111804962159, 0.006657343864440918, 0.006685279846191407, 0.006670048236846924, 0.006647679805755615, 0.0066769919395446775, 0.006657216072082519, 0.006650559902191162, 0.00673305606842041, 0.006943615913391113, 0.0066744318008422855, 0.0067116479873657224, 0.006688608169555664, 0.006612800121307373, 0.006776832103729248, 0.006708703994750977, 0.006662112236022949, 0.0066893439292907714, 0.006659904003143311, 0.006615231990814209, 0.007152895927429199, 0.006743135929107666, 0.00676639986038208, 0.006750336170196533, 0.006757919788360596, 0.006660287857055664, 0.006660096168518067, 0.006704768180847168, 0.0066891517639160155, 0.006684031963348389, 0.006710912227630615, 0.006693759918212891, 0.006784351825714111, 0.006862815856933594, 0.006754303932189941, 0.006785024166107178, 0.006707071781158447, 0.006786496162414551, 0.006695615768432617, 0.006751584053039551, 0.006719488143920899, 0.006734208106994629, 0.006673984050750733, 0.006755040168762207, 0.006727231979370117, 0.006693183898925781, 0.006729856014251709, 0.006795263767242431, 0.006747680187225342, 0.006662623882293701, 0.006709184169769287, 0.006680575847625733, 0.006684735774993896, 0.006698400020599365, 0.006675039768218994, 0.006720640182495117, 0.006687776088714599, 0.00668620777130127, 0.006656352043151855, 0.006692863941192627, 0.0066638078689575195, 0.006662528038024903, 0.006674655914306641, 0.006754079818725586, 0.006740992069244385, 0.0067573761940002445, 0.006686079978942871, 0.006632063865661621, 0.006729887962341309, 0.006739808082580566, 0.00666864013671875, 0.006706079959869385, 0.006759168148040772, 0.006666048049926758, 0.0067216320037841795, 0.0067073597908020016, 0.006652063846588134, 0.0067003521919250485, 0.006695199966430664, 0.006660128116607666, 0.006746272087097168, 0.006708799839019775, 0.006647776126861573, 0.006711743831634522, 0.00670905590057373, 0.006832352161407471, 0.006653952121734619, 0.006791168212890625, 0.0067667198181152344, 0.0066455998420715335, 0.006756383895874024, 0.006731840133666992, 0.00675600004196167, 0.0066908798217773435, 0.006733856201171875, 0.006655295848846435, 0.006651936054229736, 0.006674176216125489, 0.00667852783203125, 0.006660096168518067, 0.0070321598052978515, 0.006977215766906738, 0.008258527755737304, 0.00793552017211914, 0.006771168231964111, 0.006719103813171387, 0.006760575771331787, 0.0067547521591186524, 0.006724800109863281, 0.00674454402923584, 0.006715839862823487, 0.006741727828979492, 0.006731776237487793, 0.006780416011810303, 0.006682752132415771, 0.006700831890106201, 0.006666719913482666, 0.006750336170196533, 0.006686143875122071, 0.006765120029449463, 0.006682144165039062, 0.00674838399887085, 0.006694464206695556, 0.006773056030273438, 0.006963776111602783, 0.00678278398513794, 0.006713344097137451, 0.006819392204284668, 0.00682585620880127, 0.0067402877807617185, 0.00674560022354126, 0.006761216163635254, 0.006709248065948486, 0.006747647762298584, 0.006713856220245362, 0.006862592220306396, 0.006695168018341064, 0.006714816093444824, 0.006697535991668701, 0.006704671859741211, 0.006707680225372314, 0.00669923210144043, 0.006682144165039062, 0.006737855911254883, 0.006773151874542236, 0.006679967880249023, 0.006725855827331543, 0.00678326416015625, 0.006702239990234375, 0.006693183898925781, 0.006715456008911133, 0.0067097277641296384, 0.006737631797790527, 0.006711584091186523, 0.006699007987976074, 0.006881279945373535, 0.0067350401878356935, 0.006789792060852051, 0.006844031810760498, 0.006736608028411866, 0.006719520092010498, 0.0071077442169189455, 0.006693727970123291, 0.006741407871246338, 0.0067276802062988285, 0.006672544002532959, 0.006715839862823487, 0.006739967823028564, 0.006686336040496827, 0.006721151828765869, 0.006720160007476807, 0.006678815841674805, 0.00671827220916748, 0.006749152183532715, 0.006710432052612305, 0.006921088218688965, 0.007185855865478516, 0.006701888084411621, 0.0066921601295471195, 0.006721375942230224, 0.006770815849304199, 0.006679008007049561, 0.006735424041748047, 0.006801695823669434, 0.0067933759689331056, 0.006710879802703858, 0.006723423957824707, 0.006746687889099121, 0.0067341761589050295, 0.006702144145965577, 0.006795008182525635, 0.006807936191558838, 0.006686495780944824, 0.006787680149078369, 0.006767871856689453, 0.006705215930938721, 0.006703904151916504, 0.006676032066345215, 0.006713535785675049, 0.006746463775634766, 0.006833600044250488, 0.006713088035583496, 0.006707935810089111, 0.006708384037017822, 0.006666656017303467, 0.006708703994750977, 0.006693855762481689, 0.006801472187042236, 0.00671123218536377, 0.0066806402206420895, 0.006702208042144776, 0.006697440147399902, 0.00666812801361084, 0.0066743998527526854, 0.006690464019775391, 0.006685152053833008, 0.0067199039459228515, 0.006696959972381592, 0.006690368175506591, 0.006640255928039551, 0.0067084159851074215, 0.006679296016693115, 0.006634880065917969, 0.006734464168548584, 0.0067010560035705566, 0.006672383785247803, 0.006680543899536133, 0.0067092800140380856, 0.006682047843933106, 0.006660672187805175, 0.006741631984710694, 0.006743680000305176, 0.006759103775024414, 0.007036064147949219, 0.007029664039611816, 0.006661856174468994, 0.0068488001823425295, 0.006713344097137451, 0.006735136032104492, 0.006720479965209961, 0.006702847957611084, 0.00667852783203125, 0.006666240215301514, 0.006681920051574707, 0.00668339204788208, 0.007280128002166748, 0.0074115839004516605, 0.009368160247802734, 0.007675392150878906, 0.006846528053283691, 0.0070143680572509765, 0.007102911949157715, 0.0067785921096801755, 0.006760128021240234, 0.006802015781402588, 0.006673727989196777, 0.006820543766021729, 0.006723040103912353, 0.006709375858306885, 0.006676896095275879, 0.006721536159515381, 0.006689856052398682, 0.006721888065338135, 0.006728288173675537, 0.006725632190704346, 0.006711296081542969, 0.006774784088134766, 0.006676608085632324, 0.006672255992889405, 0.006840320110321045, 0.0067218561172485355, 0.0067396478652954105, 0.006707200050354004, 0.006679840087890625, 0.006672800064086914, 0.0067259521484375, 0.006809375762939453, 0.006669792175292968, 0.006698880195617676, 0.006755104064941406, 0.006660192012786865, 0.007040671825408936, 0.006723936080932617, 0.006750048160552979, 0.006690624237060547, 0.00668387222290039, 0.006748576164245606, 0.00664192008972168, 0.006688767910003662, 0.006706975936889649, 0.0066390719413757324, 0.006693151950836182, 0.006693280220031738, 0.006647840023040772, 0.006760479927062989, 0.006690176010131836, 0.006652607917785645, 0.006673503875732422, 0.0066752958297729495, 0.006713247776031494, 0.006705247879028321, 0.006688960075378418, 0.006721343994140625, 0.0066641921997070315, 0.00670739221572876, 0.006716544151306152, 0.006682464122772217, 0.006732416152954101, 0.006791359901428222, 0.006688864231109619, 0.006770624160766602, 0.006687744140625, 0.006648896217346191, 0.006692480087280273, 0.00666860818862915, 0.006659327983856201, 0.006703680038452149, 0.006672639846801758, 0.006698400020599365, 0.006674335956573486, 0.006672544002532959, 0.006652639865875244, 0.006689599990844727, 0.006689792156219483, 0.0068566398620605465, 0.006811391830444336, 0.006766560077667236, 0.006723584175109864, 0.006695199966430664, 0.006688767910003662, 0.006713344097137451, 0.006698751926422119, 0.006712704181671142, 0.006673279762268066, 0.0066641921997070315, 0.006772736072540283, 0.006713344097137451, 0.006725279808044433, 0.006723711967468262, 0.006778880119323731, 0.006716800212860107, 0.0066845440864562984, 0.006648799896240234, 0.006696959972381592, 0.006686719894409179, 0.00666758394241333, 0.006654655933380127, 0.006704895973205566, 0.006688704013824463, 0.006643040180206299, 0.006745759963989258, 0.00669593620300293, 0.006633471965789795, 0.006719647884368896, 0.006788095951080322, 0.006695775985717773, 0.006699071884155273, 0.006750400066375732, 0.006762080192565918, 0.0066992959976196285, 0.006721407890319824, 0.006694176197052002, 0.006676640033721924, 0.006699584007263184, 0.006684000015258789, 0.0066650562286376955, 0.006751743793487549, 0.006678624153137207, 0.006668799877166748, 0.0066986241340637204, 0.0066600642204284665, 0.006671679973602295, 0.006685503959655762, 0.00670959997177124, 0.006760096073150635, 0.00674345588684082, 0.006700863838195801, 0.006673183917999268, 0.006719488143920899, 0.00676694393157959, 0.006694464206695556, 0.006702720165252685, 0.006837887763977051, 0.00677564811706543, 0.006751776218414307, 0.006724160194396972, 0.0067190399169921875, 0.006654208183288574, 0.006713664054870606, 0.006665664196014404, 0.0066687679290771485, 0.0067337918281555175, 0.006690720081329346, 0.006668288230895996, 0.006686719894409179, 0.006666240215301514, 0.007080160140991211, 0.006788479804992676, 0.0067116799354553225, 0.006717472076416015, 0.006702847957611084, 0.006666368007659912, 0.00661516809463501, 0.006688064098358155, 0.006697728157043457, 0.0066293120384216305, 0.006715392112731934, 0.007185664176940918, 0.006853248119354248, 0.006674111843109131, 0.006791264057159424, 0.006768735885620117, 0.006651904106140137, 0.0066705279350280765, 0.006807263851165772, 0.0066622400283813474, 0.006645760059356689, 0.0067125439643859865, 0.006673183917999268, 0.006664031982421875, 0.006688928127288819, 0.006661952018737793, 0.006696800231933594, 0.006691167831420899, 0.006657536029815674, 0.006653600215911866, 0.006687583923339843, 0.006717440128326416, 0.006699007987976074, 0.006686143875122071, 0.0067136001586914065, 0.006666272163391113, 0.0066921601295471195, 0.006669280052185059, 0.006659552097320557, 0.006701600074768066, 0.006645279884338379, 0.0066646718978881836, 0.006670432090759277, 0.006731616020202637, 0.006657120227813721, 0.006707647800445557, 0.006685279846191407, 0.0066696319580078125, 0.006683487892150879, 0.006674208164215088, 0.006897664070129395, 0.006747456073760987, 0.006764992237091065, 0.006666495800018311, 0.006672383785247803, 0.006660096168518067, 0.006649856090545654, 0.006676479816436768, 0.006653088092803955, 0.006680799961090088, 0.006686431884765625, 0.006664351940155029, 0.0066629118919372555, 0.006709248065948486, 0.006702655792236328, 0.006664639949798584, 0.00671292781829834, 0.00675878381729126, 0.006669919967651367, 0.006742303848266602, 0.006721119880676269, 0.006701632022857666, 0.006705023765563965, 0.006723711967468262, 0.006673855781555176, 0.006709792137145996, 0.006772799968719483, 0.006676447868347168, 0.006650559902191162, 0.006687039852142334, 0.006684480190277099, 0.006649472236633301, 0.006725632190704346, 0.006668799877166748, 0.006670207977294922, 0.00667852783203125, 0.0067686400413513184, 0.006651679992675782, 0.006729951858520508, 0.0067131838798522945, 0.006739999771118164, 0.006780576229095459, 0.006707808017730713, 0.006688831806182861, 0.0066806721687316896, 0.0067069120407104495, 0.006707488059997558, 0.006679679870605469, 0.00671395206451416, 0.006750207901000976, 0.006656000137329102, 0.006702176094055176, 0.0066854081153869625, 0.006787263870239258, 0.0067686400413513184, 0.006694623947143555, 0.006684671878814697, 0.006756800174713135, 0.006682464122772217, 0.006656000137329102, 0.006702335834503173, 0.006671103954315185, 0.006677728176116943, 0.006705952167510986, 0.0066826238632202144, 0.00673363208770752, 0.006721727848052979, 0.006720672130584717, 0.006668831825256348, 0.006719808101654053, 0.006721759796142578, 0.00666761589050293, 0.006713119983673096, 0.006830527782440186, 0.006707424163818359, 0.006727424144744873, 0.006684927940368652, 0.006694911956787109, 0.00665340805053711, 0.006754464149475098, 0.0066991357803344725, 0.006684927940368652, 0.006766592025756836, 0.007132544040679932, 0.006908512115478516, 0.008523296356201172, 0.007635456085205078, 0.006809855937957764, 0.006751200199127197, 0.006785920143127441, 0.006774687767028808, 0.006727583885192871, 0.006707200050354004, 0.006660096168518067, 0.007714303970336914, 0.006731711864471436, 0.006752511978149414, 0.006746431827545166, 0.006719488143920899, 0.008240639686584473, 0.006736288070678711, 0.006680960178375244, 0.006689824104309082, 0.006707231998443603, 0.006750879764556884, 0.0068438401222229005, 0.006705728054046631, 0.006701280117034912, 0.006737696170806885, 0.0067226881980895995, 0.006705376148223877, 0.006740640163421631, 0.006772799968719483, 0.006694464206695556, 0.006756864070892334, 0.006694784164428711, 0.006686336040496827, 0.006672736167907715, 0.006684703826904297, 0.006696864128112793, 0.0067290239334106446, 0.006699808120727539, 0.006729728221893311, 0.006662144184112549, 0.006690559864044189, 0.006911295890808106, 0.006710207939147949, 0.0066744318008422855, 0.00680787181854248, 0.006715072154998779, 0.0066641921997070315, 0.006699007987976074, 0.006688191890716553, 0.006681280136108398, 0.006698880195617676, 0.006672544002532959, 0.006745952129364014, 0.006762495994567871, 0.006668288230895996, 0.006733151912689209, 0.006691487789154052, 0.006661344051361084, 0.006742303848266602, 0.006717631816864014, 0.006666560173034668, 0.006711296081542969, 0.006945824146270752, 0.006703743934631348, 0.006682975769042969, 0.006715392112731934, 0.006720608234405518, 0.006680960178375244, 0.006683296203613282, 0.006772607803344727, 0.006686079978942871, 0.006690336227416992, 0.006738111972808838, 0.006701759815216065, 0.0067313919067382814, 0.006719840049743652, 0.00678879976272583, 0.006661920070648193, 0.006715648174285889, 0.006720064163208007, 0.00668342399597168, 0.006699967861175537, 0.0067003521919250485, 0.00666815996170044, 0.0067571840286254885, 0.006694496154785156, 0.006730144023895264, 0.006651519775390625, 0.0067272958755493165, 0.0066977920532226565, 0.006671840190887451, 0.006709343910217285, 0.0068058881759643555, 0.006666240215301514, 0.006708799839019775, 0.0066603198051452634, 0.006657824039459228, 0.006690464019775391, 0.00672870397567749, 0.006696959972381592, 0.007129024028778077, 0.006710207939147949, 0.006672671794891357, 0.006745823860168457, 0.006738848209381103, 0.006668288230895996, 0.0067586879730224605, 0.007464672088623047, 0.00832102394104004, 0.010098688125610352, 0.011272512435913085, 0.007030464172363281, 0.00683622407913208, 0.006776415824890137, 0.0067792959213256836, 0.006823935985565186, 0.007241600036621094, 0.00672166395187378, 0.006715392112731934, 0.006744063854217529, 0.006739967823028564, 0.006696959972381592, 0.006730879783630371, 0.006779935836791992, 0.0066947522163391116, 0.006731520175933838, 0.006733280181884765, 0.006791744232177734, 0.0067853121757507325, 0.006700992107391357, 0.006702655792236328, 0.006768320083618164, 0.006728447914123535, 0.006770559787750244, 0.006737919807434082, 0.00671721601486206, 0.006745952129364014, 0.006713727951049805, 0.006746111869812011, 0.006692863941192627, 0.006782976150512696, 0.006702911853790283, 0.006721727848052979, 0.006686719894409179, 0.006737919807434082, 0.006804736137390137, 0.006734720230102539, 0.0066679039001464845, 0.00671292781829834, 0.0066703038215637205, 0.006675136089324951, 0.00670531177520752, 0.006725471973419189, 0.006684671878814697, 0.006789120197296142, 0.006769023895263672, 0.006688608169555664, 0.006733535766601562, 0.006690176010131836, 0.006741888046264648, 0.006693183898925781, 0.006660384178161621, 0.0067422399520874025, 0.006738976001739502, 0.006677472114562989, 0.006647264003753662, 0.00667251205444336, 0.006775487899780273, 0.00668233585357666, 0.006737919807434082, 0.006750207901000976, 0.006748159885406494, 0.006712416172027588, 0.006690783977508545, 0.006672832012176513, 0.006806015968322754, 0.006727839946746826, 0.006722943782806396, 0.006668320178985596, 0.006758848190307617, 0.006686880111694336, 0.006796768188476562, 0.006833600044250488, 0.006785920143127441, 0.006662208080291748, 0.006839360237121582, 0.0066856322288513185, 0.0067309122085571285, 0.006706367969512939, 0.007151264190673828, 0.0066778559684753415, 0.006662816047668457, 0.006699007987976074, 0.006651423931121826, 0.006667871952056884, 0.006822783946990967, 0.006651936054229736, 0.0066295042037963865, 0.006690815925598144, 0.006692863941192627, 0.006645760059356689, 0.006661664009094239, 0.006734399795532226, 0.006645664215087891, 0.006708992004394531, 0.006711552143096924, 0.006866047859191895, 0.006688735961914063, 0.006689695835113525, 0.006676191806793213, 0.006729472160339356, 0.00672540807723999, 0.006687424182891846, 0.006645823955535889, 0.0067686400413513184, 0.0066724481582641606, 0.006647744178771973, 0.006674272060394287, 0.006694655895233154, 0.006704800128936768, 0.006734784126281738, 0.006714399814605713, 0.006708000183105469, 0.006709248065948486, 0.006698527812957764, 0.006694431781768799, 0.0067123198509216305, 0.006754240036010742, 0.006883327960968018, 0.006759647846221924, 0.006740128040313721, 0.006726272106170654, 0.006809599876403808, 0.0067105917930603024, 0.006689472198486328, 0.0067420158386230465, 0.006868351936340332, 0.006853151798248291, 0.006891615867614746, 0.0067333121299743654, 0.006685184001922607, 0.006659615993499756, 0.006756832122802734, 0.006778880119323731, 0.006811615943908692, 0.0067420477867126466, 0.0067073597908020016, 0.006652768135070801, 0.006711743831634522, 0.0068031039237976074, 0.0067432961463928225, 0.006749855995178223, 0.00672156810760498, 0.006754528045654297, 0.006718944072723389, 0.006750239849090576, 0.006743519783020019, 0.006728415966033936, 0.006845888137817383, 0.0067870721817016606, 0.006725632190704346, 0.006764895915985107, 0.006798912048339844, 0.0067420158386230465, 0.006725696086883545, 0.006743743896484375, 0.006795680046081543, 0.006707456111907959, 0.006741663932800293, 0.006760575771331787, 0.006719391822814941, 0.006744063854217529, 0.006751711845397949, 0.00674451208114624, 0.006850783824920654, 0.006743807792663575, 0.006709407806396484, 0.0067086400985717776, 0.006751840114593506, 0.006744480133056641, 0.006822463989257813, 0.006748223781585693, 0.006815616130828857, 0.006715456008911133, 0.0067686400413513184, 0.006801663875579834, 0.00689251184463501, 0.006763360023498535, 0.006813632011413574, 0.006720704078674316, 0.00671827220916748, 0.006770688056945801, 0.006709248065948486, 0.006665215969085693, 0.006703135967254639, 0.006675007820129394, 0.006670752048492432, 0.006719456195831299, 0.00667471981048584, 0.006704864025115967, 0.0066826558113098145, 0.006846464157104492, 0.00667683219909668, 0.006697728157043457, 0.006826591968536377, 0.006677919864654541, 0.0066774082183837895, 0.0067411518096923825, 0.006701151847839356, 0.006672768115997314, 0.0067077441215515135, 0.006688127994537354, 0.006655807971954346, 0.0066847681999206545, 0.006677248001098633, 0.00678278398513794, 0.006703104019165039, 0.006692863941192627, 0.0066776638031005855, 0.006683487892150879, 0.006703104019165039, 0.006686719894409179]",tokens/s,148.45800009513988,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.34272,545.128448,0.0,159.383552,143.673856,s,1,7.43645556640625,7.43645556640625,0.0,7.43645556640625,7.43645556640625,7.43645556640625,7.43645556640625,[7.43645556640625],,kWh,1.1159478924984492e-05,1.2237030355358137e-06,2.711946613997984e-06,1.509512857451829e-05,,MB,1345.06496,616.431616,0.0,199.22944,186.684928,s,25,0.19603059291839603,0.00784122371673584,6.264749272878208e-05,0.007823296070098877,0.00793838710784912,0.007949881744384765,0.007957512989044188,"[0.007959455966949462, 0.00776358413696289, 0.007892320156097412, 0.007823552131652832, 0.007804063796997071, 0.0078123841285705565, 0.007784383773803711, 0.007930016040802002, 0.007783071994781494, 0.007763072013854981, 0.007790463924407959, 0.007779007911682129, 0.007831840038299561, 0.007813087940216065, 0.00792470407485962, 0.007838528156280517, 0.007943967819213867, 0.007823296070098877, 0.00790940809249878, 0.00787820816040039, 0.007788000106811524, 0.00795136022567749, 0.007866112232208252, 0.007788352012634277, 0.007788352012634277]",tokens/s,32647.965323780885,kWh,2.2858353291895563e-07,2.5208711314408596e-08,1.0111663009710281e-07,3.5490887433046703e-07,tokens/kWh,721311915.5809844,MB,1384.38656,618.528768,0.0,201.326592,186.687488,s,25,9.894076324462889,0.39576305297851555,0.002099559086157807,0.3956947937011719,0.3981936584472656,0.3995603088378906,0.4015686962890625,"[0.4021055908203125, 0.3958044738769531, 0.3957227783203125, 0.3926264343261719, 0.39688644409179685, 0.3934579772949219, 0.3939168395996094, 0.39432684326171874, 0.397101318359375, 0.39589395141601563, 0.39432000732421874, 0.3956947937011719, 0.39451702880859374, 0.394517578125, 0.3958472900390625, 0.394456298828125, 0.39799301147460936, 0.39528375244140623, 0.39662890625, 0.39298492431640625, 0.39456201171875, 0.3962434387207031, 0.3949886779785156, 0.3998685302734375, 0.3983274230957031]",tokens/s,159.18615829815732,kWh,1.1350921557947191e-05,1.2516806070438836e-06,4.04845933913384e-06,1.665106150412491e-05,tokens/kWh,3783542.5678052553,,s,1575,9.882510946273829,0.006274610124618289,0.00013488673599689358,0.006253056049346924,0.00632426233291626,0.006379203271865845,0.0068573912525176995,"[0.006295584201812744, 0.006387680053710937, 0.006223872184753418, 0.006385216236114502, 0.006293536186218262, 0.006332672119140625, 0.0062791361808776855, 0.006305344104766845, 0.006375648021697998, 0.007053728103637695, 0.008384511947631837, 0.008130559921264649, 0.007784383773803711, 0.006311135768890381, 0.006349664211273193, 0.0062722558975219726, 0.006347519874572754, 0.006278816223144531, 0.006261343955993652, 0.0062481918334960935, 0.006240032196044922, 0.006287807941436767, 0.0063417601585388185, 0.006331039905548096, 0.006258304119110107, 0.006295904159545899, 0.0062501440048217775, 0.006332863807678222, 0.0062483839988708495, 0.006263967990875244, 0.006290304183959961, 0.006295519828796387, 0.006219967842102051, 0.00626259183883667, 0.006245728015899658, 0.006271711826324463, 0.00623356819152832, 0.006275551795959473, 0.006244351863861084, 0.006362368106842041, 0.006266975879669189, 0.006216351985931397, 0.006260735988616943, 0.006292928218841553, 0.006224448204040527, 0.0062197761535644535, 0.00626262378692627, 0.006252607822418213, 0.006230144023895263, 0.00626259183883667, 0.006244512081146241, 0.0062724161148071285, 0.0062325439453125, 0.006260863780975342, 0.006260640144348144, 0.006225120067596435, 0.006241151809692383, 0.006285312175750732, 0.006246399879455566, 0.006278240203857422, 0.0062473278045654295, 0.006230016231536865, 0.006266751766204834, 0.006162943840026855, 0.006215551853179932, 0.006272352218627929, 0.006205664157867431, 0.006248672008514405, 0.006224095821380615, 0.006261983871459961, 0.006304543972015381, 0.006236159801483154, 0.006240255832672119, 0.006299263954162597, 0.006380032062530518, 0.006208799839019776, 0.006284927845001221, 0.0063350720405578615, 0.006316415786743164, 0.0062975997924804685, 0.006305791854858398, 0.006275328159332275, 0.006272096157073975, 0.006281888008117676, 0.006248640060424805, 0.006225632190704346, 0.006246848106384277, 0.0062904319763183595, 0.006238304138183594, 0.006212160110473632, 0.00638972806930542, 0.006225984096527099, 0.0062382397651672365, 0.006225855827331543, 0.006229663848876953, 0.006762847900390625, 0.006277120113372803, 0.006270976066589356, 0.006279007911682129, 0.0062278399467468265, 0.006279039859771729, 0.006223936080932618, 0.006246751785278321, 0.006252543926239014, 0.006254144191741943, 0.006228032112121582, 0.006223872184753418, 0.006218272209167481, 0.006260799884796143, 0.0062379841804504396, 0.006418591976165772, 0.006344543933868408, 0.006264832019805908, 0.0062341117858886715, 0.006267072200775147, 0.006234208106994629, 0.006266304016113281, 0.006205728054046631, 0.00637500810623169, 0.006273632049560547, 0.006258656024932861, 0.006353824138641357, 0.006296512126922608, 0.0063240318298339844, 0.006269120216369629, 0.006301152229309082, 0.006184959888458252, 0.006639616012573242, 0.006280255794525146, 0.006267680168151856, 0.006301536083221436, 0.0062549118995666505, 0.006426911830902099, 0.006258399963378907, 0.006338560104370118, 0.006311359882354736, 0.006218175888061523, 0.0062557439804077145, 0.006237184047698975, 0.006262784004211426, 0.006239327907562256, 0.006225984096527099, 0.006249504089355469, 0.006233376026153564, 0.00624505615234375, 0.006223487854003906, 0.006281023979187012, 0.006222239971160889, 0.0062111358642578125, 0.006239871978759765, 0.006213632106781006, 0.0062484159469604495, 0.006175871849060059, 0.006241504192352295, 0.006224448204040527, 0.0062564477920532225, 0.006234240055084228, 0.0062137598991394045, 0.006239520072937012, 0.006263743877410888, 0.006245535850524902, 0.006201536178588867, 0.006256959915161133, 0.006210752010345459, 0.006228096008300781, 0.0062863359451293946, 0.006252255916595459, 0.006232096195220947, 0.006433951854705811, 0.007024576187133789, 0.006275936126708985, 0.006285312175750732, 0.006270976066589356, 0.00619539213180542, 0.006286367893218994, 0.006228799819946289, 0.006219007968902588, 0.006254559993743897, 0.006215583801269531, 0.006316736221313477, 0.006203360080718994, 0.006301919937133789, 0.00623635196685791, 0.006276127815246582, 0.00622054386138916, 0.006219679832458496, 0.006218080043792725, 0.006499519824981689, 0.0062548799514770504, 0.0061337599754333495, 0.00621673583984375, 0.006239200115203857, 0.006215904235839844, 0.0062379841804504396, 0.006228127956390381, 0.006340159893035889, 0.00619539213180542, 0.006254079818725586, 0.006215519905090332, 0.006222591876983642, 0.0062382397651672365, 0.006258656024932861, 0.006250495910644531, 0.006184864044189453, 0.006234079837799072, 0.006178016185760498, 0.006239039897918701, 0.006249983787536621, 0.006220096111297607, 0.0062548799514770504, 0.006217728137969971, 0.006270016193389892, 0.006230720043182373, 0.0062016000747680666, 0.006166528224945069, 0.0062065601348876955, 0.006185887813568115, 0.006182911872863769, 0.0062336320877075194, 0.006196703910827637, 0.00619820785522461, 0.006219840049743653, 0.006228127956390381, 0.006180831909179688, 0.006238080024719238, 0.006199295997619629, 0.006225728034973145, 0.006257887840270996, 0.006198592185974121, 0.0062146239280700686, 0.0061877121925354005, 0.00622819185256958, 0.0061968002319335936, 0.00624457597732544, 0.006254591941833496, 0.006300864219665528, 0.006265664100646972, 0.006232063770294189, 0.006256991863250732, 0.006213280200958252, 0.006254591941833496, 0.006191103935241699, 0.00626470422744751, 0.0062271361351013185, 0.006258783817291259, 0.0062165441513061526, 0.006225247859954834, 0.006238880157470703, 0.006187007904052734, 0.006243455886840821, 0.006218880176544189, 0.006207520008087158, 0.006174752235412598, 0.006197120189666748, 0.006207615852355957, 0.0062262721061706544, 0.006516384124755859, 0.006227968215942382, 0.006309887886047363, 0.006294911861419678, 0.006234752178192139, 0.006238143920898437, 0.006221216201782226, 0.006369760036468506, 0.006223680019378662, 0.006230720043182373, 0.006213312149047852, 0.0064412479400634765, 0.006252480030059814, 0.006245984077453613, 0.006322400093078613, 0.006208608150482178, 0.006605696201324463, 0.0062873601913452145, 0.006282815933227539, 0.006265408039093018, 0.00621343994140625, 0.006451007843017578, 0.006283520221710205, 0.006374879837036133, 0.006322112083435059, 0.006230912208557129, 0.006306975841522217, 0.006250400066375733, 0.006277791976928711, 0.006248447895050049, 0.006286752223968506, 0.006294144153594971, 0.0062873921394348145, 0.006681888103485107, 0.0065790719985961916, 0.006266655921936035, 0.006273151874542237, 0.006250368118286133, 0.0062782721519470215, 0.006255360126495362, 0.006326399803161621, 0.006242303848266601, 0.006289120197296143, 0.00634665584564209, 0.006226336002349854, 0.006262752056121826, 0.006400000095367431, 0.00629094409942627, 0.006297952175140381, 0.006244031906127929, 0.0062614078521728515, 0.0062351679801940915, 0.006314047813415527, 0.006220511913299561, 0.006232096195220947, 0.006303840160369873, 0.006272895812988281, 0.006226143836975098, 0.006227744102478027, 0.006205152034759522, 0.0061831998825073245, 0.0062507839202880855, 0.0061990079879760745, 0.006242303848266601, 0.006242303848266601, 0.006217728137969971, 0.006211840152740479, 0.006239999771118164, 0.0062763838768005375, 0.0061970558166503905, 0.006276000022888184, 0.006268928050994873, 0.00622537612915039, 0.006237919807434082, 0.006316256046295166, 0.00624022388458252, 0.006206079959869385, 0.006269983768463135, 0.006203487873077392, 0.006239359855651856, 0.006250239849090576, 0.0062873601913452145, 0.006252031803131103, 0.006218239784240722, 0.006280831813812256, 0.006187392234802246, 0.0064349122047424314, 0.006282559871673584, 0.006214240074157715, 0.00621776008605957, 0.006309823989868164, 0.006240287780761719, 0.006191103935241699, 0.006233280181884766, 0.0061898880004882815, 0.00624128007888794, 0.006267072200775147, 0.0062259202003479, 0.00621446418762207, 0.0062065281867980954, 0.006252511978149414, 0.006257887840270996, 0.006405951976776123, 0.0062483839988708495, 0.006201344013214111, 0.006281216144561768, 0.006210976123809815, 0.006206240177154541, 0.006184768199920654, 0.006231776237487793, 0.0061855998039245606, 0.006231071949005127, 0.006192800045013428, 0.0062679038047790524, 0.006201312065124511, 0.0062111358642578125, 0.0062111358642578125, 0.006171520233154297, 0.0062975997924804685, 0.006205088138580323, 0.006238783836364746, 0.006208415985107422, 0.006142240047454834, 0.006202655792236328, 0.006185887813568115, 0.006217535972595215, 0.006184959888458252, 0.006268223762512207, 0.006168384075164795, 0.006212800025939941, 0.006231904029846191, 0.00620524787902832, 0.006211616039276123, 0.00622815990447998, 0.006243904113769531, 0.006215487957000733, 0.0062837119102478025, 0.006284704208374023, 0.0062082881927490235, 0.00621120023727417, 0.006199584007263183, 0.006214719772338867, 0.006226816177368164, 0.006227712154388428, 0.0062585601806640625, 0.006243872165679932, 0.006187839984893799, 0.0062687678337097165, 0.0062486081123352055, 0.006211008071899414, 0.00621830415725708, 0.006176767826080322, 0.006204895973205567, 0.00627184009552002, 0.006252223968505859, 0.006201344013214111, 0.006250495910644531, 0.00624348783493042, 0.006232927799224854, 0.006288383960723877, 0.0062882242202758785, 0.006297952175140381, 0.006227392196655274, 0.006317823886871338, 0.006251423835754394, 0.006350560188293457, 0.006324448108673096, 0.006196735858917236, 0.006515200138092041, 0.006235936164855957, 0.006268928050994873, 0.006229856014251709, 0.0062503361701965335, 0.006238527774810791, 0.0062074241638183595, 0.006334527969360352, 0.00621292781829834, 0.006265535831451416, 0.006334752082824707, 0.006272736072540283, 0.006270976066589356, 0.006248000144958496, 0.006324672222137452, 0.006182911872863769, 0.006284927845001221, 0.006201344013214111, 0.006254591941833496, 0.006262176036834717, 0.006316639900207519, 0.006340191841125488, 0.006398079872131347, 0.006283199787139893, 0.006246079921722412, 0.0062317438125610355, 0.0062672638893127445, 0.006221888065338135, 0.006258240222930908, 0.0061842560768127445, 0.006230016231536865, 0.00627785587310791, 0.006273888111114502, 0.006231647968292236, 0.006212096214294433, 0.006244351863861084, 0.0061931519508361815, 0.006363135814666748, 0.006229472160339355, 0.006237887859344483, 0.006211552143096924, 0.006214528083801269, 0.006217120170593261, 0.0062259202003479, 0.007049856185913086, 0.006219744205474853, 0.006189055919647217, 0.006238207817077636, 0.0062828798294067385, 0.006246111869812012, 0.006183519840240479, 0.006256224155426026, 0.006369311809539795, 0.00624019193649292, 0.006253056049346924, 0.0062197761535644535, 0.0062197761535644535, 0.006235936164855957, 0.006278656005859375, 0.006183775901794433, 0.006288959980010987, 0.006232319831848144, 0.006196864128112793, 0.006209983825683594, 0.006207551956176758, 0.006208799839019776, 0.006209472179412842, 0.006216224193572998, 0.006187039852142334, 0.006228127956390381, 0.006182720184326172, 0.006254784107208252, 0.0061972479820251464, 0.0061931519508361815, 0.006244448184967041, 0.006252448081970215, 0.006223872184753418, 0.006211232185363769, 0.006227519989013672, 0.006201216220855713, 0.006175039768218994, 0.006229279994964599, 0.006208255767822266, 0.006239999771118164, 0.006213600158691406, 0.006251808166503906, 0.006226912021636963, 0.0062197761535644535, 0.006232063770294189, 0.006219871997833252, 0.006225823879241943, 0.006262784004211426, 0.006444672107696533, 0.006586976051330566, 0.006671199798583985, 0.0064676799774169924, 0.0062226881980896, 0.006342400074005127, 0.006260447978973389, 0.006268479824066162, 0.006302688121795654, 0.006303296089172363, 0.006332064151763916, 0.006191840171813965, 0.006346816062927246, 0.006256127834320068, 0.0062552962303161624, 0.006335840225219726, 0.006215968132019043, 0.006262559890747071, 0.006197696208953858, 0.006268064022064209, 0.006209983825683594, 0.0062540478706359865, 0.006314176082611084, 0.006214079856872559, 0.0063060798645019536, 0.00621779203414917, 0.006285312175750732, 0.006221759796142578, 0.006264832019805908, 0.006250495910644531, 0.006248640060424805, 0.006369088172912598, 0.006643295764923096, 0.007281055927276611, 0.006293504238128662, 0.006233888149261475, 0.006262176036834717, 0.0061981120109558105, 0.006280831813812256, 0.006257343769073487, 0.006270624160766601, 0.006342656135559082, 0.006301023960113525, 0.006265312194824219, 0.00620959997177124, 0.006236288070678711, 0.006230271816253662, 0.006258048057556152, 0.006225312232971191, 0.00621065616607666, 0.00624012804031372, 0.006112063884735107, 0.00624128007888794, 0.006516736030578613, 0.006285376071929931, 0.006320064067840576, 0.006272799968719482, 0.0062464637756347655, 0.0062035517692565914, 0.0062873601913452145, 0.006184800148010254, 0.00631328010559082, 0.0062165441513061526, 0.006337855815887451, 0.006267551898956299, 0.006256095886230469, 0.006306560039520264, 0.006195007801055908, 0.006297535896301269, 0.0062239041328430176, 0.006321760177612304, 0.006238272190093994, 0.0062631678581237795, 0.006271200180053711, 0.006387584209442139, 0.0063235840797424316, 0.006219295978546142, 0.006341631889343262, 0.006230368137359619, 0.006256576061248779, 0.006315231800079346, 0.006217728137969971, 0.0062839360237121585, 0.006255712032318115, 0.006296319961547852, 0.006227968215942382, 0.0065742721557617186, 0.006549312114715576, 0.006317376136779785, 0.006296256065368652, 0.006268928050994873, 0.006283008098602295, 0.006260000228881836, 0.006310880184173584, 0.006289408206939697, 0.006262080192565918, 0.006265535831451416, 0.0062197761535644535, 0.006258207798004151, 0.00624073600769043, 0.006245696067810059, 0.006342688083648682, 0.00622057580947876, 0.006285183906555176, 0.006217887878417969, 0.006240096092224121, 0.006198912143707275, 0.006229375839233399, 0.006237184047698975, 0.006281216144561768, 0.006242303848266601, 0.0062239041328430176, 0.006266975879669189, 0.006266751766204834, 0.0061594557762146, 0.006261888027191162, 0.006231135845184326, 0.0062512001991271975, 0.006252543926239014, 0.0062156801223754886, 0.006240255832672119, 0.006232063770294189, 0.006242656230926514, 0.006260064125061035, 0.006308032035827637, 0.006297728061676025, 0.006285312175750732, 0.006330719947814941, 0.0063025918006896975, 0.006251039981842041, 0.0062360639572143555, 0.006242047786712646, 0.006257247924804687, 0.006254079818725586, 0.006334943771362305, 0.006239712238311768, 0.006289984226226807, 0.006259712219238281, 0.006247424125671387, 0.006319680213928223, 0.006270847797393799, 0.006247104167938233, 0.0061989760398864745, 0.006281407833099365, 0.006253952026367187, 0.006264575958251953, 0.006238111972808838, 0.006238719940185547, 0.006219647884368897, 0.006297855854034424, 0.0062503361701965335, 0.006205023765563965, 0.006255519866943359, 0.006207488059997559, 0.006239391803741455, 0.0062226881980896, 0.006296671867370605, 0.006271168231964112, 0.00619817590713501, 0.006284351825714111, 0.006238048076629639, 0.0062432317733764646, 0.006229087829589843, 0.006246496200561524, 0.006217728137969971, 0.006232448101043701, 0.006330111980438233, 0.006218048095703125, 0.006232448101043701, 0.006239327907562256, 0.006232704162597656, 0.006218016147613525, 0.006262176036834717, 0.0062715840339660645, 0.006195199966430664, 0.006253952026367187, 0.00621017599105835, 0.006184991836547852, 0.006190815925598145, 0.006301119804382325, 0.006215744018554688, 0.006240928173065185, 0.006228064060211182, 0.006221792221069336, 0.0062624640464782715, 0.0062016000747680666, 0.0062856640815734865, 0.006237376213073731, 0.006283040046691894, 0.0062659521102905276, 0.006249599933624268, 0.006311552047729492, 0.006245408058166504, 0.006266848087310791, 0.006188127994537353, 0.006282080173492432, 0.006249567985534668, 0.006327199935913086, 0.006277088165283203, 0.006240287780761719, 0.0062993597984313966, 0.0062039680480957034, 0.006315487861633301, 0.00694707202911377, 0.00624835205078125, 0.006280447959899902, 0.006316895961761475, 0.006299647808074951, 0.006197375774383545, 0.006272480010986328, 0.00622431993484497, 0.006229599952697754, 0.006685056209564209, 0.006256192207336426, 0.0063161921501159665, 0.006281023979187012, 0.0062219839096069336, 0.00627558422088623, 0.006186431884765625, 0.00635532808303833, 0.006178751945495605, 0.0063788480758666995, 0.006185696125030517, 0.0062507839202880855, 0.00626038408279419, 0.006211008071899414, 0.0062665920257568355, 0.006302624225616455, 0.006312352180480957, 0.00617142391204834, 0.006257472038269043, 0.0063569917678833006, 0.006220032215118408, 0.006274816036224365, 0.006238304138183594, 0.0063056960105896, 0.006195199966430664, 0.006290976047515869, 0.0061874880790710445, 0.006227071762084961, 0.0061372160911560055, 0.006255231857299805, 0.006262784004211426, 0.006311935901641846, 0.006296800136566162, 0.006264736175537109, 0.006260831832885742, 0.006214687824249268, 0.0062481918334960935, 0.006277056217193604, 0.006257919788360595, 0.006259520053863526, 0.006303840160369873, 0.006285215854644775, 0.006237664222717285, 0.006244895935058593, 0.006252352237701416, 0.006254784107208252, 0.006243360042572021, 0.0062286720275878905, 0.0062849922180175785, 0.006285920143127442, 0.006331744194030761, 0.0062486081123352055, 0.00628275203704834, 0.006335487842559814, 0.0062663998603820804, 0.006272768020629883, 0.006318816184997559, 0.0062466559410095214, 0.006220895767211914, 0.006251167774200439, 0.006245791912078857, 0.006238944053649902, 0.0063034558296203615, 0.006258848190307617, 0.006232192039489746, 0.006227776050567627, 0.00632428789138794, 0.00621504020690918, 0.0062197761535644535, 0.006238143920898437, 0.006206143856048584, 0.0062215042114257815, 0.006238527774810791, 0.006207520008087158, 0.006219615936279297, 0.006241983890533447, 0.0062074241638183595, 0.006232863903045655, 0.006291168212890625, 0.006305823802947998, 0.006244319915771485, 0.006244351863861084, 0.006258687973022461, 0.006207295894622803, 0.006239520072937012, 0.00631715202331543, 0.0062605438232421875, 0.006203392028808594, 0.00627126407623291, 0.006253983974456787, 0.006226240158081054, 0.0061337599754333495, 0.006356256008148193, 0.00624886417388916, 0.006281407833099365, 0.006295680046081543, 0.0062444801330566405, 0.006324384212493896, 0.006216447830200195, 0.006291679859161377, 0.006265600204467773, 0.006225728034973145, 0.006324416160583496, 0.006477888107299805, 0.006321536064147949, 0.006222400188446045, 0.006309887886047363, 0.006254591941833496, 0.006257952213287354, 0.00634278392791748, 0.006300096035003662, 0.006295135974884033, 0.006230559825897217, 0.00628329610824585, 0.006221824169158936, 0.006285312175750732, 0.006273024082183838, 0.006211584091186524, 0.006238304138183594, 0.0062169919013977054, 0.006236639976501465, 0.006223264217376709, 0.0062287039756774906, 0.006184127807617188, 0.006246528148651123, 0.006240575790405273, 0.006205376148223877, 0.006228608131408691, 0.006239424228668213, 0.006263679981231689, 0.006200191974639893, 0.006252768039703369, 0.00625324821472168, 0.006246399879455566, 0.006307839870452881, 0.006217728137969971, 0.006230016231536865, 0.006326015949249268, 0.006287903785705567, 0.006201056003570557, 0.006236159801483154, 0.006240255832672119, 0.006245888233184814, 0.006211904048919678, 0.006242496013641357, 0.0062341117858886715, 0.00620908784866333, 0.00626035213470459, 0.006236991882324219, 0.006246399879455566, 0.006240384101867676, 0.006205408096313476, 0.006223455905914306, 0.006232384204864502, 0.006200223922729492, 0.006189055919647217, 0.0062932162284851075, 0.006240543842315674, 0.006213791847229004, 0.006298943996429444, 0.006244895935058593, 0.006275072097778321, 0.006196671962738037, 0.006320352077484131, 0.006207007884979248, 0.006246560096740723, 0.006222655773162842, 0.006223360061645508, 0.006310239791870117, 0.006194464206695557, 0.00628553581237793, 0.00622819185256958, 0.006308127880096435, 0.006232063770294189, 0.006273119926452637, 0.006352799892425537, 0.006231840133666992, 0.006308320045471191, 0.006238207817077636, 0.006353888034820556, 0.00622054386138916, 0.006302944183349609, 0.006267712116241455, 0.00625161600112915, 0.006341536045074463, 0.006244351863861084, 0.0063055682182312014, 0.006590047836303711, 0.006522655963897705, 0.0063517122268676755, 0.006318143844604492, 0.006350944042205811, 0.006260223865509033, 0.0062708802223205564, 0.006244416236877441, 0.0062979841232299804, 0.006290976047515869, 0.006239776134490967, 0.0063190398216247555, 0.006260735988616943, 0.006298783779144287, 0.006240960121154785, 0.006277152061462402, 0.006264639854431152, 0.006236480236053467, 0.006262784004211426, 0.006250815868377686, 0.006230847835540771, 0.006341504096984863, 0.00628326416015625, 0.0062341117858886715, 0.006251999855041504, 0.006264512062072754, 0.006308576107025146, 0.0062444801330566405, 0.006209536075592041, 0.006248447895050049, 0.006177216053009033, 0.006241919994354248, 0.006218656063079834, 0.006262976169586182, 0.0061910400390625, 0.006268671989440918, 0.0062073922157287595, 0.006289504051208496, 0.006272352218627929, 0.006289696216583252, 0.006250879764556885, 0.006246399879455566, 0.006268928050994873, 0.006215616226196289, 0.006291520118713379, 0.006252543926239014, 0.006316031932830811, 0.006272160053253174, 0.006363359928131104, 0.006283135890960694, 0.006254335880279541, 0.006278431892395019, 0.006252575874328614, 0.006301375865936279, 0.006259039878845215, 0.006338431835174561, 0.006327424049377441, 0.00622431993484497, 0.006283487796783447, 0.006227456092834473, 0.006306303977966309, 0.00622815990447998, 0.006323103904724121, 0.006263711929321289, 0.006212704181671143, 0.006267807960510254, 0.0062156801223754886, 0.006275040149688721, 0.006288991928100586, 0.006270656108856201, 0.00622054386138916, 0.006201344013214111, 0.00626259183883667, 0.006176032066345215, 0.006259359836578369, 0.006193439960479736, 0.006262815952301025, 0.006210527896881104, 0.006249407768249512, 0.006211616039276123, 0.0062724161148071285, 0.006268511772155762, 0.006227039813995362, 0.006258592128753662, 0.006199520111083984, 0.006274208068847657, 0.006232704162597656, 0.006247424125671387, 0.006273087978363037, 0.006206399917602539, 0.006238207817077636, 0.006184959888458252, 0.006289408206939697, 0.006123519897460937, 0.006310080051422119, 0.006251359939575195, 0.0062494401931762695, 0.006227200031280518, 0.00619596815109253, 0.006299136161804199, 0.006175456047058106, 0.006464799880981445, 0.00622652816772461, 0.006260640144348144, 0.006235936164855957, 0.006209440231323242, 0.0062848000526428225, 0.006200128078460694, 0.006252543926239014, 0.006180863857269287, 0.0062665920257568355, 0.006199584007263183, 0.006237311840057373, 0.006222720146179199, 0.006243775844573975, 0.006276864051818848, 0.0061898880004882815, 0.006299647808074951, 0.006557695865631104, 0.006760735988616943, 0.007631648063659668, 0.006883423805236816, 0.006850751876831054, 0.006264959812164307, 0.006226592063903809, 0.006221216201782226, 0.00624073600769043, 0.006232319831848144, 0.006306975841522217, 0.006351424217224121, 0.006262911796569824, 0.006219615936279297, 0.006253791809082031, 0.006240992069244384, 0.00625267219543457, 0.006285120010375977, 0.0061989760398864745, 0.006292096138000488, 0.0062053117752075195, 0.006229599952697754, 0.006228064060211182, 0.006250527858734131, 0.006267168045043945, 0.006221824169158936, 0.006432320117950439, 0.006230336189270019, 0.006290847778320312, 0.006303552150726318, 0.00633516788482666, 0.00625593614578247, 0.006255648136138916, 0.006340415954589844, 0.006348576068878174, 0.006253983974456787, 0.006212480068206787, 0.0062566399574279785, 0.006204576015472412, 0.006408671855926514, 0.0062672638893127445, 0.006313183784484863, 0.006250751972198486, 0.006274784088134765, 0.006257472038269043, 0.0062975997924804685, 0.0063201279640197755, 0.00625603199005127, 0.0062849922180175785, 0.006228896141052246, 0.0062997121810913085, 0.006274687767028809, 0.0062503361701965335, 0.006266816139221192, 0.006258783817291259, 0.006293951988220215, 0.006256063938140869, 0.006390336036682129, 0.006231488227844239, 0.006285696029663086, 0.006274879932403565, 0.006311456203460694, 0.006280288219451904, 0.006235904216766357, 0.006279327869415283, 0.006214719772338867, 0.006267839908599853, 0.006225759983062744, 0.006227776050567627, 0.006283455848693847, 0.006293504238128662, 0.006293504238128662, 0.006229983806610107, 0.006470719814300537, 0.006271967887878418, 0.006260447978973389, 0.0063134078979492185, 0.006255807876586914, 0.00625219202041626, 0.006231552124023438, 0.006265344142913819, 0.006219871997833252, 0.006259903907775879, 0.006236959934234619, 0.006211008071899414, 0.006252128124237061, 0.0061981120109558105, 0.00636352014541626, 0.006217535972595215, 0.006323423862457276, 0.006207136154174805, 0.006248799800872803, 0.0062911038398742675, 0.006234591960906983, 0.006252863883972168, 0.006200640201568603, 0.006273983955383301, 0.006193120002746582, 0.006276415824890137, 0.006244671821594238, 0.00621343994140625, 0.0061394238471984865, 0.00624073600769043, 0.00627945613861084, 0.006201056003570557, 0.00628326416015625, 0.0061972479820251464, 0.006344704151153564, 0.00623638391494751, 0.006303328037261963, 0.006303936004638672, 0.006214687824249268, 0.006291520118713379, 0.006285823822021484, 0.006279583930969238, 0.006236159801483154, 0.006268896102905273, 0.00623414421081543, 0.006231296062469483, 0.006300127983093261, 0.00619379186630249, 0.0062911357879638675, 0.006242015838623047, 0.006260992050170899, 0.006258687973022461, 0.0062566399574279785, 0.006292799949645996, 0.006185664176940918, 0.006294879913330078, 0.006196928024291992, 0.006273407936096192, 0.006214335918426513, 0.006219679832458496, 0.006223519802093506, 0.0062221760749816895, 0.006268479824066162, 0.006168896198272705, 0.00625267219543457, 0.006205535888671875, 0.006264927864074707, 0.006271967887878418, 0.006201727867126465, 0.006279232025146485, 0.006287775993347168, 0.006323775768280029, 0.006185408115386963, 0.006274496078491211, 0.006213344097137451, 0.006212448120117188, 0.006250751972198486, 0.006192160129547119, 0.006249504089355469, 0.006174079895019531, 0.006246304035186768, 0.006200831890106201, 0.006273952007293701, 0.0062873601913452145, 0.006524928092956543, 0.006326176166534424, 0.006659167766571045, 0.006558015823364258, 0.006300352096557617, 0.007456319808959961, 0.006561279773712158, 0.006206783771514892, 0.0062053117752075195, 0.006277311801910401, 0.006251391887664795, 0.006201087951660156, 0.0062259202003479, 0.006240255832672119, 0.0062156801223754886, 0.006187007904052734, 0.006268415927886963, 0.006189184188842773, 0.006242496013641357, 0.0062007360458374025, 0.00621401596069336, 0.006202079772949219, 0.00618668794631958, 0.006198624134063721, 0.0062039680480957034, 0.006318431854248047, 0.006190847873687744, 0.006258687973022461, 0.006299647808074951, 0.006268928050994873, 0.006218976020812989, 0.006207263946533203, 0.00622489595413208, 0.006233119964599609, 0.0062494401931762695, 0.006172224044799805, 0.0062202239036560055, 0.006426623821258545, 0.0062197761535644535, 0.006205664157867431, 0.006196703910827637, 0.006254623889923096, 0.0061855678558349605, 0.006229695796966553, 0.006191103935241699, 0.006248799800872803, 0.0061907520294189455, 0.006216896057128906, 0.0062267518043518065, 0.006207263946533203, 0.006279488086700439, 0.006197184085845947, 0.006225728034973145, 0.006180511951446533, 0.0062510080337524416, 0.006199359893798828, 0.006193088054656982, 0.006236159801483154, 0.006205440044403076, 0.006226143836975098, 0.006187839984893799, 0.006300672054290772, 0.006227935791015625, 0.006286975860595703, 0.006287744045257568, 0.006262688159942627, 0.006234015941619873, 0.006283584117889404, 0.006252416133880615, 0.006205056190490723, 0.006145311832427979, 0.006261023998260498, 0.006226719856262207, 0.006248095989227295, 0.006233983993530273, 0.006221951961517334, 0.006262911796569824, 0.006235680103302002, 0.006273375988006592, 0.006216800212860108, 0.006263711929321289, 0.006235648155212403, 0.006253056049346924, 0.006266623973846436, 0.006270815849304199, 0.006266751766204834, 0.006271520137786865, 0.006260735988616943, 0.006267231941223144, 0.006246399879455566, 0.006264063835144043, 0.006230432033538819, 0.00626796817779541, 0.006231200218200683, 0.006288832187652588, 0.006208127975463868, 0.006250207901000977, 0.006232063770294189, 0.006227968215942382, 0.0065168957710266115, 0.006284863948822022, 0.00629372787475586, 0.006252607822418213, 0.006306816101074219, 0.0062494721412658695, 0.0062873601913452145, 0.006248447895050049, 0.0062624959945678715, 0.006291168212890625, 0.006225791931152344, 0.006259391784667969, 0.006215712070465088, 0.006285280227661133, 0.0062559680938720705, 0.006241087913513184, 0.006233952045440674, 0.00624835205078125, 0.006262879848480224, 0.006213632106781006, 0.006289216041564941, 0.006209727764129639, 0.006245920181274414, 0.006241824150085449, 0.00625055980682373, 0.006265279769897461, 0.006218175888061523, 0.006262720108032227, 0.006251904010772705, 0.006279871940612793, 0.006254687786102295, 0.0062581758499145506, 0.006268640041351318, 0.006242015838623047, 0.006191999912261963, 0.006258815765380859, 0.006320000171661377, 0.006227519989013672, 0.006304192066192627, 0.006200479984283447, 0.006288576126098633, 0.0062462081909179686, 0.006268735885620117, 0.006296832084655762, 0.006222623825073242, 0.006316256046295166, 0.0062226881980896, 0.00636624002456665, 0.006258592128753662, 0.0062811517715454105, 0.006520927906036377, 0.0062679038047790524, 0.006281375885009766, 0.0062146239280700686, 0.006234047889709472, 0.0062501120567321775, 0.006283423900604248, 0.006289087772369385, 0.006267360210418701, 0.006315040111541748, 0.006236608028411865, 0.0063281598091125485, 0.006236767768859864, 0.006270944118499756, 0.006289504051208496, 0.006242303848266601, 0.006278336048126221, 0.006209407806396484, 0.006314176082611084, 0.006266975879669189, 0.006287583827972412, 0.006232672214508057, 0.006285151958465576, 0.0063012480735778805, 0.006265279769897461, 0.006643680095672608, 0.006264927864074707, 0.006272319793701172, 0.006314911842346191, 0.006213344097137451, 0.006313663959503174, 0.006199615955352783, 0.006388832092285157, 0.006405024051666259, 0.006311935901641846, 0.0063053760528564455, 0.006226336002349854, 0.006307839870452881, 0.006258207798004151, 0.006260799884796143, 0.006274496078491211, 0.006239200115203857, 0.006311264038085938, 0.0062221760749816895, 0.006295872211456299, 0.0061972479820251464, 0.00632147216796875, 0.006138815879821778, 0.0062791681289672855, 0.0062259202003479, 0.006258656024932861, 0.006244383811950684, 0.006254208087921142, 0.006287744045257568, 0.006238207817077636, 0.006280928134918213, 0.0062691841125488285, 0.006258975982666015, 0.00625600004196167, 0.006262176036834717, 0.006252607822418213, 0.006232863903045655, 0.0062708802223205564, 0.006269152164459228, 0.006272863864898681, 0.006237728118896484, 0.006223519802093506, 0.0062822079658508305, 0.006262784004211426, 0.00628326416015625, 0.0062975997924804685, 0.00625216007232666, 0.006258975982666015, 0.006265215873718262, 0.00625216007232666, 0.006284543991088867, 0.006294623851776123, 0.006876287937164306, 0.006237823963165284, 0.00630844783782959, 0.0062980160713195805, 0.0062641921043395995, 0.006259488105773926, 0.006238175868988037, 0.006223584175109863, 0.006219935894012451, 0.0062626562118530274, 0.00618016004562378, 0.006275904178619385, 0.00619868803024292, 0.0062347202301025395, 0.006231103897094727, 0.0062475199699401854, 0.0062912960052490235, 0.0062137598991394045, 0.006277056217193604, 0.0061931519508361815, 0.0062709121704101565, 0.006208608150482178, 0.006226848125457764, 0.0062334399223327635, 0.00629807996749878, 0.006291679859161377, 0.0061849279403686525, 0.006273024082183838, 0.006219136238098145, 0.006229887962341309, 0.006202367782592774, 0.006323808193206787, 0.00629478406906128, 0.006195199966430664, 0.006233983993530273, 0.006237855911254883, 0.006264736175537109, 0.006226784229278565, 0.006261983871459961, 0.006281504154205322, 0.006263008117675781, 0.006285312175750732, 0.006257919788360595, 0.0062978239059448245, 0.006249311923980713, 0.0062808961868286136, 0.006235775947570801, 0.0062631678581237795, 0.006266880035400391, 0.0062392959594726564, 0.0062700481414794925, 0.006279007911682129, 0.006295551776885986, 0.006225664138793946, 0.006282623767852783, 0.0062740478515625, 0.006247903823852539, 0.0063021121025085445, 0.00628326416015625, 0.006280320167541504, 0.006241312026977539, 0.006274879932403565, 0.006277152061462402, 0.006276639938354492, 0.00628988790512085, 0.006313983917236328, 0.006254432201385498, 0.006271008014678955, 0.006271359920501709, 0.006271999835968017, 0.006287263870239258, 0.006320159912109375, 0.006303904056549073, 0.006240928173065185, 0.0062585282325744625, 0.0062854719161987305, 0.006268352031707764, 0.006716159820556641, 0.006614751815795898, 0.006714879989624023, 0.007045055866241455, 0.0072341117858886715, 0.006334559917449951, 0.006286367893218994, 0.006259615898132324, 0.006269343852996826, 0.006282911777496338, 0.006317376136779785, 0.006298111915588379, 0.006301887989044189, 0.006373023986816407, 0.007264607906341552, 0.006338560104370118, 0.006321728229522705, 0.00629804801940918, 0.006333568096160889, 0.006127039909362793, 0.006261312007904053, 0.006191264152526856, 0.006408031940460205, 0.006227968215942382, 0.006340608119964599, 0.006267136096954346, 0.006393919944763184, 0.006315711975097656, 0.0062156801223754886, 0.006359007835388184, 0.006297632217407227, 0.0065064959526062015, 0.006313983917236328, 0.006317376136779785, 0.006312640190124512, 0.00632422399520874, 0.0063056640625, 0.006370495796203613, 0.006256927967071533, 0.00629417610168457, 0.006256703853607178, 0.006318016052246094, 0.007324800014495849, 0.006293536186218262, 0.006380383968353272, 0.006317408084869385, 0.0065064640045166015, 0.006308544158935547, 0.006522111892700196, 0.006299776077270508, 0.0062798080444335935, 0.0062259521484375, 0.006242144107818604, 0.0062399678230285645, 0.006264575958251953, 0.006219423770904541, 0.0062800321578979496, 0.006228096008300781, 0.006275392055511474, 0.006280543804168701, 0.006236576080322266, 0.006260255813598633, 0.0063385281562805176, 0.006257120132446289, 0.006246399879455566, 0.006272704124450684, 0.006238143920898437, 0.006242015838623047, 0.006263455867767334, 0.006275263786315918, 0.006337440013885498, 0.006255871772766113, 0.006391456127166748, 0.00628227186203003, 0.006322688102722168, 0.006252543926239014, 0.0062379841804504396, 0.006339263916015625, 0.006330368041992188, 0.0062988801002502445, 0.006294271945953369, 0.0064268479347229]",tokens/s,159.37245185585684,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7436.812288,8048.738304,0.0,7646.216192,7627.584,s,1,13.2579482421875,13.2579482421875,0.0,13.2579482421875,13.2579482421875,13.2579482421875,13.2579482421875,[13.2579482421875],,kWh,0.00017457606932500008,1.924887821256329e-05,5.65525452420057e-05,0.0002503774927795691,,MB,1781.61664,8694.66112,0.0,8277.458944,8199.8592,s,10,3.5925811462402346,0.35925811462402335,0.0013695581169296252,0.359052993774414,0.3603923065185547,0.36132835540771485,0.36207719451904297,"[0.35684686279296873, 0.3584761962890625, 0.3596515197753906, 0.3601842956542969, 0.3585326538085937, 0.35834906005859374, 0.35913156127929685, 0.35897442626953124, 0.362264404296875, 0.360170166015625]",tokens/s,712.5795899360917,kWh,1.0491878434375697e-05,1.1570262446000938e-06,6.972098831643196e-06,1.8621003510618987e-05,tokens/kWh,13747916.42426849,MB,1791.56992,9009.23392,0.0,8592.031744,8476.849152,s,10,29.637718261718746,2.9637718261718744,0.005008545861069562,2.963507568359375,2.9687735107421878,2.970996374511719,2.9727746655273437,"[2.954398681640625, 2.96602294921875, 2.962838134765625, 2.96061376953125, 2.97321923828125, 2.968279541015625, 2.96679345703125, 2.96285791015625, 2.958537353515625, 2.9641572265625]",tokens/s,21.256697105921717,kWh,8.645050377478658e-05,9.534735657917464e-06,5.748192495375674e-05,0.0001534671643864608,tokens/kWh,410511.26637978066,,s,630,29.632593711853026,0.04703586303468734,0.000377765763243554,0.04702848052978516,0.04748235206604004,0.04760046043395996,0.04799247398376465,"[0.04702825546264648, 0.04676544189453125, 0.046362369537353516, 0.046412670135498046, 0.04620230484008789, 0.04617875289916992, 0.046383232116699216, 0.04630963134765625, 0.04650163269042969, 0.04633379364013672, 0.04637900924682617, 0.047856033325195314, 0.046771968841552734, 0.04642531204223633, 0.04626054382324219, 0.04628659057617188, 0.046454689025878904, 0.046467742919921874, 0.04639350509643555, 0.0464793586730957, 0.04679916763305664, 0.046929534912109376, 0.046825183868408206, 0.046942558288574215, 0.04685622406005859, 0.04671619033813477, 0.04649193572998047, 0.046481056213378905, 0.04662764739990234, 0.04671891021728516, 0.04653788757324219, 0.04684649658203125, 0.046674240112304685, 0.04673311996459961, 0.04690348815917969, 0.046870529174804686, 0.046923583984375, 0.04729481506347656, 0.04702377700805664, 0.04685023880004883, 0.04710326385498047, 0.04706921768188477, 0.0469920654296875, 0.04702412796020508, 0.04711529541015625, 0.04745929718017578, 0.04739187240600586, 0.0471325454711914, 0.04700467300415039, 0.04715219116210938, 0.047385601043701174, 0.047196094512939456, 0.04727315139770508, 0.047239105224609376, 0.04736016082763672, 0.047522048950195316, 0.047397342681884766, 0.047411231994628905, 0.04759318542480469, 0.047368000030517575, 0.04749356842041016, 0.04749484634399414, 0.047530303955078124, 0.04773516845703125, 0.04715155029296875, 0.046481407165527344, 0.04638521575927734, 0.04663283157348633, 0.046465087890625, 0.04648550415039063, 0.046581760406494144, 0.04673273468017578, 0.046623294830322265, 0.04661862564086914, 0.04654463958740234, 0.04642220687866211, 0.046549121856689454, 0.046621631622314454, 0.04666470336914062, 0.0470118408203125, 0.04695654296875, 0.04680499267578125, 0.04688083267211914, 0.04684288024902344, 0.04674041748046875, 0.0479185905456543, 0.04707129669189453, 0.04719577789306641, 0.046980960845947266, 0.047074272155761716, 0.04661043167114258, 0.04688022232055664, 0.046835617065429686, 0.04680563354492188, 0.04707123184204102, 0.046779903411865234, 0.04674387359619141, 0.046788352966308594, 0.047335872650146486, 0.047168895721435546, 0.047284862518310544, 0.0471954231262207, 0.04703078460693359, 0.04696806335449219, 0.047166431427001956, 0.047073184967041014, 0.0474686393737793, 0.04732108688354492, 0.047462398529052735, 0.04739686584472656, 0.04734566497802734, 0.04700364685058594, 0.04724127960205078, 0.047306591033935544, 0.04742278289794922, 0.04821481704711914, 0.048010848999023435, 0.04727836990356445, 0.047144191741943356, 0.04716838455200195, 0.04758528137207031, 0.04789254379272461, 0.047611167907714844, 0.04756444931030274, 0.047782913208007816, 0.04755023956298828, 0.04707360076904297, 0.04665568161010742, 0.04657097625732422, 0.0467215690612793, 0.04676153564453125, 0.04656902313232422, 0.04641164779663086, 0.04646841430664062, 0.04687020874023438, 0.046739456176757815, 0.04661043167114258, 0.04661248016357422, 0.046837760925292966, 0.04693734359741211, 0.046878849029541016, 0.04667046356201172, 0.046647296905517575, 0.04681318283081055, 0.04699750518798828, 0.047089344024658204, 0.04688723373413086, 0.04690124893188476, 0.04704691314697266, 0.04699929428100586, 0.04697907257080078, 0.047068576812744144, 0.04695657730102539, 0.04679244613647461, 0.04678329467773437, 0.04698316955566406, 0.046882015228271484, 0.04693686294555664, 0.04683059310913086, 0.04726595306396485, 0.047118209838867185, 0.04723795318603516, 0.04698502349853516, 0.047052639007568356, 0.04677478408813476, 0.04721049499511719, 0.04706224060058594, 0.04722972869873047, 0.04700774383544922, 0.04727807998657227, 0.047222782135009765, 0.04717929458618164, 0.04705676651000976, 0.04685689544677735, 0.047048160552978516, 0.04702048110961914, 0.04743600082397461, 0.04741507339477539, 0.047306751251220705, 0.04719820785522461, 0.04772249603271484, 0.04759961700439453, 0.04729241561889649, 0.04727603149414063, 0.04754227066040039, 0.047421440124511716, 0.04734137725830078, 0.04761385726928711, 0.04736979293823242, 0.0469730224609375, 0.04648803329467773, 0.04632918548583984, 0.046480224609375, 0.046505184173583985, 0.046527263641357425, 0.04657955169677734, 0.04654095840454102, 0.04694144058227539, 0.04658457565307617, 0.046388671875, 0.0466229133605957, 0.046493888854980465, 0.046723262786865234, 0.04677142333984375, 0.04661942291259766, 0.04685609436035156, 0.0466457290649414, 0.04674636840820313, 0.046795936584472654, 0.0467729606628418, 0.0467589111328125, 0.04664524841308594, 0.04658992004394531, 0.0468480339050293, 0.046929534912109376, 0.04657395172119141, 0.04678656005859375, 0.04685004806518555, 0.04696268844604492, 0.04684902572631836, 0.04712099075317383, 0.046768543243408206, 0.04717363357543945, 0.04705484771728516, 0.04708726501464844, 0.047100257873535153, 0.04734975814819336, 0.047180801391601565, 0.04730265426635742, 0.04738150405883789, 0.04743737411499024, 0.047309249877929685, 0.047405055999755856, 0.04727603149414063, 0.04725964736938477, 0.04715929412841797, 0.0472138557434082, 0.047270622253417965, 0.04721622467041016, 0.04723548889160156, 0.04721855926513672, 0.04723110580444336, 0.047421440124511716, 0.047266014099121095, 0.04739459228515625, 0.04725507354736328, 0.04736867141723633, 0.04731084823608398, 0.04748204803466797, 0.0474796142578125, 0.04747465515136719, 0.04784483337402344, 0.04735622406005859, 0.04733552169799805, 0.04661248016357422, 0.04635136032104492, 0.04661897659301758, 0.046483585357666016, 0.04679324722290039, 0.04676732635498047, 0.046641952514648435, 0.04672447967529297, 0.04673190307617187, 0.046751201629638674, 0.04669289779663086, 0.046702625274658204, 0.046577632904052736, 0.04680470275878906, 0.04696640014648437, 0.04692201614379883, 0.04693955230712891, 0.04692803192138672, 0.046946399688720705, 0.047185760498046875, 0.048407424926757814, 0.04731843185424805, 0.04664508819580078, 0.04668492889404297, 0.046836894989013673, 0.0472625617980957, 0.04695859146118164, 0.046917057037353514, 0.04712886428833008, 0.047155487060546876, 0.04694630432128906, 0.047080928802490235, 0.047110687255859374, 0.04704051208496094, 0.04701353454589844, 0.04709020614624024, 0.047417152404785154, 0.047513118743896486, 0.047430110931396485, 0.047485088348388674, 0.04740694427490234, 0.0472627182006836, 0.04712243270874023, 0.04740182495117187, 0.047254753112792966, 0.0476743049621582, 0.047898494720458984, 0.047230655670166016, 0.04744646453857422, 0.0475546875, 0.04754009628295899, 0.047446014404296875, 0.04922163009643555, 0.047378593444824216, 0.047527137756347655, 0.04735654449462891, 0.047740478515625, 0.04751814270019531, 0.04765919876098633, 0.047876224517822266, 0.04768735885620117, 0.04722073745727539, 0.04664031982421875, 0.04659814453125, 0.04670956802368164, 0.046653438568115234, 0.04674560165405273, 0.04680499267578125, 0.04662227249145508, 0.046578079223632815, 0.04678659057617188, 0.046798080444335935, 0.047104766845703125, 0.047035808563232424, 0.04699135971069336, 0.04683353424072265, 0.04667660903930664, 0.04662895965576172, 0.0467127685546875, 0.04678649520874024, 0.04683116912841797, 0.0472110710144043, 0.047241214752197266, 0.04704051208496094, 0.04678041458129883, 0.046859455108642575, 0.04715193557739258, 0.047046016693115235, 0.047147647857666015, 0.0471674575805664, 0.04706307220458984, 0.04689625549316406, 0.04702870559692383, 0.04706467056274414, 0.04732704162597656, 0.04734374237060547, 0.047334175109863284, 0.0473612174987793, 0.047382881164550784, 0.04712505722045898, 0.047097759246826174, 0.04728022384643555, 0.047510848999023435, 0.047559680938720705, 0.047107776641845706, 0.04704051208496094, 0.047331329345703124, 0.047263744354248044, 0.047124481201171874, 0.04731289672851562, 0.04712243270874023, 0.04725356674194336, 0.04729439926147461, 0.047280384063720704, 0.047252769470214846, 0.04749935913085938, 0.04768601608276367, 0.04764057540893555, 0.04737638473510742, 0.04732723236083984, 0.04746444702148438, 0.04748896026611328, 0.04764678573608398, 0.0476440315246582, 0.047236927032470705, 0.046792896270751956, 0.04662886428833008, 0.04661638259887695, 0.046510272979736325, 0.0465428466796875, 0.04660361480712891, 0.04675753784179688, 0.04657664108276367, 0.04663241577148437, 0.046649887084960935, 0.04683161544799805, 0.046774112701416015, 0.04686044692993164, 0.04661625671386719, 0.04677580642700195, 0.0466624641418457, 0.04659609603881836, 0.046845951080322266, 0.04676607894897461, 0.0467836799621582, 0.04706150436401367, 0.04698963165283203, 0.04675484848022461, 0.04671148681640625, 0.04696707153320313, 0.046908863067626955, 0.047043296813964845, 0.04707027053833008, 0.04702288055419922, 0.04694217681884766, 0.04707126235961914, 0.047183265686035154, 0.04723295974731445, 0.04722140884399414, 0.04740483093261719, 0.04708713531494141, 0.0470731201171875, 0.0470880012512207, 0.04739904022216797, 0.0473803825378418, 0.04739487838745117, 0.047495552062988285, 0.047416576385498045, 0.04710067367553711, 0.047439617156982423, 0.04741145706176758, 0.04720435333251953, 0.04698316955566406, 0.04704460906982422, 0.04732505416870117, 0.04801548767089844, 0.04737638473510742, 0.04741510391235351, 0.04745644760131836, 0.047540321350097656, 0.047841182708740236, 0.04760313415527344, 0.04733599853515625, 0.047418975830078126, 0.04752988815307617, 0.04746905517578125, 0.04753952026367188, 0.047279678344726565, 0.04682387161254883, 0.046849502563476565, 0.04656387329101563, 0.0466833610534668, 0.0468691520690918, 0.046988510131835935, 0.0467239990234375, 0.0466464958190918, 0.04660508728027344, 0.04643804931640625, 0.046606464385986326, 0.046450912475585936, 0.046747871398925785, 0.04654467010498047, 0.04658790588378906, 0.04667801666259765, 0.046827518463134765, 0.04665244674682617, 0.04670479965209961, 0.04658998489379883, 0.04677465438842773, 0.046674110412597655, 0.04869142532348633, 0.04688281631469727, 0.04708534240722656, 0.046903518676757815, 0.04679884719848633, 0.047013343811035155, 0.046870399475097656, 0.046776992797851566, 0.04692172622680664, 0.04679065704345703, 0.04688838577270508, 0.046985790252685546, 0.04686643218994141, 0.04701980972290039, 0.04706335830688477, 0.046983070373535156, 0.046927871704101565, 0.04697907257080078, 0.04720982360839844, 0.047322784423828125, 0.047206878662109375, 0.0470984001159668, 0.04717567825317383, 0.04719001770019531, 0.04742550277709961, 0.047390335083007815, 0.047255233764648436, 0.04713516616821289, 0.047178016662597654, 0.04716134262084961, 0.047328960418701174, 0.047249664306640626, 0.04806588745117187, 0.04738326263427734, 0.04762419128417969, 0.047446014404296875, 0.04724118423461914, 0.04754947280883789, 0.04752870559692383, 0.047537952423095706, 0.047273983001708986, 0.046772224426269535, 0.046309280395507815, 0.04635862350463867, 0.04644464111328125, 0.046430110931396484, 0.04656528091430664, 0.04670883178710938, 0.046626495361328124, 0.04674553680419922, 0.04662217712402344, 0.046620960235595706, 0.04647180938720703, 0.046685985565185543, 0.04655324935913086, 0.04687200164794922, 0.046785152435302735, 0.04675980758666992, 0.04673651123046875, 0.04707030487060547, 0.04712847900390625, 0.04702819061279297, 0.04694019317626953, 0.046860286712646484, 0.04667391967773438, 0.04674560165405273, 0.046736961364746095, 0.04670300674438477, 0.046754878997802736, 0.04677321624755859, 0.04683366394042969, 0.046852096557617184, 0.0466033935546875, 0.04670876693725586, 0.047033184051513674, 0.04693376159667969, 0.047255809783935544, 0.047123680114746096, 0.046955295562744144, 0.04715625762939453, 0.04720943832397461, 0.04732463836669922, 0.04741926574707031, 0.047290721893310544, 0.04705904006958008, 0.04688284683227539, 0.04706732940673828, 0.04701388931274414, 0.04690124893188476, 0.04693376159667969, 0.04735145568847656, 0.04716134262084961, 0.04721110534667969, 0.047239166259765625, 0.047101951599121096, 0.04760115051269531, 0.04739328002929687, 0.047361248016357424, 0.04736665725708008, 0.04730704116821289, 0.04756003189086914, 0.04741392135620117, 0.04744521713256836, 0.04731017684936523, 0.04680156707763672, 0.04643123245239258, 0.046487934112548826, 0.04632022476196289, 0.04631964874267578, 0.04635647964477539, 0.046366111755371094, 0.046508544921875, 0.04657571029663086, 0.046753311157226564, 0.0467993278503418, 0.0467119369506836, 0.04680780792236328, 0.04686640167236328, 0.04783919906616211, 0.04794748687744141, 0.04702387237548828, 0.046924510955810544, 0.04677017593383789, 0.046811134338378906, 0.04712985610961914, 0.046795520782470706, 0.046900894165039064, 0.04700307083129883, 0.047079967498779296, 0.04665996932983398, 0.04680438232421875, 0.04674006271362305, 0.04709318542480469, 0.04693459320068359, 0.04722073745727539, 0.046924991607666014, 0.04721468734741211, 0.046875358581542965, 0.046882495880126954, 0.046905662536621096, 0.04715484619140625, 0.0472599983215332, 0.04720822525024414, 0.04737251281738281, 0.047260990142822264, 0.04716726303100586, 0.04706806564331055, 0.04721049499511719, 0.04753347015380859, 0.04739952087402344, 0.047288318634033204, 0.04702412796020508, 0.04721664047241211, 0.047325183868408206, 0.04718592071533203, 0.04742083358764648, 0.04736470413208008, 0.04719001770019531, 0.047338912963867184, 0.047486751556396485, 0.047285057067871096, 0.04744134521484375, 0.047415168762207034, 0.0474422721862793, 0.04749347305297852, 0.047344993591308594]",tokens/s,21.260373159572605,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11170.459648,12553.4208,0.0,12150.898688,12116.742656,s,1,16.541673828125,16.541673828125,0.0,16.541673828125,16.541673828125,16.541673828125,16.541673828125,[16.541673828125],,kWh,0.0002728465994917011,3.0088333942573003e-05,8.776951465998273e-05,0.00039070444809425687,,MB,2167.066624,13505.527808,0.0,13088.325632,12892.834304,s,10,5.962787658691407,0.5962787658691406,0.0014361496247861764,0.5957967224121093,0.59900234375,0.5990214050292969,0.5990366540527344,"[0.5955895385742187, 0.5951190185546875, 0.5961220092773437, 0.59634423828125, 0.5949058837890625, 0.59600390625, 0.5952977294921875, 0.5989981079101563, 0.5953667602539062, 0.5990404663085938]",tokens/s,429.32939197801613,kWh,1.7416458374754362e-05,1.920704908292925e-06,1.156759095341193e-05,3.090475423645922e-05,tokens/kWh,8283515.1524353335,MB,2167.066624,13883.015168,0.0,13465.812992,13237.636096,s,10,40.78575366210938,4.078575366210937,0.00884052601947971,4.080454345703125,4.089705932617187,4.089837219238281,4.089942248535156,"[4.06139697265625, 4.068272216796875, 4.073059814453125, 4.0738955078125, 4.0814794921875, 4.07942919921875, 4.08378369140625, 4.08479150390625, 4.089968505859375, 4.0896767578125]",tokens/s,15.44657002587843,kWh,0.00011932692462524684,1.3162372901363742e-05,7.933331510058727e-05,0.00021182261262719785,tokens/kWh,297418.6713053073,,s,630,40.76741284942632,0.06471017912607345,0.0005584043339581829,0.06473465728759766,0.06544433975219727,0.0656420753479004,0.06585083534240722,"[0.06350163269042969, 0.06362796783447265, 0.06347161483764649, 0.06366764831542969, 0.06385107040405273, 0.06454886627197266, 0.0645241928100586, 0.06402057647705078, 0.06351414489746093, 0.06379292678833008, 0.06467206573486328, 0.0641415023803711, 0.06392031860351563, 0.06357196807861328, 0.06331391906738282, 0.06368051147460937, 0.06405913543701172, 0.06409446716308594, 0.06416793823242188, 0.06390371322631835, 0.06416591644287109, 0.06439254760742187, 0.0643897933959961, 0.06455910491943359, 0.0643399658203125, 0.06413516998291016, 0.06456025695800781, 0.06454156494140625, 0.06441165161132813, 0.06425126647949218, 0.06426438140869141, 0.06430486297607421, 0.06469705963134766, 0.06464044952392578, 0.06442185974121094, 0.06415216064453125, 0.06465293121337891, 0.06464979553222656, 0.06471456146240234, 0.06483491516113281, 0.06488745880126953, 0.06489087677001953, 0.06491340637207031, 0.06469542694091797, 0.06475251007080078, 0.06448258972167968, 0.06475440216064453, 0.06488432312011719, 0.06503810882568359, 0.06461830139160156, 0.06442198181152343, 0.06489161682128906, 0.06521222686767578, 0.06500761413574219, 0.06524272155761719, 0.06561033630371094, 0.06527970886230469, 0.0650627212524414, 0.0647493438720703, 0.06484038543701172, 0.0652390365600586, 0.06519602966308594, 0.06510134124755859, 0.06371299362182617, 0.06431334686279297, 0.06397689437866211, 0.06378515243530274, 0.0639614715576172, 0.06455206298828126, 0.06421798706054688, 0.06377660751342773, 0.06335001754760743, 0.06331881713867188, 0.06457907104492187, 0.06413712310791016, 0.06395337677001953, 0.06366790390014648, 0.06365651321411132, 0.06400819396972657, 0.06420480346679687, 0.06456729888916016, 0.06418013000488282, 0.06432777404785156, 0.06478848266601563, 0.06475981140136719, 0.06460211181640625, 0.06417203521728515, 0.06400819396972657, 0.06396454238891601, 0.06439564514160157, 0.06435804748535157, 0.06434467315673828, 0.06425804901123047, 0.06408995056152343, 0.06456050872802735, 0.06502185821533203, 0.06469107055664063, 0.0646344985961914, 0.06426866912841797, 0.06509516906738282, 0.0649969253540039, 0.06479055786132812, 0.0646968994140625, 0.06472473907470704, 0.06485052490234375, 0.06477005004882813, 0.0650588150024414, 0.06481613159179687, 0.0647504653930664, 0.06554227447509765, 0.06529840087890625, 0.06483309173583984, 0.06473334503173828, 0.06568787384033203, 0.06565052795410156, 0.06520764923095704, 0.0648650894165039, 0.0652918701171875, 0.06506742095947265, 0.06477619171142578, 0.06462393951416015, 0.06527830505371093, 0.06560179138183594, 0.06522796630859375, 0.065227294921875, 0.06523737335205078, 0.06363945770263672, 0.06371635055541992, 0.0643880615234375, 0.06439730834960937, 0.06400204467773438, 0.06390784072875977, 0.06395423889160157, 0.06367097473144531, 0.06402047729492187, 0.06387712097167969, 0.06454271697998047, 0.06414950561523437, 0.06384214401245117, 0.064012451171875, 0.06385798263549805, 0.06428300476074218, 0.06383571243286133, 0.0640843505859375, 0.06407337951660157, 0.0648252182006836, 0.06473596954345703, 0.06423155212402344, 0.0643604507446289, 0.06390979385375976, 0.06475532531738282, 0.06478076934814453, 0.06490230560302734, 0.06465827178955078, 0.06412226867675781, 0.06452489471435546, 0.06453862762451172, 0.06450701141357422, 0.06477686309814454, 0.06445008087158204, 0.06462083435058594, 0.0648298568725586, 0.06515916442871093, 0.06493772888183594, 0.0649826889038086, 0.06542601776123047, 0.0648638687133789, 0.06504281616210937, 0.06486006164550781, 0.0646488037109375, 0.0651431655883789, 0.06492787170410157, 0.06479462432861328, 0.06546371459960937, 0.06523750305175781, 0.06493782043457032, 0.06498102569580078, 0.06503404998779297, 0.06524098968505859, 0.06568396759033203, 0.0651727066040039, 0.06487324523925782, 0.06526927947998047, 0.06537165069580078, 0.06534857940673829, 0.06509260559082031, 0.06512742614746093, 0.06576127624511718, 0.06548627471923828, 0.06375628662109376, 0.0642498550415039, 0.06378496170043946, 0.06367027282714843, 0.06455814361572265, 0.06456159973144532, 0.0641049575805664, 0.06370076751708985, 0.06370860671997071, 0.06356047821044922, 0.06396518325805664, 0.06433382415771484, 0.06418966674804688, 0.06398406219482422, 0.06365014266967774, 0.06401023864746094, 0.0646197738647461, 0.06422351837158204, 0.06408035278320312, 0.06412429046630859, 0.06408665466308594, 0.06423129272460938, 0.06440144348144532, 0.06416188812255859, 0.06435804748535157, 0.06439139556884765, 0.06484185791015624, 0.06476927947998047, 0.06492825317382812, 0.06433984375, 0.06408560180664062, 0.06478313446044921, 0.06486179351806641, 0.06463120269775391, 0.06457478332519531, 0.06435501098632812, 0.06453008270263672, 0.06481279754638672, 0.06463549041748047, 0.06472086334228516, 0.06438435363769532, 0.06474822235107422, 0.06540697479248046, 0.06546431732177735, 0.06505267333984376, 0.06480076599121094, 0.06497484588623047, 0.06519602966308594, 0.06515049743652344, 0.06507158660888672, 0.06531276702880859, 0.06528614044189453, 0.06520832061767579, 0.0650255355834961, 0.06510972595214844, 0.06507590484619141, 0.06515516662597656, 0.06512579345703125, 0.06546902465820313, 0.06548889923095703, 0.06542540740966797, 0.06545999908447266, 0.0660134048461914, 0.06483411407470703, 0.06452019500732421, 0.06460211181640625, 0.06415888214111329, 0.0643239974975586, 0.0642933120727539, 0.06419395446777344, 0.0637724494934082, 0.06363407897949219, 0.0638568000793457, 0.06403276824951172, 0.06416912078857422, 0.06399881744384765, 0.06388531112670899, 0.06362643051147461, 0.06413394927978516, 0.06435020446777344, 0.06448121643066407, 0.06499129486083985, 0.06515711975097656, 0.06471177673339844, 0.06423235321044922, 0.06430105590820312, 0.0641269760131836, 0.06412831878662109, 0.06507081604003906, 0.06445769500732422, 0.0644336929321289, 0.06474800109863281, 0.06455228424072265, 0.06436460876464843, 0.06448188781738282, 0.06469149017333985, 0.065297119140625, 0.06497484588623047, 0.06514208221435547, 0.06501651000976562, 0.06541311645507812, 0.0648622055053711, 0.06474543762207031, 0.064499267578125, 0.06490774536132812, 0.06480214691162109, 0.06506153869628906, 0.06487964630126954, 0.06465372467041015, 0.0651269760131836, 0.06546431732177735, 0.06531021118164063, 0.06534758758544922, 0.0655631332397461, 0.06561980438232422, 0.06544400024414063, 0.06506495666503906, 0.06505439758300781, 0.06507961273193359, 0.06501376342773438, 0.06546534729003907, 0.065993408203125, 0.06524345397949219, 0.0650465316772461, 0.06507033538818359, 0.06580668640136719, 0.06421084594726563, 0.06457724761962891, 0.06433411407470703, 0.06394265747070313, 0.0639447021484375, 0.06401158142089844, 0.06402470397949218, 0.06384214401245117, 0.0641434555053711, 0.06454131317138671, 0.06416793823242188, 0.06444371032714843, 0.0642526092529297, 0.06418335723876953, 0.06394976043701171, 0.06367961502075195, 0.064050048828125, 0.0648089599609375, 0.06457138824462891, 0.06437273406982422, 0.0643185577392578, 0.0642364501953125, 0.06446284484863281, 0.06462054443359375, 0.06450086212158203, 0.06449199676513671, 0.06453699493408203, 0.06464102172851563, 0.06446854400634766, 0.06453702545166015, 0.06461824035644531, 0.0644938201904297, 0.06476595306396485, 0.06509945678710938, 0.06513488006591797, 0.06468816375732422, 0.06477823638916015, 0.06494822692871094, 0.064876220703125, 0.06496902465820313, 0.06514435577392579, 0.06494461059570313, 0.06478591918945313, 0.06470912170410156, 0.06492364501953125, 0.0651851806640625, 0.06481161499023437, 0.06482339477539062, 0.06501570892333984, 0.06531890869140625, 0.0656662368774414, 0.0651006088256836, 0.06502604675292968, 0.06498025512695313, 0.0652991714477539, 0.06577526092529297, 0.06524912261962891, 0.06506499481201172, 0.06526947021484375, 0.06549785614013671, 0.06585689544677735, 0.06532160186767579, 0.06524256134033203, 0.06434591674804688, 0.06442105865478516, 0.06436147308349609, 0.06449971008300781, 0.06428057861328125, 0.06407577514648438, 0.06405888366699218, 0.06394700622558594, 0.06402470397949218, 0.06401651000976563, 0.06462873840332031, 0.06457724761962891, 0.06459983825683593, 0.0641602554321289, 0.06393584060668946, 0.0639840965270996, 0.06460025787353516, 0.06445875549316406, 0.06490726470947265, 0.0647741470336914, 0.06425167846679687, 0.06439344024658203, 0.06481919860839844, 0.06445670318603515, 0.06431942749023438, 0.06435027313232422, 0.06490646362304688, 0.06459404754638672, 0.06449014282226563, 0.06430060577392578, 0.06406393432617187, 0.06490681457519532, 0.06464966583251953, 0.06502143859863281, 0.06516172790527344, 0.06499270629882813, 0.06518950653076172, 0.06489939117431641, 0.06477053070068359, 0.0653353271484375, 0.06571794891357421, 0.06544761657714844, 0.06496537780761719, 0.06478438568115234, 0.06470861053466796, 0.06543564605712891, 0.06519193267822265, 0.0652390365600586, 0.06517295837402344, 0.06516899108886719, 0.06566393280029297, 0.06524313354492188, 0.06515711975097656, 0.06489215850830078, 0.06567123413085937, 0.06567906951904297, 0.06583599853515625, 0.06537625885009765, 0.0652042236328125, 0.06534963226318359, 0.06543721771240234, 0.0658641586303711, 0.06560720062255859, 0.06392681503295898, 0.06446284484863281, 0.06430912017822266, 0.06369702529907227, 0.0647188491821289, 0.06503218841552734, 0.06453619384765626, 0.06398771286010742, 0.06356774520874023, 0.06379487991333008, 0.06424601745605468, 0.06407635498046875, 0.06419455718994141, 0.06424781036376953, 0.06421238708496094, 0.06429961395263672, 0.06457138824462891, 0.06421257781982422, 0.06437110137939453, 0.06428057861328125, 0.06482851409912109, 0.06497763061523437, 0.06489107513427735, 0.06424361419677735, 0.0639010238647461, 0.06441603088378907, 0.06454105377197265, 0.0644170913696289, 0.06446086120605468, 0.06446153259277344, 0.06496051025390626, 0.0646527328491211, 0.06485820770263671, 0.06485040283203125, 0.06506700897216797, 0.06517759704589844, 0.0652779541015625, 0.06520355224609375, 0.06483580780029297, 0.06520877075195312, 0.06487241363525391, 0.06483561706542969, 0.06471238708496094, 0.06532128143310546, 0.06499244689941407, 0.06493225860595703, 0.06496092987060546, 0.06489027404785157, 0.0649543685913086, 0.06527446746826172, 0.06568141174316407, 0.06566092681884765, 0.06562815856933593, 0.06550521850585937, 0.06563174438476563, 0.06560157012939453, 0.0654280014038086, 0.06529228973388672, 0.06562608337402344, 0.06533660888671874, 0.06522739410400391, 0.06582489776611328, 0.06539244842529297, 0.06409625244140625, 0.06451382446289063, 0.063842529296875, 0.06386278533935547, 0.06454681396484375, 0.06451760101318359, 0.06408451080322265, 0.06379225540161133, 0.06389017486572265, 0.06414553833007812, 0.06421708679199219, 0.06454489898681641, 0.06412895965576172, 0.0640099868774414, 0.06404934692382812, 0.06459782409667969, 0.06457977294921875, 0.0648305892944336, 0.06441283416748046, 0.06490083312988282, 0.0650416030883789, 0.0647892837524414, 0.0644544677734375, 0.06429676818847656, 0.06409801483154297, 0.06499603271484375, 0.06490726470947265, 0.06477117156982422, 0.06476659393310547, 0.06489730834960937, 0.06485606384277344, 0.06509977722167969, 0.06487401580810546, 0.06518422698974609, 0.0652410888671875, 0.06512191772460937, 0.06512665557861329, 0.06488896179199219, 0.06556172943115235, 0.06483647918701171, 0.06512435150146484, 0.06488473510742188, 0.06530982208251954, 0.06507305908203125, 0.06498397064208984, 0.0653404769897461, 0.06561894226074219, 0.06537779235839844, 0.06582851409912109, 0.06574495697021485, 0.0657823715209961, 0.06565404510498046, 0.06581686401367187, 0.06518592071533204, 0.0655220184326172, 0.06551785278320313, 0.06539865875244141, 0.06516754913330078, 0.06550099182128906, 0.06559273529052734, 0.06542915344238281, 0.06570285034179688, 0.06655795288085938, 0.06402518463134765, 0.06429430389404298, 0.06437741088867187, 0.0643515853881836, 0.0642721939086914, 0.06497574615478516, 0.06477180480957032, 0.06409871673583985, 0.06408914947509765, 0.06431212615966797, 0.06412493133544922, 0.06411603546142577, 0.06416659545898437, 0.064321533203125, 0.06430636596679687, 0.06407244873046875, 0.06413078308105469, 0.06469814300537109, 0.06459040069580078, 0.06492774200439454, 0.06486160278320313, 0.06478505706787109, 0.06492793273925782, 0.06462640380859375, 0.06453606414794921, 0.06441004943847656, 0.06448342132568359, 0.0643420181274414, 0.06455910491943359, 0.06456092834472656, 0.06449945831298828, 0.06468390655517578, 0.0650798110961914, 0.06512239837646484, 0.06490860748291015, 0.06511891174316406, 0.06547865295410156, 0.06542960357666015, 0.06523190307617187, 0.06497305297851562, 0.06523763275146484, 0.06468915557861328, 0.06469324493408203, 0.06464717102050781, 0.06493753814697266, 0.06520057678222656, 0.06556428527832031, 0.06516774749755859, 0.06542745971679688, 0.06572441864013671, 0.06544739532470703, 0.06570652770996094, 0.06538809967041016, 0.06588886260986328, 0.06535740661621094, 0.065697021484375, 0.06525234985351562, 0.06511798095703125, 0.06523926544189453, 0.06508354949951171, 0.06548258972167968, 0.06582208251953126, 0.06590118408203124]",tokens/s,15.453519268610298,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1586.962432,1604.190208,0.0,1201.668096,1189.151232,s,1,8.4352861328125,8.4352861328125,0.0,8.4352861328125,8.4352861328125,8.4352861328125,8.4352861328125,[8.4352861328125],,kWh,3.766827811249793e-05,4.147741742169834e-06,1.1847787255978015e-05,5.366380711064578e-05,,MB,1578.131456,1799.225344,0.0,1382.023168,1351.367168,s,10,0.484584831237793,0.048458483123779296,0.0007073508666428545,0.048841983795166015,0.04925450820922852,0.04929992637634277,0.04933626091003418,"[0.04924441528320313, 0.04882291030883789, 0.04934534454345703, 0.04773798370361328, 0.047417888641357424, 0.04770182418823242, 0.04761743927001953, 0.04892895889282227, 0.04890700912475586, 0.04886105728149414]",tokens/s,5282.872749980426,kWh,1.468573784589633e-06,1.6195723306047074e-07,9.74802566552759e-07,2.605333584202863e-06,tokens/kWh,98259970.06764363,MB,1581.7728,1841.168384,0.0,1423.966208,1407.328256,s,10,16.40045654296875,1.640045654296875,0.19219965471939635,1.7453414916992187,1.8165412719726564,1.8180832580566406,1.8193168469238283,"[1.819625244140625, 1.8146661376953126, 1.6843673095703124, 1.391161865234375, 1.378186279296875, 1.3861680908203124, 1.4925438232421875, 1.806315673828125, 1.8112235107421875, 1.8161986083984376]",tokens/s,38.41356479006649,kWh,4.459828528499658e-05,4.918199388182922e-06,2.007710316384735e-05,6.959358783702687e-05,tokens/kWh,905255.8139053325,,s,630,16.39684947013855,0.02602674519069611,0.0033714348664986384,0.02846175956726074,0.02908328266143799,0.02930663547515869,0.029913509006500246,"[0.02851430320739746, 0.028676095962524413, 0.02850764846801758, 0.028457216262817383, 0.02844268798828125, 0.028350656509399413, 0.02843631935119629, 0.02845916748046875, 0.028399616241455077, 0.028438528060913085, 0.0285347843170166, 0.02854015922546387, 0.0285100154876709, 0.02863171195983887, 0.028784032821655273, 0.02858006477355957, 0.02876483154296875, 0.02873535919189453, 0.02896294403076172, 0.029552255630493164, 0.02987660789489746, 0.02991440010070801, 0.02921481513977051, 0.02921126365661621, 0.029212671279907225, 0.029101600646972658, 0.02912495994567871, 0.02886992073059082, 0.02923129653930664, 0.029118911743164062, 0.028883167266845703, 0.029646015167236327, 0.02931180763244629, 0.029108224868774416, 0.02897830390930176, 0.028896127700805664, 0.028915712356567383, 0.02876620864868164, 0.028690431594848635, 0.028477439880371092, 0.02851020812988281, 0.028725248336791992, 0.028849824905395508, 0.028686080932617188, 0.028725215911865234, 0.029381248474121095, 0.02930009651184082, 0.029202112197875975, 0.028844991683959962, 0.029081247329711915, 0.028782943725585937, 0.028882368087768555, 0.028613183975219728, 0.02872480010986328, 0.029204160690307616, 0.029153215408325196, 0.02913158416748047, 0.029101951599121094, 0.028733568191528322, 0.028618240356445314, 0.028535295486450195, 0.028993535995483398, 0.02863920021057129, 0.028362079620361327, 0.029117088317871093, 0.028620800018310546, 0.028516351699829103, 0.028687936782836914, 0.02853318405151367, 0.02852659225463867, 0.028431840896606445, 0.02845244789123535, 0.028396480560302733, 0.028423488616943358, 0.028439231872558594, 0.02840575981140137, 0.02841708755493164, 0.028433151245117187, 0.028721343994140624, 0.02851430320739746, 0.028518016815185548, 0.028543359756469728, 0.02871500778198242, 0.028546367645263672, 0.02869728088378906, 0.028956672668457032, 0.02903232002258301, 0.029069440841674805, 0.029314207077026366, 0.029584224700927735, 0.029351936340332032, 0.02926905632019043, 0.02910857582092285, 0.028950592041015626, 0.02879267120361328, 0.028641984939575194, 0.028704383850097655, 0.028608896255493163, 0.028693920135498048, 0.02935663986206055, 0.02935807991027832, 0.029429759979248047, 0.029306880950927733, 0.02920649528503418, 0.02865564727783203, 0.02854265594482422, 0.028512575149536132, 0.02843615913391113, 0.028498239517211914, 0.028497919082641602, 0.02850201606750488, 0.02855731201171875, 0.028594175338745118, 0.028544351577758788, 0.02845884895324707, 0.028820287704467772, 0.03154944038391113, 0.028868608474731446, 0.028688383102416993, 0.028680192947387696, 0.028649471282958985, 0.02879283142089844, 0.028587711334228515, 0.028539199829101563, 0.02910553550720215, 0.029275903701782225, 0.02895462417602539, 0.0291757755279541, 0.02910825538635254, 0.028762111663818358, 0.028503583908081054, 0.028377248764038087, 0.028671615600585936, 0.028766496658325195, 0.02861471939086914, 0.02871331214904785, 0.02857779121398926, 0.028545024871826172, 0.028495872497558594, 0.028460575103759767, 0.0284881591796875, 0.028411872863769533, 0.02842166328430176, 0.028484256744384765, 0.02842198371887207, 0.028467199325561524, 0.028428287506103517, 0.028646400451660156, 0.028705791473388673, 0.03151667213439941, 0.02973695945739746, 0.02898863983154297, 0.028859167098999022, 0.02863046455383301, 0.028703296661376953, 0.028931135177612304, 0.02869875144958496, 0.02892678451538086, 0.028579423904418946, 0.028848543167114257, 0.028596223831176756, 0.028567359924316405, 0.028764352798461915, 0.028686336517333984, 0.028570688247680665, 0.028613088607788086, 0.028469728469848632, 0.028624223709106444, 0.02861532783508301, 0.028647167205810547, 0.02284124755859375, 0.021819744110107422, 0.021859807968139647, 0.021819936752319337, 0.02229862403869629, 0.021942272186279296, 0.02196601676940918, 0.021926719665527342, 0.021968095779418946, 0.02189958381652832, 0.022464544296264648, 0.02196518325805664, 0.021863967895507812, 0.022253376007080078, 0.022190176010131835, 0.022039167404174803, 0.02206924819946289, 0.021954559326171876, 0.022196224212646484, 0.022183168411254884, 0.02233011245727539, 0.022028064727783202, 0.02334230422973633, 0.022115327835083007, 0.021983232498168945, 0.021966848373413086, 0.02207529640197754, 0.022214591979980467, 0.021956768035888672, 0.022125696182250975, 0.022022911071777344, 0.0218789119720459, 0.021964799880981444, 0.02196201515197754, 0.02203926467895508, 0.021997568130493163, 0.02247475242614746, 0.022071296691894532, 0.021968832015991212, 0.02178873634338379, 0.02178006362915039, 0.02191916847229004, 0.021950592041015626, 0.022483999252319337, 0.022017183303833007, 0.02188051223754883, 0.021832672119140625, 0.021894399642944335, 0.021807872772216796, 0.021921791076660157, 0.022060224533081055, 0.02218067169189453, 0.022108160018920898, 0.0218603515625, 0.021830944061279296, 0.021928672790527345, 0.022018016815185545, 0.022220767974853516, 0.024346559524536134, 0.022990976333618164, 0.022018047332763673, 0.022171648025512695, 0.02257254409790039, 0.022130752563476564, 0.02179475212097168, 0.02195199966430664, 0.0218175048828125, 0.02176838493347168, 0.02192860794067383, 0.021950176239013672, 0.021839296340942383, 0.022208831787109376, 0.022055456161499024, 0.021899007797241212, 0.021753376007080077, 0.02196544075012207, 0.021884191513061525, 0.0220263671875, 0.021752511978149414, 0.02183782386779785, 0.021985279083251954, 0.02204649543762207, 0.021952512741088868, 0.02188444709777832, 0.021967327117919922, 0.021845279693603517, 0.022004447937011718, 0.021773664474487305, 0.02212931251525879, 0.021977088928222657, 0.021766143798828123, 0.02183510398864746, 0.021795488357543944, 0.02189107131958008, 0.021710432052612305, 0.023670240402221678, 0.02318636894226074, 0.021940223693847655, 0.021942272186279296, 0.02183782386779785, 0.021835775375366212, 0.021724159240722657, 0.021703680038452147, 0.021790719985961913, 0.021815135955810548, 0.02185759925842285, 0.021793632507324218, 0.021716096878051757, 0.02189606475830078, 0.021892255783081054, 0.021859167098999023, 0.02214240074157715, 0.0220513916015625, 0.02192793655395508, 0.021917695999145507, 0.021855487823486328, 0.021822208404541014, 0.021765312194824218, 0.02184684753417969, 0.021694208145141603, 0.021762304306030274, 0.021754880905151368, 0.021799936294555664, 0.02175369644165039, 0.02174172782897949, 0.021746816635131835, 0.02192473602294922, 0.021743616104125976, 0.02182943916320801, 0.021811391830444334, 0.021721088409423828, 0.021803007125854493, 0.02183782386779785, 0.021751808166503905, 0.022024192810058595, 0.021772192001342772, 0.021745119094848633, 0.021652095794677733, 0.021630815505981445, 0.021672096252441406, 0.021702655792236326, 0.021651391983032228, 0.021648704528808595, 0.021630912780761718, 0.021807935714721678, 0.0221429443359375, 0.022206464767456056, 0.021887008666992187, 0.022458368301391602, 0.022222208023071288, 0.021869184494018555, 0.0221265926361084, 0.02206947135925293, 0.022552352905273437, 0.021975040435791016, 0.02207289505004883, 0.021929952621459962, 0.022172128677368164, 0.022466560363769532, 0.022450111389160157, 0.022222911834716797, 0.02206515121459961, 0.022149120330810547, 0.021979135513305666, 0.022149120330810547, 0.022073183059692383, 0.02211609649658203, 0.022054752349853515, 0.022007423400878905, 0.021827520370483397, 0.02185113525390625, 0.021847135543823244, 0.021858911514282226, 0.02179462432861328, 0.021899200439453124, 0.02199545669555664, 0.022067840576171876, 0.02240620803833008, 0.022089759826660157, 0.022131616592407227, 0.022153120040893554, 0.021938079833984374, 0.022034496307373048, 0.022054975509643554, 0.022097984313964845, 0.022188032150268554, 0.022027488708496093, 0.021977088928222657, 0.02201228713989258, 0.022299039840698243, 0.02172297668457031, 0.02164908790588379, 0.021690847396850586, 0.02241663932800293, 0.021859071731567384, 0.02163836860656738, 0.02163996887207031, 0.02169856071472168, 0.021694303512573242, 0.021661855697631835, 0.021929824829101562, 0.021712608337402343, 0.02156287956237793, 0.021707712173461916, 0.021751808166503905, 0.021658655166625976, 0.021904352188110352, 0.02191708755493164, 0.02193427276611328, 0.02191001510620117, 0.021810943603515626, 0.021868608474731446, 0.021803199768066408, 0.021544960021972655, 0.022976512908935546, 0.02176345634460449, 0.021689983367919923, 0.021674367904663087, 0.02174627113342285, 0.02174569511413574, 0.021583391189575196, 0.0217969913482666, 0.021690719604492186, 0.021591968536376953, 0.02159452819824219, 0.021663423538208007, 0.021546848297119142, 0.021667167663574217, 0.02161337661743164, 0.021594112396240234, 0.02154457664489746, 0.02164143943786621, 0.021639328002929687, 0.022417407989501953, 0.021764095306396485, 0.02171673583984375, 0.021637247085571288, 0.021792896270751955, 0.022599679946899414, 0.021663200378417968, 0.02160892868041992, 0.021872480392456053, 0.021745344161987305, 0.021594240188598634, 0.021707168579101564, 0.021583871841430666, 0.021626399993896483, 0.024621023178100585, 0.02185990333557129, 0.021732160568237305, 0.021637247085571288, 0.021542911529541017, 0.021805055618286134, 0.021644351959228515, 0.028655616760253907, 0.02848863983154297, 0.02852022361755371, 0.02881065559387207, 0.028957504272460938, 0.028632352828979492, 0.0286561279296875, 0.02896303939819336, 0.029663232803344725, 0.029642751693725586, 0.028719104766845704, 0.028546783447265626, 0.02853264045715332, 0.02844495964050293, 0.028465248107910155, 0.028366847991943358, 0.028338176727294922, 0.028598623275756838, 0.028585535049438476, 0.028515199661254882, 0.02845030403137207, 0.028590591430664062, 0.028374208450317382, 0.028746559143066407, 0.02856550407409668, 0.028489728927612305, 0.028429599761962892, 0.028404191970825197, 0.028436735153198243, 0.028436479568481447, 0.028495168685913085, 0.028479455947875976, 0.02843084716796875, 0.028714879989624024, 0.02873788833618164, 0.028694528579711914, 0.028837888717651368, 0.02865705680847168, 0.02867056083679199, 0.028884992599487305, 0.028618623733520508, 0.028852352142333983, 0.029249536514282228, 0.02930633544921875, 0.02854966354370117, 0.028434431076049805, 0.028448768615722656, 0.028333887100219727, 0.028546688079833984, 0.028400192260742186, 0.02866147232055664, 0.028373279571533204, 0.02878220748901367, 0.028643104553222658, 0.028559263229370118, 0.028793056488037108, 0.02851478385925293, 0.028475391387939454, 0.02856857681274414, 0.028582847595214844, 0.028581951141357424, 0.02839344024658203, 0.028442655563354492, 0.02879897689819336, 0.02870649528503418, 0.028541248321533205, 0.028493183135986328, 0.028481216430664064, 0.028427200317382814, 0.02854707145690918, 0.028440576553344726, 0.028631040573120117, 0.028739456176757813, 0.028954559326171875, 0.029225120544433592, 0.029487136840820313, 0.029221920013427733, 0.029573951721191406, 0.029519231796264648, 0.028740383148193358, 0.028398143768310548, 0.028571104049682616, 0.028653247833251953, 0.028687200546264648, 0.028592319488525392, 0.028735008239746094, 0.028563743591308595, 0.028685983657836915, 0.032928001403808596, 0.028752288818359374, 0.028755136489868164, 0.028766719818115235, 0.028706016540527343, 0.028598207473754883, 0.02846726417541504, 0.02833692741394043, 0.028397216796875, 0.028636863708496094, 0.02862761688232422, 0.028450496673583986, 0.028444992065429688, 0.028418079376220703, 0.028487648010253906, 0.028339359283447267, 0.028527103424072265, 0.02863564872741699, 0.028921695709228517, 0.02861846351623535, 0.028767711639404298, 0.029160032272338866, 0.02914518356323242, 0.02962777519226074, 0.028701568603515627, 0.02857263946533203, 0.0285533447265625, 0.02843846321105957, 0.028469280242919923, 0.028449920654296874, 0.028475072860717772, 0.028446495056152345, 0.028659936904907226, 0.028528863906860352, 0.029849311828613282, 0.03144684791564942, 0.028708383560180663, 0.028773088455200196, 0.028602367401123048, 0.028498079299926756, 0.02846294403076172, 0.028399616241455077, 0.028393247604370116, 0.028797119140625, 0.028454944610595702, 0.02853887939453125, 0.028515968322753906, 0.028426624298095702, 0.028711231231689453, 0.028573375701904297, 0.028591680526733398, 0.02856697654724121, 0.028788831710815428, 0.02851728057861328, 0.02855936050415039, 0.02888470458984375, 0.029911327362060546, 0.028657087326049803, 0.028666368484497072, 0.028465215682983398, 0.02851126480102539, 0.028445663452148436, 0.028424095153808594, 0.02836284828186035, 0.02853068733215332, 0.028644832611083984, 0.0286909122467041, 0.028439872741699217, 0.02961401557922363, 0.02869487953186035, 0.02853321647644043, 0.029487104415893556, 0.028852224349975586, 0.029242944717407227, 0.028559808731079103, 0.028473184585571288, 0.02846476745605469, 0.028500511169433595, 0.028622848510742187, 0.028647424697875977, 0.028651103973388672, 0.028602783203125, 0.02874367904663086, 0.02854297637939453, 0.028637184143066406, 0.02877235221862793, 0.028469247817993162, 0.028488864898681642, 0.028562271118164062, 0.02854911994934082, 0.028726688385009767, 0.029516384124755858, 0.028747488021850585, 0.029053216934204103, 0.029009920120239258, 0.02914508819580078, 0.028837215423583983, 0.02860915184020996, 0.02886249542236328, 0.028978208541870117, 0.031196128845214843, 0.03057254409790039, 0.029245439529418944, 0.029483007431030273, 0.028788320541381834, 0.0285447998046875, 0.028705408096313476, 0.02855695915222168, 0.028792383193969727, 0.02848953628540039, 0.02895929527282715, 0.028620927810668946, 0.028474720001220703, 0.028607423782348634, 0.02855116844177246, 0.028610559463500978, 0.02899705505371094, 0.028781120300292968]",tokens/s,38.42201522599431,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6767.423488,7769.817088,0.0,7367.294976,7351.94368,s,1,12.741939453125,12.741939453125,0.0,12.741939453125,12.741939453125,12.741939453125,12.741939453125,[12.741939453125],,kWh,0.0001642591552000037,1.811164823456318e-05,5.286865340600744e-05,0.0002352394568405743,,MB,1646.440448,8411.5456,0.0,7994.343424,7863.794176,s,10,3.0284338073730472,0.3028433807373047,0.0009454751657447772,0.3034013824462891,0.3036218139648437,0.3036933074951172,0.3037505023193359,"[0.30256051635742187, 0.30360592651367185, 0.301056640625, 0.3034053344726563, 0.3012239990234375, 0.30244915771484376, 0.30356362915039065, 0.3037648010253906, 0.3034063720703125, 0.3033974304199219]",tokens/s,845.3214310867238,kWh,8.846569199999517e-06,9.756156900101896e-07,5.855130946727264e-06,1.567731583673697e-05,tokens/kWh,16329325.929640967,MB,1655.382016,8684.17536,0.0,8266.973184,8120.408064,s,10,24.751498046875003,2.4751498046875,0.003349627317508383,2.47534521484375,2.478800634765625,2.479545947265625,2.480142197265625,"[2.4690849609375, 2.473358642578125, 2.471820068359375, 2.472419189453125, 2.476474853515625, 2.474215576171875, 2.477121337890625, 2.480291259765625, 2.478635009765625, 2.4780771484375]",tokens/s,25.453004856792525,kWh,7.241697299958683e-05,7.987631412326827e-06,4.823388454667258e-05,0.00012863848895858622,tokens/kWh,489744.5586466907,,s,630,24.74590123367307,0.03927920830741761,0.0003618952930649532,0.03927067184448242,0.03975482406616211,0.03983846321105957,0.04010098854064941,"[0.03852483367919922, 0.03845539093017578, 0.03868057632446289, 0.038620670318603514, 0.03852889633178711, 0.038536865234375, 0.03869795227050781, 0.038744064331054685, 0.038669727325439454, 0.03862764739990234, 0.038844287872314455, 0.03922294235229492, 0.039054080963134764, 0.03888742446899414, 0.0390549430847168, 0.039069889068603515, 0.03892444610595703, 0.038862911224365235, 0.03888947296142578, 0.0391693115234375, 0.03911692810058594, 0.03905596923828125, 0.03973529434204102, 0.03945471954345703, 0.03914044952392578, 0.03899484634399414, 0.03894844818115235, 0.03888579177856445, 0.03901401519775391, 0.03895846557617187, 0.03883049774169922, 0.03883683013916016, 0.03913523101806641, 0.03904716873168945, 0.039089248657226565, 0.03928102493286133, 0.03933622360229492, 0.039534847259521486, 0.03954998397827148, 0.03953071975708008, 0.03928271865844726, 0.03950665664672852, 0.039616512298583983, 0.039824928283691406, 0.03976646423339844, 0.03950985717773438, 0.03933407974243164, 0.03940556716918946, 0.039444480895996094, 0.03963046264648438, 0.03953702545166016, 0.03941923141479492, 0.039397472381591796, 0.03932831954956055, 0.03925193786621094, 0.03926019287109375, 0.03937484741210937, 0.039392959594726565, 0.039790687561035154, 0.03978406524658203, 0.03970723342895508, 0.03981107330322266, 0.039867904663085936, 0.039446529388427735, 0.039049217224121094, 0.03892019271850586, 0.038954017639160156, 0.039029727935791014, 0.03878297424316406, 0.03861280059814453, 0.03853535842895508, 0.038569183349609376, 0.03851753616333008, 0.038623233795166016, 0.038610366821289065, 0.038563713073730466, 0.03878895950317383, 0.039015262603759766, 0.039032833099365234, 0.03901030349731445, 0.03912835311889649, 0.03916259384155273, 0.03892019271850586, 0.03910192108154297, 0.03912348937988281, 0.039012065887451174, 0.03910172653198242, 0.039449600219726565, 0.03951142501831055, 0.03930515289306641, 0.03920966339111328, 0.039600128173828124, 0.039403518676757815, 0.039337982177734376, 0.03924918365478516, 0.03915439987182617, 0.03915161514282227, 0.0393438720703125, 0.039295230865478516, 0.03922534561157227, 0.03924694442749024, 0.03908403015136719, 0.03896207809448242, 0.03911372756958008, 0.03932672119140625, 0.0393809928894043, 0.039757598876953126, 0.039508190155029294, 0.039397377014160156, 0.03937203216552734, 0.0392628173828125, 0.03984147262573242, 0.03986048126220703, 0.03971481704711914, 0.039673408508300784, 0.039555744171142576, 0.03999097442626953, 0.039823169708251956, 0.03959561538696289, 0.03954937744140625, 0.039592414855957034, 0.039775390625, 0.03970729446411133, 0.039693695068359375, 0.03974636840820313, 0.03965465545654297, 0.03911340713500976, 0.038901504516601564, 0.03872358322143555, 0.03868902587890625, 0.03887923049926758, 0.03880099105834961, 0.038906272888183595, 0.03889705657958984, 0.03879792022705078, 0.03872079849243164, 0.03944051361083984, 0.039102752685546874, 0.03897171020507813, 0.038851646423339846, 0.03884553527832031, 0.03872137451171875, 0.038735870361328126, 0.03876172637939453, 0.03890047836303711, 0.03891814422607422, 0.03884422302246094, 0.03882412719726563, 0.03912499237060547, 0.039001857757568356, 0.03920409774780274, 0.03926278305053711, 0.03914387130737305, 0.03924956893920899, 0.039299007415771484, 0.03927008056640625, 0.039277217864990235, 0.039143489837646483, 0.03901440048217773, 0.03904431915283203, 0.03922758483886719, 0.03944713592529297, 0.03950387191772461, 0.039444480895996094, 0.03944857788085938, 0.0393256950378418, 0.03979257583618164, 0.039616287231445314, 0.0393485107421875, 0.03936460876464844, 0.03930931091308594, 0.03938508987426758, 0.03941580963134766, 0.03931071853637695, 0.0395428466796875, 0.039825984954833984, 0.039600128173828124, 0.03938710403442383, 0.03942607879638672, 0.039517982482910156, 0.03951433563232422, 0.039413726806640625, 0.039561054229736325, 0.03953206253051758, 0.03952025604248047, 0.03979740905761719, 0.03982950210571289, 0.03991686248779297, 0.03983225631713867, 0.03903120040893555, 0.038894977569580075, 0.03875526428222656, 0.03866419219970703, 0.03870105743408203, 0.03904307174682617, 0.03873756790161133, 0.038790592193603514, 0.03876752090454102, 0.03886489486694336, 0.03885446548461914, 0.038805694580078126, 0.03882732772827149, 0.03889836883544922, 0.039118209838867185, 0.039203456878662106, 0.03895040130615234, 0.038888095855712894, 0.039133022308349606, 0.03913523101806641, 0.03903833770751953, 0.0393131217956543, 0.039314334869384765, 0.03910995101928711, 0.03887295913696289, 0.03880428695678711, 0.03906969451904297, 0.03914080047607422, 0.03910863876342773, 0.039010848999023434, 0.03913113784790039, 0.03930108642578125, 0.03907497787475586, 0.0391910400390625, 0.03938137435913086, 0.03948339080810547, 0.03946700668334961, 0.03933980941772461, 0.03938489532470703, 0.039459232330322266, 0.03976704025268555, 0.0397256965637207, 0.03935884857177734, 0.039223297119140625, 0.03916185760498047, 0.039288833618164064, 0.03940556716918946, 0.039395328521728515, 0.03937484741210937, 0.0394444465637207, 0.039659393310546874, 0.03966329574584961, 0.03954940795898437, 0.03949363327026367, 0.039708671569824217, 0.03970041656494141, 0.03966128158569336, 0.03956316757202148, 0.039686592102050784, 0.03982745742797852, 0.039875873565673826, 0.039791328430175785, 0.0397209587097168, 0.03958911895751953, 0.039094207763671875, 0.03892643356323242, 0.038709888458251955, 0.03867452621459961, 0.03861503982543945, 0.038760448455810545, 0.038655265808105466, 0.03873190307617187, 0.03877948760986328, 0.0388587532043457, 0.03931340789794922, 0.03919027328491211, 0.03886105728149414, 0.03883808135986328, 0.03909212875366211, 0.03894095993041992, 0.03905945587158203, 0.03897958374023437, 0.03906880187988281, 0.03916479873657226, 0.039054561614990234, 0.038953758239746096, 0.039000064849853515, 0.039182334899902346, 0.03933900833129883, 0.039482368469238284, 0.039374431610107424, 0.03905980682373047, 0.03904431915283203, 0.039271263122558596, 0.03930112075805664, 0.03912268829345703, 0.03916211318969726, 0.03924911880493164, 0.03940576171875, 0.03946140670776367, 0.03919571304321289, 0.03930156707763672, 0.0394983024597168, 0.03957145690917969, 0.039362560272216796, 0.03933184051513672, 0.039430015563964846, 0.03946268844604492, 0.039721057891845706, 0.03956492614746094, 0.039524158477783206, 0.0394719352722168, 0.03968166351318359, 0.039667232513427735, 0.03956617736816406, 0.040021759033203125, 0.039966209411621094, 0.0398485107421875, 0.03966934585571289, 0.039516319274902345, 0.039544704437255856, 0.03973932647705078, 0.040029983520507816, 0.04001414489746094, 0.03975632095336914, 0.03974063873291016, 0.03870719909667969, 0.03872713470458984, 0.03862099075317383, 0.038709983825683594, 0.03871705627441406, 0.038908287048339846, 0.03892019271850586, 0.03880755233764648, 0.03906355285644531, 0.03894681549072266, 0.03896115112304688, 0.03896905517578125, 0.03914691162109375, 0.03901676940917969, 0.03885113525390625, 0.03891404724121094, 0.03899596786499023, 0.039000064849853515, 0.038978721618652346, 0.038896064758300784, 0.038784896850585934, 0.03924758529663086, 0.03943097686767578, 0.039290271759033206, 0.039238239288330076, 0.03911814498901367, 0.03903763198852539, 0.03910553741455078, 0.039010913848876956, 0.039354591369628905, 0.03952044677734375, 0.039374431610107424, 0.03922771072387695, 0.03937900924682617, 0.03921913528442383, 0.03934163284301758, 0.03957609558105469, 0.03950918579101562, 0.03929740905761719, 0.039293121337890625, 0.03935232162475586, 0.03931366348266602, 0.03931340789794922, 0.03918211364746094, 0.03934230422973633, 0.039583168029785155, 0.03956383895874024, 0.03951615905761719, 0.03983564758300781, 0.039790592193603515, 0.03959379196166992, 0.039440574645996096, 0.039288833618164064, 0.039444480895996094, 0.03955507278442383, 0.03958784103393555, 0.03967574310302734, 0.03987472152709961, 0.03962879943847656, 0.03958076858520508, 0.03975465774536133, 0.03990323257446289, 0.03993804931640625, 0.03883001708984375, 0.03869292831420899, 0.038916095733642575, 0.038874336242675785, 0.03859331130981445, 0.038696319580078124, 0.03876233673095703, 0.03866396713256836, 0.03898659133911133, 0.03912515258789063, 0.03893619155883789, 0.03903657531738281, 0.038890209197998044, 0.03898559951782227, 0.03894432067871094, 0.03904774475097656, 0.039064640045166014, 0.03897439956665039, 0.03888947296142578, 0.03915913772583008, 0.03914140701293945, 0.03938777542114258, 0.03940147018432617, 0.03929190444946289, 0.03919564819335938, 0.03920220947265625, 0.03916041564941406, 0.03909632110595703, 0.03901232147216797, 0.03907939147949219, 0.039156097412109375, 0.038999679565429685, 0.03924435043334961, 0.03936460876464844, 0.03918438339233398, 0.039311359405517575, 0.03948691177368164, 0.03978707122802734, 0.03965542221069336, 0.03947315216064453, 0.039486526489257816, 0.0392817268371582, 0.03978841781616211, 0.03970767974853515, 0.0396605110168457, 0.03984076690673828, 0.039752128601074216, 0.039627326965332034, 0.03963302230834961, 0.03943564987182617, 0.03934819030761719, 0.04101583862304688, 0.039239681243896485, 0.03951123046875, 0.03956409454345703, 0.03969833755493164, 0.039566688537597657, 0.039471199035644534, 0.03964380645751953, 0.039917312622070315, 0.03982556915283203, 0.039820545196533205, 0.03960425567626953, 0.039229438781738284, 0.039120094299316406, 0.03898857498168945, 0.03887308883666992, 0.03890367889404297, 0.03880972671508789, 0.03899110412597656, 0.03902694320678711, 0.038954944610595704, 0.03875644683837891, 0.03914937591552734, 0.03908646392822265, 0.03892047882080078, 0.03885615921020508, 0.038963230133056644, 0.03888374328613281, 0.03895305633544922, 0.03892995071411133, 0.03902409744262696, 0.03906377410888672, 0.0392322883605957, 0.03926988983154297, 0.039171871185302735, 0.03976265716552734, 0.03967795181274414, 0.03951001739501953, 0.0392765121459961, 0.039068927764892576, 0.038978336334228515, 0.03914035034179687, 0.03916697692871094, 0.03920444869995117, 0.03923503875732422, 0.03926931381225586, 0.039367774963378906, 0.039381439208984376, 0.03951254272460938, 0.03969023895263672, 0.03968771362304688, 0.03949615859985352, 0.039444480895996094, 0.039329792022705076, 0.03933388900756836, 0.03944758224487305, 0.0395882568359375, 0.03969187164306641, 0.03963779067993164, 0.039575489044189456, 0.03954012680053711, 0.03966038513183594, 0.04035174560546875, 0.040212318420410155, 0.039974143981933594, 0.03979337692260742, 0.039561344146728517, 0.03952032089233398, 0.03951142501831055, 0.03949820709228516, 0.03987216186523437, 0.03988243103027344, 0.03982009506225586, 0.03991756820678711, 0.040204288482666016, 0.03914934539794922, 0.038746944427490236, 0.03911231994628906, 0.03910899353027344, 0.039007774353027345, 0.0392193603515625, 0.039064895629882815, 0.038986751556396484, 0.03915980911254883, 0.03896710586547852, 0.03913689422607422, 0.038885025024414065, 0.03878940963745117, 0.03886336135864258, 0.038893310546875, 0.038883712768554686, 0.03906710433959961, 0.0389901123046875, 0.03906755065917969, 0.038984031677246095, 0.03907993698120117, 0.0392309455871582, 0.03932524871826172, 0.03917718505859375, 0.03917004776000976, 0.03919462585449219, 0.03924991989135742, 0.039122943878173826, 0.03915158462524414, 0.039273887634277346, 0.039244415283203125, 0.039288833618164064, 0.03991708755493164, 0.03964473724365234, 0.03962972640991211, 0.03949913787841797, 0.03930518341064453, 0.03917276763916016, 0.03926425552368164, 0.03922937774658203, 0.03918188858032227, 0.039608833312988284, 0.039626750946044925, 0.03955712127685547, 0.03981721496582031, 0.03970048141479492, 0.03967327880859375, 0.039744064331054686, 0.03967795181274414, 0.039489376068115235, 0.039377056121826175, 0.03938304138183594, 0.03937279891967774, 0.0395489273071289, 0.03971648025512695, 0.03967833709716797, 0.03963264083862305, 0.04019225692749023, 0.03993804931640625, 0.03982745742797852, 0.03977388763427735, 0.039645503997802735, 0.039933345794677735, 0.03886342239379883, 0.03893017578125, 0.038854881286621096, 0.03860201644897461, 0.03874278259277344, 0.03868672180175781, 0.03888508987426758, 0.038919937133789065, 0.03886134338378906, 0.03887308883666992, 0.0392355842590332, 0.03904307174682617, 0.03889113616943359, 0.03890419387817383, 0.039074848175048825, 0.03915187072753906, 0.039026527404785155, 0.03893519973754883, 0.03913750457763672, 0.03912704086303711, 0.039040382385253904, 0.03935430526733399, 0.039414337158203125, 0.03940121459960937, 0.03940595245361328, 0.03924787139892578, 0.03905945587158203, 0.038932479858398435, 0.038968704223632813, 0.038992446899414064, 0.039054817199707034, 0.039208671569824216, 0.03935110473632813, 0.03934799957275391, 0.039508129119873045, 0.03973542404174805, 0.039723007202148435, 0.03960969543457031, 0.03949619293212891, 0.03967388916015625, 0.03946713638305664, 0.03940556716918946, 0.039308929443359376, 0.03933427047729492, 0.03952230453491211, 0.039616512298583983, 0.03979673767089844, 0.0398131217956543, 0.03972710418701172, 0.03959750366210937, 0.03945529556274414, 0.03956099319458008, 0.0395634880065918, 0.039395328521728515, 0.039512065887451174, 0.03951001739501953, 0.03954278564453125, 0.039663200378417966, 0.03972716903686523, 0.04003385543823242, 0.040102176666259766, 0.04011030578613281, 0.04009807968139648]",tokens/s,25.458761596555828,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11047.215104,12553.4208,0.0,12150.898688,12116.742656,s,1,17.374958984375,17.374958984375,0.0,17.374958984375,17.374958984375,17.374958984375,17.374958984375,[17.374958984375],,kWh,0.00026638753347497945,2.9377291599195945e-05,8.28420107179817e-05,0.0003786068357921571,,MB,2021.761024,13513.916416,0.0,13096.71424,12892.965376,s,10,6.35274005126953,0.635274005126953,0.0006303388391127866,0.6350317687988281,0.6361684387207032,0.6364086822509766,0.6366008770751953,"[0.634775390625, 0.6361150512695313, 0.6343910522460937, 0.63664892578125, 0.6349720458984375, 0.6353724975585937, 0.6354431762695313, 0.6349583740234375, 0.6350518188476563, 0.63501171875]",tokens/s,402.97572060868606,kWh,1.8560247330988964e-05,2.04686881637087e-06,1.2336190424500765e-05,3.29433065718606e-05,tokens/kWh,7770926.073907505,MB,2025.84064,13870.432256,0.0,13453.23008,13237.236736,s,10,43.37938134765625,4.337938134765625,0.013299954204283156,4.338426025390625,4.344648925781249,4.358195068359374,4.369031982421874,"[4.323328125, 4.32533935546875, 4.32271630859375, 4.3717412109375, 4.3379326171875, 4.338703125, 4.33959814453125, 4.33814892578125, 4.341638671875, 4.34023486328125]",tokens/s,14.523028693078363,kWh,0.00012886448709859374,1.4214455129869945e-05,8.558577680189763e-05,0.00022866471903036133,tokens/kWh,275512.550721194,,s,630,43.35546378326413,0.06881819648137168,0.0006119319921971192,0.0687756462097168,0.06942293167114258,0.06967524948120117,0.07112882720947265,"[0.06810841369628906, 0.06771737670898438, 0.06787359619140625, 0.06817683410644532, 0.06833561706542969, 0.06857878112792969, 0.06809648132324218, 0.06813702392578125, 0.06773145294189453, 0.06795263671875, 0.06788236999511718, 0.06833216094970704, 0.06849699401855469, 0.06811481475830078, 0.0684851531982422, 0.06822911834716797, 0.0680079345703125, 0.06861424255371094, 0.06842733001708984, 0.06820233917236328, 0.0681006088256836, 0.0679436798095703, 0.06795545959472657, 0.06861209869384766, 0.06821862030029296, 0.06829606628417968, 0.06848941040039062, 0.06891542053222656, 0.068751708984375, 0.06822515106201171, 0.06828166198730469, 0.06892620849609375, 0.06882093048095703, 0.06856195068359375, 0.06854959869384766, 0.06851583862304687, 0.06898470306396484, 0.06894166564941406, 0.06886224365234375, 0.06856918334960938, 0.0689552001953125, 0.06899798583984375, 0.06905350494384765, 0.0688050537109375, 0.06874905395507812, 0.06895894622802734, 0.06914784240722656, 0.06913497924804687, 0.06874543762207032, 0.06877104187011719, 0.06922115325927734, 0.06917257690429687, 0.06917919921875, 0.0694276123046875, 0.06919622039794922, 0.06917120361328125, 0.06926131439208984, 0.06913164520263672, 0.06903404998779297, 0.06900179290771484, 0.06925107574462891, 0.06930022430419921, 0.06925667572021485, 0.0686297607421875, 0.06816185760498047, 0.06789369964599609, 0.06798745727539063, 0.0681465301513672, 0.06794111633300781, 0.06812662506103516, 0.06795378875732422, 0.06789328002929687, 0.068176513671875, 0.06808940887451172, 0.06807516479492187, 0.06847369384765625, 0.06816767883300781, 0.06833993530273437, 0.06827744293212891, 0.06842649841308594, 0.06838050842285157, 0.06842556762695312, 0.06822124481201172, 0.06813699340820313, 0.06825510406494141, 0.06849187469482422, 0.06836137390136719, 0.06871485137939454, 0.06830719757080078, 0.06847103881835938, 0.06864691162109375, 0.06867558288574219, 0.06848822021484376, 0.06874620819091797, 0.06859081268310546, 0.06892022705078125, 0.06856486511230468, 0.06848649597167969, 0.068666015625, 0.06874451446533203, 0.06854099273681641, 0.0687883529663086, 0.06948454284667968, 0.06872847747802735, 0.0689176025390625, 0.06869766235351563, 0.06909792327880859, 0.06889881896972656, 0.06888992309570313, 0.06872489929199219, 0.06887888336181641, 0.06871449279785156, 0.0687831039428711, 0.06902012634277344, 0.06879695892333984, 0.06914252471923828, 0.06911542510986328, 0.06904265594482421, 0.06885785675048828, 0.06908489227294921, 0.06899935913085938, 0.06923273468017578, 0.06966607666015626, 0.0690711669921875, 0.06909552001953125, 0.06927187347412109, 0.06883468627929687, 0.06821459197998046, 0.06803129577636718, 0.06818355560302734, 0.06810265350341797, 0.06827145385742188, 0.06814582061767578, 0.06777811431884766, 0.06846099090576171, 0.06792790222167969, 0.06794802856445313, 0.06825392150878906, 0.06799609375, 0.0678023681640625, 0.06850841522216797, 0.06897408294677734, 0.06843443298339844, 0.06839318084716797, 0.06813426971435547, 0.06846505737304688, 0.06848223876953125, 0.0683485107421875, 0.06825945281982422, 0.0680754852294922, 0.06814988708496093, 0.06833971405029297, 0.0689560317993164, 0.06856102752685547, 0.06841548919677734, 0.06859923553466797, 0.06863100433349609, 0.06880879974365234, 0.06878358459472657, 0.06824969482421875, 0.06896275329589843, 0.06848512268066406, 0.06842272186279297, 0.0685777587890625, 0.06846691131591796, 0.0689359359741211, 0.06859366607666016, 0.06878610992431641, 0.0688476791381836, 0.06894786834716797, 0.06905999755859375, 0.06900192260742187, 0.06928153228759766, 0.06876531219482422, 0.06915340423583985, 0.06881689453125, 0.06902169799804687, 0.06885990142822265, 0.06884969329833984, 0.06888803100585937, 0.0690301742553711, 0.06893590545654296, 0.06907698822021484, 0.06908060455322265, 0.06921263885498047, 0.0691568603515625, 0.06912818908691407, 0.0692490234375, 0.06922444915771485, 0.06857270050048828, 0.06829539489746093, 0.06824960327148437, 0.06826780700683593, 0.06827369689941407, 0.06905107116699219, 0.06831104278564454, 0.06827388763427734, 0.06797926330566406, 0.06777680206298828, 0.06814924621582032, 0.067989501953125, 0.06801612854003906, 0.06815948486328124, 0.06840739440917969, 0.068312255859375, 0.06816226959228516, 0.06840457916259765, 0.06860047912597657, 0.06854860687255859, 0.06811840057373048, 0.06828659057617187, 0.06838646697998046, 0.06830438232421875, 0.06867440032958984, 0.068384765625, 0.06832089233398438, 0.0681844482421875, 0.06878556823730468, 0.0687069091796875, 0.06883328247070312, 0.0688721923828125, 0.06859139251708984, 0.06892156982421875, 0.06868991851806641, 0.06850137329101562, 0.06859174346923828, 0.06874111938476563, 0.06889676666259766, 0.06878141021728515, 0.07103350067138672, 0.07105126190185547, 0.07078899383544922, 0.0708629150390625, 0.07108016204833985, 0.0712927703857422, 0.07104243469238282, 0.07091577911376953, 0.07085062408447265, 0.07084735870361328, 0.07085874938964844, 0.07078502655029296, 0.07072345733642578, 0.07116748809814454, 0.07127049255371094, 0.07088336181640625, 0.0709043197631836, 0.07114870452880859, 0.07122758483886718, 0.0726817626953125, 0.0712542724609375, 0.06915888214111328, 0.0697364501953125, 0.06862473297119141, 0.06848502349853515, 0.06831836700439453, 0.06841356658935546, 0.06851872253417969, 0.0682592010498047, 0.06795961761474609, 0.06805484771728515, 0.06812979125976562, 0.0678670425415039, 0.0682767333984375, 0.06824150085449218, 0.06829388427734374, 0.06823737335205078, 0.06824784088134765, 0.06854889678955078, 0.06881513977050781, 0.06871842956542969, 0.06859353637695312, 0.06859379577636719, 0.06847647857666016, 0.06826627349853516, 0.06813097381591797, 0.06850559997558593, 0.06837580871582032, 0.06849817657470703, 0.06859366607666016, 0.06900847625732422, 0.06902003479003906, 0.06860345458984375, 0.06857382202148438, 0.06887254333496094, 0.06907215881347656, 0.06893830108642578, 0.06897475433349609, 0.06877587127685547, 0.06859744262695312, 0.06918592071533203, 0.06890422058105469, 0.0688544921875, 0.06895763397216798, 0.06921014404296875, 0.06906454467773437, 0.06897065734863281, 0.06905696105957031, 0.06904431915283203, 0.0695640640258789, 0.06946031951904297, 0.06931804656982422, 0.06944009399414063, 0.06907644653320312, 0.06913897705078124, 0.06899302673339844, 0.06940486145019531, 0.06923040008544921, 0.06918521881103516, 0.06945414733886719, 0.06914457702636718, 0.0692628173828125, 0.06926099395751953, 0.06973321533203125, 0.06976707458496094, 0.07001891326904297, 0.06824018859863282, 0.06840525054931641, 0.06816153717041015, 0.06825708770751954, 0.06869789123535157, 0.06857753753662109, 0.06816015625, 0.06787686157226562, 0.06799468994140626, 0.06810284423828125, 0.06835625457763672, 0.06845164489746093, 0.06836099243164062, 0.06873292541503906, 0.06841753387451172, 0.0688617935180664, 0.06850576019287109, 0.06848512268066406, 0.06870015716552734, 0.06886809539794922, 0.06890828704833984, 0.06848323059082032, 0.0683853759765625, 0.06871449279785156, 0.06846873474121094, 0.06844825744628906, 0.06845439910888672, 0.0687083511352539, 0.06861766052246093, 0.06850339508056641, 0.06897942352294922, 0.06888652801513671, 0.06878380584716796, 0.06890553283691406, 0.06891648101806641, 0.06892924499511718, 0.0688523178100586, 0.06868582153320313, 0.06907901000976563, 0.06900128173828125, 0.06902569580078124, 0.07017810821533203, 0.06928995513916016, 0.06890185546875, 0.069212158203125, 0.06893772888183594, 0.06936166381835937, 0.06927750396728516, 0.06916934204101563, 0.06914867401123047, 0.06910771179199218, 0.06887833404541016, 0.06909040069580077, 0.06896896362304687, 0.0693375015258789, 0.06913228607177735, 0.06934528350830078, 0.06920601654052734, 0.06922425842285156, 0.06963629150390625, 0.06924492645263672, 0.06984294128417969, 0.06931798553466798, 0.06906089782714844, 0.06837232208251953, 0.06793555450439454, 0.06819900512695312, 0.06847081756591797, 0.06833763122558593, 0.06874918365478516, 0.0681448974609375, 0.06810854339599609, 0.06830863952636719, 0.06831996917724609, 0.06853427124023438, 0.06835609436035156, 0.06823321533203125, 0.06855884552001953, 0.06827196502685547, 0.06907078552246093, 0.0685723876953125, 0.06838374328613281, 0.06872268676757813, 0.06863053131103515, 0.06860390472412109, 0.06836838531494141, 0.06880255889892578, 0.06849696350097656, 0.06868422698974609, 0.06895001220703124, 0.06853632354736328, 0.06859696197509765, 0.06886614227294922, 0.06879634857177734, 0.06882572937011719, 0.06883261108398438, 0.06890889739990234, 0.06874822235107422, 0.06881670379638671, 0.06888784027099609, 0.06873900604248047, 0.07004975891113281, 0.06932787322998046, 0.06879436492919921, 0.06880870056152344, 0.06903513336181641, 0.06901235198974609, 0.06896640014648438, 0.06912393951416015, 0.06925737762451172, 0.06903158569335938, 0.069542236328125, 0.06917478179931641, 0.06895871734619141, 0.068927490234375, 0.06954998779296875, 0.06950249481201172, 0.06907939147949219, 0.06904985809326172, 0.06911603546142578, 0.06943360137939453, 0.06925532531738281, 0.06969744110107422, 0.06944777679443359, 0.06922780609130859, 0.06955014038085937, 0.06851686096191406, 0.06808758544921875, 0.06797042846679688, 0.06842169952392578, 0.06872144317626953, 0.06833939361572265, 0.0679466552734375, 0.06798486328125, 0.06796550750732422, 0.06813299560546875, 0.06833971405029297, 0.06832284545898437, 0.0685060806274414, 0.06847283172607421, 0.06854243469238282, 0.06876573181152344, 0.06831718444824218, 0.0688345947265625, 0.06885596466064453, 0.06844882965087891, 0.06833766174316407, 0.06845407867431641, 0.0686226577758789, 0.06840255737304687, 0.06876998138427734, 0.06863113403320313, 0.06862989044189453, 0.068669921875, 0.06854844665527343, 0.06872284698486328, 0.06869116973876953, 0.06868851470947265, 0.06922051239013671, 0.06893158721923828, 0.06880358123779297, 0.06873395538330078, 0.0689450912475586, 0.06902003479003906, 0.06903443145751953, 0.06901471710205079, 0.06903014373779297, 0.06895878601074219, 0.06909935760498047, 0.0693638687133789, 0.06910546875, 0.06913043212890625, 0.06949292755126953, 0.06942291259765625, 0.0689208984375, 0.06976156616210938, 0.06892124938964844, 0.0690951690673828, 0.06929366302490235, 0.06896502685546875, 0.06910505676269531, 0.06912470245361328, 0.06936780548095703, 0.069410400390625, 0.06915267181396484, 0.06943590545654296, 0.0695889892578125, 0.06979788970947266, 0.06928572845458984, 0.06915116882324218, 0.0682760009765625, 0.06850975799560546, 0.06827228546142577, 0.06812057495117188, 0.06845388793945313, 0.0683873291015625, 0.06816767883300781, 0.06847596740722656, 0.06835295867919922, 0.06836019134521484, 0.06831715393066407, 0.06836380767822266, 0.06857344055175782, 0.06853673553466796, 0.0682936019897461, 0.06892173004150391, 0.06909184265136718, 0.0686592025756836, 0.06868544006347656, 0.06864934539794922, 0.06862595367431641, 0.06855625915527344, 0.06832844543457031, 0.06851500701904296, 0.06897337341308593, 0.06873414611816406, 0.06849110412597656, 0.06892179107666016, 0.06887235260009765, 0.06915277099609375, 0.06889612579345702, 0.06863155364990234, 0.06878412628173829, 0.0686919708251953, 0.06912726593017578, 0.06893628692626953, 0.06852243041992187, 0.06891072082519531, 0.06899890899658204, 0.06893990325927735, 0.06915478515625, 0.06918585968017578, 0.06917334747314453, 0.06920396423339843, 0.06933708953857422, 0.0692490234375, 0.0692257308959961, 0.06916786956787109, 0.06955622100830078, 0.06926060485839844, 0.06903206634521485, 0.06898473358154297, 0.06917705535888671, 0.06912300872802735, 0.06942310333251953, 0.06938575744628907, 0.0695253448486328, 0.06954662322998047, 0.06950297546386719, 0.06946326446533203, 0.06956060791015625, 0.06922489929199219, 0.06860205078125, 0.06819840240478516, 0.06784204864501953, 0.06839705657958985, 0.06860787200927734, 0.06802854156494141, 0.06795030212402343, 0.06832259368896484, 0.06815984344482422, 0.06839955139160156, 0.06802864074707031, 0.06819805145263672, 0.06835340881347657, 0.06885065460205078, 0.06863667297363281, 0.06864482879638673, 0.06879824066162109, 0.06855705261230469, 0.06877542114257812, 0.0686056671142578, 0.06887468719482422, 0.06864860534667969, 0.06821139526367187, 0.06890290832519531, 0.0686592025756836, 0.06849081420898437, 0.0685588150024414, 0.06893801879882812, 0.06884575653076172, 0.06901760101318359, 0.06864281463623047, 0.06890105438232422, 0.06896825408935547, 0.06891929626464843, 0.06881279754638672, 0.06873056030273438, 0.0686448974609375, 0.06858576202392579, 0.06884502410888672, 0.06902156829833984, 0.06896073913574219, 0.06937619018554687, 0.06918125152587891, 0.06912150573730469, 0.06946233367919921, 0.0691081314086914, 0.06935247802734375, 0.06932713317871093, 0.06927839660644532, 0.06898847961425782, 0.06941535949707031, 0.06894096374511718, 0.06910243225097656, 0.0688695068359375, 0.06955270385742188, 0.069476318359375, 0.06936380767822266, 0.06938214111328125, 0.06947392272949218, 0.06945420837402344, 0.06947203063964844, 0.06955622100830078, 0.06968275451660157]",tokens/s,14.531040496980895,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.631808,1570.635776,0.0,1201.668096,1189.151232,s,1,8.403615234375,8.403615234375,0.0,8.403615234375,8.403615234375,8.403615234375,8.403615234375,[8.403615234375],,kWh,3.511641749587397e-05,3.8660726060268e-06,1.1006119915987767e-05,4.998861001788854e-05,,MB,1708.802048,1799.225344,0.0,1382.023168,1351.367168,s,10,0.4759820137023926,0.047598201370239256,0.0001626858909872711,0.047567728042602536,0.04776416168212891,0.047842943954467775,0.047905969772338866,"[0.04792172622680664, 0.04764956665039063, 0.0474736328125, 0.047566303253173826, 0.04730271911621094, 0.04756915283203125, 0.047480705261230466, 0.04771945571899414, 0.04755209732055664, 0.047746654510498046]",tokens/s,5378.35448883293,kWh,1.4359977920954476e-06,1.5830856897339233e-07,9.491061514412116e-07,2.543412512510052e-06,tokens/kWh,100652174.48637846,MB,1717.399552,1841.168384,0.0,1423.966208,1407.328256,s,10,13.668268920898438,1.3668268920898439,0.0032423993119370872,1.3658818359375,1.3711612548828125,1.3717023315429686,1.3721351928710936,"[1.3639447021484374, 1.3659415283203125, 1.371041015625, 1.3658221435546876, 1.368722412109375, 1.372243408203125, 1.364673828125, 1.3626820068359375, 1.363250732421875, 1.3699471435546875]",tokens/s,46.092157218003365,kWh,3.999733017831921e-05,4.4113654140000186e-06,1.8718270693559428e-05,6.312696628587866e-05,tokens/kWh,997988.7155466386,,s,630,13.665357463836683,0.02169104359339154,0.00028029880422812236,0.021626880645751953,0.021889948654174804,0.022095055866241456,0.02298744827270508,"[0.021407743453979493, 0.021522432327270507, 0.021439775466918946, 0.021703392028808593, 0.021549055099487305, 0.02185625648498535, 0.021590015411376954, 0.021600255966186522, 0.02162483215332031, 0.021727231979370116, 0.02166912078857422, 0.021635839462280274, 0.02167398452758789, 0.021646976470947266, 0.02236240005493164, 0.021833824157714843, 0.021702655792236326, 0.02186854362487793, 0.021833375930786134, 0.021815647125244142, 0.021706560134887695, 0.021688512802124024, 0.021515487670898437, 0.02188982391357422, 0.021624000549316406, 0.02158880043029785, 0.02165555191040039, 0.021567039489746094, 0.02147577667236328, 0.021526527404785157, 0.021589056015014648, 0.021528671264648438, 0.02152534484863281, 0.02147123146057129, 0.021591392517089844, 0.021532703399658203, 0.0215599365234375, 0.021570623397827147, 0.02158870315551758, 0.021737184524536133, 0.02177027130126953, 0.021690847396850586, 0.0216944637298584, 0.021557247161865235, 0.021565439224243164, 0.021632640838623048, 0.021634687423706056, 0.02166655921936035, 0.021575679779052736, 0.021523584365844728, 0.021496543884277342, 0.021530752182006837, 0.021753055572509766, 0.021544767379760743, 0.021530656814575194, 0.021478048324584963, 0.02167740821838379, 0.021571968078613283, 0.021611103057861326, 0.021835712432861327, 0.021811264038085938, 0.021872608184814454, 0.021693599700927733, 0.021999807357788087, 0.02181990432739258, 0.021552352905273436, 0.021525279998779297, 0.021456895828247072, 0.021493759155273438, 0.021562688827514647, 0.021539520263671875, 0.02152038383483887, 0.02153267288208008, 0.021643264770507813, 0.02166783905029297, 0.02153628730773926, 0.02160073661804199, 0.021590015411376954, 0.02163302421569824, 0.021534719467163087, 0.021570655822753908, 0.021603231430053712, 0.021563392639160156, 0.02157151985168457, 0.021856319427490233, 0.02168832015991211, 0.021608448028564452, 0.02149990463256836, 0.02166374397277832, 0.021703680038452147, 0.021566463470458985, 0.02152038383483887, 0.021561344146728514, 0.021669599533081056, 0.0215546875, 0.021560096740722658, 0.02163302421569824, 0.021690048217773438, 0.021641536712646483, 0.021770240783691407, 0.022022144317626953, 0.02185420799255371, 0.021656831741333007, 0.021590688705444335, 0.021557344436645507, 0.021583871841430666, 0.021622783660888673, 0.021712799072265625, 0.021645408630371094, 0.02167913627624512, 0.022604768753051757, 0.022424703598022462, 0.02210700798034668, 0.022042015075683593, 0.02178268814086914, 0.021679935455322267, 0.021688959121704102, 0.02163865661621094, 0.021727680206298828, 0.02155939292907715, 0.02163705635070801, 0.021684032440185547, 0.02164963150024414, 0.02164531135559082, 0.021630495071411134, 0.02158639907836914, 0.02192793655395508, 0.02202009582519531, 0.02224742317199707, 0.02212777519226074, 0.021777248382568358, 0.021572959899902343, 0.021572256088256837, 0.02168988800048828, 0.021528863906860353, 0.02152262306213379, 0.021581663131713866, 0.021614751815795898, 0.021598207473754884, 0.021765888214111326, 0.02163849639892578, 0.021660575866699217, 0.02147327995300293, 0.02150809669494629, 0.021421695709228517, 0.021657312393188476, 0.022006431579589845, 0.023037952423095705, 0.021774335861206053, 0.021835615158081054, 0.021729440689086915, 0.021743776321411133, 0.021601791381835937, 0.02163337516784668, 0.02370969581604004, 0.023240703582763672, 0.021843967437744142, 0.0216494083404541, 0.021733375549316408, 0.021901311874389647, 0.02161408042907715, 0.02161305618286133, 0.02162892723083496, 0.021594112396240234, 0.02161664009094238, 0.021579776763916016, 0.021549055099487305, 0.021624383926391603, 0.021580095291137694, 0.021544960021972655, 0.02166592025756836, 0.021753856658935547, 0.0216944637298584, 0.02162073516845703, 0.021739456176757814, 0.021725248336791993, 0.02157695960998535, 0.021594879150390624, 0.0216760311126709, 0.021632511138916014, 0.021637088775634767, 0.02190185546875, 0.021567487716674806, 0.02166579246520996, 0.021757055282592773, 0.021522432327270507, 0.02155404853820801, 0.021626880645751953, 0.021610496520996093, 0.021438207626342774, 0.022194080352783203, 0.0225994873046875, 0.022131488800048827, 0.021884927749633788, 0.021753856658935547, 0.021843967437744142, 0.0216964168548584, 0.021827648162841797, 0.02159823989868164, 0.02159401512145996, 0.021620832443237304, 0.022241247177124022, 0.021644351959228515, 0.021642208099365234, 0.021639167785644533, 0.021691808700561522, 0.021686880111694336, 0.021682111740112305, 0.021628992080688476, 0.021716991424560548, 0.021624383926391603, 0.021641664505004883, 0.021626880645751953, 0.021551103591918946, 0.02168422317504883, 0.021581823348999024, 0.021604352951049805, 0.02173936080932617, 0.021776287078857422, 0.02184217643737793, 0.02165555191040039, 0.02170204734802246, 0.02157423973083496, 0.021626207351684572, 0.02154870414733887, 0.021568511962890623, 0.021659423828125, 0.021610719680786133, 0.021611839294433593, 0.02171139144897461, 0.021501312255859373, 0.021564191818237304, 0.0215285758972168, 0.021692256927490234, 0.02150726318359375, 0.021469152450561524, 0.02146406364440918, 0.021501951217651367, 0.02143436813354492, 0.021581375122070312, 0.02148192024230957, 0.021612543106079102, 0.021710847854614256, 0.02150739288330078, 0.02161734390258789, 0.02158527946472168, 0.021713535308837892, 0.021749759674072267, 0.02162483215332031, 0.021700607299804688, 0.021559072494506837, 0.021582048416137697, 0.021544832229614258, 0.021548959732055666, 0.021557952880859373, 0.021485567092895508, 0.02146713638305664, 0.021436416625976562, 0.021534719467163087, 0.021704095840454102, 0.02158857536315918, 0.02154607963562012, 0.021686336517333985, 0.021541248321533202, 0.021566944122314455, 0.021633056640625, 0.021607391357421873, 0.021712896347045898, 0.021696512222290038, 0.021909503936767577, 0.021944320678710938, 0.021784576416015625, 0.021575679779052736, 0.021741216659545898, 0.021770143508911134, 0.021823936462402344, 0.021765439987182618, 0.021975744247436525, 0.02289459228515625, 0.021933792114257812, 0.02174390411376953, 0.021725183486938478, 0.02164691162109375, 0.021549280166625977, 0.021602527618408203, 0.021700607299804688, 0.02163711929321289, 0.0216428165435791, 0.021627328872680665, 0.02149171257019043, 0.02168614387512207, 0.022005887985229493, 0.021809152603149414, 0.021706592559814452, 0.02205411148071289, 0.021908000946044923, 0.021878528594970702, 0.02222127914428711, 0.02176223945617676, 0.02162892723083496, 0.02231091117858887, 0.021575679779052736, 0.02165350341796875, 0.021585727691650392, 0.021620832443237304, 0.02174332809448242, 0.021675775527954102, 0.021635295867919922, 0.021615007400512695, 0.021583871841430666, 0.02165555191040039, 0.021710048675537108, 0.021596960067749024, 0.02169241523742676, 0.02161664009094238, 0.021493152618408205, 0.021549087524414062, 0.021717567443847657, 0.021718687057495117, 0.02164361572265625, 0.021573631286621094, 0.02162073516845703, 0.021811199188232423, 0.021648639678955077, 0.021577535629272462, 0.021549375534057617, 0.02212518310546875, 0.02353561592102051, 0.02466409683227539, 0.02191049575805664, 0.021789695739746092, 0.02172313690185547, 0.02169241523742676, 0.02162249565124512, 0.021635360717773437, 0.021610496520996093, 0.021612255096435547, 0.021633312225341796, 0.02167919921875, 0.021742496490478515, 0.02196272087097168, 0.021810239791870117, 0.021774335861206053, 0.02166032028198242, 0.021704095840454102, 0.022538751602172852, 0.021623199462890624, 0.021626207351684572, 0.02168899154663086, 0.021655296325683592, 0.02159846305847168, 0.02170675277709961, 0.021796415328979492, 0.02170719909667969, 0.021782112121582032, 0.021720607757568358, 0.021772960662841796, 0.02163929557800293, 0.02169660758972168, 0.021683807373046874, 0.021601760864257812, 0.02158892822265625, 0.02179452705383301, 0.02163711929321289, 0.021769760131835937, 0.021815935134887696, 0.02166489601135254, 0.02163609504699707, 0.021585920333862304, 0.02158585548400879, 0.02150579261779785, 0.022261280059814453, 0.02171539115905762, 0.02164975929260254, 0.021595199584960936, 0.02153772735595703, 0.021550687789916992, 0.021492128372192384, 0.021577375411987305, 0.02166204833984375, 0.021601343154907228, 0.021550016403198244, 0.02151628875732422, 0.021421920776367186, 0.021463199615478514, 0.021876735687255858, 0.021763999938964843, 0.021590015411376954, 0.02153071975708008, 0.021499807357788087, 0.02141391944885254, 0.021624895095825197, 0.021518335342407227, 0.021489664077758788, 0.02151206398010254, 0.021620223999023438, 0.021645952224731445, 0.021587968826293946, 0.02150339126586914, 0.021504608154296875, 0.021523744583129882, 0.021641952514648437, 0.021432319641113282, 0.021526527404785157, 0.021501951217651367, 0.021583871841430666, 0.021398591995239257, 0.021679040908813476, 0.021553152084350585, 0.021639167785644533, 0.021557279586791992, 0.021595680236816406, 0.021580223083496095, 0.021537952423095703, 0.02156220817565918, 0.021514240264892577, 0.021553152084350585, 0.021557247161865235, 0.022237056732177733, 0.021731456756591796, 0.021622783660888673, 0.021608448028564452, 0.0216711368560791, 0.021541568756103517, 0.021592159271240235, 0.021511520385742187, 0.021653823852539063, 0.021780511856079102, 0.022018112182617188, 0.021967103958129883, 0.021822879791259766, 0.021795103073120117, 0.021954879760742188, 0.022211904525756835, 0.02209452819824219, 0.02209548759460449, 0.0217706241607666, 0.02166783905029297, 0.022517759323120116, 0.021635072708129883, 0.02153379249572754, 0.021573631286621094, 0.021536767959594725, 0.021499519348144532, 0.021560800552368163, 0.021463968276977538, 0.021384416580200197, 0.02148428726196289, 0.021468832015991212, 0.021451135635375977, 0.02143846321105957, 0.02172313690185547, 0.021476608276367187, 0.021424831390380858, 0.021547071456909178, 0.021549055099487305, 0.02149718475341797, 0.021445280075073243, 0.021526527404785157, 0.02189107131958008, 0.021626880645751953, 0.021762048721313477, 0.021783647537231447, 0.02165225601196289, 0.02162601661682129, 0.021560287475585936, 0.0216494083404541, 0.021489664077758788, 0.021536767959594725, 0.021962751388549806, 0.024057855606079103, 0.0224768009185791, 0.021733375549316408, 0.021598207473754884, 0.021571584701538086, 0.02152390480041504, 0.021578304290771483, 0.02162220764160156, 0.02165203285217285, 0.02184806442260742, 0.021630592346191406, 0.021665567398071288, 0.02158038330078125, 0.02150399971008301, 0.02147871971130371, 0.02156819152832031, 0.02155513572692871, 0.02154911994934082, 0.021522432327270507, 0.02149760055541992, 0.02145510482788086, 0.021598207473754884, 0.021534719467163087, 0.02151603126525879, 0.021567487716674806, 0.021542303085327147, 0.02155561637878418, 0.02153900718688965, 0.021512447357177736, 0.021571584701538086, 0.021591360092163087, 0.021441215515136718, 0.02150921630859375, 0.021498783111572266, 0.021929632186889647, 0.02171939277648926, 0.021763328552246095, 0.0217259521484375, 0.021953792572021485, 0.02147929573059082, 0.021541759490966796, 0.021551103591918946, 0.021526527404785157, 0.021374975204467773, 0.02146099281311035, 0.021374975204467773, 0.021419776916503906, 0.021548448562622072, 0.021556032180786132, 0.021683584213256835, 0.02166032028198242, 0.02181331253051758, 0.02183366394042969, 0.02169980812072754, 0.021728031158447264, 0.021747200012207032, 0.021705055236816408, 0.021678239822387695, 0.02154431915283203, 0.02160908889770508, 0.02173689651489258, 0.02161827278137207, 0.02154582405090332, 0.02155353546142578, 0.021599136352539062, 0.02171126365661621, 0.021615039825439452, 0.021596160888671875, 0.0216180477142334, 0.021617279052734376, 0.021743392944335936, 0.021747392654418947, 0.021584415435791017, 0.02158380889892578, 0.02158188819885254, 0.02155104064941406, 0.021654815673828126, 0.02160892868041992, 0.021550912857055664, 0.02182102394104004, 0.021582752227783202, 0.021591039657592775, 0.02165043258666992, 0.02167398452758789, 0.021679231643676758, 0.021611391067504884, 0.021606399536132814, 0.021494943618774413, 0.02192860794067383, 0.021926080703735352, 0.021569536209106444, 0.021567487716674806, 0.021599615097045898, 0.021512832641601563, 0.02157904052734375, 0.02157209587097168, 0.02161257553100586, 0.021881088256835938, 0.021916959762573244, 0.021592256546020507, 0.022134880065917968, 0.022038976669311525, 0.022003551483154297, 0.022106271743774414, 0.0221265926361084, 0.021839872360229492, 0.021589279174804688, 0.021525215148925782, 0.021585119247436522, 0.021750015258789064, 0.02154960060119629, 0.021606399536132814, 0.021555200576782226, 0.021518335342407227, 0.021599584579467774, 0.021777055740356446, 0.02166169548034668, 0.021575679779052736, 0.021611520767211914, 0.02167830467224121, 0.021715744018554688, 0.021514240264892577, 0.023025375366210937, 0.022531967163085937, 0.021700927734375, 0.02165894317626953, 0.022059423446655273, 0.02187241554260254, 0.021807712554931642, 0.021718080520629884, 0.021750207901000976, 0.021633535385131835, 0.02161664009094238, 0.021710687637329102, 0.021766271591186524, 0.021634368896484374, 0.02167190361022949, 0.02152524757385254, 0.021559295654296876, 0.021496864318847658, 0.021611488342285157, 0.02167398452758789, 0.021618112564086914, 0.02188857650756836, 0.021840576171875, 0.021827903747558594, 0.021725183486938478, 0.021708480834960936, 0.02167430305480957, 0.021792064666748046, 0.02170719909667969, 0.021671199798583986, 0.021584863662719726, 0.021700607299804688, 0.021579776763916016, 0.021616575241088867, 0.021536512374877928, 0.021558719635009764, 0.02156563186645508, 0.021635776519775392]",tokens/s,46.101977329696716,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,813.334528,561.905664,0.0,159.383552,143.673856,s,1,7.50515478515625,7.50515478515625,0.0,7.50515478515625,7.50515478515625,7.50515478515625,7.50515478515625,[7.50515478515625],,kWh,1.1158478420854105e-05,1.2228986377304952e-06,3.6277806799989154e-06,1.6009157738583516e-05,,MB,1304.133632,616.431616,0.0,199.22944,186.684928,s,31,0.20093119907379153,0.006481651583025533,0.00010389777823592195,0.006445312023162842,0.006552927970886231,0.006615776062011719,0.006871209526062011,"[0.006973631858825683, 0.006423295974731446, 0.006409503936767578, 0.006398623943328858, 0.006476160049438477, 0.006443071842193604, 0.006445312023162842, 0.0064759359359741215, 0.006441184043884277, 0.006486112117767334, 0.006502399921417237, 0.0064174079895019534, 0.006491487979888916, 0.0064382081031799316, 0.006444416046142578, 0.006467167854309082, 0.006540671825408935, 0.006471487998962402, 0.006430975914001465, 0.006425663948059082, 0.006440703868865967, 0.006429535865783691, 0.0064618239402770995, 0.006448416233062744, 0.006552927970886231, 0.006467072010040284, 0.006432608127593994, 0.006438111782073975, 0.00659932804107666, 0.006632224082946777, 0.006425727844238281]",tokens/s,39496.106311919844,kWh,1.905631963776542e-07,2.10157273710509e-08,8.305072323708438e-08,2.9462964698578946e-07,tokens/kWh,868887440.9585379,MB,1317.588992,620.62592,0.0,203.423744,186.687488,s,31,10.048262298583984,0.3241374935027092,0.0024631135803904456,0.32332138061523436,0.32610025024414063,0.32665461730957035,0.33285871582031246,"[0.33548345947265623, 0.3229282531738281, 0.3227763671875, 0.32332138061523436, 0.3248694152832031, 0.3230039367675781, 0.32282720947265625, 0.32302767944335936, 0.32409942626953125, 0.3231819763183594, 0.3229313659667969, 0.3230043029785156, 0.3240220031738281, 0.3265749206542969, 0.3229845581054688, 0.3231822509765625, 0.32673431396484376, 0.3237119140625, 0.32159268188476564, 0.32234414672851563, 0.3259073486328125, 0.32258786010742185, 0.32466278076171873, 0.32340029907226564, 0.323005126953125, 0.32610025024414063, 0.3218153076171875, 0.323631103515625, 0.3234344787597656, 0.325078369140625, 0.3260378112792969]",tokens/s,194.3619644836719,kWh,9.273638420557685e-06,1.0227241197393212e-06,3.365724437214853e-06,1.3662086977511862e-05,tokens/kWh,4611301.340981036,,s,1953,10.033810359954812,0.005137639713238522,0.00013653811097587233,0.005112095832824707,0.005185254287719727,0.005239487934112549,0.005743982048034668,"[0.005298208236694336, 0.005466527938842773, 0.005439487934112549, 0.005302048206329345, 0.0052820158004760745, 0.00522649621963501, 0.005246848106384277, 0.0053208317756652836, 0.005187583923339844, 0.0051998720169067385, 0.005222239971160889, 0.005154975891113281, 0.005158912181854248, 0.00514899206161499, 0.005467840194702148, 0.0051383042335510255, 0.005202047824859619, 0.005142528057098389, 0.005130591869354248, 0.005101215839385986, 0.005177120208740235, 0.005129792213439941, 0.005271903991699219, 0.00520630407333374, 0.005162432193756104, 0.005194272041320801, 0.005191743850708008, 0.005169151782989502, 0.005134335994720459, 0.005189631938934326, 0.005213247776031494, 0.006446176052093506, 0.006696864128112793, 0.006758336067199707, 0.0067338237762451176, 0.006879327774047851, 0.005666719913482666, 0.005160192012786865, 0.00514739179611206, 0.0051439681053161625, 0.00518614387512207, 0.005152768135070801, 0.005144576072692871, 0.005179711818695068, 0.005193120002746582, 0.00514025592803955, 0.00513043212890625, 0.005146944046020508, 0.005152512073516846, 0.0051274561882019044, 0.005151711940765381, 0.005197824001312256, 0.005197824001312256, 0.005183487892150879, 0.005152768135070801, 0.005137792110443115, 0.005122687816619873, 0.005136032104492188, 0.005122399806976318, 0.0051036162376403805, 0.005215231895446777, 0.005118144035339355, 0.005124927997589112, 0.005079040050506592, 0.0050954241752624516, 0.005138336181640625, 0.005122144222259522, 0.0050954241752624516, 0.0051151041984558105, 0.005133088111877441, 0.005122047901153564, 0.005095615863800049, 0.005111616134643554, 0.005122047901153564, 0.005109151840209961, 0.005108320236206055, 0.005130239963531494, 0.005134335994720459, 0.005107711791992187, 0.005121056079864502, 0.005158175945281982, 0.005120704174041748, 0.005129216194152832, 0.005119999885559082, 0.005107711791992187, 0.005114175796508789, 0.005107391834259034, 0.005164447784423828, 0.005108320236206055, 0.005119999885559082, 0.005144576072692871, 0.005125408172607422, 0.005122719764709472, 0.0051131839752197265, 0.005133024215698242, 0.00511081600189209, 0.005100512027740479, 0.005117951869964599, 0.0051027522087097165, 0.005116767883300781, 0.0050924158096313475, 0.005141600131988526, 0.005129983901977539, 0.005095488071441651, 0.005095456123352051, 0.005122047901153564, 0.005134335994720459, 0.005085279941558838, 0.005124000072479248, 0.005091328144073487, 0.005107711791992187, 0.005253215789794922, 0.005144480228424072, 0.005115647792816162, 0.005107711791992187, 0.005175295829772949, 0.005103968143463135, 0.00509494400024414, 0.0050804481506347655, 0.005114880084991455, 0.005089280128479004, 0.005164383888244629, 0.005124000072479248, 0.005121088027954102, 0.005098336219787598, 0.005092192173004151, 0.005026336193084717, 0.0051294717788696285, 0.005112383842468262, 0.005079071998596191, 0.005079008102416992, 0.005101568222045898, 0.005106847763061523, 0.005110591888427734, 0.005128223896026612, 0.005141727924346924, 0.005100575923919678, 0.005088128089904785, 0.0051289920806884765, 0.0051327037811279294, 0.005096831798553467, 0.005125631809234619, 0.005138527870178222, 0.005092063903808594, 0.005085279941558838, 0.0051233282089233395, 0.005126368045806884, 0.005079040050506592, 0.005095871925354004, 0.005134335994720459, 0.005123616218566895, 0.005085728168487549, 0.005136672019958496, 0.005095071792602539, 0.0050769920349121095, 0.005089280128479004, 0.005138527870178222, 0.005103583812713623, 0.005154848098754883, 0.005154719829559326, 0.0050974078178405765, 0.005074687957763672, 0.005077407836914062, 0.005173344135284424, 0.0051253437995910645, 0.0051775679588317875, 0.005109119892120362, 0.005126751899719238, 0.00512656021118164, 0.005113823890686035, 0.005117536067962647, 0.005122560024261475, 0.005107679843902588, 0.00530838394165039, 0.005145792007446289, 0.005094431877136231, 0.005100607872009277, 0.0051145920753479, 0.005111104011535645, 0.005113952159881592, 0.005075808048248291, 0.005115231990814209, 0.005101280212402344, 0.005095647811889648, 0.0051331200599670414, 0.0051216320991516115, 0.005091392040252686, 0.005104928016662597, 0.005157536029815674, 0.005041759967803955, 0.005088831901550293, 0.005219168186187744, 0.005490143775939941, 0.005116064071655274, 0.005228928089141846, 0.005111999988555908, 0.005135744094848633, 0.005120448112487793, 0.00510969591140747, 0.005156544208526612, 0.0051101441383361815, 0.005117951869964599, 0.005180672168731689, 0.005133056163787842, 0.005125887870788574, 0.0051073598861694335, 0.005116511821746826, 0.005145919799804687, 0.0051101441383361815, 0.005085504055023194, 0.0051036162376403805, 0.0051199040412902835, 0.0052139201164245605, 0.0051569280624389644, 0.00512556791305542, 0.005122047901153564, 0.0050819840431213376, 0.005112959861755371, 0.00507539176940918, 0.0050631041526794434, 0.005096896171569824, 0.005108287811279297, 0.005183328151702881, 0.0050850238800048825, 0.005117792129516602, 0.005079520225524902, 0.0050871682167053225, 0.005095488071441651, 0.0051066880226135255, 0.0050835199356079105, 0.005073535919189453, 0.005148191928863525, 0.005109983921051026, 0.005100927829742432, 0.005073791980743409, 0.005107264041900635, 0.005132544040679932, 0.005089471817016602, 0.005093152046203613, 0.005101568222045898, 0.005107840061187744, 0.00518287992477417, 0.0051422080993652345, 0.00512662410736084, 0.0051099519729614256, 0.0051019201278686525, 0.005150720119476319, 0.005123551845550537, 0.005083775997161865, 0.005136288166046142, 0.0051363840103149415, 0.005162720203399658, 0.005079648017883301, 0.006504447937011719, 0.005146624088287354, 0.005119999885559082, 0.005119872093200683, 0.005099936008453369, 0.005150400161743164, 0.005197023868560791, 0.005120639801025391, 0.005089183807373047, 0.005074272155761719, 0.005141791820526123, 0.005123744010925293, 0.005082623958587646, 0.005071104049682617, 0.005135839939117432, 0.005101439952850342, 0.005081600189208985, 0.0050689601898193356, 0.005128575801849365, 0.005105535984039306, 0.005077311992645264, 0.005133376121520996, 0.005767327785491943, 0.005350944042205811, 0.005120031833648682, 0.005129119873046875, 0.005196832180023194, 0.0050843839645385745, 0.005115647792816162, 0.005066751956939697, 0.005086239814758301, 0.0051027522087097165, 0.005096864223480224, 0.005080959796905518, 0.0050795841217041015, 0.005150815963745117, 0.0051158080101013186, 0.00506060791015625, 0.005077280044555664, 0.005078495979309082, 0.00506496000289917, 0.005068863868713379, 0.005107200145721436, 0.005087456226348877, 0.00512559986114502, 0.005077151775360107, 0.00513651180267334, 0.005124576091766357, 0.005111807823181152, 0.005292031764984131, 0.00513647985458374, 0.0051158080101013186, 0.005070400238037109, 0.005151167869567871, 0.005118207931518555, 0.005132031917572021, 0.005230591773986816, 0.005138432025909424, 0.005067999839782715, 0.00506060791015625, 0.005185535907745361, 0.0050982718467712405, 0.005499743938446045, 0.005097248077392578, 0.0051550078392028804, 0.005134367942810058, 0.005105696201324463, 0.005118015766143799, 0.005134528160095215, 0.005129055976867676, 0.005097472190856934, 0.005116799831390381, 0.005092991828918457, 0.005148767948150635, 0.005109119892120362, 0.005102496147155761, 0.005095104217529297, 0.005095232009887696, 0.0051140799522399905, 0.005124063968658448, 0.005089727878570557, 0.005111199855804443, 0.0051179838180541995, 0.005117663860321045, 0.005094399929046631, 0.005107423782348633, 0.0051017279624938966, 0.005124063968658448, 0.005088448047637939, 0.005134528160095215, 0.005095456123352051, 0.005070784091949463, 0.005087776184082031, 0.005146463871002197, 0.005108160018920898, 0.005131840229034424, 0.005114016056060791, 0.005102687835693359, 0.005080031871795654, 0.0050802559852600095, 0.005103936195373535, 0.0050830078125, 0.0050850558280944825, 0.00507155179977417, 0.005124095916748047, 0.005110112190246582, 0.005090976238250732, 0.005099520206451416, 0.005096672058105468, 0.005092127799987793, 0.005130496025085449, 0.00514467191696167, 0.005317503929138183, 0.005100319862365723, 0.005125408172607422, 0.005186272144317627, 0.005075104236602783, 0.005084832191467285, 0.005103936195373535, 0.005100927829742432, 0.005091392040252686, 0.005097919940948486, 0.0051463360786437986, 0.005196063995361328, 0.005113344192504882, 0.005025919914245606, 0.005154816150665284, 0.005101280212402344, 0.005091008186340332, 0.005076863765716553, 0.0051270718574523925, 0.005098495960235596, 0.005370304107666015, 0.0051448321342468266, 0.005249311923980713, 0.00515283203125, 0.005118048191070557, 0.005162047863006592, 0.0051114559173583984, 0.005110432147979736, 0.005128640174865723, 0.005201791763305664, 0.005095776081085205, 0.005115551948547363, 0.005125855922698975, 0.0051039037704467775, 0.005090559959411621, 0.005115839958190918, 0.005100351810455322, 0.005076255798339844, 0.005173151969909668, 0.005114687919616699, 0.005103295803070069, 0.005075263977050781, 0.005173247814178467, 0.005101568222045898, 0.005078112125396729, 0.005077343940734863, 0.0050869441032409665, 0.005079840183258057, 0.005173247814178467, 0.005078623771667481, 0.005113759994506836, 0.00509830379486084, 0.005099264144897461, 0.005131775856018066, 0.0051082239151000975, 0.005088831901550293, 0.005091775894165039, 0.005126368045806884, 0.005103392124176025, 0.005086656093597412, 0.0050748162269592285, 0.005108479976654053, 0.005095071792602539, 0.005093440055847168, 0.005115551948547363, 0.00510646390914917, 0.005142399787902832, 0.005070752143859864, 0.005222400188446045, 0.005103007793426513, 0.005067359924316406, 0.005117663860321045, 0.005112095832824707, 0.005130239963531494, 0.005089280128479004, 0.005107967853546142, 0.005048319816589355, 0.0051016960144042965, 0.005142399787902832, 0.005087232112884522, 0.005127552032470703, 0.0051329278945922855, 0.005074944019317627, 0.005119999885559082, 0.005107679843902588, 0.0050854401588439945, 0.005097248077392578, 0.0051138877868652345, 0.005092512130737305, 0.005102303981781006, 0.005111167907714844, 0.0051975998878479, 0.00510262393951416, 0.005115488052368164, 0.005115744113922119, 0.005112063884735107, 0.005074975967407226, 0.005052608013153076, 0.005132287979125977, 0.005097504138946533, 0.005078239917755127, 0.005107999801635742, 0.005115520000457763, 0.005087296009063721, 0.005250944137573242, 0.005297311782836914, 0.005138175964355469, 0.005158912181854248, 0.005148191928863525, 0.005136864185333252, 0.005099520206451416, 0.005105792045593261, 0.005121920108795166, 0.005105663776397705, 0.005121791839599609, 0.005197984218597412, 0.005119967937469483, 0.005099904060363769, 0.005129536151885986, 0.005121664047241211, 0.005092160224914551, 0.005095232009887696, 0.005103807926177978, 0.005105535984039306, 0.005083263874053955, 0.005079040050506592, 0.005133535861968994, 0.00513097620010376, 0.0052163200378417965, 0.00510975980758667, 0.005214079856872559, 0.0051212477684020995, 0.005106592178344726, 0.005156864166259765, 0.0051363840103149415, 0.005111807823181152, 0.005074944019317627, 0.00510975980758667, 0.005099520206451416, 0.0050032639503479, 0.005066976070404052, 0.00511568021774292, 0.005107711791992187, 0.005082367897033691, 0.005175871849060059, 0.0051055998802185054, 0.005117343902587891, 0.005129055976867676, 0.005126143932342529, 0.005162847995758057, 0.005115488052368164, 0.005097536087036133, 0.005125919818878174, 0.00513478422164917, 0.0051123518943786625, 0.005109504222869873, 0.005154751777648926, 0.005107776165008545, 0.005099071979522705, 0.005132991790771485, 0.005133920192718506, 0.005111968040466309, 0.0051283202171325684, 0.00523360013961792, 0.005116543769836426, 0.005132160186767578, 0.005132319927215576, 0.0051158080101013186, 0.005155327796936035, 0.005128191947937012, 0.005148672103881836, 0.005152768135070801, 0.0051773438453674315, 0.005168863773345947, 0.0051567678451538085, 0.005206399917602539, 0.0056031041145324706, 0.005130464076995849, 0.005111711978912354, 0.0050953922271728515, 0.005140607833862305, 0.005203968048095703, 0.005089280128479004, 0.0051363840103149415, 0.005093376159667969, 0.00513372802734375, 0.005063263893127441, 0.005099520206451416, 0.005187007904052734, 0.005077600002288818, 0.005127999782562256, 0.0054265279769897465, 0.005102399826049805, 0.005109119892120362, 0.005112031936645508, 0.005130464076995849, 0.005093152046203613, 0.005130847930908203, 0.005119135856628418, 0.005098144054412842, 0.005105504035949707, 0.005127808094024658, 0.0050243520736694336, 0.0050802559852600095, 0.005086016178131103, 0.00507913589477539, 0.0052202558517456055, 0.005080863952636718, 0.005087456226348877, 0.005109600067138672, 0.00518943977355957, 0.005564767837524414, 0.005141791820526123, 0.005397215843200683, 0.0051877121925354005, 0.005285759925842285, 0.00512934398651123, 0.005108287811279297, 0.005095615863800049, 0.005163296222686768, 0.005123712062835693, 0.005105887889862061, 0.005120160102844239, 0.0051138558387756345, 0.005092927932739258, 0.005106143951416016, 0.005176191806793213, 0.005101535797119141, 0.005089856147766113, 0.0051838397979736325, 0.005105728149414062, 0.0050969281196594236, 0.005083680152893066, 0.005150144100189209, 0.005075263977050781, 0.005067008018493652, 0.005056320190429687, 0.005085696220397949, 0.0050657281875610355, 0.005075967788696289, 0.0051010560989379885, 0.005102816104888916, 0.005094367980957032, 0.005081088066101074, 0.005107903957366943, 0.005205632209777832, 0.005072415828704834, 0.005058656215667725, 0.0051099519729614256, 0.005120448112487793, 0.005076576232910157, 0.005089632034301758, 0.005129312038421631, 0.0050926079750061035, 0.005098400115966797, 0.005121151924133301, 0.005101151943206787, 0.005091231822967529, 0.005117536067962647, 0.0051164479255676265, 0.005124095916748047, 0.0050852479934692385, 0.005107647895812988, 0.005115903854370117, 0.0050991039276123045, 0.005021376132965088, 0.00507747220993042, 0.0051279358863830565, 0.005106080055236817, 0.005067776203155518, 0.005075488090515137, 0.005102047920227051, 0.0051435518264770505, 0.005090303897857666, 0.005191679954528809, 0.0050910720825195314, 0.005089695930480957, 0.0050657281875610355, 0.005136672019958496, 0.005084928035736084, 0.0051495041847229, 0.005103551864624024, 0.005113696098327637, 0.005175776004791259, 0.005080832004547119, 0.005117343902587891, 0.005097983837127685, 0.005128287792205811, 0.005079071998596191, 0.005131423950195313, 0.005118080139160156, 0.005077119827270508, 0.005136960029602051, 0.00547430419921875, 0.005111807823181152, 0.0051240320205688476, 0.005138495922088623, 0.005111807823181152, 0.005128191947937012, 0.005119999885559082, 0.0051039037704467775, 0.005085951805114746, 0.005079264163970947, 0.00521292781829834, 0.0051561279296875, 0.005114687919616699, 0.005134496212005615, 0.005082848072052002, 0.005058591842651367, 0.005089600086212159, 0.005115231990814209, 0.005089344024658203, 0.005091616153717041, 0.00509878396987915, 0.005135072231292725, 0.005089024066925049, 0.005074687957763672, 0.00507750415802002, 0.005205408096313476, 0.005179808139801025, 0.005091519832611084, 0.00524396800994873, 0.005092095851898193, 0.005116096019744873, 0.005150335788726807, 0.0050917119979858394, 0.005130303859710693, 0.005068128108978272, 0.004984831809997559, 0.005107711791992187, 0.005092959880828857, 0.005065152168273926, 0.005131936073303223, 0.005527872085571289, 0.005101568222045898, 0.0050728960037231445, 0.005119808197021484, 0.005102848052978516, 0.005084095954895019, 0.005140128135681152, 0.00509168004989624, 0.005110976219177246, 0.005094207763671875, 0.005124095916748047, 0.005117951869964599, 0.005107999801635742, 0.005257152080535889, 0.005101408004760742, 0.005074880123138428, 0.0050954241752624516, 0.0051242241859436035, 0.005100512027740479, 0.005087423801422119, 0.005097887992858887, 0.005151040077209472, 0.0051138558387756345, 0.005093696117401123, 0.005111680030822754, 0.005190624237060547, 0.0050793919563293455, 0.0050915517807006835, 0.005112095832824707, 0.005105792045593261, 0.005111680030822754, 0.005122047901153564, 0.005119552135467529, 0.00517574405670166, 0.00509984016418457, 0.0051155838966369625, 0.005111807823181152, 0.00508028793334961, 0.005082047939300537, 0.005125984191894532, 0.005167103767395019, 0.005096799850463867, 0.0050919361114501955, 0.005084959983825683, 0.005081151962280274, 0.005089471817016602, 0.005106847763061523, 0.005104512214660645, 0.005076511859893799, 0.005125664234161377, 0.005326879978179932, 0.005095776081085205, 0.005095839977264404, 0.0051262078285217285, 0.005107808113098145, 0.005146624088287354, 0.005154816150665284, 0.0051528000831604, 0.005125696182250976, 0.005145023822784424, 0.005160863876342773, 0.005212255954742432, 0.005118015766143799, 0.005105728149414062, 0.005134016036987305, 0.005140672206878662, 0.005150815963745117, 0.005112927913665771, 0.005192512035369873, 0.0051356801986694334, 0.0051084160804748535, 0.005146016120910644, 0.005126495838165283, 0.005248288154602051, 0.005264063835144043, 0.005300704002380371, 0.005127168178558349, 0.005124000072479248, 0.0051476478576660155, 0.005141856193542481, 0.00521235179901123, 0.005175680160522461, 0.005146624088287354, 0.005098720073699951, 0.005083936214447022, 0.005144576072692871, 0.005127327919006348, 0.005086239814758301, 0.005234496116638183, 0.005119999885559082, 0.005093023777008056, 0.005105472087860107, 0.005231135845184326, 0.005099711894989013, 0.005080671787261963, 0.005069056034088135, 0.005103456020355225, 0.00511353588104248, 0.005077184200286865, 0.005150752067565918, 0.005183712005615234, 0.005083136081695557, 0.005092480182647705, 0.005098368167877197, 0.005095776081085205, 0.0051075201034545895, 0.00509935998916626, 0.0051263999938964844, 0.00514025592803955, 0.005101535797119141, 0.005128191947937012, 0.005103104114532471, 0.005091584205627441, 0.0050936322212219234, 0.0051233601570129395, 0.00512278413772583, 0.005104832172393799, 0.005118783950805664, 0.005115903854370117, 0.005263040065765381, 0.005119328022003174, 0.005014336109161377, 0.0050991039276123045, 0.005201759815216064, 0.00514243221282959, 0.0051452798843383786, 0.005130655765533447, 0.005125728130340576, 0.005099679946899414, 0.00513369607925415, 0.005184127807617188, 0.005105792045593261, 0.005153823852539063, 0.0051040959358215334, 0.005091328144073487, 0.005108128070831299, 0.005127871990203857, 0.005105535984039306, 0.005190207958221436, 0.005115039825439453, 0.005110176086425782, 0.005081567764282226, 0.005111616134643554, 0.005238976001739502, 0.005117760181427002, 0.006131040096282959, 0.005272575855255127, 0.00517903995513916, 0.005822463989257813, 0.005722432136535644, 0.005141856193542481, 0.005157472133636475, 0.005136127948760986, 0.005117663860321045, 0.00510595178604126, 0.005134367942810058, 0.00512556791305542, 0.005132832050323486, 0.0051437759399414066, 0.005141280174255371, 0.005142528057098389, 0.005087232112884522, 0.005183487892150879, 0.0051036162376403805, 0.005285984039306641, 0.005138336181640625, 0.005130047798156738, 0.005093567848205567, 0.005089087963104248, 0.005206240177154541, 0.0051056318283081055, 0.00505676794052124, 0.005093120098114014, 0.005127871990203857, 0.005087456226348877, 0.005100895881652832, 0.005386047840118409, 0.005132959842681885, 0.005140768051147461, 0.005148672103881836, 0.005186975955963135, 0.005200831890106201, 0.005135615825653076, 0.005177248001098633, 0.005109439849853516, 0.005128479957580566, 0.0051082239151000975, 0.005124415874481201, 0.005109439849853516, 0.005098976135253906, 0.005080992221832275, 0.005124735832214356, 0.0051138558387756345, 0.005113088130950928, 0.00513699197769165, 0.005158239841461181, 0.005155263900756836, 0.005140768051147461, 0.0051448001861572265, 0.005132031917572021, 0.005116096019744873, 0.005128384113311768, 0.005134143829345703, 0.005122335910797119, 0.005115007877349854, 0.005130784034729004, 0.005115903854370117, 0.005078815937042237, 0.005112224102020264, 0.005124256134033203, 0.00510643196105957, 0.005080992221832275, 0.005108736038208008, 0.005134335994720459, 0.0051036162376403805, 0.00509935998916626, 0.005120160102844239, 0.005111743927001953, 0.005105728149414062, 0.005129407882690429, 0.005111775875091553, 0.00512886381149292, 0.005087584018707275, 0.005193920135498047, 0.005107647895812988, 0.00510537576675415, 0.005100575923919678, 0.005116928100585938, 0.005103583812713623, 0.005087007999420166, 0.005193920135498047, 0.005141952037811279, 0.005100255966186524, 0.005107583999633789, 0.005096799850463867, 0.00508790397644043, 0.005083104133605957, 0.005107840061187744, 0.005105120182037353, 0.005092959880828857, 0.005337952136993408, 0.0051530561447143556, 0.005102367877960205, 0.005098720073699951, 0.005125984191894532, 0.0051066560745239254, 0.00508406400680542, 0.005005311965942383, 0.00506879997253418, 0.0051298561096191405, 0.005095967769622803, 0.005092544078826904, 0.0050878400802612305, 0.005107647895812988, 0.005088543891906739, 0.005090112209320068, 0.005146912097930908, 0.0052221441268920895, 0.005119455814361572, 0.005144576072692871, 0.0051290240287780765, 0.005140192031860351, 0.005111199855804443, 0.005132895946502685, 0.00510975980758667, 0.005090559959411621, 0.005366144180297852, 0.005126719951629639, 0.005271359920501709, 0.0051133761405944824, 0.005145055770874023, 0.00511356782913208, 0.005124383926391601, 0.005087232112884522, 0.005107711791992187, 0.005091328144073487, 0.005093728065490723, 0.00524563217163086, 0.0051988158226013185, 0.005085375785827637, 0.005095232009887696, 0.005093376159667969, 0.005173247814178467, 0.0051131839752197265, 0.005118624210357666, 0.005105663776397705, 0.0050821762084960935, 0.0050694079399108884, 0.005091455936431885, 0.005081471920013428, 0.0050768318176269535, 0.005087520122528076, 0.005080800056457519, 0.005085184097290039, 0.0050702719688415524, 0.00509555196762085, 0.005198272228240966, 0.0050720000267028805, 0.005093567848205567, 0.005101984024047852, 0.005087520122528076, 0.005081088066101074, 0.005107135772705078, 0.005108160018920898, 0.005089536190032959, 0.005555488109588623, 0.005112192153930664, 0.005097856044769287, 0.005096479892730713, 0.005090367794036865, 0.0050618557929992675, 0.005105728149414062, 0.005079904079437256, 0.005074399948120117, 0.0051164479255676265, 0.005128255844116211, 0.005116896152496338, 0.005106751918792725, 0.005123936176300049, 0.005130303859710693, 0.0050908799171447755, 0.005153120040893555, 0.00510697603225708, 0.005100128173828125, 0.005087456226348877, 0.005115551948547363, 0.005110079765319824, 0.005134592056274414, 0.005123263835906983, 0.005169760227203369, 0.005090943813323975, 0.005095808029174804, 0.005106944084167481, 0.005112576007843018, 0.005227744102478027, 0.005525599956512451, 0.0056072001457214355, 0.005442080020904541, 0.005136767864227295, 0.0051363840103149415, 0.0051171197891235354, 0.006521664142608643, 0.005437439918518067, 0.0052013759613037105, 0.0051454720497131345, 0.005136159896850586, 0.005119872093200683, 0.005175295829772949, 0.005130239963531494, 0.005108767986297607, 0.0051578559875488286, 0.005144927978515625, 0.005141248226165771, 0.005169216156005859, 0.005196640014648438, 0.005189631938934326, 0.005165056228637695, 0.005133823871612549, 0.005165567874908447, 0.005082848072052002, 0.005126016139984131, 0.005124671936035156, 0.005117792129516602, 0.0050728960037231445, 0.005059904098510743, 0.0051975998878479, 0.005143392086029053, 0.005325088024139404, 0.005142303943634033, 0.00520195198059082, 0.005097439765930176, 0.005214303970336914, 0.005097184181213379, 0.006296576023101807, 0.0051271038055419925, 0.00511187219619751, 0.005150527954101563, 0.005099711894989013, 0.005146111965179443, 0.005127808094024658, 0.0053146882057189945, 0.0050965437889099125, 0.0051380801200866695, 0.005117023944854736, 0.005158944129943848, 0.005147359848022461, 0.005090816020965576, 0.005120160102844239, 0.005117919921875, 0.0051205439567565915, 0.005093599796295166, 0.005091104030609131, 0.005130239963531494, 0.005087584018707275, 0.005082784175872802, 0.005074944019317627, 0.0050926079750061035, 0.005095808029174804, 0.005079679965972901, 0.00510646390914917, 0.005114848136901855, 0.005117951869964599, 0.005081151962280274, 0.00510969591140747, 0.005127776145935059, 0.005112224102020264, 0.005105088233947754, 0.0051262078285217285, 0.005085504055023194, 0.0050804481506347655, 0.0050797438621521, 0.005088768005371094, 0.005087967872619629, 0.005215839862823486, 0.005122399806976318, 0.005113823890686035, 0.005075200080871582, 0.0050720000267028805, 0.005113952159881592, 0.005074687957763672, 0.005071648120880127, 0.00508348798751831, 0.005112864017486572, 0.005186495780944825, 0.005091008186340332, 0.005119999885559082, 0.005099167823791504, 0.005085599899291992, 0.005081024169921875, 0.0051212158203125, 0.005112703800201416, 0.005160704135894776, 0.005126272201538086, 0.005124576091766357, 0.00508681583404541, 0.005074304103851318, 0.0049909758567810054, 0.005093376159667969, 0.005091584205627441, 0.005115647792816162, 0.0050954241752624516, 0.005105120182037353, 0.005087776184082031, 0.0050852160453796385, 0.005107679843902588, 0.005107711791992187, 0.005066783905029297, 0.0051056318283081055, 0.005170976161956787, 0.005117152214050293, 0.005078015804290771, 0.005087391853332519, 0.005123136043548584, 0.005102367877960205, 0.005101280212402344, 0.005152031898498535, 0.005108736038208008, 0.005093440055847168, 0.00506873607635498, 0.005119999885559082, 0.005123680114746094, 0.005097887992858887, 0.005106719970703125, 0.005102911949157715, 0.00508073616027832, 0.0050746240615844726, 0.0051010241508483885, 0.005098336219787598, 0.0050728960037231445, 0.0050787520408630375, 0.005123904228210449, 0.005075712203979493, 0.005117663860321045, 0.005101568222045898, 0.005177631855010987, 0.005095136165618896, 0.00508512020111084, 0.005094560146331787, 0.005099552154541016, 0.005077439785003662, 0.0050668478012084964, 0.005088640213012695, 0.0050797438621521, 0.005072415828704834, 0.005095359802246094, 0.005100607872009277, 0.0050722241401672365, 0.005077311992645264, 0.005101664066314697, 0.005102911949157715, 0.0050858879089355465, 0.005082304000854492, 0.005126976013183594, 0.005110015869140625, 0.005080832004547119, 0.005132287979125977, 0.005092768192291259, 0.005110367774963379, 0.005076064109802246, 0.00499507188796997, 0.005081088066101074, 0.005096960067749024, 0.00506873607635498, 0.005058495998382568, 0.005093760013580322, 0.0050936322212219234, 0.005074463844299317, 0.005099008083343506, 0.005094367980957032, 0.005087232112884522, 0.0050841598510742185, 0.005135072231292725, 0.0051140480041503905, 0.005081056118011474, 0.005077119827270508, 0.005361408233642578, 0.005109824180603027, 0.0050787520408630375, 0.005105728149414062, 0.0051448321342468266, 0.005109024047851562, 0.005215136051177979, 0.005156479835510254, 0.0051019201278686525, 0.0051075520515441895, 0.0051140480041503905, 0.005221888065338135, 0.005132351875305176, 0.005110335826873779, 0.005134175777435303, 0.005112895965576172, 0.005125184059143067, 0.005137504100799561, 0.0051147518157958986, 0.005081056118011474, 0.005084703922271729, 0.005117599964141846, 0.005108511924743652, 0.00508681583404541, 0.005132607936859131, 0.0051018881797790525, 0.005117311954498291, 0.005087615966796875, 0.005086688041687012, 0.0051017279624938966, 0.005066880226135254, 0.0051857919692993165, 0.005097184181213379, 0.005148416042327881, 0.005073440074920654, 0.005099520206451416, 0.005089280128479004, 0.0050637760162353515, 0.005065631866455078, 0.005126143932342529, 0.005077055931091309, 0.005074079990386963, 0.005100319862365723, 0.005116064071655274, 0.005094687938690185, 0.005177919864654541, 0.00509939193725586, 0.005114655971527099, 0.0050728960037231445, 0.0050862717628479, 0.005089407920837403, 0.005089503765106201, 0.005072351932525635, 0.00506774377822876, 0.005060544013977051, 0.005119999885559082, 0.005074079990386963, 0.0050973758697509764, 0.005087584018707275, 0.00508896017074585, 0.005150847911834717, 0.005108736038208008, 0.005109312057495118, 0.0056360640525817875, 0.006101119995117188, 0.006256768226623535, 0.005269504070281982, 0.0051380801200866695, 0.005118303775787354, 0.005106847763061523, 0.0051199040412902835, 0.005143487930297851, 0.005081088066101074, 0.005083136081695557, 0.005130496025085449, 0.0051179838180541995, 0.0050869441032409665, 0.005416959762573242, 0.005142528057098389, 0.005117152214050293, 0.005114655971527099, 0.0051773438453674315, 0.005091328144073487, 0.005126143932342529, 0.00513753604888916, 0.005124991893768311, 0.00510697603225708, 0.005106592178344726, 0.0051133761405944824, 0.005122367858886719, 0.005227871894836426, 0.0051019201278686525, 0.005100959777832031, 0.0051147518157958986, 0.005113503932952881, 0.005116576194763183, 0.005100607872009277, 0.005108128070831299, 0.00512175989151001, 0.005097663879394531, 0.005240255832672119, 0.005102591991424561, 0.005123295783996582, 0.005118752002716064, 0.005091519832611084, 0.005136064052581787, 0.005127871990203857, 0.005259359836578369, 0.005120223999023438, 0.0051363840103149415, 0.005028128147125244, 0.005117184162139893, 0.005091263771057129, 0.005074048042297363, 0.005119103908538819, 0.0050878081321716305, 0.005091584205627441, 0.005111551761627197, 0.005097472190856934, 0.005068384170532227, 0.005073023796081543, 0.005115392208099365, 0.005094175815582276, 0.0051068801879882815, 0.005101823806762695, 0.005112576007843018, 0.005091135978698731, 0.00509555196762085, 0.005111680030822754, 0.0051036162376403805, 0.005087232112884522, 0.00506879997253418, 0.005128191947937012, 0.005105311870574951, 0.005076416015625, 0.00511683177947998, 0.005107776165008545, 0.0051075520515441895, 0.005105088233947754, 0.005151391983032227, 0.005173471927642823, 0.005103392124176025, 0.005097311973571777, 0.005120160102844239, 0.0050769920349121095, 0.005101247787475586, 0.005126463890075684, 0.005097536087036133, 0.005111072063446045, 0.005083775997161865, 0.005101600170135498, 0.005095071792602539, 0.0051981439590454105, 0.00516099214553833, 0.005105088233947754, 0.005084735870361328, 0.005098495960235596, 0.005691135883331299, 0.005228799819946289, 0.005082943916320801, 0.005101183891296387, 0.005090943813323975, 0.005086143970489502, 0.005079040050506592, 0.005093183994293213, 0.005089471817016602, 0.005079040050506592, 0.00510975980758667, 0.00508460807800293, 0.005105663776397705, 0.005084864139556885, 0.005140927791595459, 0.005104063987731933, 0.0050274238586425785, 0.005077216148376465, 0.005101664066314697, 0.005126336097717285, 0.00507689619064331, 0.005054463863372802, 0.005104928016662597, 0.005076960086822509, 0.0050731520652771, 0.00506112003326416, 0.005107711791992187, 0.005101568222045898, 0.00511084794998169, 0.005214303970336914, 0.0051147198677062985, 0.00509552001953125, 0.005101503849029541, 0.00511740779876709, 0.005888512134552002, 0.005181439876556396, 0.005384191989898681, 0.005389920234680176, 0.00558128023147583, 0.005196000099182129, 0.005133056163787842, 0.005120255947113037, 0.005143263816833496, 0.0051448001861572265, 0.005121823787689209, 0.005128223896026612, 0.005152736186981201, 0.005201920032501221, 0.005119999885559082, 0.005129824161529541, 0.0051183681488037105, 0.005167103767395019, 0.005113344192504882, 0.005099936008453369, 0.0051200962066650394, 0.005089056015014648, 0.005119391918182373, 0.00509830379486084, 0.005099743843078613, 0.005090144157409668, 0.005096384048461914, 0.005090847969055175, 0.005084799766540527, 0.0051018881797790525, 0.005139008045196534, 0.005113152027130127, 0.005075456142425537, 0.005114016056060791, 0.00517900800704956, 0.005119840145111084, 0.005100063800811768, 0.005160607814788818, 0.0051528639793396, 0.005093152046203613, 0.005136960029602051, 0.005136000156402588, 0.00510595178604126, 0.005087232112884522, 0.005224448204040527, 0.005092959880828857, 0.0052239041328430175, 0.0051311998367309574, 0.005145919799804687, 0.005176000118255615, 0.0051036162376403805, 0.005107295989990234, 0.005107679843902588, 0.0051224961280822755, 0.0050769920349121095, 0.005105663776397705, 0.005189727783203125, 0.005103487968444825, 0.005107232093811035, 0.005124000072479248, 0.005149280071258545, 0.00510745620727539, 0.005166336059570313, 0.005129216194152832, 0.005139904022216797, 0.005126719951629639, 0.005169151782989502, 0.0051404800415039064, 0.005122047901153564, 0.0051775360107421875, 0.005152575969696045, 0.005119647979736328, 0.005085536003112793, 0.005132127761840821, 0.005098944187164307, 0.005106400012969971, 0.005138656139373779, 0.005121312141418457, 0.005077824115753173, 0.00508896017074585, 0.005127615928649902, 0.005093535900115967, 0.00511843204498291, 0.005094848155975342, 0.005138207912445068, 0.0051140480041503905, 0.005248799800872803, 0.005122975826263428, 0.00511078405380249, 0.0050794239044189456, 0.005195648193359375, 0.005083744049072266, 0.005098944187164307, 0.005087423801422119, 0.005117311954498291, 0.005107872009277344, 0.005092192173004151, 0.005097472190856934, 0.005091328144073487, 0.005117023944854736, 0.005238880157470703, 0.005253695964813233, 0.005110015869140625, 0.00510745620727539, 0.0050854401588439945, 0.005107872009277344, 0.005128096103668213, 0.005095359802246094, 0.005056672096252442, 0.005111072063446045, 0.005110559940338134, 0.005111423969268799, 0.005071296215057373, 0.005119872093200683, 0.005090976238250732, 0.005129568099975586, 0.0050917119979858394, 0.005126527786254883, 0.005093376159667969, 0.005123648166656494, 0.005112512111663818, 0.005123551845550537, 0.005073440074920654, 0.005086656093597412, 0.005126719951629639, 0.005086400032043457, 0.005103744029998779, 0.00507155179977417, 0.005121088027954102, 0.005103775978088379, 0.005086336135864258, 0.0051623997688293455, 0.005113952159881592, 0.005068480014801025, 0.00506928014755249, 0.005117951869964599, 0.005105247974395752, 0.0051036162376403805, 0.005127871990203857, 0.0051758079528808594, 0.005095776081085205, 0.005115424156188965, 0.0051244478225708, 0.005130464076995849, 0.005119775772094727, 0.005137887954711914, 0.005181983947753906, 0.005119999885559082, 0.005156864166259765, 0.005158912181854248, 0.005144576072692871, 0.005111423969268799, 0.005184000015258789, 0.005129983901977539, 0.005275712013244629, 0.005142816066741943, 0.005132031917572021, 0.005152512073516846, 0.005124095916748047, 0.005120639801025391, 0.00513593578338623, 0.005086463928222656, 0.005110367774963379, 0.005126336097717285, 0.005124351978302002, 0.00510041618347168, 0.005114816188812256, 0.00517142391204834, 0.005095263957977295, 0.005114975929260254, 0.005129216194152832, 0.005048351764678955, 0.005126368045806884, 0.00509503984451294, 0.005111936092376709, 0.0051281599998474125, 0.005253471851348877, 0.005101471900939941, 0.005117184162139893, 0.005095359802246094, 0.005067615985870361, 0.005091040134429931, 0.0050928001403808595, 0.005094240188598633, 0.005054399967193603, 0.005119135856628418, 0.00511030387878418, 0.005125760078430176, 0.005100224018096924, 0.005119584083557129, 0.006115263938903809, 0.006099423885345459, 0.006352447986602784, 0.005190080165863037, 0.00514409589767456, 0.005121952056884766, 0.005101664066314697, 0.00516483211517334, 0.005126336097717285, 0.005249375820159912, 0.005130239963531494, 0.00512172794342041, 0.00510748815536499, 0.0051157760620117185, 0.0051495680809020995, 0.005199808120727539, 0.005123551845550537, 0.005134592056274414, 0.005115647792816162, 0.0050795841217041015, 0.005101568222045898, 0.005128191947937012, 0.0051138558387756345, 0.005087071895599365, 0.0050791997909545895, 0.005117663860321045, 0.005089056015014648, 0.005081471920013428, 0.005089503765106201, 0.005139967918395996, 0.005097887992858887, 0.005111807823181152, 0.0051036162376403805, 0.0051274561882019044, 0.0050850238800048825, 0.0051056318283081055, 0.00511683177947998, 0.0050969281196594236, 0.005114175796508789, 0.005148575782775879, 0.0051121277809143065, 0.0051036162376403805, 0.005179391860961914, 0.00510697603225708, 0.0050360321998596195, 0.005111807823181152, 0.005095232009887696, 0.0052135357856750485, 0.005086048126220703, 0.005083136081695557, 0.0050954241752624516, 0.005101119995117188, 0.005093823909759521, 0.005076191902160645, 0.005090047836303711, 0.005106783866882324, 0.0050731520652771, 0.005075808048248291, 0.005113728046417236, 0.005080895900726318, 0.005099679946899414, 0.005089280128479004, 0.0051138558387756345, 0.005103519916534424, 0.00509552001953125, 0.005099520206451416, 0.005084479808807373, 0.005143231868743896, 0.005087232112884522, 0.005098847866058349, 0.005110176086425782, 0.005090976238250732, 0.005083744049072266, 0.005101344108581543, 0.005093599796295166, 0.005090591907501221, 0.005106143951416016, 0.005089183807373047, 0.00508348798751831, 0.005079040050506592, 0.005105663776397705, 0.005080895900726318, 0.005075232028961181, 0.005080543994903565, 0.005114304065704346, 0.005107935905456543, 0.0051320638656616215, 0.005119935989379883, 0.005104800224304199, 0.005090496063232422, 0.005134079933166504, 0.005120128154754639, 0.005096735954284668, 0.005079872131347656, 0.005087135791778564, 0.005098336219787598, 0.005094048023223877, 0.005099264144897461, 0.005122623920440674, 0.005099552154541016, 0.005099520206451416, 0.0051008639335632325, 0.005118656158447266, 0.005137951850891114, 0.0051051521301269534, 0.005144544124603271, 0.005129439830780029, 0.005080607891082764, 0.005140223979949951, 0.005108448028564453, 0.005123551845550537, 0.005104383945465088, 0.0050952000617980955, 0.005195775985717774, 0.005115071773529053, 0.005102399826049805, 0.005120255947113037, 0.0051418237686157225, 0.0051224961280822755, 0.0051056318283081055, 0.0050869760513305665, 0.005125440120697021, 0.005109024047851562, 0.005095104217529297, 0.0051315841674804685, 0.0051075520515441895, 0.005102528095245361, 0.005138336181640625, 0.005128191947937012, 0.005117280006408691, 0.005112256050109863, 0.005189856052398681, 0.00511081600189209, 0.005094367980957032, 0.005092735767364502, 0.005110367774963379, 0.005083168029785156, 0.005080607891082764, 0.005085152149200439, 0.005118624210357666, 0.0051066880226135255, 0.005078207969665527, 0.005321824073791504, 0.005118527889251709, 0.0052674560546875, 0.005146687984466553, 0.005140416145324707, 0.005155839920043945, 0.005109920024871826, 0.005159039974212646, 0.005139167785644531, 0.005099647998809814, 0.005133952140808105, 0.005232895851135254, 0.005158912181854248, 0.0051279358863830565, 0.0051857919692993165, 0.005151840209960937, 0.00513321590423584, 0.005148672103881836, 0.0051404800415039064, 0.005117695808410644, 0.005079296112060547, 0.005154047966003418, 0.005116223812103271, 0.005117599964141846, 0.0051372480392456055, 0.00516870403289795, 0.005125919818878174, 0.005126175880432129, 0.005041791915893555, 0.0051429119110107425, 0.005123680114746094, 0.00511030387878418, 0.00509503984451294, 0.005150976181030273, 0.005105663776397705, 0.00510975980758667, 0.005148608207702637, 0.005103487968444825, 0.005097695827484131, 0.005103839874267578, 0.005128255844116211, 0.005132192134857177, 0.005134399890899658, 0.005168288230895996, 0.005173823833465576, 0.005226111888885498, 0.005134719848632813, 0.005131519794464111, 0.005126016139984131, 0.005090176105499267, 0.005144768238067627, 0.005130047798156738, 0.005110911846160889, 0.005095488071441651, 0.005102015972137451, 0.005109216213226318, 0.005098112106323242, 0.005140607833862305, 0.005114016056060791, 0.005109375953674317, 0.0051138558387756345, 0.005147295951843262, 0.0052715840339660645, 0.005118720054626465, 0.0051352958679199215, 0.005118271827697754, 0.005248479843139649, 0.005257440090179443, 0.005137728214263916, 0.005109920024871826, 0.005107840061187744, 0.005137951850891114, 0.00513318395614624, 0.0050824317932128904, 0.005096127986907959, 0.0051140480041503905, 0.00509830379486084, 0.0050800638198852536, 0.005088863849639892, 0.005125631809234619, 0.005106592178344726, 0.005101535797119141, 0.005105663776397705, 0.0051138877868652345, 0.005085184097290039, 0.005107711791992187, 0.005182688236236572, 0.005120800018310547, 0.005105663776397705, 0.005157087802886963, 0.005130015850067139, 0.005236512184143067, 0.005116032123565674, 0.005146687984466553, 0.005126944065093994, 0.005099455833435059, 0.005127423763275146, 0.005112639904022217, 0.005089216232299805, 0.005087615966796875, 0.005797599792480469, 0.005090496063232422, 0.005103936195373535, 0.005131999969482422, 0.006148608207702637, 0.005228799819946289, 0.005406303882598877, 0.005136896133422852, 0.005128096103668213, 0.005154335975646972, 0.005132991790771485, 0.005121823787689209, 0.005111807823181152, 0.005131680011749268, 0.00512608003616333, 0.005094048023223877, 0.005107711791992187, 0.00515392017364502, 0.005116479873657227, 0.005205567836761475, 0.00513100814819336, 0.005251071929931641, 0.0051133761405944824, 0.005095967769622803, 0.005120063781738281, 0.005111680030822754, 0.005107711791992187, 0.005122047901153564, 0.005099520206451416, 0.005103263854980469, 0.00509065580368042, 0.0051066880226135255, 0.005111328125, 0.0051036481857299806, 0.00510211181640625, 0.005107615947723389, 0.0050954241752624516, 0.005099040031433105, 0.00512662410736084, 0.005182943820953369, 0.0051164479255676265, 0.005079040050506592, 0.00521776008605957, 0.005083680152893066, 0.0050833277702331545, 0.005103392124176025, 0.0050926079750061035, 0.005071487903594971, 0.005085343837738037, 0.005093152046203613, 0.005166431903839111, 0.005071807861328125, 0.005111743927001953, 0.005089280128479004, 0.005070464134216308, 0.005075551986694336, 0.0052899842262268066, 0.0051888318061828614, 0.005092288017272949, 0.005104832172393799, 0.00508134412765503, 0.005116288185119629, 0.0050811200141906735, 0.005069920063018799, 0.005160927772521973, 0.005145535945892334, 0.005087232112884522, 0.005068287849426269, 0.005109888076782226, 0.005104000091552734, 0.005096896171569824, 0.005104191780090332, 0.005133344173431397, 0.005170303821563721, 0.005096447944641113, 0.005138495922088623, 0.0050982718467712405, 0.005074944019317627, 0.005099616050720215, 0.0051199040412902835, 0.005105023860931397, 0.005108352184295654, 0.005119008064270019, 0.005104608058929443, 0.005099520206451416, 0.005105567932128906, 0.0051110081672668456, 0.005104512214660645, 0.0050769920349121095, 0.005117919921875, 0.005118015766143799, 0.005103040218353272, 0.00511030387878418, 0.005416959762573242, 0.005672959804534912, 0.0052715520858764645, 0.005197728157043457, 0.0052939200401306155, 0.005238751888275147, 0.005159200191497803, 0.0051773438453674315, 0.005125152111053467, 0.005235487937927246, 0.005138175964355469, 0.005140384197235107, 0.005148255825042724, 0.005159264087677002, 0.006121695995330811, 0.005196159839630127, 0.005160448074340821, 0.005102335929870605, 0.00515231990814209, 0.005105855941772461, 0.005163008213043213, 0.0050969281196594236, 0.0056243519783020016, 0.005117951869964599]",tokens/s,194.64190870045417,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.889472,4886.233088,0.0,4483.710976,4465.672704,s,1,10.8904892578125,10.8904892578125,0.0,10.8904892578125,10.8904892578125,10.8904892578125,10.8904892578125,[10.8904892578125],,kWh,0.00010626884295412917,1.171483841686056e-05,3.3577804640000375e-05,0.00015156148601099011,,MB,2155.393024,5309.857792,0.0,4892.655616,4841.339904,s,10,1.9586477813720704,0.19586477813720704,0.00029519232091201945,0.19595532989501951,0.1961327163696289,0.1961373649597168,0.1961410838317871,"[0.19562754821777345, 0.19594793701171875, 0.19599331665039063, 0.19606419372558595, 0.19614201354980468, 0.19510330200195314, 0.19591778564453124, 0.1957572784423828, 0.19613168334960937, 0.1959627227783203]",tokens/s,1307.024174712347,kWh,5.786576667483756e-06,6.381546521312642e-07,3.825013953254458e-06,1.0249745272869478e-05,tokens/kWh,24976230.451074544,MB,2160.193536,5477.629952,0.0,5060.427776,5012.931584,s,10,24.409556640625002,2.4409556640625003,0.006774720228408151,2.4418928222656247,2.4479796142578123,2.449918151855469,2.4514689819335937,"[2.437376953125, 2.43127685546875, 2.4377001953125, 2.444419921875, 2.451856689453125, 2.441253173828125, 2.447548828125, 2.4463115234375, 2.429280029296875, 2.442532470703125]",tokens/s,25.809563413023504,kWh,7.101976219001333e-05,7.83331610585867e-06,4.600652591194827e-05,0.00012485960420782028,tokens/kWh,504566.71234629897,,s,630,24.40638483810425,0.038740293393816266,0.000532612278388482,0.038604207992553714,0.03927266998291016,0.0396168155670166,0.04087017627716064,"[0.03919190216064453, 0.03850102233886719, 0.03834019088745117, 0.04040745544433594, 0.03880550384521484, 0.03836476898193359, 0.03818848037719726, 0.038164512634277344, 0.038982017517089844, 0.038604446411132816, 0.038161312103271484, 0.03849216079711914, 0.03831808090209961, 0.03826483154296875, 0.038454849243164065, 0.03845151901245117, 0.03840777587890625, 0.03834889602661133, 0.038341182708740235, 0.03821145629882813, 0.03846144104003906, 0.03827097702026367, 0.038292991638183595, 0.03860736083984375, 0.03854950332641602, 0.041444385528564456, 0.039016544342041014, 0.039217121124267576, 0.038745121002197264, 0.03862307357788086, 0.03847542572021485, 0.03824031829833984, 0.03833478546142578, 0.038225921630859375, 0.03899004745483398, 0.03862300872802735, 0.03853311920166016, 0.03822499084472656, 0.03840415954589844, 0.03832204818725586, 0.038381919860839844, 0.038412929534912106, 0.039311359405517575, 0.0384155502319336, 0.03836099243164062, 0.038310817718505856, 0.03831727981567383, 0.0394596176147461, 0.04017488098144531, 0.03949641418457031, 0.03901433563232422, 0.0390840950012207, 0.03873996734619141, 0.039941761016845705, 0.038594944000244144, 0.03883827209472656, 0.03852492904663086, 0.03833625411987305, 0.03843123245239258, 0.038475006103515626, 0.03880585479736328, 0.03865756988525391, 0.038419071197509765, 0.0385830078125, 0.03869481658935547, 0.03942822265625, 0.03875801467895508, 0.03841206359863281, 0.038505279541015625, 0.03818905639648437, 0.03818086242675781, 0.038182910919189454, 0.03849216079711914, 0.038547454833984376, 0.038370689392089846, 0.03825423812866211, 0.038369438171386716, 0.03841215896606445, 0.03825964736938477, 0.03819475173950195, 0.038617118835449216, 0.03871318435668945, 0.038902336120605466, 0.03851216125488281, 0.03878499221801758, 0.038785537719726565, 0.038365184783935545, 0.038290496826171874, 0.03836332702636719, 0.03824089431762695, 0.03847372817993164, 0.03844518280029297, 0.038648094177246094, 0.03889273452758789, 0.03878348922729492, 0.038551582336425784, 0.038932289123535156, 0.038612350463867184, 0.03851299285888672, 0.038743934631347655, 0.03889030456542969, 0.03856915283203125, 0.038803295135498045, 0.03838956832885742, 0.03902256011962891, 0.03847267150878906, 0.03892019271850586, 0.03832627105712891, 0.038866943359375, 0.03879731369018555, 0.0388939208984375, 0.03851433563232422, 0.0384134407043457, 0.03850739288330078, 0.03848191833496094, 0.03828326416015625, 0.038469791412353516, 0.03856108856201172, 0.03914166259765625, 0.038543392181396484, 0.03853657531738281, 0.038435489654541015, 0.03851676940917969, 0.03901046371459961, 0.03855769729614258, 0.03890176010131836, 0.03933993530273437, 0.03992153549194336, 0.04027391815185547, 0.03927702331542969, 0.0391657600402832, 0.03904022216796875, 0.03885564804077148, 0.03861503982543945, 0.038529022216796875, 0.03852288055419922, 0.0384279670715332, 0.038365886688232424, 0.03961452865600586, 0.03874604797363281, 0.03858748626708984, 0.038540191650390625, 0.038569984436035154, 0.03865766525268555, 0.03844140625, 0.038510238647460934, 0.038570369720458984, 0.03853916931152344, 0.03876454544067383, 0.038408191680908206, 0.03850239944458008, 0.038469566345214846, 0.0391448974609375, 0.03937753677368164, 0.038787071228027346, 0.038542430877685545, 0.038423454284667966, 0.03835279846191406, 0.03843075180053711, 0.03846547317504883, 0.038512767791748045, 0.03851603317260742, 0.038491039276123046, 0.03832524871826172, 0.03828611373901367, 0.03845939254760742, 0.03955644989013672, 0.03846416091918945, 0.03859024047851563, 0.038940448760986325, 0.038701473236083986, 0.03856387329101563, 0.038608257293701174, 0.0385665283203125, 0.038718784332275394, 0.03833516693115235, 0.03836220932006836, 0.03838822555541992, 0.038555904388427736, 0.039274654388427734, 0.039215103149414066, 0.03860396957397461, 0.03836409759521484, 0.03831795120239258, 0.03828736114501953, 0.038434814453125, 0.038434814453125, 0.038365184783935545, 0.03841558456420899, 0.03898809432983399, 0.038809951782226564, 0.03840528106689453, 0.03855174255371094, 0.03867679977416992, 0.03864361572265625, 0.03889980697631836, 0.03844841766357422, 0.038141857147216796, 0.03826361465454101, 0.038266880035400394, 0.03921644973754883, 0.038763198852539066, 0.038806976318359374, 0.03898751831054687, 0.03892425537109375, 0.038851425170898436, 0.03864166259765625, 0.038669471740722654, 0.03850758361816406, 0.038286945343017575, 0.03827881622314453, 0.038166465759277346, 0.03826748657226563, 0.03829145431518555, 0.03879731369018555, 0.040812545776367185, 0.03909632110595703, 0.03915980911254883, 0.03882345581054687, 0.03907788848876953, 0.03996716690063477, 0.039022624969482424, 0.03877024078369141, 0.038506942749023436, 0.03853513717651367, 0.03845737457275391, 0.0390060806274414, 0.03830387115478515, 0.038567745208740234, 0.038478015899658206, 0.038522144317626954, 0.038419166564941404, 0.0388317756652832, 0.03841059112548828, 0.03831577682495117, 0.0383135986328125, 0.038255233764648434, 0.0382558708190918, 0.03831270217895508, 0.03816998291015625, 0.03864873504638672, 0.04138979339599609, 0.03914473724365235, 0.038982368469238284, 0.03902873611450195, 0.03915676879882812, 0.040471519470214844, 0.03906719970703125, 0.03900665664672852, 0.0391756477355957, 0.03895555114746094, 0.03896473693847656, 0.03899667358398438, 0.03908425521850586, 0.038981632232666014, 0.038908958435058594, 0.038964191436767576, 0.03876588821411133, 0.03875279998779297, 0.038989185333251956, 0.03882918548583984, 0.03928438568115234, 0.03915590286254883, 0.03928044891357422, 0.04157030487060547, 0.039200702667236326, 0.03922051239013672, 0.038887744903564454, 0.04150934219360351, 0.0393359375, 0.039190528869628906, 0.03926835250854492, 0.040474494934082034, 0.03961868667602539, 0.039157024383544924, 0.039266334533691404, 0.03910425567626953, 0.039067966461181644, 0.03927664184570313, 0.039268638610839846, 0.038995552062988284, 0.03846416091918945, 0.03842201614379883, 0.03850060653686523, 0.03837734222412109, 0.04087196731567383, 0.038812000274658205, 0.03888873672485352, 0.03864620971679687, 0.03865129470825195, 0.04004131317138672, 0.03882175827026367, 0.03921526336669922, 0.038645633697509764, 0.03824796676635742, 0.0383144302368164, 0.03823190307617187, 0.03825212860107422, 0.03910921478271484, 0.03838886260986328, 0.03827609634399414, 0.038292736053466794, 0.03825126266479492, 0.038199295043945314, 0.0382457275390625, 0.03847808074951172, 0.03824886322021484, 0.038158336639404294, 0.038309471130371094, 0.03823001480102539, 0.03820710372924805, 0.038433567047119144, 0.03829350280761719, 0.03832121658325195, 0.038333023071289066, 0.03891263961791992, 0.038505535125732425, 0.038343456268310545, 0.03883638381958008, 0.03931340789794922, 0.038547454833984376, 0.0388361587524414, 0.03824851226806641, 0.03817881774902344, 0.03826851272583008, 0.03831439971923828, 0.038153953552246093, 0.03832374572753906, 0.03818678283691406, 0.03821363067626953, 0.03840304183959961, 0.038704544067382815, 0.03859040069580078, 0.03832284927368164, 0.03846758270263672, 0.03826835250854492, 0.038218303680419924, 0.038858528137207034, 0.038613216400146484, 0.0390546875, 0.038730400085449215, 0.038430721282958984, 0.038555648803710936, 0.03870924758911133, 0.038564895629882814, 0.03929328155517578, 0.038793952941894534, 0.038413631439208985, 0.038974048614501954, 0.03838332748413086, 0.03855388641357422, 0.03921065521240234, 0.038871711730957034, 0.03888300704956055, 0.03873382568359375, 0.03875148773193359, 0.03852521514892578, 0.03862575912475586, 0.03846108627319336, 0.03847766494750977, 0.038551231384277344, 0.03863020706176758, 0.03862278366088867, 0.03899027252197266, 0.03895004653930664, 0.04044038391113281, 0.038955295562744144, 0.03875180816650391, 0.03913363265991211, 0.03975167846679688, 0.0391363525390625, 0.03993894577026367, 0.039530078887939454, 0.0393460807800293, 0.03897398376464844, 0.03902409744262696, 0.038817790985107424, 0.03878761672973633, 0.039434177398681644, 0.03920640182495117, 0.03987760162353516, 0.038946304321289066, 0.03875481414794922, 0.03864371109008789, 0.03879731369018555, 0.0387562255859375, 0.03929334259033203, 0.03915683364868164, 0.03894028854370117, 0.03887936019897461, 0.03882675170898438, 0.03947065734863281, 0.03895558547973633, 0.03879116821289062, 0.038531295776367186, 0.03845916748046875, 0.03828736114501953, 0.038555648803710936, 0.03841024017333984, 0.038432510375976565, 0.038496513366699216, 0.03845119857788086, 0.038456737518310545, 0.03866070556640625, 0.03875020980834961, 0.03885228729248047, 0.039272449493408204, 0.03957177734375, 0.038779071807861325, 0.03890499114990234, 0.03883411026000977, 0.038767326354980466, 0.03909222412109375, 0.03883996963500977, 0.03859081649780274, 0.03876831817626953, 0.03881811141967773, 0.039783935546875, 0.0389832649230957, 0.03894073486328125, 0.03865180969238281, 0.03874297714233398, 0.038825984954833984, 0.038967296600341796, 0.03885670471191406, 0.03865599822998047, 0.03849785614013672, 0.038403518676757814, 0.03849065780639648, 0.038281696319580075, 0.03851571273803711, 0.038838623046875, 0.039032608032226565, 0.03884940719604492, 0.03869664001464844, 0.038869087219238284, 0.038883808135986325, 0.03905305480957031, 0.039346176147460936, 0.038981632232666014, 0.038795455932617184, 0.039261184692382815, 0.03896745681762695, 0.039128929138183596, 0.03904512023925781, 0.03904492950439453, 0.039088382720947265, 0.039196609497070316, 0.03895865631103516, 0.03914591979980469, 0.03876812744140625, 0.03873023986816406, 0.04092911911010742, 0.0394090576171875, 0.03879193496704102, 0.038956222534179685, 0.039230270385742186, 0.0389119987487793, 0.04018758392333984, 0.039290206909179684, 0.03941996765136719, 0.03926681518554687, 0.038717857360839845, 0.038346752166748044, 0.03908403015136719, 0.03875430297851563, 0.03930316925048828, 0.038768577575683597, 0.03859465789794922, 0.03844464111328125, 0.038342048645019534, 0.03823715209960937, 0.03831571197509766, 0.03830201721191406, 0.03822143936157227, 0.03843929672241211, 0.03845529556274414, 0.03860889434814453, 0.04086579132080078, 0.03908198547363281, 0.038705471038818356, 0.03855699157714844, 0.03860892868041992, 0.038354721069335934, 0.040185630798339846, 0.03891654586791992, 0.03889555358886719, 0.038731616973876955, 0.038545982360839844, 0.0390366096496582, 0.039212478637695315, 0.03839884948730469, 0.03827711868286133, 0.03827273559570313, 0.038295841217041014, 0.03821964645385742, 0.03817609786987305, 0.038239009857177736, 0.0381952018737793, 0.03824435043334961, 0.0382033920288086, 0.038198593139648435, 0.038720191955566405, 0.0382454719543457, 0.03923353576660156, 0.03910860824584961, 0.03868409729003906, 0.038558273315429686, 0.038485824584960936, 0.038598846435546875, 0.039090175628662106, 0.03892598342895508, 0.03864432144165039, 0.038323040008544924, 0.03842755126953125, 0.03844025421142578, 0.03836179351806641, 0.03846553421020508, 0.03863552093505859, 0.03834624099731445, 0.03836537551879883, 0.03854163360595703, 0.03837164688110352, 0.0402940788269043, 0.03870051193237305, 0.03862348937988281, 0.038306079864501956, 0.03836108779907227, 0.03844467163085938, 0.03834918212890625, 0.03887104034423828, 0.03869900894165039, 0.03873993682861328, 0.03865398406982422, 0.0384345588684082, 0.038306049346923825, 0.038595905303955076, 0.03892089462280274, 0.03834470367431641, 0.03840409469604492, 0.03823446273803711, 0.03818588638305664, 0.038312255859375, 0.03829913711547851, 0.03822844696044922, 0.03831241607666016, 0.038301055908203124, 0.038295520782470706, 0.038574752807617185, 0.03840409469604492, 0.03838771057128906, 0.03845119857788086, 0.03836659240722656, 0.03832896041870117, 0.038406143188476564, 0.038389759063720705, 0.03824639892578125, 0.03890995025634766, 0.03847372817993164, 0.03871334457397461, 0.038505664825439455, 0.03856438446044922, 0.03848255920410156, 0.03947020721435547, 0.038621406555175784, 0.03844697570800781, 0.03843638229370117, 0.03837209701538086, 0.03882419204711914, 0.03859379196166992, 0.038591232299804684, 0.03846316909790039, 0.03864198303222656, 0.03826892852783203, 0.03846963119506836, 0.038594558715820314, 0.038327423095703125, 0.038634368896484375, 0.03845523071289063, 0.038449214935302733, 0.038645889282226564, 0.03849612808227539, 0.038372638702392575, 0.038373119354248045, 0.038865886688232425, 0.03830774307250977, 0.038228065490722656, 0.038166526794433595, 0.03813343811035156, 0.038185279846191404, 0.03812172698974609, 0.038144798278808595, 0.03811836624145508, 0.03821363067626953, 0.038121055603027344, 0.03817308807373047, 0.03807027053833008, 0.03808051300048828, 0.038056961059570314, 0.0381921272277832, 0.038112415313720706, 0.03815439987182617, 0.03823481750488281, 0.038215679168701173, 0.03831193542480469, 0.038171871185302735, 0.040063774108886716, 0.039079486846923826, 0.03916054534912109, 0.039300830841064456, 0.04043145751953125, 0.039639198303222656, 0.03977830505371094, 0.03921075057983398, 0.03913580703735352, 0.03911443328857422, 0.039221248626708984, 0.03924284744262695, 0.039142303466796875, 0.04066611099243164, 0.03919974517822265, 0.03934003067016602, 0.03927878570556641, 0.03922438430786133, 0.038921215057373046, 0.039096000671386716, 0.0388733139038086, 0.039230751037597655, 0.03884089660644531, 0.042057727813720705]",tokens/s,25.812917569685215,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1061.236736,912.130048,0.0,509.607936,491.434496,s,1,7.93610986328125,7.93610986328125,0.0,7.93610986328125,7.93610986328125,7.93610986328125,7.93610986328125,[7.93610986328125],,kWh,2.4285686454148463e-05,2.6702019786394506e-06,7.264450256005972e-06,3.422033868879388e-05,,MB,1401.614336,1046.347776,0.0,629.1456,592.24832,s,10,0.262838659286499,0.026283865928649904,0.00033437420198730304,0.026199616432189942,0.0265708646774292,0.026870744228363035,0.02711064786911011,"[0.025845760345458983, 0.02650422477722168, 0.026157344818115234, 0.027170623779296875, 0.02610348892211914, 0.02625484848022461, 0.026118656158447266, 0.02621286392211914, 0.026284479141235353, 0.026186368942260743]",tokens/s,9739.815318451889,kWh,7.652417729057633e-07,8.439221195687993e-08,4.93555514099496e-07,1.3431894989621391e-06,tokens/kWh,190591126.71578142,MB,1441.202176,1061.02784,0.0,643.825664,605.085696,s,10,13.67902734375,1.367902734375,0.004353706976141033,1.3682092895507814,1.3732679443359375,1.3739672241210938,1.3745266479492189,"[1.3710374755859376, 1.3671981201171874, 1.373112548828125, 1.368277099609375, 1.3681414794921876, 1.37466650390625, 1.368664306640625, 1.36397802734375, 1.3588216552734376, 1.365130126953125]",tokens/s,46.05590618165182,kWh,5.1841454566261775e-05,5.7177801905249995e-06,1.8949182262301657e-05,7.650841701908843e-05,tokens/kWh,823438.811762134,,s,630,13.673743951797496,0.021704355479043626,0.0003393185883234052,0.021631551742553712,0.021929119682312013,0.022142153072357177,0.022981150226593017,"[0.024559167861938475, 0.021752191543579102, 0.02149580764770508, 0.021538816452026367, 0.021514240264892577, 0.021462047576904297, 0.021683263778686523, 0.021548959732055666, 0.021474624633789064, 0.021705087661743165, 0.021620319366455077, 0.021545087814331055, 0.021480224609375, 0.021507871627807616, 0.02163920021057129, 0.02208665657043457, 0.021766176223754884, 0.021594623565673828, 0.0218668155670166, 0.021719200134277344, 0.02171494483947754, 0.021734624862670898, 0.021695072174072266, 0.021700895309448243, 0.02152969551086426, 0.021545888900756836, 0.021507360458374022, 0.021507968902587892, 0.021702463150024415, 0.021676992416381834, 0.021542911529541017, 0.02167398452758789, 0.021608064651489258, 0.021897600173950194, 0.02277689552307129, 0.02192902374267578, 0.02194553565979004, 0.022237888336181642, 0.021773920059204102, 0.021566911697387694, 0.021622016906738283, 0.02176348876953125, 0.021606719970703125, 0.02159814453125, 0.02161884880065918, 0.021561248779296875, 0.021643264770507813, 0.02182143974304199, 0.02166761589050293, 0.021899488449096678, 0.021985376358032226, 0.02189673614501953, 0.02177020835876465, 0.021759647369384766, 0.021810943603515626, 0.021784799575805664, 0.0217096004486084, 0.021657855987548828, 0.021651296615600585, 0.02165305519104004, 0.0218175048828125, 0.021634496688842774, 0.021770559310913085, 0.021354560852050782, 0.02152511978149414, 0.021600255966186522, 0.021755903244018555, 0.021800960540771484, 0.021819391250610352, 0.02173708724975586, 0.021733663558959962, 0.021653600692749023, 0.021608640670776367, 0.02162067222595215, 0.021646303176879884, 0.021580896377563476, 0.021582944869995117, 0.021631839752197266, 0.021632896423339844, 0.021595455169677733, 0.021661951065063478, 0.02171254348754883, 0.021826240539550783, 0.02187923240661621, 0.021925535202026367, 0.02187264060974121, 0.02180940818786621, 0.021613887786865234, 0.021759904861450196, 0.021639711380004884, 0.02162073516845703, 0.0215285758972168, 0.021511808395385742, 0.02147760009765625, 0.021663904190063477, 0.02199295997619629, 0.02259030342102051, 0.022222496032714843, 0.02191974449157715, 0.021723104476928712, 0.021725343704223632, 0.02151750373840332, 0.02162144088745117, 0.021567487716674806, 0.02162892723083496, 0.02179199981689453, 0.021621503829956056, 0.021524288177490233, 0.022069440841674805, 0.02224332809448242, 0.021571584701538086, 0.021589120864868163, 0.021592960357666016, 0.0215285758972168, 0.02165555191040039, 0.02148975944519043, 0.021571424484252928, 0.02157369613647461, 0.02146076774597168, 0.02147555160522461, 0.021583871841430666, 0.021685312271118164, 0.021701568603515624, 0.021669471740722656, 0.0217576961517334, 0.021622848510742188, 0.021284543991088867, 0.02293017578125, 0.02187161636352539, 0.021808128356933593, 0.021803007125854493, 0.02159555244445801, 0.02151817512512207, 0.021662239074707032, 0.021655391693115235, 0.021498239517211915, 0.02153071975708008, 0.022636640548706056, 0.02164873504638672, 0.021623199462890624, 0.02153887939453125, 0.021514240264892577, 0.021507423400878908, 0.021574047088623045, 0.02169267272949219, 0.021606399536132814, 0.021585727691650392, 0.021666048049926757, 0.02163475227355957, 0.02165171241760254, 0.02166579246520996, 0.021605728149414062, 0.021657855987548828, 0.02219379234313965, 0.022186784744262694, 0.021755903244018555, 0.022173696517944336, 0.021557247161865235, 0.021627168655395507, 0.021657567977905273, 0.021753599166870117, 0.021564672470092774, 0.021654272079467775, 0.021612287521362305, 0.02176639938354492, 0.021876735687255858, 0.02238649559020996, 0.02168828773498535, 0.02179852867126465, 0.021712896347045898, 0.021680511474609376, 0.021745664596557617, 0.021715200424194336, 0.021679519653320312, 0.021606176376342774, 0.022358816146850587, 0.022573055267333983, 0.0219814395904541, 0.021792831420898436, 0.022326080322265626, 0.0217445125579834, 0.021657440185546876, 0.021567520141601564, 0.02169664001464844, 0.021589664459228514, 0.022097408294677736, 0.021786495208740233, 0.02206399917602539, 0.021962080001831055, 0.02140140724182129, 0.02187500762939453, 0.021778303146362303, 0.021712896347045898, 0.021661119461059572, 0.021721664428710936, 0.021929983139038087, 0.02158777618408203, 0.022239776611328126, 0.021806751251220703, 0.022013952255249023, 0.021702592849731445, 0.02156265640258789, 0.021580575942993164, 0.021600288391113283, 0.021622528076171876, 0.021530847549438476, 0.02292736053466797, 0.021748928070068358, 0.021598623275756835, 0.021574047088623045, 0.021706911087036134, 0.021649248123168947, 0.02172313690185547, 0.02189334487915039, 0.021720447540283204, 0.021662111282348632, 0.021579776763916016, 0.021671392440795897, 0.02179724884033203, 0.0217576961517334, 0.02163337516784668, 0.021794912338256835, 0.021676000595092774, 0.02171494483947754, 0.02187468719482422, 0.021579776763916016, 0.021735424041748046, 0.021902559280395507, 0.021576032638549805, 0.021586496353149413, 0.021554208755493163, 0.0215665283203125, 0.021606176376342774, 0.02164121627807617, 0.021622783660888673, 0.02167807960510254, 0.021653408050537108, 0.02156153678894043, 0.021595935821533203, 0.021686399459838867, 0.021651456832885742, 0.02169036865234375, 0.021553152084350585, 0.021536575317382813, 0.021858495712280275, 0.021941951751708984, 0.021769887924194337, 0.021702911376953123, 0.021623199462890624, 0.02165350341796875, 0.021646751403808593, 0.02156368064880371, 0.021283199310302734, 0.02153228759765625, 0.021538400650024415, 0.021508512496948243, 0.021626880645751953, 0.021774112701416017, 0.02163324737548828, 0.021751808166503905, 0.02178860855102539, 0.021852224349975587, 0.021864160537719727, 0.022038015365600586, 0.021844768524169923, 0.021663936614990234, 0.021923168182373047, 0.021699039459228516, 0.021647327423095702, 0.021865856170654296, 0.02174633598327637, 0.021938175201416017, 0.021585920333862304, 0.02159119987487793, 0.021549919128417968, 0.021642751693725586, 0.021464767456054686, 0.02173593521118164, 0.022061376571655272, 0.021934080123901366, 0.021792768478393554, 0.02170172882080078, 0.02167900848388672, 0.021612640380859374, 0.021780384063720702, 0.021760000228881835, 0.021594432830810546, 0.021660831451416014, 0.02218556785583496, 0.021695423126220702, 0.021935808181762696, 0.0216180477142334, 0.021588287353515624, 0.02151487922668457, 0.021689920425415038, 0.02154694366455078, 0.021683712005615235, 0.021584896087646483, 0.021510143280029297, 0.021581823348999024, 0.021592063903808592, 0.021796640396118165, 0.021579135894775392, 0.02155606460571289, 0.021704160690307617, 0.021553695678710936, 0.021581823348999024, 0.021516063690185546, 0.021700960159301758, 0.021776256561279298, 0.02163302421569824, 0.021546239852905272, 0.022497888565063476, 0.021860511779785156, 0.021923360824584962, 0.02124630355834961, 0.021753503799438478, 0.021993728637695314, 0.021576223373413087, 0.021599903106689453, 0.021622848510742188, 0.021647455215454102, 0.0217109432220459, 0.021593280792236328, 0.0216092472076416, 0.02164134407043457, 0.02178656005859375, 0.021698495864868165, 0.021540191650390624, 0.021506591796875, 0.021762304306030274, 0.02191360092163086, 0.021917760848999022, 0.021936063766479493, 0.022169248580932617, 0.02172758483886719, 0.021712095260620116, 0.02162326431274414, 0.021520128250122072, 0.021572160720825195, 0.021503807067871094, 0.02147961616516113, 0.021482528686523436, 0.021466272354125977, 0.021534528732299805, 0.02148940849304199, 0.0214880313873291, 0.021837535858154296, 0.021503488540649415, 0.02163302421569824, 0.021512832641601563, 0.021443647384643556, 0.021910463333129883, 0.022910943984985353, 0.02201193618774414, 0.022165536880493164, 0.021880800247192383, 0.02186604881286621, 0.02163961601257324, 0.021947744369506837, 0.024476320266723632, 0.02298806381225586, 0.02170524787902832, 0.02173766326904297, 0.021720672607421877, 0.021604352951049805, 0.02171945571899414, 0.02152409553527832, 0.021407903671264647, 0.021518304824829103, 0.021428319931030275, 0.021465248107910156, 0.021563392639160156, 0.021699712753295897, 0.021586111068725586, 0.021643455505371095, 0.022964223861694336, 0.024291936874389648, 0.02133843231201172, 0.021587871551513673, 0.02160451126098633, 0.021581760406494142, 0.021622783660888673, 0.021833728790283204, 0.021594112396240234, 0.02163692855834961, 0.021635135650634765, 0.021620288848876953, 0.02156915283203125, 0.02156470489501953, 0.021556032180786132, 0.021575712203979493, 0.021740255355834962, 0.021579519271850586, 0.021676383972167968, 0.021567487716674806, 0.02188697624206543, 0.024649728775024415, 0.02373980712890625, 0.02185481643676758, 0.022011232376098634, 0.02186204719543457, 0.021834144592285155, 0.021678464889526367, 0.021767776489257814, 0.021676671981811522, 0.021585344314575195, 0.021870975494384767, 0.021669151306152344, 0.021801055908203124, 0.021738304138183593, 0.021663135528564453, 0.02155926322937012, 0.021694944381713866, 0.021592063903808592, 0.021600095748901368, 0.021581119537353515, 0.02147430419921875, 0.021555200576782226, 0.021577472686767577, 0.02153273582458496, 0.02157702445983887, 0.021681024551391603, 0.021589536666870118, 0.021549535751342774, 0.021835775375366212, 0.021757951736450197, 0.021763296127319337, 0.021863199234008788, 0.021589248657226563, 0.02155356788635254, 0.02154070472717285, 0.021458816528320313, 0.02150655937194824, 0.02142630386352539, 0.021482719421386718, 0.021473119735717774, 0.021510271072387694, 0.021549312591552735, 0.0215219841003418, 0.021574848175048827, 0.021211135864257814, 0.021506048202514647, 0.021429920196533205, 0.02146143913269043, 0.021387168884277344, 0.0214138240814209, 0.02153887939453125, 0.021530624389648437, 0.021561311721801757, 0.021644895553588867, 0.021436864852905274, 0.021526527404785157, 0.021406719207763672, 0.021521408081054686, 0.021435808181762696, 0.021582080841064454, 0.02144000053405762, 0.021502368927001952, 0.021535167694091795, 0.02151136016845703, 0.021527360916137696, 0.022164703369140625, 0.02162726402282715, 0.021429887771606447, 0.02144041633605957, 0.021445472717285155, 0.02142620849609375, 0.02145894432067871, 0.021426368713378906, 0.02148067283630371, 0.021516191482543946, 0.021510656356811524, 0.021859519958496092, 0.02155404853820801, 0.021577856063842774, 0.021577247619628905, 0.021430591583251953, 0.0215512638092041, 0.021456575393676756, 0.021438432693481446, 0.021600608825683595, 0.021468416213989257, 0.021457664489746092, 0.02170684814453125, 0.022110111236572267, 0.023874719619750975, 0.022114591598510744, 0.021731903076171875, 0.021958015441894532, 0.02156403160095215, 0.021598207473754884, 0.021739519119262696, 0.021606399536132814, 0.021583839416503905, 0.021633056640625, 0.02182566452026367, 0.021587808609008788, 0.021559328079223634, 0.02201615905761719, 0.0215765438079834, 0.02267033576965332, 0.02205900764465332, 0.021866559982299805, 0.02144339179992676, 0.02180191993713379, 0.021736160278320312, 0.021561344146728514, 0.021532991409301757, 0.021456575393676756, 0.02146303939819336, 0.021559295654296876, 0.021518335342407227, 0.021493759155273438, 0.02150809669494629, 0.02164246368408203, 0.02167875289916992, 0.021599807739257813, 0.021965311050415038, 0.021522495269775392, 0.02152620887756348, 0.021500223159790038, 0.02153267288208008, 0.02160207939147949, 0.02147724723815918, 0.021393760681152344, 0.021520351409912108, 0.02159414482116699, 0.021521951675415037, 0.021506528854370117, 0.021481472015380858, 0.021604352951049805, 0.02149519920349121, 0.0215063362121582, 0.021438112258911134, 0.021504671096801757, 0.02146713638305664, 0.02151801681518555, 0.021637760162353515, 0.021569215774536132, 0.02145894432067871, 0.021497472763061524, 0.021541215896606444, 0.02155027198791504, 0.021526527404785157, 0.021578912734985353, 0.021444608688354492, 0.021573312759399416, 0.02147737693786621, 0.02167795181274414, 0.021680192947387697, 0.02164726448059082, 0.02158812713623047, 0.02155507278442383, 0.021717119216918945, 0.02152560043334961, 0.021543807983398437, 0.02181532859802246, 0.02147532844543457, 0.021572864532470704, 0.021762815475463868, 0.0214466552734375, 0.0216494083404541, 0.021501951217651367, 0.021581375122070312, 0.021551584243774412, 0.021476320266723633, 0.021174560546875, 0.021547008514404296, 0.02145075225830078, 0.021416032791137695, 0.021536575317382813, 0.02164575958251953, 0.02192367935180664, 0.021759807586669924, 0.021744768142700197, 0.021582719802856445, 0.021518335342407227, 0.021510143280029297, 0.02153891181945801, 0.02151612854003906, 0.021558399200439452, 0.021576736450195314, 0.021903263092041016, 0.021768159866333008, 0.02177462387084961, 0.021662912368774413, 0.021525056838989257, 0.021515392303466798, 0.021828351974487306, 0.02167616081237793, 0.02165555191040039, 0.021669376373291017, 0.021706880569458006, 0.021475711822509767, 0.021583103179931642, 0.021651424407958985, 0.02184886360168457, 0.02170035171508789, 0.021790111541748047, 0.02159814453125, 0.021663679122924804, 0.021582304000854494, 0.021729408264160158, 0.02148384094238281, 0.02173958396911621, 0.021546239852905272, 0.022010623931884767, 0.021597600936889647, 0.02157423973083496, 0.021741567611694337, 0.02159382438659668, 0.021631263732910157, 0.0220731201171875, 0.02166374397277832, 0.021672256469726564, 0.021618688583374023, 0.021718944549560547, 0.02150115203857422, 0.02165353584289551, 0.021630783081054688, 0.021754816055297853, 0.021559072494506837, 0.02173891258239746, 0.021692384719848634, 0.021623743057250976, 0.021620319366455077, 0.022312799453735353, 0.021561824798583984, 0.021991296768188475]",tokens/s,46.07370170312303,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1059.762176,912.130048,0.0,509.607936,491.434496,s,1,7.85234912109375,7.85234912109375,0.0,7.85234912109375,7.85234912109375,7.85234912109375,7.85234912109375,[7.85234912109375],,kWh,2.405287149998306e-05,2.646042185613344e-06,7.292783612000697e-06,3.39916972975971e-05,,MB,1381.920768,1025.376256,0.0,608.17408,592.24832,s,10,0.19996665763854982,0.019996665763854983,0.000131748681444734,0.01998686408996582,0.02008093795776367,0.02019747714996338,0.020290708503723144,"[0.02005504035949707, 0.020047040939331056, 0.02003971290588379, 0.019862207412719726, 0.020009376525878905, 0.020314016342163087, 0.019800159454345705, 0.019929088592529298, 0.019964351654052734, 0.01994566345214844]",tokens/s,12802.13426694031,kWh,5.840438472750217e-07,6.44096236234857e-08,3.6833473911201953e-07,1.016788210010527e-06,tokens/kWh,251773178.99599716,MB,1395.699712,1040.05632,0.0,622.854144,605.085696,s,10,10.51122424316406,1.0511224243164061,0.008739214203431933,1.0486399536132813,1.0635650024414063,1.0646485290527345,1.065515350341797,"[1.057982421875, 1.06332421875, 1.056958984375, 1.044591064453125, 1.0473111572265625, 1.042011962890625, 1.04996875, 1.041940673828125, 1.0414029541015626, 1.0657320556640626]",tokens/s,59.93592995694277,kWh,3.037873132938943e-05,3.350275178322624e-06,1.2106258573885142e-05,4.5835265081597193e-05,tokens/kWh,1374487.523696998,,s,630,10.505954067230212,0.016676117567032103,0.0003330798190631205,0.016607407569885255,0.01693511657714844,0.017105914974212644,0.017864472084045416,"[0.0163786563873291, 0.016881120681762694, 0.01668899154663086, 0.016775775909423828, 0.016633056640625, 0.01662447929382324, 0.016636127471923827, 0.016643808364868163, 0.01698521614074707, 0.01774224090576172, 0.01695555114746094, 0.016858816146850586, 0.016792192459106445, 0.016719871520996094, 0.016769023895263673, 0.016736415863037108, 0.016672607421875, 0.016950592041015625, 0.01675334358215332, 0.016763999938964845, 0.016693727493286133, 0.016664255142211915, 0.016906240463256835, 0.01709337615966797, 0.016959680557250976, 0.016735519409179687, 0.016595487594604493, 0.01662566375732422, 0.016701440811157226, 0.01683443260192871, 0.016754816055297852, 0.016744447708129884, 0.016750848770141602, 0.01660492706298828, 0.01666633605957031, 0.016806175231933593, 0.016600896835327148, 0.01663404846191406, 0.016571392059326173, 0.01667558479309082, 0.01672422409057617, 0.016731647491455077, 0.01665011215209961, 0.01668592071533203, 0.017293088912963866, 0.016687103271484375, 0.01661747169494629, 0.01661952018737793, 0.01659596824645996, 0.01660825538635254, 0.016539167404174805, 0.016727872848510742, 0.016983840942382814, 0.01690435218811035, 0.01674131202697754, 0.01658233642578125, 0.016640256881713868, 0.01672380828857422, 0.016873472213745116, 0.016955455780029296, 0.017143999099731445, 0.01779871940612793, 0.017024991989135742, 0.016321407318115235, 0.016663679122924803, 0.016652063369750978, 0.017554559707641602, 0.01742732810974121, 0.01802047920227051, 0.016994176864624025, 0.01956662368774414, 0.019191776275634766, 0.016884735107421875, 0.016840927124023436, 0.016883871078491212, 0.016982656478881836, 0.016772544860839844, 0.016773855209350586, 0.01665007972717285, 0.016732160568237304, 0.016969728469848632, 0.016723424911499023, 0.01663030433654785, 0.016652288436889647, 0.016629600524902345, 0.016611488342285156, 0.016600831985473633, 0.016611263275146483, 0.01658310317993164, 0.016635744094848633, 0.01685001564025879, 0.016873504638671873, 0.016642175674438476, 0.01680668830871582, 0.016611328125, 0.016578559875488282, 0.016658079147338866, 0.016631135940551756, 0.016679744720458984, 0.016777088165283203, 0.016630079269409178, 0.016689151763916017, 0.016689151763916017, 0.016660480499267577, 0.016650495529174806, 0.01662950325012207, 0.01691152000427246, 0.016540000915527344, 0.016607776641845703, 0.01663587188720703, 0.01660723114013672, 0.016623264312744142, 0.01694905662536621, 0.01709110450744629, 0.01676288032531738, 0.016744447708129884, 0.01679580879211426, 0.01651696014404297, 0.016636032104492188, 0.01677507209777832, 0.017276575088500976, 0.017144128799438475, 0.017338367462158204, 0.01676873588562012, 0.016812255859375, 0.016664447784423827, 0.016662752151489258, 0.01654537582397461, 0.016786880493164062, 0.016593664169311524, 0.016556032180786134, 0.016614559173583985, 0.016527807235717774, 0.016664608001708985, 0.016679264068603514, 0.016637632369995117, 0.016584672927856446, 0.01661292839050293, 0.020450111389160155, 0.016725311279296873, 0.016951711654663085, 0.016615711212158202, 0.016698848724365233, 0.016542240142822264, 0.016613056182861328, 0.016521055221557616, 0.01682099151611328, 0.016629247665405272, 0.016721887588500975, 0.01672012710571289, 0.01660723114013672, 0.016598335266113283, 0.016898752212524414, 0.016698432922363282, 0.01669526481628418, 0.01662460708618164, 0.016604991912841798, 0.0167521915435791, 0.01664499282836914, 0.016598751068115234, 0.016601119995117188, 0.01656131172180176, 0.016696159362792968, 0.016508544921875, 0.016625152587890626, 0.016493600845336916, 0.01662566375732422, 0.016704639434814452, 0.016630495071411133, 0.016608415603637697, 0.01661628723144531, 0.01677507209777832, 0.01660108757019043, 0.016875455856323242, 0.017057952880859376, 0.016639999389648438, 0.016683008193969725, 0.016866527557373046, 0.016933664321899414, 0.016934911727905275, 0.01732601547241211, 0.01765711975097656, 0.017260576248168947, 0.01680460739135742, 0.016660415649414062, 0.016637664794921875, 0.016678239822387694, 0.01655705642700195, 0.01665023994445801, 0.016229631423950196, 0.016578432083129882, 0.016524192810058593, 0.01663372802734375, 0.016545791625976563, 0.016589088439941405, 0.016598751068115234, 0.016489599227905275, 0.016771039962768554, 0.01647609519958496, 0.01666908836364746, 0.01676950454711914, 0.016848352432250975, 0.016634496688842773, 0.01649072074890137, 0.01647724723815918, 0.01657110404968262, 0.01664204788208008, 0.016773056030273438, 0.01651308822631836, 0.016424736022949218, 0.01664224052429199, 0.016599071502685546, 0.01660108757019043, 0.01664022445678711, 0.016549663543701174, 0.016463872909545898, 0.016338687896728515, 0.016586175918579103, 0.016578784942626955, 0.01647369575500488, 0.01646080017089844, 0.016416767120361327, 0.016484352111816408, 0.01652511978149414, 0.016410272598266603, 0.016479808807373045, 0.016606176376342773, 0.01674617576599121, 0.016537055969238282, 0.016509855270385742, 0.01653548812866211, 0.01643929672241211, 0.016602783203125, 0.016501087188720703, 0.016809247970581056, 0.016653024673461914, 0.016731967926025392, 0.016678655624389648, 0.016996288299560548, 0.016568832397460938, 0.016605344772338868, 0.016562015533447265, 0.016686880111694335, 0.016464096069335937, 0.016794815063476562, 0.016542528152465822, 0.01652467155456543, 0.01648089599609375, 0.016547679901123047, 0.016478368759155274, 0.01651424026489258, 0.016452415466308594, 0.016229120254516602, 0.01654368019104004, 0.016504032135009766, 0.01663862419128418, 0.016512351989746092, 0.016436031341552734, 0.016436607360839843, 0.01657904052734375, 0.016486400604248046, 0.016527263641357422, 0.016653600692749022, 0.016444223403930664, 0.01651705551147461, 0.01650694465637207, 0.016553983688354493, 0.01656012725830078, 0.016515071868896485, 0.01646940803527832, 0.016577119827270507, 0.016537599563598633, 0.01640003204345703, 0.01670582389831543, 0.016781375885009765, 0.016592159271240234, 0.016548576354980468, 0.016809696197509764, 0.016621856689453124, 0.016721920013427736, 0.016506879806518555, 0.01640415954589844, 0.016419103622436523, 0.016525344848632814, 0.016483808517456056, 0.016797887802124024, 0.016540000915527344, 0.016451520919799803, 0.01640777587890625, 0.016680959701538087, 0.01645244789123535, 0.01662566375732422, 0.016533504486083983, 0.016701440811157226, 0.016558080673217773, 0.01692643165588379, 0.0169800968170166, 0.01688153648376465, 0.01666076850891113, 0.01664112091064453, 0.016722944259643553, 0.016625568389892577, 0.016730112075805666, 0.016760831832885743, 0.017098751068115235, 0.016553983688354493, 0.01644361686706543, 0.01668499183654785, 0.016734048843383788, 0.01680588722229004, 0.016508928298950197, 0.016695295333862305, 0.01664787292480469, 0.016611839294433595, 0.01743244743347168, 0.016620000839233397, 0.01643519973754883, 0.016451456069946288, 0.01672819137573242, 0.016492544174194337, 0.01665023994445801, 0.016778432846069335, 0.016493375778198243, 0.0164454402923584, 0.016736255645751954, 0.016484352111816408, 0.016508127212524416, 0.016466720581054688, 0.016451583862304688, 0.016446847915649414, 0.01653209686279297, 0.016504352569580077, 0.01655855941772461, 0.016490495681762696, 0.01647724723815918, 0.016434112548828126, 0.01653590393066406, 0.01653536033630371, 0.01653936004638672, 0.016455808639526368, 0.01679155158996582, 0.01652908706665039, 0.01655593681335449, 0.01648873519897461, 0.01644761657714844, 0.016459232330322267, 0.01645417594909668, 0.016576671600341798, 0.016559392929077148, 0.016525888442993165, 0.016447231292724608, 0.016656639099121094, 0.01640671920776367, 0.016485343933105467, 0.016564800262451173, 0.01663942337036133, 0.01675948715209961, 0.016565887451171876, 0.01644393539428711, 0.01649564743041992, 0.016436288833618164, 0.016537504196166994, 0.016447488784790038, 0.016504831314086914, 0.016553056716918944, 0.016491424560546874, 0.016504127502441405, 0.016583072662353517, 0.016492832183837892, 0.016622655868530272, 0.01650169563293457, 0.016689151763916017, 0.016488447189331054, 0.01658470344543457, 0.016504831314086914, 0.01655193519592285, 0.016430463790893555, 0.016488544464111327, 0.01623289680480957, 0.017002847671508788, 0.016899744033813478, 0.016854272842407227, 0.017040063858032226, 0.016982080459594727, 0.01743052864074707, 0.016697536468505858, 0.01674630355834961, 0.01656012725830078, 0.01656947135925293, 0.016509824752807618, 0.01659449577331543, 0.016545984268188478, 0.016498943328857422, 0.016740224838256837, 0.016533279418945314, 0.016584287643432616, 0.01662835121154785, 0.01657206344604492, 0.016562656402587892, 0.0164977912902832, 0.0164401912689209, 0.016504640579223632, 0.016508224487304688, 0.016552831649780272, 0.016496288299560548, 0.016506752014160155, 0.016570848464965822, 0.016478208541870116, 0.01789132881164551, 0.016517120361328123, 0.01699782371520996, 0.016534080505371095, 0.016652288436889647, 0.016515071868896485, 0.01680998420715332, 0.016864383697509765, 0.017021087646484374, 0.016831039428710937, 0.016605344772338868, 0.016592096328735352, 0.01663171195983887, 0.016988895416259767, 0.016583871841430665, 0.0166242561340332, 0.01645724868774414, 0.016457727432250976, 0.016707456588745118, 0.016468255996704102, 0.016515743255615233, 0.016515039443969728, 0.016445472717285158, 0.01651308822631836, 0.01653753662109375, 0.016424095153808594, 0.016964448928833007, 0.016662111282348634, 0.016538015365600588, 0.016475679397583008, 0.01662607955932617, 0.016475488662719726, 0.01668783950805664, 0.01620172882080078, 0.016416767120361327, 0.016566368103027345, 0.01658425521850586, 0.016496992111206053, 0.016561983108520507, 0.016423072814941406, 0.01651430320739746, 0.016607583999633788, 0.016490943908691408, 0.016541439056396483, 0.016462080001831053, 0.016457727432250976, 0.016482559204101563, 0.016568063735961914, 0.016475135803222657, 0.016579584121704103, 0.01662067222595215, 0.01640505599975586, 0.016513343811035155, 0.016673887252807617, 0.016563232421875, 0.016682111740112304, 0.016468671798706053, 0.016615392684936524, 0.016506879806518555, 0.016496736526489256, 0.016438688278198242, 0.016464479446411134, 0.016695295333862305, 0.016408575057983397, 0.016355520248413087, 0.01655900764465332, 0.016456607818603516, 0.016623424530029296, 0.01655027198791504, 0.01674835205078125, 0.016639999389648438, 0.01660326385498047, 0.016535776138305664, 0.016909120559692382, 0.01665119934082031, 0.016490144729614256, 0.016445695877075197, 0.016453279495239257, 0.016511327743530275, 0.01647113609313965, 0.016628000259399416, 0.01655036735534668, 0.016461088180541993, 0.01648115158081055, 0.016528608322143555, 0.016550239562988282, 0.016590816497802734, 0.016500991821289064, 0.016417024612426757, 0.016494815826416015, 0.01654755210876465, 0.01652876853942871, 0.016519840240478516, 0.016611328125, 0.016508928298950197, 0.01652124786376953, 0.016139776229858398, 0.01646214485168457, 0.01649273681640625, 0.016408384323120116, 0.016475679397583008, 0.016463552474975586, 0.016519264221191408, 0.016452287673950194, 0.01644476890563965, 0.016451871871948243, 0.01644697570800781, 0.016479103088378907, 0.016453632354736326, 0.016647327423095704, 0.016449728012084962, 0.016466592788696287, 0.016555648803710937, 0.016613471984863282, 0.016551584243774415, 0.01654438400268555, 0.01641062355041504, 0.016488544464111327, 0.016524864196777345, 0.016507232666015625, 0.01645120048522949, 0.016842815399169923, 0.0164150390625, 0.016365568161010743, 0.01644339179992676, 0.016463872909545898, 0.016475967407226563, 0.016537792205810548, 0.016521215438842773, 0.016483936309814453, 0.01657494354248047, 0.01665567970275879, 0.016703807830810546, 0.01675267219543457, 0.016535839080810546, 0.01641267204284668, 0.016414464950561522, 0.01649007987976074, 0.016458400726318358, 0.016440832138061523, 0.01656284713745117, 0.01665827178955078, 0.01662156867980957, 0.016527360916137695, 0.01654911994934082, 0.01704012870788574, 0.016517120361328123, 0.016523263931274415, 0.016492319107055665, 0.01659017562866211, 0.016536640167236327, 0.016578176498413085, 0.01652960014343262, 0.01647542381286621, 0.01655062484741211, 0.016615423202514648, 0.01656012725830078, 0.016574464797973632, 0.016519168853759765, 0.016147903442382813, 0.016570911407470704, 0.016889856338500975, 0.016939008712768554, 0.016846111297607422, 0.01676697540283203, 0.0169335994720459, 0.016953344345092772, 0.016893951416015626, 0.016863231658935548, 0.016676864624023437, 0.016633087158203125, 0.016722688674926756, 0.016752096176147462, 0.016800287246704102, 0.01686697578430176, 0.01688438415527344, 0.016848031997680663, 0.016681695938110353, 0.01675872039794922, 0.016697216033935546, 0.016936960220336913, 0.01665843200683594, 0.01666281509399414, 0.01652707290649414, 0.016506879806518555, 0.016816032409667968, 0.016543840408325194, 0.01669081687927246, 0.016519519805908205, 0.01756572723388672, 0.01859584045410156, 0.016750591278076172, 0.017161376953125, 0.01729827117919922, 0.01698918342590332, 0.016763872146606445, 0.016680992126464844, 0.01660723114013672, 0.016705631256103515, 0.016546815872192384, 0.017515424728393555, 0.016747711181640625, 0.017057888031005858, 0.01711177635192871, 0.017575103759765624, 0.017582527160644533, 0.017535200119018556, 0.017660064697265623, 0.017666271209716797, 0.01822287940979004, 0.01725644874572754, 0.016910335540771485, 0.016824512481689452, 0.016786815643310547, 0.01668342399597168, 0.016594879150390623, 0.01661756706237793, 0.016625280380249022, 0.01650726318359375, 0.016664575576782227, 0.016740352630615234, 0.016639999389648438]",tokens/s,59.96599604076628,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1516.163072,1350.434816,0.0,981.467136,978.805248,s,1,8.3325400390625,8.3325400390625,0.0,8.3325400390625,8.3325400390625,8.3325400390625,8.3325400390625,[8.3325400390625],,kWh,3.8126878737504434e-05,4.198042305263677e-06,1.1770287193985096e-05,5.409520823675321e-05,,MB,1629.294592,1526.595584,0.0,1109.393408,1064.45312,s,10,0.5448845062255859,0.054488450622558596,0.00035772993408945606,0.05462516784667969,0.054786217880249025,0.05480843601226807,0.0548262105178833,"[0.05375833511352539, 0.054522529602050784, 0.054359710693359375, 0.0538930549621582, 0.05474764633178711, 0.05483065414428711, 0.05478128051757813, 0.054740959167480466, 0.05458499145507813, 0.05466534423828125]",tokens/s,4698.243335515475,kWh,1.560762001715623e-06,1.7212397160919612e-07,1.033411403080306e-06,2.7662973764051256e-06,tokens/kWh,92542472.90386349,MB,1631.002624,1610.481664,0.0,1193.279488,1117.180928,s,10,14.827466064453123,1.4827466064453123,0.0058437859367857035,1.4808502807617185,1.4896680541992187,1.4927709777832032,1.4952533166503907,"[1.4958739013671876, 1.481080810546875, 1.47506494140625, 1.48024267578125, 1.488978515625, 1.4868951416015626, 1.4812662353515624, 1.4769952392578125, 1.4804488525390624, 1.4806197509765624]",tokens/s,42.48871636336712,kWh,3.313050732661795e-05,3.653835636957376e-06,1.6357824732118522e-05,5.314216769569385e-05,tokens/kWh,1185499.2510044887,,s,630,14.824115055084235,0.023530341357276552,0.0005934390800975151,0.023418272018432617,0.023780060386657715,0.023972012901306152,0.02561443174362183,"[0.024063232421875, 0.023460351943969726, 0.02398847961425781, 0.025544704437255858, 0.0341319694519043, 0.023613439559936524, 0.023398399353027344, 0.023440704345703126, 0.02359891128540039, 0.023732864379882812, 0.02342732810974121, 0.023455007553100586, 0.023855104446411132, 0.024265439987182617, 0.02333286476135254, 0.02333638381958008, 0.023290111541748048, 0.023474496841430666, 0.024202463150024413, 0.027228256225585938, 0.023413440704345704, 0.023586559295654296, 0.023318464279174805, 0.023600576400756836, 0.023275680541992187, 0.023243488311767577, 0.023349248886108398, 0.023201791763305665, 0.02331648063659668, 0.02365235137939453, 0.023578367233276366, 0.02343881607055664, 0.023493408203125, 0.023408191680908203, 0.023372224807739258, 0.023265087127685546, 0.0234105281829834, 0.023325023651123048, 0.02346598434448242, 0.02334886360168457, 0.02340083122253418, 0.023736320495605468, 0.023644159317016602, 0.0236440315246582, 0.02339558410644531, 0.02332352066040039, 0.02331648063659668, 0.023371776580810546, 0.023324031829833985, 0.023414527893066406, 0.023360383987426757, 0.023640064239501952, 0.023387584686279297, 0.02326585578918457, 0.023427072525024413, 0.02324803161621094, 0.02330300712585449, 0.023254751205444336, 0.02326483154296875, 0.02331622314453125, 0.023641056060791015, 0.02333695983886719, 0.02365235137939453, 0.023247072219848633, 0.023308895111083985, 0.023351423263549803, 0.02342092704772949, 0.02327961540222168, 0.023373504638671876, 0.023208255767822265, 0.023640064239501952, 0.023670783996582033, 0.023408639907836915, 0.023404544830322265, 0.023391584396362304, 0.023482847213745116, 0.023533439636230467, 0.023279199600219725, 0.02331622314453125, 0.023286752700805664, 0.023426399230957032, 0.023386783599853515, 0.023620864868164063, 0.02654640007019043, 0.023779136657714844, 0.023750879287719726, 0.024687103271484375, 0.02346188735961914, 0.023523231506347657, 0.02344099235534668, 0.023732736587524415, 0.023676128387451173, 0.02355407905578613, 0.02335206413269043, 0.023453184127807617, 0.023734399795532227, 0.02360972785949707, 0.023301759719848634, 0.02326924705505371, 0.023484256744384764, 0.02345974349975586, 0.02335545539855957, 0.023472831726074218, 0.023289056777954103, 0.023429920196533203, 0.023528640747070312, 0.02327414321899414, 0.023225919723510742, 0.023308576583862303, 0.023466304779052736, 0.023442848205566406, 0.023247295379638672, 0.023292064666748047, 0.023797632217407227, 0.023495935440063478, 0.023583776473999025, 0.023347040176391602, 0.023576576232910155, 0.02328780746459961, 0.023334911346435547, 0.02327756881713867, 0.02332841682434082, 0.023478176116943358, 0.023390432357788087, 0.02332899284362793, 0.023346687316894533, 0.02345369529724121, 0.023371807098388674, 0.023449567794799803, 0.023414783477783203, 0.02326937675476074, 0.023541759490966797, 0.023459840774536132, 0.023375072479248048, 0.023384864807128907, 0.02343731117248535, 0.02331999969482422, 0.023267648696899415, 0.02344576072692871, 0.023388160705566406, 0.023391328811645507, 0.023356319427490235, 0.02341449546813965, 0.023316768646240233, 0.023360767364501954, 0.023380735397338866, 0.023369728088378908, 0.023248895645141602, 0.023275007247924806, 0.023282175064086915, 0.02344988822937012, 0.023303583145141603, 0.02328607940673828, 0.023262336730957033, 0.023763263702392578, 0.023384639739990234, 0.023355072021484374, 0.02335478401184082, 0.023343296051025392, 0.023200223922729492, 0.02332819175720215, 0.02323744010925293, 0.023358495712280273, 0.02338096046447754, 0.02370560073852539, 0.023492416381835936, 0.023283071517944336, 0.024103679656982423, 0.023504192352294923, 0.023923456192016603, 0.02330419158935547, 0.023468032836914062, 0.023347200393676756, 0.02326937675476074, 0.023557952880859375, 0.02326300811767578, 0.02331484794616699, 0.0233570556640625, 0.023892160415649413, 0.023488704681396484, 0.02327347183227539, 0.02332876777648926, 0.023285184860229492, 0.023435392379760743, 0.023392704010009764, 0.02342905616760254, 0.023398271560668947, 0.023510271072387696, 0.023433631896972656, 0.023337535858154297, 0.023594688415527344, 0.023531839370727538, 0.023440607070922853, 0.023300895690917967, 0.02334671974182129, 0.023992799758911134, 0.02368921661376953, 0.023597055435180665, 0.023379072189331055, 0.0233624324798584, 0.02329190444946289, 0.023345151901245118, 0.02331020736694336, 0.023922719955444337, 0.023451744079589845, 0.02334275245666504, 0.023361888885498047, 0.02333407974243164, 0.023341344833374023, 0.023395999908447266, 0.023475296020507814, 0.02350057601928711, 0.02341856002807617, 0.02335366439819336, 0.023533567428588868, 0.023793088912963868, 0.023601728439331053, 0.02326118469238281, 0.023504831314086913, 0.024449087142944335, 0.023325792312622072, 0.02329884719848633, 0.023423072814941406, 0.02338559913635254, 0.02324720001220703, 0.0233371524810791, 0.023328351974487304, 0.023403072357177736, 0.02359075164794922, 0.023330816268920897, 0.02329599952697754, 0.02328166389465332, 0.023422975540161133, 0.02347007942199707, 0.02343731117248535, 0.023394304275512694, 0.023713760375976563, 0.02385103988647461, 0.024205024719238282, 0.023823040008544922, 0.023889503479003905, 0.02399168014526367, 0.023754911422729494, 0.023505504608154298, 0.023383487701416017, 0.02331216049194336, 0.023239328384399415, 0.023385280609130858, 0.023497343063354492, 0.02326857566833496, 0.02323747253417969, 0.023337024688720703, 0.023562368392944337, 0.026116191864013674, 0.023615007400512696, 0.023603679656982422, 0.02489139175415039, 0.024788639068603516, 0.02366703987121582, 0.02360655975341797, 0.028541631698608398, 0.023597087860107422, 0.023547679901123046, 0.02330406379699707, 0.02332640075683594, 0.023368352890014647, 0.023334720611572265, 0.023417375564575196, 0.02324239921569824, 0.023345151901245118, 0.023265087127685546, 0.023249088287353517, 0.023364959716796876, 0.023368223190307617, 0.02337366485595703, 0.023314720153808595, 0.023257087707519532, 0.023234560012817384, 0.023682687759399416, 0.023590944290161134, 0.02343065643310547, 0.023491424560546877, 0.02351702308654785, 0.023572639465332033, 0.02348624038696289, 0.02331056022644043, 0.023320287704467774, 0.023906591415405274, 0.02441046333312988, 0.023355039596557617, 0.023408639907836915, 0.023455743789672853, 0.023759103775024413, 0.02338707160949707, 0.02345599937438965, 0.02355833625793457, 0.023812480926513672, 0.023675935745239258, 0.023415775299072267, 0.023409696578979493, 0.023430112838745118, 0.023349248886108398, 0.0234715518951416, 0.023427616119384764, 0.023287839889526367, 0.023382015228271484, 0.023348352432250977, 0.02349964714050293, 0.023494335174560548, 0.02343961524963379, 0.02339779281616211, 0.023384735107421874, 0.023473312377929687, 0.023249759674072265, 0.02330828857421875, 0.02352742385864258, 0.02369536018371582, 0.023478271484375, 0.02347417640686035, 0.023842815399169923, 0.02345699119567871, 0.023442207336425783, 0.023599103927612306, 0.023417984008789063, 0.023314559936523437, 0.023419071197509765, 0.023517599105834962, 0.02342032051086426, 0.023495424270629884, 0.023747615814208985, 0.023301088333129882, 0.02326688003540039, 0.024531391143798827, 0.023275360107421875, 0.023349407196044922, 0.023518592834472656, 0.023299936294555665, 0.023790496826171875, 0.023696575164794922, 0.02345235252380371, 0.023314367294311522, 0.023904319763183593, 0.02350284767150879, 0.02331772804260254, 0.02332342338562012, 0.023410335540771484, 0.023375232696533202, 0.02330473518371582, 0.023622079849243163, 0.02371798324584961, 0.02346486473083496, 0.023442432403564452, 0.023352319717407227, 0.023658815383911132, 0.0238721923828125, 0.023952863693237306, 0.02410099220275879, 0.024029600143432618, 0.023760351181030273, 0.02343503952026367, 0.02345417594909668, 0.023279903411865234, 0.023862560272216796, 0.02346575927734375, 0.02339731216430664, 0.023391904830932616, 0.023224672317504882, 0.023299360275268556, 0.02350476837158203, 0.023313247680664062, 0.02327756881713867, 0.023390207290649414, 0.023302143096923827, 0.02517919921875, 0.026559423446655274, 0.023554048538208007, 0.02348441505432129, 0.023453311920166017, 0.02343731117248535, 0.023684383392333985, 0.02346995162963867, 0.023421791076660155, 0.02351513671875, 0.023406015396118165, 0.023513631820678713, 0.02327350425720215, 0.023221439361572265, 0.023324480056762697, 0.023298336029052735, 0.023321184158325195, 0.02326095962524414, 0.02330611228942871, 0.023261663436889648, 0.023369728088378908, 0.023330623626708985, 0.02328985595703125, 0.023314624786376952, 0.023379968643188476, 0.023481407165527345, 0.023595968246459962, 0.023779199600219725, 0.024022815704345703, 0.023987680435180663, 0.023904800415039062, 0.023596511840820313, 0.023630720138549804, 0.023600736618041993, 0.023499168395996094, 0.023393728256225585, 0.023560768127441407, 0.02331648063659668, 0.02332262420654297, 0.023343103408813477, 0.024182783126831055, 0.025642911911010743, 0.023894111633300782, 0.023516191482543945, 0.023438304901123048, 0.02344927978515625, 0.023314687728881837, 0.023776416778564454, 0.02337593650817871, 0.023419647216796874, 0.023947359085083008, 0.023576543807983397, 0.02339638328552246, 0.023377727508544922, 0.023490623474121095, 0.023431135177612306, 0.023317823410034178, 0.023388128280639648, 0.02347097587585449, 0.023359296798706054, 0.023306432723999022, 0.02327961540222168, 0.023293727874755858, 0.023371360778808595, 0.023414400100708006, 0.023391231536865235, 0.02333286476135254, 0.023367679595947266, 0.02370137596130371, 0.0236627197265625, 0.02346134376525879, 0.023472671508789063, 0.02335673522949219, 0.023392032623291016, 0.0233371524810791, 0.023280351638793946, 0.023394304275512694, 0.023404544830322265, 0.023371135711669922, 0.023304800033569335, 0.023330848693847658, 0.02332057571411133, 0.02348784065246582, 0.02338268852233887, 0.023236608505249022, 0.024130687713623047, 0.023327615737915038, 0.02364355278015137, 0.02337455940246582, 0.023275392532348633, 0.023375871658325196, 0.02332464027404785, 0.02350214385986328, 0.0234237117767334, 0.023398399353027344, 0.023326496124267578, 0.023287839889526367, 0.02331443214416504, 0.023328672409057616, 0.02335526466369629, 0.023376256942749023, 0.023279647827148437, 0.02331648063659668, 0.023443456649780273, 0.023435264587402343, 0.02333078384399414, 0.023449151992797852, 0.023302719116210936, 0.023241727828979493, 0.023268255233764648, 0.02326323127746582, 0.023318527221679687, 0.02336067199707031, 0.023270240783691408, 0.02324412727355957, 0.023255584716796875, 0.023323776245117188, 0.023299072265625, 0.023346656799316405, 0.023276063919067384, 0.02328166389465332, 0.024982976913452148, 0.024568384170532226, 0.02381999969482422, 0.02384515190124512, 0.023414783477783203, 0.023222272872924804, 0.02341231918334961, 0.023490943908691407, 0.023353055953979494, 0.023611711502075194, 0.023853408813476563, 0.023375232696533202, 0.023483200073242186, 0.023820287704467775, 0.02349875259399414, 0.023367328643798826, 0.023400543212890625, 0.023384319305419923, 0.023412736892700195, 0.023331968307495118, 0.023407487869262694, 0.02333695983886719, 0.02330944061279297, 0.023438207626342772, 0.023367679595947266, 0.02342905616760254, 0.02352649688720703, 0.02360371208190918, 0.023514591217041015, 0.02343622398376465, 0.023504831314086913, 0.02324678421020508, 0.02333247947692871, 0.02330067253112793, 0.02345510482788086, 0.02386089515686035, 0.023591232299804688, 0.02347279930114746, 0.023355712890625, 0.02345747184753418, 0.023336767196655273, 0.023496896743774413, 0.023465503692626954, 0.02328828811645508, 0.023422367095947267, 0.02338262367248535, 0.023549951553344727, 0.023379039764404298, 0.023505088806152343, 0.024122079849243163, 0.02371993637084961, 0.023729568481445314, 0.023523935317993162, 0.02348179244995117, 0.02345631980895996, 0.023406591415405274, 0.02354617691040039, 0.023695039749145507, 0.023629823684692384, 0.023588863372802735, 0.02364959907531738, 0.023562944412231446, 0.023558143615722657, 0.023814144134521483, 0.023555360794067382, 0.023451776504516603, 0.023378528594970704, 0.023369119644165038, 0.023439296722412108, 0.023435199737548828, 0.023556255340576173, 0.02338240051269531, 0.023363744735717774, 0.02327881622314453, 0.023438240051269533, 0.023367679595947266, 0.023330816268920897, 0.023407840728759767, 0.023572639465332033, 0.023597696304321288, 0.023605247497558594, 0.02351923179626465, 0.023433120727539062, 0.023427007675170898, 0.023638175964355468, 0.02349056053161621, 0.023443359375, 0.023384159088134765, 0.023431167602539063, 0.023475456237792968, 0.02357686424255371, 0.023452480316162108, 0.023342655181884765, 0.023867488861083985, 0.023467199325561523, 0.023474336624145508, 0.023433887481689453, 0.023367103576660157, 0.0234968318939209, 0.02354630470275879, 0.02349273681640625, 0.023460960388183592, 0.023642879486083984, 0.023498783111572264, 0.023533279418945313, 0.02378780746459961, 0.02342086410522461, 0.023453760147094726, 0.023363584518432616, 0.02352720069885254, 0.0233842887878418, 0.023396352767944335, 0.02345369529724121, 0.0233670711517334, 0.023426847457885744, 0.023326879501342775, 0.023345279693603515, 0.02336128044128418, 0.02345062446594238, 0.023412511825561522, 0.023367679595947266, 0.023472127914428712, 0.023377920150756838, 0.02341584014892578, 0.0233154239654541, 0.023404544830322265, 0.023390207290649414, 0.023318496704101563, 0.023569728851318358, 0.02356502342224121, 0.023746591567993164, 0.02375267219543457, 0.023948768615722656, 0.02373686408996582, 0.023812095642089845, 0.023916543960571288]",tokens/s,42.49832098975303,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,879.448064,647.888896,0.0,260.046848,258.555392,s,1,7.8527939453125,7.8527939453125,0.0,7.8527939453125,7.8527939453125,7.8527939453125,7.8527939453125,[7.8527939453125],,kWh,1.5225077583333283e-05,1.672210803189071e-06,4.538892519986848e-06,2.1436180906509204e-05,,MB,1310.605312,759.037952,0.0,341.835776,317.950464,s,18,0.1996141767501831,0.011089676486121284,0.00011796392470782542,0.011055184364318849,0.011199401378631593,0.011332230377197265,0.011437722930908204,"[0.010964768409729004, 0.011021056175231934, 0.011035743713378907, 0.01102022361755371, 0.011152447700500489, 0.010991711616516114, 0.011068384170532226, 0.011464096069335938, 0.011044992446899413, 0.010987711906433105, 0.011045056343078613, 0.011077504158020019, 0.011089920043945312, 0.01112700843811035, 0.011065312385559082, 0.011034655570983886, 0.0111146240234375, 0.0113089599609375]",tokens/s,23084.532747225196,kWh,4.156658072411516e-07,4.583220738520306e-08,1.9724256045298298e-07,6.587405750793376e-07,tokens/kWh,388620360.8593076,MB,1324.593152,786.300928,0.0,369.098752,317.953024,s,18,10.26638409423828,0.5703546719021269,0.002953374153126736,0.5709329223632813,0.5733548583984375,0.5744043029785156,0.5761015637207031,"[0.5730655517578125, 0.5728255004882813, 0.571229736328125, 0.57109521484375, 0.572139404296875, 0.5660000610351562, 0.57652587890625, 0.567683837890625, 0.5707706298828125, 0.5740299072265626, 0.5716837768554688, 0.5695390625, 0.5675003051757812, 0.5672777099609375, 0.5703414916992188, 0.5653462524414062, 0.5724132690429687, 0.56691650390625]",tokens/s,110.4575856105389,kWh,2.214961834083761e-05,2.4426860927560373e-06,7.998725534778504e-06,3.259102996837216e-05,tokens/kWh,1933047.223764886,,s,1134,10.257575246810896,0.009045480817293575,0.0002845528273059231,0.008989727973937989,0.009139334201812744,0.009240865707397461,0.010659195566177383,"[0.008824831962585449, 0.008984576225280762, 0.009033727645874023, 0.009118911743164062, 0.009225312232971192, 0.009236191749572754, 0.009191424369812011, 0.009148415565490722, 0.009139967918395995, 0.009076992034912109, 0.009066495895385742, 0.00902143955230713, 0.009048064231872559, 0.009017120361328125, 0.009055871963500977, 0.009022047996520996, 0.009340415954589844, 0.009185952186584473, 0.009137696266174316, 0.009173024177551269, 0.009058112144470215, 0.009036064147949219, 0.009011391639709473, 0.008996864318847657, 0.009076767921447754, 0.009062432289123535, 0.008977375984191894, 0.008940511703491211, 0.009054112434387206, 0.009009023666381837, 0.00899443244934082, 0.008974656105041504, 0.009012928009033203, 0.00908512020111084, 0.009079551696777344, 0.00908681583404541, 0.009107263565063476, 0.009052160263061524, 0.009202719688415528, 0.009141216278076172, 0.009013248443603515, 0.009506815910339356, 0.00918553638458252, 0.009190272331237793, 0.009091744422912598, 0.00902348804473877, 0.009130111694335937, 0.009134176254272462, 0.009027584075927735, 0.009009311676025391, 0.009107647895812988, 0.009432736396789551, 0.009070591926574707, 0.009091072082519532, 0.009160223960876466, 0.009187552452087402, 0.009032064437866211, 0.00903718376159668, 0.009046079635620118, 0.008970975875854492, 0.009128767967224121, 0.008989567756652833, 0.009017375946044922, 0.008761024475097657, 0.00905401611328125, 0.009236639976501464, 0.009026111602783203, 0.008990559577941894, 0.009113120079040528, 0.00896678352355957, 0.008957344055175781, 0.008970848083496094, 0.00893289566040039, 0.008982848167419434, 0.009015456199645996, 0.009198752403259278, 0.00912384033203125, 0.009040736198425292, 0.00901529598236084, 0.00898812770843506, 0.008962656021118164, 0.009125951766967774, 0.00921340847015381, 0.011413727760314941, 0.010834112167358398, 0.009102751731872558, 0.009097824096679688, 0.009096480369567872, 0.008952639579772948, 0.00891484832763672, 0.009098272323608398, 0.008995648384094238, 0.00901750373840332, 0.008988863945007324, 0.009310015678405762, 0.009037823677062988, 0.008992768287658692, 0.009486335754394531, 0.008916383743286132, 0.008972895622253419, 0.008955488204956055, 0.008944160461425781, 0.008931167602539062, 0.008989855766296387, 0.009034079551696777, 0.008939264297485351, 0.008909600257873535, 0.008935423851013183, 0.009006208419799804, 0.008940480232238769, 0.008918975830078125, 0.008993824005126954, 0.008987615585327149, 0.008951359748840333, 0.008928704261779784, 0.008969504356384278, 0.008938591957092285, 0.008972512245178223, 0.008937151908874511, 0.009013983726501465, 0.009068544387817384, 0.009104991912841797, 0.009091232299804687, 0.00905023956298828, 0.00898198413848877, 0.008949631690979004, 0.008942079544067384, 0.00899891185760498, 0.008994815826416015, 0.009102592468261719, 0.008919424057006835, 0.008950143814086915, 0.008951264381408692, 0.008978143692016601, 0.008960831642150878, 0.008928288459777832, 0.008936544418334962, 0.008941408157348632, 0.008910304069519042, 0.00890732765197754, 0.00910483169555664, 0.009015872001647949, 0.00890880012512207, 0.008953696250915527, 0.009928416252136231, 0.009596384048461915, 0.010367679595947265, 0.009158944129943848, 0.009119487762451172, 0.009094688415527344, 0.00900547218322754, 0.009076992034912109, 0.009230175971984863, 0.00904582405090332, 0.009050527572631835, 0.008969504356384278, 0.009010016441345215, 0.008953215599060059, 0.008992608070373536, 0.009040543556213379, 0.00899449634552002, 0.00899078369140625, 0.008996992111206055, 0.00897036838531494, 0.008918304443359375, 0.009128671646118164, 0.009087200164794923, 0.008988320350646973, 0.009169024467468262, 0.009011199951171875, 0.009024576187133789, 0.009041152000427245, 0.009049792289733887, 0.008982848167419434, 0.008965824127197266, 0.008950016021728516, 0.009275263786315917, 0.009215456008911133, 0.009009568214416504, 0.008999199867248535, 0.008984064102172852, 0.008978591918945313, 0.00894364833831787, 0.00896617603302002, 0.00912179183959961, 0.00900499153137207, 0.00891443157196045, 0.009015872001647949, 0.008998208045959473, 0.008762751579284668, 0.008957056045532227, 0.008929056167602539, 0.009076576232910156, 0.009037983894348145, 0.010182271957397461, 0.012036479949951173, 0.00903769588470459, 0.009033280372619629, 0.009003583908081054, 0.008968064308166504, 0.00899289608001709, 0.00897436809539795, 0.00893881607055664, 0.008913567543029785, 0.008951807975769043, 0.008986944198608399, 0.008984512329101562, 0.008912639617919922, 0.008949664115905762, 0.008966239929199218, 0.00893449592590332, 0.008977312088012696, 0.009026592254638671, 0.009024479866027831, 0.009007424354553222, 0.008989983558654786, 0.008946080207824707, 0.00909216022491455, 0.009009663581848144, 0.009034175872802735, 0.009006431579589844, 0.009052831649780274, 0.009000672340393066, 0.008943424224853516, 0.00893996810913086, 0.00951097583770752, 0.009012895584106445, 0.008974656105041504, 0.009010687828063964, 0.009020000457763673, 0.009040800094604493, 0.008993184089660645, 0.009009152412414552, 0.008974944114685059, 0.00891808032989502, 0.008958975791931152, 0.008998847961425782, 0.008989919662475586, 0.008979104042053222, 0.009021344184875489, 0.008949983596801757, 0.008973312377929688, 0.0089584321975708, 0.00895849609375, 0.008936736106872558, 0.00899350357055664, 0.008962047576904298, 0.00894156837463379, 0.008933119773864746, 0.009144543647766113, 0.008907936096191406, 0.008962143898010254, 0.008814720153808594, 0.008988544464111329, 0.008971615791320801, 0.008968992233276367, 0.008984576225280762, 0.008964287757873534, 0.00896992015838623, 0.008986432075500489, 0.009668255805969238, 0.00896457576751709, 0.009019359588623047, 0.008980704307556151, 0.008955072402954102, 0.00899078369140625, 0.00894438362121582, 0.008927040100097657, 0.009002495765686035, 0.009011808395385743, 0.00900271987915039, 0.008961440086364746, 0.008956928253173829, 0.009063584327697754, 0.008964351654052734, 0.008974911689758301, 0.009029600143432617, 0.00912816047668457, 0.008976192474365234, 0.0089169921875, 0.00894057559967041, 0.008974528312683106, 0.008946656227111816, 0.009022624015808105, 0.009222816467285157, 0.009005151748657226, 0.008968095779418945, 0.008965855598449706, 0.008974847793579101, 0.008962016105651855, 0.008949567794799804, 0.008975872039794922, 0.009052672386169434, 0.009010687828063964, 0.009017855644226073, 0.008996864318847657, 0.009043968200683594, 0.008974464416503906, 0.00907260799407959, 0.008953760147094727, 0.00894976043701172, 0.008964159965515136, 0.008941184043884278, 0.009055808067321777, 0.008950336456298828, 0.009115103721618653, 0.009001184463500976, 0.01083033561706543, 0.011249119758605958, 0.009349663734436036, 0.00909721565246582, 0.009121248245239258, 0.00900051212310791, 0.00897532844543457, 0.00894156837463379, 0.008746175765991212, 0.009067328453063964, 0.009004544258117676, 0.009038399696350098, 0.008998496055603027, 0.00898083209991455, 0.008974111557006836, 0.008947104454040527, 0.008989503860473633, 0.008937439918518066, 0.00898256015777588, 0.00899891185760498, 0.008981663703918457, 0.009091456413269043, 0.008997183799743652, 0.008958271980285645, 0.008947104454040527, 0.008985024452209472, 0.008955679893493652, 0.008913472175598144, 0.009120512008666991, 0.008928031921386718, 0.008980607986450196, 0.008965408325195313, 0.008939711570739747, 0.008939200401306153, 0.008940383911132813, 0.008951807975769043, 0.008945792198181153, 0.008900480270385742, 0.008931520462036133, 0.0089967041015625, 0.009021792411804199, 0.008916095733642578, 0.008898752212524414, 0.00893779182434082, 0.008937472343444825, 0.008914079666137695, 0.008954719543457031, 0.00904412841796875, 0.009126815795898437, 0.008999679565429687, 0.009021280288696289, 0.00900051212310791, 0.008991519927978515, 0.009002495765686035, 0.008947903633117676, 0.008900927543640138, 0.008945407867431641, 0.008932607650756836, 0.008926207542419434, 0.008972096443176269, 0.009004575729370117, 0.009034527778625488, 0.008996735572814942, 0.00899721622467041, 0.009176735877990722, 0.008988672256469727, 0.00894156837463379, 0.008962176322937011, 0.008963968276977539, 0.00899071979522705, 0.009021663665771484, 0.008823200225830078, 0.009004447937011719, 0.009206560134887696, 0.008980607986450196, 0.008959327697753906, 0.009006752014160156, 0.009367648124694825, 0.008960607528686524, 0.008998815536499023, 0.009031488418579101, 0.009036128044128417, 0.009025664329528808, 0.009023360252380371, 0.00898812770843506, 0.008980863571166993, 0.009421088218688966, 0.009178655624389648, 0.009238816261291504, 0.00902560043334961, 0.008990880012512206, 0.009031455993652343, 0.00901692771911621, 0.009094911575317383, 0.00898464012145996, 0.009137855529785157, 0.008951040267944336, 0.008967840194702148, 0.008957951545715333, 0.00917199993133545, 0.008948672294616699, 0.008959775924682617, 0.009027839660644531, 0.009220095634460449, 0.009342464447021484, 0.009299712181091309, 0.009506943702697754, 0.009324959754943848, 0.009171456336975097, 0.00909705638885498, 0.009418335914611817, 0.009103808403015137, 0.012152671813964844, 0.010419615745544434, 0.00908694362640381, 0.009067135810852051, 0.009054559707641602, 0.009018943786621094, 0.009036992073059082, 0.00905513572692871, 0.009052063941955566, 0.009015616416931152, 0.00900483226776123, 0.009019392013549805, 0.008951807975769043, 0.009040063858032226, 0.008962047576904298, 0.008974464416503906, 0.008974016189575195, 0.009055232048034668, 0.008973312377929688, 0.008986623764038085, 0.008992192268371581, 0.009108223915100098, 0.009638272285461425, 0.009500608444213868, 0.00930403232574463, 0.009156448364257812, 0.009042271614074707, 0.009042240142822266, 0.009008831977844238, 0.00903388786315918, 0.009003583908081054, 0.008978207588195801, 0.008979647636413575, 0.009675840377807617, 0.008989760398864746, 0.008978879928588868, 0.008953248023986816, 0.0089649600982666, 0.009021280288696289, 0.008934656143188476, 0.008931903839111329, 0.008957759857177734, 0.008932031631469727, 0.008930560111999511, 0.008931936264038086, 0.008942879676818848, 0.008942208290100097, 0.008912768363952637, 0.009054431915283203, 0.008951807975769043, 0.008919039726257324, 0.008920096397399902, 0.008906911849975586, 0.008950816154479981, 0.008957728385925293, 0.008980480194091797, 0.008953856468200684, 0.008888319969177246, 0.008954943656921387, 0.008958559989929199, 0.008930720329284669, 0.008924096107482911, 0.009006815910339356, 0.008943903923034667, 0.008928256034851074, 0.008922112464904786, 0.008945664405822755, 0.009021408081054687, 0.008898591995239258, 0.008956255912780762, 0.009064127922058105, 0.009021311759948731, 0.009007200241088868, 0.009004287719726563, 0.009089792251586914, 0.008976384162902832, 0.008950976371765137, 0.00892191982269287, 0.008939167976379395, 0.008946016311645507, 0.008933568000793457, 0.008954879760742187, 0.008897279739379883, 0.008973759651184082, 0.008960639953613281, 0.008802304267883301, 0.008951040267944336, 0.00897920036315918, 0.009031167984008789, 0.009181695938110352, 0.00903171157836914, 0.00896735954284668, 0.00897923183441162, 0.008962400436401367, 0.008937312126159667, 0.008985535621643066, 0.008975168228149415, 0.008937536239624023, 0.008927040100097657, 0.008951999664306641, 0.008988287925720215, 0.008980863571166993, 0.008966143608093263, 0.00892518424987793, 0.009028703689575195, 0.00895254421234131, 0.00894153594970703, 0.008887776374816895, 0.008940287590026855, 0.009244671821594238, 0.008957951545715333, 0.011403264045715332, 0.010727423667907715, 0.009027551651000977, 0.008953887939453125, 0.008975839614868164, 0.00905065631866455, 0.008976384162902832, 0.008944992065429688, 0.008948384284973144, 0.008994048118591308, 0.00895257568359375, 0.008875231742858886, 0.008894847869873048, 0.008927935600280762, 0.008912223815917969, 0.008910880088806153, 0.008892800331115723, 0.00888764762878418, 0.008929920196533204, 0.00894105625152588, 0.00888419246673584, 0.009183808326721192, 0.010092576026916504, 0.009052063941955566, 0.008939167976379395, 0.008997247695922852, 0.008920191764831543, 0.008883071899414062, 0.008949407577514648, 0.009001119613647461, 0.008958175659179687, 0.008930815696716308, 0.0090098876953125, 0.008967935562133789, 0.008959903717041015, 0.00893126392364502, 0.009065983772277832, 0.008764575958251954, 0.008928128242492676, 0.008937439918518066, 0.008951680183410644, 0.008943936347961427, 0.008975584030151368, 0.008982303619384766, 0.008936256408691405, 0.008951871871948243, 0.009062335968017579, 0.009081855773925781, 0.00895299243927002, 0.008976192474365234, 0.008978464126586915, 0.008989695549011231, 0.008998944282531738, 0.009013888359069824, 0.009016863822937012, 0.008948224067687988, 0.009128255844116211, 0.009168224334716797, 0.008942208290100097, 0.008939295768737794, 0.00890659236907959, 0.008876447677612304, 0.008950976371765137, 0.008972736358642578, 0.008936991691589355, 0.008972895622253419, 0.00898646354675293, 0.00893619155883789, 0.008947360038757323, 0.00898252773284912, 0.009001248359680176, 0.00906214427947998, 0.010520671844482422, 0.011323007583618164, 0.010008831977844239, 0.00911580753326416, 0.009051008224487305, 0.009028703689575195, 0.008963040351867676, 0.009255680084228515, 0.009014623641967773, 0.009098048210144044, 0.009037823677062988, 0.009627743721008301, 0.009077919960021972, 0.009865983963012696, 0.00913593578338623, 0.009082431793212891, 0.009043744087219238, 0.00900592041015625, 0.00910540771484375, 0.00899449634552002, 0.009090559959411621, 0.008984992027282715, 0.009000672340393066, 0.00900716781616211, 0.008927871704101562, 0.009064191818237305, 0.008978688240051269, 0.009027104377746582, 0.008763872146606445, 0.008894463539123536, 0.008941696166992188, 0.008990816116333008, 0.008976160049438476, 0.009122879981994628, 0.008956864356994628, 0.00900476837158203, 0.008965408325195313, 0.0092293119430542, 0.00901084804534912, 0.011481311798095703, 0.010864768028259277, 0.009003007888793945, 0.00898252773284912, 0.008976384162902832, 0.008994815826416015, 0.008916768074035644, 0.008913120269775391, 0.00899020767211914, 0.008956064224243164, 0.009100768089294433, 0.009003904342651367, 0.008953632354736328, 0.008941887855529785, 0.008933440208435058, 0.008889792442321777, 0.00901750373840332, 0.008947967529296875, 0.008937439918518066, 0.008906047821044922, 0.00890931224822998, 0.008984800338745116, 0.00905840015411377, 0.008966272354125976, 0.008961824417114258, 0.008972415924072266, 0.009519136428833008, 0.008966079711914062, 0.009150367736816406, 0.008965632438659669, 0.00892569637298584, 0.009193792343139648, 0.00898534393310547, 0.009143424034118653, 0.008926176071166992, 0.009052576065063477, 0.008988351821899413, 0.008931551933288574, 0.008944160461425781, 0.008916255950927734, 0.009183967590332031, 0.009033056259155273, 0.009034399986267089, 0.009080575942993165, 0.00900928020477295, 0.00893660831451416, 0.008913887977600097, 0.009074560165405273, 0.009048031806945802, 0.008964256286621093, 0.008955712318420411, 0.008935711860656738, 0.00870809555053711, 0.008970208168029785, 0.009154751777648925, 0.00903337574005127, 0.008949952125549316, 0.008976223945617676, 0.009023648262023926, 0.009003168106079101, 0.008955103874206543, 0.00916329574584961, 0.008993056297302246, 0.008963904380798339, 0.009027584075927735, 0.009175104141235351, 0.00902348804473877, 0.009000896453857422, 0.009025535583496093, 0.00901529598236084, 0.008965279579162598, 0.009010016441345215, 0.009197216033935546, 0.00908460807800293, 0.009020064353942871, 0.009047231674194335, 0.009030464172363282, 0.009048064231872559, 0.008988767623901368, 0.009010720252990722, 0.009023872375488282, 0.008993087768554687, 0.008984255790710449, 0.00905628776550293, 0.009007072448730469, 0.009084223747253418, 0.008962752342224121, 0.008989983558654786, 0.008962559700012206, 0.009025759696960449, 0.009107456207275391, 0.008982111930847168, 0.00897475242614746, 0.008961088180541992, 0.008921664237976074, 0.008949567794799804, 0.008968159675598145, 0.009095775604248046, 0.009005056381225587, 0.008933600425720214, 0.009045439720153809, 0.009527647972106933, 0.0098853759765625, 0.009118016242980958, 0.009115455627441406, 0.0090066556930542, 0.009064448356628419, 0.008947680473327637, 0.00891750431060791, 0.008892160415649414, 0.00895631980895996, 0.009074687957763672, 0.008929280281066895, 0.008973983764648438, 0.00899897575378418, 0.008844415664672851, 0.009003904342651367, 0.00902348804473877, 0.009000960350036622, 0.009119744300842286, 0.00906611156463623, 0.008969792366027832, 0.008961952209472657, 0.009005184173583985, 0.008962847709655762, 0.0089169921875, 0.008915200233459472, 0.00897548770904541, 0.009067296028137207, 0.008885951995849609, 0.008915103912353516, 0.008949215888977052, 0.008935968399047851, 0.008904704093933105, 0.008924544334411621, 0.00892512035369873, 0.009097920417785645, 0.009000767707824707, 0.008962176322937011, 0.009011263847351075, 0.008916192054748535, 0.008909536361694335, 0.008922304153442383, 0.008966527938842773, 0.008919551849365234, 0.008965439796447754, 0.008934016227722169, 0.008941632270812988, 0.008994815826416015, 0.009008416175842285, 0.008960736274719239, 0.008891839981079102, 0.008983200073242187, 0.008961695671081543, 0.008972543716430664, 0.00899071979522705, 0.00896121597290039, 0.00893830394744873, 0.009015328407287597, 0.009521120071411133, 0.009004256248474122, 0.00898742389678955, 0.009678848266601562, 0.009066207885742188, 0.009017536163330078, 0.008986399650573731, 0.008980799674987792, 0.008964032173156737, 0.008942815780639648, 0.008952704429626465, 0.009029600143432617, 0.009158656120300293, 0.008994848251342773, 0.009084320068359375, 0.008958239555358887, 0.009144703865051269, 0.009002592086791992, 0.009045503616333007, 0.00869007968902588, 0.008966912269592285, 0.008958208084106445, 0.009034496307373047, 0.008993023872375489, 0.008971487998962403, 0.00901200008392334, 0.009226176261901855, 0.009102751731872558, 0.009045536041259766, 0.009063072204589843, 0.009041952133178712, 0.009003199577331544, 0.008994815826416015, 0.009093119621276855, 0.009004287719726563, 0.009003487586975098, 0.009044256210327149, 0.008960000038146973, 0.008999135971069336, 0.008988448143005371, 0.00894976043701172, 0.008929311752319336, 0.008899871826171874, 0.0088787841796875, 0.00893280029296875, 0.00893836784362793, 0.00910431957244873, 0.008991168022155762, 0.008986944198608399, 0.00901529598236084, 0.008912320137023927, 0.008915648460388184, 0.00888003158569336, 0.009077055931091309, 0.008965567588806152, 0.008933440208435058, 0.009074848175048828, 0.0091843843460083, 0.009114815711975097, 0.009121472358703614, 0.009260640144348145, 0.009039615631103516, 0.00903545570373535, 0.008983712196350097, 0.009059200286865235, 0.009036128044128417, 0.009015904426574708, 0.009033280372619629, 0.009052191734313965, 0.009025312423706055, 0.008941984176635743, 0.008956128120422364, 0.00894156837463379, 0.008943615913391113, 0.00891062355041504, 0.00887551975250244, 0.008956640243530274, 0.008947487831115722, 0.008931424140930176, 0.008929247856140137, 0.008947039604187012, 0.008974176406860351, 0.00883737564086914, 0.009066656112670898, 0.009120767593383788, 0.009184127807617187, 0.009070624351501464, 0.009064576148986817, 0.008966239929199218, 0.008988544464111329, 0.008939711570739747, 0.008933055877685546, 0.009017375946044922, 0.009011296272277832, 0.009005056381225587, 0.00897875213623047, 0.009108799934387207, 0.009075072288513184, 0.00897993564605713, 0.008927840232849121, 0.008927103996276855, 0.00898464012145996, 0.0089683837890625, 0.008959808349609376, 0.009050111770629882, 0.008953856468200684, 0.008918560028076172, 0.00893177604675293, 0.009176671981811524, 0.009113823890686035, 0.009167072296142578, 0.009745951652526855, 0.00907868766784668, 0.009034303665161133, 0.009023839950561524, 0.009004287719726563, 0.00905401611328125, 0.00897056007385254, 0.008985983848571777, 0.00892848014831543, 0.00915014362335205, 0.008945664405822755, 0.0090316801071167, 0.009099264144897461, 0.009058112144470215, 0.008925375938415527, 0.00901046371459961, 0.0089966402053833, 0.009062432289123535, 0.009046624183654785, 0.009093440055847168, 0.009142271995544434, 0.009105440139770507, 0.009035167694091796, 0.009077312469482422, 0.00903987216949463, 0.009051872253417968, 0.009114944458007812, 0.009048895835876465, 0.009013407707214356, 0.009246303558349609, 0.009034144401550292, 0.009193471908569336, 0.009050111770629882, 0.009025535583496093, 0.008706591606140136, 0.008992480278015138, 0.008982975959777833, 0.008976320266723632, 0.008913023948669434, 0.008904576301574707, 0.008931327819824218, 0.00894268798828125, 0.008929920196533204, 0.008894399642944336, 0.00896025562286377, 0.008922528266906739, 0.008917087554931641, 0.008989279747009277, 0.008960160255432129, 0.008994784355163574, 0.008942655563354492, 0.00898745632171631, 0.008883584022521972, 0.008944607734680176, 0.008929183959960937, 0.008904447555541992, 0.00890060806274414, 0.008921119689941406, 0.00898249626159668, 0.008955936431884765, 0.008933216094970704, 0.009012607574462891, 0.008951744079589844, 0.008905471801757813, 0.008933440208435058, 0.00890454387664795, 0.008963264465332032, 0.008948703765869141, 0.008941247940063476, 0.009007424354553222, 0.0090316801071167, 0.00900438404083252, 0.008923808097839355, 0.009016511917114257, 0.00898953628540039, 0.008975392341613769, 0.008969375610351563, 0.008879903793334962, 0.00892470359802246, 0.008940064430236816, 0.008972224235534667, 0.00899071979522705, 0.008895711898803711, 0.00892188835144043, 0.008927424430847168, 0.009041728019714356, 0.00898252773284912, 0.009041631698608398, 0.009078144073486328, 0.009072768211364745, 0.009007904052734375, 0.009732383728027344, 0.008973888397216797, 0.008937408447265625, 0.00893564796447754, 0.008953184127807616, 0.008976736068725587, 0.00881049633026123, 0.009014464378356934, 0.008989503860473633, 0.00897433567047119, 0.00903104019165039, 0.009011839866638184, 0.009046015739440917, 0.008958239555358887, 0.008937184333801269, 0.009471391677856445, 0.009120351791381836, 0.009108896255493165, 0.00955247974395752, 0.011686240196228027, 0.00911292839050293, 0.009060480117797852, 0.009119359970092774, 0.009084671974182128, 0.008992959976196289, 0.008935104370117188, 0.009060576438903808, 0.008960736274719239, 0.008945664405822755, 0.009012576103210448, 0.009005215644836426, 0.00899839973449707, 0.009081503868103028, 0.009031295776367188, 0.009036224365234375, 0.009068832397460938, 0.009162816047668457, 0.009035327911376952, 0.008922495841979981, 0.009034751892089844, 0.009011199951171875, 0.009294976234436035, 0.00907875156402588, 0.009234463691711426, 0.009284704208374024, 0.008982432365417481, 0.008993791580200196, 0.008987039566040038, 0.009011679649353028, 0.009027584075927735, 0.009003007888793945, 0.008952927589416505, 0.008915871620178222, 0.008947423934936524, 0.009104736328125, 0.008950719833374024, 0.008920991897583008, 0.008939807891845702, 0.008976192474365234, 0.008933695793151855, 0.008958911895751953, 0.008948479652404785, 0.00902143955230713, 0.00905583953857422, 0.009096799850463867, 0.009040191650390625, 0.009044095993041993, 0.00892147159576416, 0.008919039726257324, 0.008717984199523925, 0.008907584190368653, 0.008931327819824218, 0.008945664405822755, 0.008928895950317383, 0.008945759773254394, 0.008988960266113281, 0.00893289566040039, 0.008913375854492188, 0.008949440002441406, 0.008976703643798828, 0.00897433567047119, 0.008907808303833007, 0.008930463790893555, 0.008951616287231446, 0.008980480194091797, 0.008999199867248535, 0.009090784072875977, 0.009136128425598144, 0.009082880020141602, 0.009105536460876465, 0.009150336265563965, 0.009134336471557617, 0.009151264190673828, 0.009005439758300782, 0.008935392379760742, 0.008935711860656738, 0.008984928131103515, 0.009000224113464355, 0.008995552062988282, 0.009038847923278808, 0.009114720344543458, 0.009035072326660155, 0.009027935981750489, 0.008984224319458008, 0.00895792007446289, 0.008960864067077638, 0.008955360412597656, 0.008993087768554687, 0.008952896118164062, 0.009015263557434082, 0.008971232414245606, 0.009009152412414552, 0.009047103881835937, 0.00892204761505127, 0.008978143692016601, 0.009015168190002441, 0.008986271858215333, 0.008938240051269532, 0.00914579200744629, 0.008988608360290527, 0.009040575981140136, 0.009027520179748534, 0.008955904006958008, 0.008947744369506835, 0.008969311714172363, 0.008954815864562989, 0.008964032173156737, 0.00898252773284912, 0.00894976043701172, 0.008927136421203614, 0.009053471565246583, 0.009020159721374512]",tokens/s,110.55244272788165,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,879.788032,662.56896,0.0,260.046848,258.555392,s,1,7.6580693359375,7.6580693359375,0.0,7.6580693359375,7.6580693359375,7.6580693359375,7.6580693359375,[7.6580693359375],,kWh,1.546157191250283e-05,1.6983004759740771e-06,4.540003632008682e-06,2.169987602048559e-05,,MB,1341.857792,759.037952,0.0,341.835776,317.950464,s,14,0.19417334270477296,0.013869524478912354,0.00010517210179652958,0.013833615779876709,0.01401504602432251,0.014100852346420289,0.01414809329032898,"[0.013838239669799805, 0.013851840019226075, 0.014159903526306153, 0.013826239585876464, 0.013828991889953613, 0.01382083225250244, 0.013797727584838868, 0.014069055557250977, 0.013889023780822754, 0.013791104316711426, 0.013865728378295899, 0.013808032035827637, 0.01384284782409668, 0.01378377628326416]",tokens/s,18457.734465895366,kWh,4.0430467469416875e-07,4.458645456758525e-08,2.1527333319113485e-07,6.641644624528889e-07,tokens/kWh,385446699.5336397,MB,1381.986304,786.300928,0.0,369.098752,317.953024,s,14,10.099852355957031,0.7214180254255023,0.0033747384144426156,0.719999267578125,0.7265133544921875,0.7269929077148437,0.7273353002929688,"[0.7241568603515625, 0.7274208984375, 0.7212330322265625, 0.7190859985351562, 0.726762451171875, 0.7175048828125, 0.7199035034179687, 0.7200950317382813, 0.7180828857421875, 0.7185093994140626, 0.7182341918945313, 0.7239569702148437, 0.71897412109375, 0.72593212890625]",tokens/s,87.32800925350006,kWh,2.0938425898222758e-05,2.309153294804496e-06,7.70897903795113e-06,3.095655823097839e-05,tokens/kWh,2035109.9605432095,,s,882,10.09300582027433,0.011443317256546886,0.00018695391206423247,0.011403183937072753,0.011536511898040771,0.011651961660385131,0.012133155317306516,"[0.011290623664855956, 0.011498527526855469, 0.011461600303649903, 0.011411135673522949, 0.011386879920959473, 0.011376447677612304, 0.01141379165649414, 0.011340000152587891, 0.01142579174041748, 0.011341312408447265, 0.012181599617004395, 0.012630784034729003, 0.011945631980895996, 0.011491552352905273, 0.011462431907653808, 0.011611392021179199, 0.011551487922668456, 0.011409695625305177, 0.011421407699584961, 0.011526304244995117, 0.011450464248657227, 0.0113918399810791, 0.011434911727905273, 0.011412544250488281, 0.011512255668640137, 0.011666272163391113, 0.011426976203918457, 0.011432448387145995, 0.011413439750671387, 0.011359328269958497, 0.011579456329345704, 0.011827872276306153, 0.011477248191833496, 0.011428000450134278, 0.011437888145446778, 0.01194752025604248, 0.0113951358795166, 0.01137507152557373, 0.011429887771606445, 0.011653120040893555, 0.011425567626953124, 0.011474271774291993, 0.01146348762512207, 0.011358271598815918, 0.011373760223388672, 0.011434528350830078, 0.011416000366210937, 0.011411295890808106, 0.011446271896362305, 0.011376799583435059, 0.011392864227294922, 0.011393152236938476, 0.01138649559020996, 0.011380191802978515, 0.01136515235900879, 0.011400544166564941, 0.011467424392700196, 0.011405055999755859, 0.011351455688476562, 0.011387104034423829, 0.011428480148315429, 0.01143120002746582, 0.01148134422302246, 0.011180255889892578, 0.011547807693481446, 0.01144217586517334, 0.011501536369323731, 0.011446944236755371, 0.01141759967803955, 0.011336000442504882, 0.01142137622833252, 0.011439871788024903, 0.011416959762573241, 0.01155571174621582, 0.01144422435760498, 0.011429887771606445, 0.011392704010009766, 0.01142518424987793, 0.011420576095581055, 0.011444576263427734, 0.011392448425292969, 0.01136473560333252, 0.011433119773864747, 0.011430591583251952, 0.011376543998718262, 0.01139516830444336, 0.011378848075866699, 0.01139846420288086, 0.011554752349853516, 0.011436575889587403, 0.011386688232421876, 0.011501824378967285, 0.011479040145874024, 0.011530240058898926, 0.011503616333007812, 0.011546303749084472, 0.012261759757995606, 0.013305791854858398, 0.013035519599914551, 0.011883968353271485, 0.011532032012939453, 0.011446335792541503, 0.011430656433105469, 0.011413760185241699, 0.011387935638427734, 0.011393535614013671, 0.011405535697937012, 0.011464703559875488, 0.011777855873107911, 0.01152019214630127, 0.011472895622253418, 0.01190841579437256, 0.011489983558654784, 0.011498559951782226, 0.011454751968383789, 0.011399519920349122, 0.012005727767944336, 0.011514047622680663, 0.011587583541870117, 0.011413408279418944, 0.011389120101928711, 0.011368320465087891, 0.01138486385345459, 0.011372320175170899, 0.01143398380279541, 0.011584768295288086, 0.0114585599899292, 0.011587776184082032, 0.011585023880004883, 0.011610431671142578, 0.011542367935180663, 0.011478816032409668, 0.011423871994018555, 0.011479455947875977, 0.011409248352050781, 0.011448320388793945, 0.01145036792755127, 0.011460512161254884, 0.011398880004882813, 0.011405695915222168, 0.01141759967803955, 0.011366399765014648, 0.0113536958694458, 0.011323519706726074, 0.011379103660583496, 0.011479264259338378, 0.011426560401916504, 0.01141648006439209, 0.01212179183959961, 0.011392607688903808, 0.011411647796630859, 0.01146729564666748, 0.011511808395385742, 0.011527392387390136, 0.011567071914672851, 0.011600704193115234, 0.011429951667785645, 0.011415488243103027, 0.011333727836608886, 0.011358112335205077, 0.011378687858581543, 0.0114782075881958, 0.011434783935546875, 0.011421728134155273, 0.011407360076904297, 0.01145644760131836, 0.01139414405822754, 0.011398112297058106, 0.011384991645812989, 0.011392448425292969, 0.011350432395935058, 0.011365792274475098, 0.011321503639221192, 0.011564991950988769, 0.011371007919311523, 0.0113887996673584, 0.011415264129638673, 0.01137500762939453, 0.011380224227905274, 0.011377152442932128, 0.011425984382629395, 0.011384256362915039, 0.011395456314086914, 0.011378463745117187, 0.011399392127990723, 0.011364192008972167, 0.011428000450134278, 0.01143183994293213, 0.011446368217468262, 0.011593119621276855, 0.011704768180847168, 0.011485088348388671, 0.011405887603759766, 0.011466655731201172, 0.011501055717468261, 0.011385120391845704, 0.01139247989654541, 0.011364895820617676, 0.01136796760559082, 0.01139065647125244, 0.011416352272033692, 0.011771231651306153, 0.011467424392700196, 0.011400287628173827, 0.011363327980041504, 0.011376543998718262, 0.011370400428771972, 0.011362144470214844, 0.011391519546508789, 0.011363295555114747, 0.011391743659973145, 0.011511327743530274, 0.011413984298706055, 0.011409760475158692, 0.011443424224853515, 0.011375040054321288, 0.011397120475769042, 0.0114585599899292, 0.01140121555328369, 0.011414752006530762, 0.011434271812438964, 0.01137657642364502, 0.011376704216003418, 0.011399680137634278, 0.011333439826965332, 0.011331775665283202, 0.011364319801330566, 0.011321727752685546, 0.011450016021728515, 0.0113438720703125, 0.01135206413269043, 0.011310879707336425, 0.011343551635742187, 0.011378527641296387, 0.011372384071350097, 0.011375455856323243, 0.011378687858581543, 0.011421664237976074, 0.011382335662841797, 0.011465184211730957, 0.011390848159790039, 0.0115283203125, 0.011372735977172851, 0.011366208076477051, 0.01132307243347168, 0.011336000442504882, 0.011487232208251954, 0.011363391876220702, 0.011314111709594727, 0.011331583976745606, 0.0113603515625, 0.011355487823486329, 0.01115135955810547, 0.011401311874389648, 0.011544192314147949, 0.01143836784362793, 0.01140940761566162, 0.011425919532775879, 0.011484959602355956, 0.01142416000366211, 0.011423423767089843, 0.012073087692260741, 0.013186559677124024, 0.013435263633728028, 0.011816160202026367, 0.011825920104980469, 0.01168182373046875, 0.011590911865234374, 0.011574015617370606, 0.011759615898132325, 0.011507712364196777, 0.011423968315124511, 0.011435680389404297, 0.011427071571350098, 0.011489248275756836, 0.011530847549438476, 0.011460927963256836, 0.011421695709228515, 0.011423232078552246, 0.011366911888122559, 0.011472064018249512, 0.01152079963684082, 0.01150774383544922, 0.01147475242614746, 0.011447680473327638, 0.011418432235717773, 0.01139635181427002, 0.01189350414276123, 0.011487168312072754, 0.011412863731384277, 0.011375519752502441, 0.011377696037292481, 0.01138764762878418, 0.011374591827392578, 0.01142198371887207, 0.0115032958984375, 0.01137667179107666, 0.011396415710449219, 0.011411456108093262, 0.011350720405578614, 0.011448479652404786, 0.011421536445617676, 0.011402751922607422, 0.011360063552856445, 0.011369152069091797, 0.011384767532348633, 0.011426079750061035, 0.011574560165405274, 0.011483648300170898, 0.011472960472106934, 0.011400320053100587, 0.011369279861450195, 0.01132748794555664, 0.01135427188873291, 0.011324799537658692, 0.011204607963562012, 0.011403264045715332, 0.011370495796203613, 0.011433728218078613, 0.011383296012878418, 0.011447839736938477, 0.011389311790466308, 0.01140892791748047, 0.011405247688293457, 0.011370464324951172, 0.011411871910095215, 0.01140124797821045, 0.01140118408203125, 0.011423744201660157, 0.011409503936767578, 0.011390815734863282, 0.011322688102722168, 0.011377087593078613, 0.01132767963409424, 0.011360511779785157, 0.011351936340332032, 0.011355584144592284, 0.011358271598815918, 0.011380224227905274, 0.011420672416687011, 0.011389151573181152, 0.011454239845275879, 0.011345919609069824, 0.011381888389587402, 0.011375552177429199, 0.011388575553894043, 0.011415840148925781, 0.011399168014526367, 0.011386879920959473, 0.011347135543823243, 0.011336095809936523, 0.011399231910705566, 0.011364383697509766, 0.011388447761535645, 0.011385248184204102, 0.011339584350585937, 0.011346048355102539, 0.01135865592956543, 0.011400927543640136, 0.011386624336242676, 0.011425888061523438, 0.011338175773620606, 0.01133078384399414, 0.011346719741821288, 0.01140940761566162, 0.011397024154663087, 0.011364447593688964, 0.011462656021118164, 0.0113787202835083, 0.011348095893859864, 0.011378656387329102, 0.011376031875610352, 0.01137507152557373, 0.011368608474731446, 0.011536224365234375, 0.011423744201660157, 0.011320639610290527, 0.011346624374389649, 0.01119372844696045, 0.011350560188293456, 0.011387328147888183, 0.011421279907226562, 0.01137990379333496, 0.011378815650939941, 0.011394816398620606, 0.011453632354736329, 0.01140681552886963, 0.011462719917297363, 0.01143836784362793, 0.011423040390014648, 0.011427712440490723, 0.011381823539733887, 0.01151142406463623, 0.011333696365356445, 0.011429439544677735, 0.011444448471069336, 0.011401503562927247, 0.011382783889770508, 0.011443648338317871, 0.011401791572570801, 0.011337727546691894, 0.011386879920959473, 0.011390975952148438, 0.01188684844970703, 0.011424736022949218, 0.011380576133728027, 0.011413599967956543, 0.011370976448059083, 0.011351872444152832, 0.011579808235168456, 0.011397600173950195, 0.011414752006530762, 0.011479071617126465, 0.011432703971862792, 0.011343328475952149, 0.011361696243286134, 0.011336288452148437, 0.011376864433288574, 0.011317248344421387, 0.011326496124267577, 0.01132857608795166, 0.011392000198364258, 0.011389023780822754, 0.01137718391418457, 0.011366687774658202, 0.011388928413391113, 0.011383872032165527, 0.01134102439880371, 0.011378399848937988, 0.011432095527648926, 0.011491168022155761, 0.011499775886535644, 0.011438143730163575, 0.01151148796081543, 0.01140121555328369, 0.011534336090087891, 0.011472288131713868, 0.011483839988708496, 0.011581119537353515, 0.01153654384613037, 0.011493696212768554, 0.011325663566589355, 0.011477824211120605, 0.011433823585510254, 0.011392704010009766, 0.011419967651367187, 0.011318559646606446, 0.011346303939819335, 0.011362208366394042, 0.011424127578735352, 0.011520064353942872, 0.011534239768981934, 0.011698431968688965, 0.011540608406066894, 0.011460479736328124, 0.011472448348999024, 0.011402591705322266, 0.011414463996887208, 0.011603967666625976, 0.011662816047668457, 0.012089568138122559, 0.011737600326538086, 0.01139187240600586, 0.011346943855285644, 0.011322784423828124, 0.01151039981842041, 0.011386367797851562, 0.011331999778747558, 0.01132748794555664, 0.011338815689086915, 0.011379743576049805, 0.011409184455871583, 0.01146399974822998, 0.0113754243850708, 0.011325440406799316, 0.011390080451965332, 0.011303487777709962, 0.011352383613586425, 0.011520000457763671, 0.011382368087768555, 0.011372960090637207, 0.011345919609069824, 0.01132953643798828, 0.011453503608703614, 0.01138150405883789, 0.01140332794189453, 0.01139129638671875, 0.011417440414428711, 0.011343839645385743, 0.011382783889770508, 0.011339936256408691, 0.01136355209350586, 0.011377280235290527, 0.011363360404968261, 0.011377632141113281, 0.011347968101501465, 0.011428159713745117, 0.011426591873168945, 0.011495936393737792, 0.011402751922607422, 0.011336895942687988, 0.011370207786560059, 0.011318431854248048, 0.011346816062927246, 0.011187552452087402, 0.011413375854492187, 0.011431903839111328, 0.011393535614013671, 0.01140940761566162, 0.011391072273254395, 0.011347135543823243, 0.011442912101745606, 0.01140940761566162, 0.011374591827392578, 0.011354111671447753, 0.011380224227905274, 0.01135638427734375, 0.011358240127563476, 0.011374848365783691, 0.01141379165649414, 0.011365471839904785, 0.011432191848754884, 0.011350015640258788, 0.011329919815063476, 0.011371935844421387, 0.011290271759033204, 0.011469759941101075, 0.01150921630859375, 0.011551263809204102, 0.01144761562347412, 0.011504608154296875, 0.011409312248229981, 0.01138054370880127, 0.011406847953796387, 0.011388928413391113, 0.011375103950500488, 0.011393247604370117, 0.011347488403320312, 0.011757504463195801, 0.011420096397399902, 0.011376511573791505, 0.011371871948242188, 0.011401887893676757, 0.011393024444580077, 0.01136844825744629, 0.011595199584960938, 0.011370976448059083, 0.011353856086730956, 0.01143228816986084, 0.011367615699768066, 0.011385663986206055, 0.0113438720703125, 0.011355551719665527, 0.011364959716796874, 0.011395071983337402, 0.01135747241973877, 0.011365440368652344, 0.01134284782409668, 0.011412128448486328, 0.011325407981872558, 0.011325152397155761, 0.01137059211730957, 0.01135638427734375, 0.011323295593261718, 0.01136627197265625, 0.011303135871887207, 0.011337375640869141, 0.011173727989196778, 0.01134006404876709, 0.011404416084289552, 0.011369215965270995, 0.01139465618133545, 0.01142416000366211, 0.011431936264038087, 0.011397120475769042, 0.011419648170471192, 0.01135206413269043, 0.01138105583190918, 0.011363360404968261, 0.011387552261352539, 0.011399168014526367, 0.011408960342407227, 0.011847552299499511, 0.011935839653015137, 0.011524543762207032, 0.01137782382965088, 0.011360960006713867, 0.011374784469604491, 0.01136019229888916, 0.011335743904113769, 0.011373920440673828, 0.01135478401184082, 0.011382783889770508, 0.011353568077087402, 0.011372256278991699, 0.011366815567016601, 0.011371935844421387, 0.011393471717834473, 0.011362784385681153, 0.01143331241607666, 0.011332351684570313, 0.01133568000793457, 0.011479040145874024, 0.011411135673522949, 0.011409728050231934, 0.011681119918823242, 0.011471808433532715, 0.01140662384033203, 0.011346112251281739, 0.011354240417480468, 0.011407391548156738, 0.011350111961364746, 0.011382783889770508, 0.011358400344848633, 0.011370304107666016, 0.01136838436126709, 0.011329471588134766, 0.011382911682128906, 0.011358016014099121, 0.01143017578125, 0.011400159835815429, 0.011348544120788574, 0.011337887763977051, 0.011363807678222657, 0.011362208366394042, 0.011405183792114257, 0.011349087715148925, 0.011335200309753417, 0.011325887680053711, 0.011304160118103028, 0.011386591911315918, 0.011411456108093262, 0.01141055965423584, 0.011403424263000488, 0.011414239883422852, 0.011343520164489746, 0.011360608100891113, 0.011388128280639648, 0.011402015686035155, 0.01136415958404541, 0.011362239837646485, 0.011370623588562011, 0.011397279739379883, 0.011323360443115235, 0.011421695709228515, 0.011378687858581543, 0.011429920196533203, 0.011390048027038574, 0.01138368034362793, 0.011396160125732422, 0.011377375602722168, 0.011352160453796386, 0.011337823867797851, 0.011413536071777343, 0.011425951957702636, 0.011423583984375, 0.011419648170471192, 0.011337727546691894, 0.011354047775268555, 0.011448384284973145, 0.011413056373596192, 0.011460895538330079, 0.011485343933105468, 0.011462656021118164, 0.011393280029296874, 0.011353856086730956, 0.011398528099060059, 0.011411744117736817, 0.01142569637298584, 0.011411904335021972, 0.011370495796203613, 0.011445247650146484, 0.011407615661621094, 0.01141750431060791, 0.011397631645202636, 0.011383456230163574, 0.011327168464660645, 0.011400832176208495, 0.011331968307495117, 0.011354111671447753, 0.011382399559020996, 0.0113503999710083, 0.011384832382202148, 0.011388575553894043, 0.011376480102539062, 0.011360544204711914, 0.011477215766906738, 0.011431936264038087, 0.01133897590637207, 0.01137123203277588, 0.011466815948486328, 0.011411456108093262, 0.011331583976745606, 0.011138079643249511, 0.01137939167022705, 0.01134620761871338, 0.011366399765014648, 0.011390975952148438, 0.013285440444946289, 0.012300095558166503, 0.011496640205383301, 0.011445183753967286, 0.011436320304870606, 0.011412832260131836, 0.01148761558532715, 0.011436032295227052, 0.011429984092712403, 0.011503520011901856, 0.011507648468017578, 0.011677760124206543, 0.011617568016052246, 0.011588319778442383, 0.01148908805847168, 0.011472288131713868, 0.01143887996673584, 0.011419648170471192, 0.011446271896362305, 0.011655167579650879, 0.011386783599853515, 0.011380831718444824, 0.011468352317810059, 0.011372287750244141, 0.011374815940856934, 0.011395680427551269, 0.011478912353515626, 0.011403103828430176, 0.011402655601501464, 0.011424415588378907, 0.011560192108154297, 0.011598688125610351, 0.011454208374023438, 0.011484640121459961, 0.011495488166809082, 0.011399904251098632, 0.011396863937377929, 0.011380895614624024, 0.011438176155090332, 0.01142182445526123, 0.011431039810180664, 0.011410176277160644, 0.01140121555328369, 0.011437088012695313, 0.011412384033203125, 0.011396703720092773, 0.011402048110961915, 0.01136911964416504, 0.011473216056823731, 0.011430591583251952, 0.011517951965332032, 0.011401151657104492, 0.011360608100891113, 0.011433631896972656, 0.011423808097839355, 0.011436032295227052, 0.011388928413391113, 0.011445504188537597, 0.011205344200134278, 0.011454336166381836, 0.0114236478805542, 0.01137001609802246, 0.011391679763793945, 0.011382783889770508, 0.011354304313659669, 0.011390784263610839, 0.011386879920959473, 0.011384223937988281, 0.011405023574829101, 0.011334527969360351, 0.011374239921569823, 0.01138697624206543, 0.011454527854919433, 0.011411647796630859, 0.011405311584472656, 0.011429344177246094, 0.011303456306457519, 0.01144985580444336, 0.011365056037902832, 0.011524160385131836, 0.011422687530517579, 0.011391776084899902, 0.011402560234069823, 0.011357024192810058, 0.011392864227294922, 0.011362591743469238, 0.01140492820739746, 0.011407135963439941, 0.011364064216613769, 0.01136684799194336, 0.011388319969177246, 0.011467359542846679, 0.01134716796875, 0.011362591743469238, 0.011350687980651855, 0.011343008041381835, 0.011363167762756348, 0.01136025619506836, 0.011380736351013183, 0.011386176109313965, 0.011363072395324707, 0.011408351898193359, 0.011518495559692383, 0.01154412841796875, 0.01136729621887207, 0.012017215728759766, 0.011440447807312012, 0.011460063934326171, 0.011456255912780761, 0.011377535820007324, 0.011327168464660645, 0.011440383911132812, 0.011388895988464355, 0.01140544033050537, 0.011371583938598632, 0.011402079582214356, 0.011382880210876465, 0.011397120475769042, 0.011370207786560059, 0.011434271812438964, 0.01140336036682129, 0.011218879699707031, 0.011379008293151855, 0.011460288047790528, 0.011828543663024903, 0.011530112266540528, 0.01136518383026123, 0.01135580825805664, 0.011445695877075196, 0.011629471778869629, 0.011527584075927735, 0.011506336212158203, 0.011466976165771484, 0.011422752380371094, 0.011563712120056153, 0.011498559951782226, 0.01144108772277832, 0.011476448059082031, 0.011457056045532226, 0.01143398380279541, 0.011402432441711425, 0.011457280158996582, 0.011454431533813477, 0.011929408073425293, 0.01139247989654541, 0.011428544044494629, 0.011408672332763672, 0.011434176445007323, 0.011479328155517578, 0.011538816452026366, 0.011553983688354492, 0.01171504020690918, 0.011503647804260254, 0.011423551559448242, 0.011463232040405273, 0.011395008087158202, 0.01140652847290039, 0.011460831642150879, 0.011509632110595703, 0.011508159637451172, 0.011596063613891601, 0.011497440338134766, 0.011448063850402833, 0.011443519592285156, 0.011371487617492676, 0.011383040428161621, 0.011408960342407227, 0.011436223983764648, 0.011457728385925294, 0.0115513916015625, 0.011970720291137694, 0.012089056015014648, 0.011980416297912598, 0.011600543975830078, 0.011629952430725097, 0.011626463890075683, 0.011558719635009766, 0.011533151626586914, 0.011601087570190429, 0.011489855766296386, 0.01150592041015625, 0.011488639831542968, 0.011425472259521485, 0.011485856056213379]",tokens/s,87.38724773429537,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.195328,2558.394368,0.0,2155.872256,2032.413184,s,1,8.8895615234375,8.8895615234375,0.0,8.8895615234375,8.8895615234375,8.8895615234375,8.8895615234375,[8.8895615234375],,kWh,5.174910428333987e-05,5.700924598974701e-06,1.6621957741991267e-05,7.407198662430584e-05,,MB,2251.853824,2835.218432,0.0,2418.016256,2280.154112,s,10,0.9366658630371092,0.09366658630371094,8.042570404870768e-05,0.09365460586547852,0.09377081909179688,0.0937755844116211,0.09377939666748046,"[0.09364790344238282, 0.0937803497314453, 0.09362393951416016, 0.09369862365722656, 0.09375603485107421, 0.09358163452148438, 0.09362413024902344, 0.09352217864990234, 0.09376976013183594, 0.09366130828857422]",tokens/s,2733.098430318877,kWh,2.7832190508332942e-06,3.0675981385820323e-07,1.8559062466286114e-06,4.945885111320109e-06,tokens/kWh,51760199.48665384,MB,2262.106112,2919.104512,0.0,2501.902336,2389.055488,s,10,24.521555908203126,2.4521555908203125,0.006199772254977471,2.4546346435546873,2.4580506835937497,2.458599560546875,2.459038662109375,"[2.456687744140625, 2.445426513671875, 2.4528740234375, 2.456395263671875, 2.4406396484375, 2.451732177734375, 2.4440927734375, 2.456630615234375, 2.4591484375, 2.4579287109375]",tokens/s,25.691681325541335,kWh,7.129616252875116e-05,7.863884643074794e-06,3.3561677642970646e-05,0.0001127217248147966,tokens/kWh,558898.4741274134,,s,630,24.518189258575436,0.03891776072789752,0.0004411238680858104,0.03881636810302734,0.039373481369018554,0.039591349983215327,0.04075487628936768,"[0.03986227035522461, 0.041564926147460934, 0.039090175628662106, 0.03872972869873047, 0.03890380859375, 0.03897958374023437, 0.03909222412109375, 0.03924102401733399, 0.038668991088867184, 0.038472801208496096, 0.03871414566040039, 0.039236927032470705, 0.039133758544921876, 0.03902489471435547, 0.03899801635742187, 0.038973438262939454, 0.03891788864135742, 0.03939478302001953, 0.03937055969238281, 0.03931990432739258, 0.03948812866210937, 0.03936460876464844, 0.039008255004882815, 0.038937728881835935, 0.03903558349609375, 0.03911494445800781, 0.038808799743652346, 0.03881145477294922, 0.03888188934326172, 0.03884479904174805, 0.03867372894287109, 0.038926078796386716, 0.038672351837158205, 0.038634078979492184, 0.038585792541503905, 0.0388218879699707, 0.038730560302734376, 0.038798526763916014, 0.03886124801635742, 0.03903084945678711, 0.03897183990478516, 0.03884172821044922, 0.03870169448852539, 0.03868467330932617, 0.03870230484008789, 0.03860534286499023, 0.03863577651977539, 0.03912704086303711, 0.03882704162597656, 0.038855327606201175, 0.04078540802001953, 0.03963571166992187, 0.039386432647705076, 0.03892095947265625, 0.038717472076416015, 0.038786464691162106, 0.038711872100830075, 0.038629375457763675, 0.03867238235473633, 0.03873177719116211, 0.03860889434814453, 0.03859251022338867, 0.03856137466430664, 0.03870105743408203, 0.038776832580566405, 0.038520832061767575, 0.038501953125, 0.03849420928955078, 0.03855353546142578, 0.03843689727783203, 0.03846598434448242, 0.038790462493896484, 0.03861699295043945, 0.038529502868652345, 0.038743934631347655, 0.03880393600463867, 0.03846902465820313, 0.03853987121582031, 0.038629375457763675, 0.03889468765258789, 0.03874089431762695, 0.03910627365112305, 0.03893833541870117, 0.038852481842041015, 0.03866284942626953, 0.038618240356445316, 0.038635646820068356, 0.039160575866699215, 0.03853830337524414, 0.038564769744873044, 0.03869084930419922, 0.03869900894165039, 0.03870515060424805, 0.038596607208251955, 0.03877264022827148, 0.03867776107788086, 0.03856399917602539, 0.03878572845458984, 0.03941580963134766, 0.03906953430175781, 0.03880361557006836, 0.0412303352355957, 0.03892633438110352, 0.038830078125, 0.038744190216064456, 0.03877856063842773, 0.03878838348388672, 0.039029151916503906, 0.03865568161010742, 0.03847980880737305, 0.03857497787475586, 0.03875430297851563, 0.03897958374023437, 0.03933184051513672, 0.03888883209228516, 0.03872012710571289, 0.03868262481689453, 0.03863142395019531, 0.03974553680419922, 0.03931241607666015, 0.03876553726196289, 0.038485214233398436, 0.03862134552001953, 0.039242366790771484, 0.03885465621948242, 0.038816993713378906, 0.03861913681030273, 0.03847372817993164, 0.03896441650390625, 0.03863225555419922, 0.03835696029663086, 0.039096351623535155, 0.038561790466308594, 0.03854131317138672, 0.0387583999633789, 0.038807361602783204, 0.039176383972167966, 0.03906969451904297, 0.03884636688232422, 0.038760543823242184, 0.03923503875732422, 0.03877737426757812, 0.03861708831787109, 0.04034969711303711, 0.038572032928466796, 0.038493919372558597, 0.03850883102416992, 0.03874611282348633, 0.0392806396484375, 0.03936665725708008, 0.039802017211914065, 0.0388043212890625, 0.03885260772705078, 0.03881574249267578, 0.03943423843383789, 0.03935641479492188, 0.04248748779296875, 0.039237056732177734, 0.03924671936035156, 0.038868736267089844, 0.0386868782043457, 0.03867596817016602, 0.03912931060791015, 0.03890966415405273, 0.03897411346435547, 0.03878054428100586, 0.0389615364074707, 0.03915980911254883, 0.038901153564453124, 0.038736480712890625, 0.038553600311279294, 0.03868819046020508, 0.03855203247070312, 0.039110240936279295, 0.038671966552734374, 0.038940799713134765, 0.038929183959960936, 0.038942718505859376, 0.03876851272583008, 0.03885587310791016, 0.038628192901611326, 0.03890358352661133, 0.03851295852661133, 0.03847782516479492, 0.03847564697265625, 0.03884659194946289, 0.03873785781860352, 0.03868371200561523, 0.038849342346191404, 0.038852127075195315, 0.03878883361816406, 0.03856422424316406, 0.03862163162231445, 0.038621185302734375, 0.03854131317138672, 0.03844054412841797, 0.03847721481323242, 0.0383449592590332, 0.03855846405029297, 0.03976380920410156, 0.03890192031860352, 0.03855142211914062, 0.03860281753540039, 0.038639678955078124, 0.03893350219726562, 0.03903180694580078, 0.03864303970336914, 0.0384293441772461, 0.038510238647460934, 0.0412388801574707, 0.038508544921875, 0.03857612609863281, 0.039456256866455076, 0.039559680938720705, 0.0394013442993164, 0.039426174163818356, 0.03886284637451172, 0.03866624069213867, 0.03874611282348633, 0.03926422500610351, 0.0391065902709961, 0.03924124908447266, 0.03907347106933594, 0.038730400085449215, 0.039198848724365236, 0.040235008239746094, 0.039411712646484375, 0.03900620651245117, 0.03904473495483399, 0.039282432556152345, 0.03938777542114258, 0.03926406478881836, 0.039516353607177736, 0.039002113342285157, 0.03864166259765625, 0.03891401672363281, 0.03918441772460937, 0.038766624450683594, 0.03905267333984375, 0.0387672004699707, 0.03910041427612305, 0.038643169403076175, 0.03863401412963867, 0.038629150390625, 0.038713569641113284, 0.03870515060424805, 0.03841164779663086, 0.038588191986083986, 0.039023456573486326, 0.03849123382568359, 0.040680126190185545, 0.040089824676513675, 0.039147518157958985, 0.039065601348876954, 0.038965248107910154, 0.03886796951293945, 0.03871846389770508, 0.03865932846069336, 0.03847449493408203, 0.03841228866577148, 0.03841347122192383, 0.038617919921875, 0.03868374252319336, 0.038884288787841795, 0.038629375457763675, 0.039117950439453125, 0.03901139068603516, 0.03902444839477539, 0.03897507095336914, 0.038599071502685545, 0.038441982269287106, 0.03897651290893555, 0.03861625671386719, 0.03895702362060547, 0.03856675338745117, 0.03857766342163086, 0.03925043106079101, 0.039822784423828125, 0.03877331161499024, 0.03860275268554687, 0.03851260757446289, 0.03849833679199219, 0.0385351676940918, 0.03855155181884766, 0.03845241546630859, 0.03847244644165039, 0.038345985412597657, 0.038674400329589846, 0.0387674560546875, 0.038899711608886715, 0.038330368041992184, 0.0384634895324707, 0.03842867279052734, 0.03840204620361328, 0.0383798713684082, 0.038610591888427734, 0.03829350280761719, 0.03893779373168945, 0.04027884674072266, 0.03924991989135742, 0.038980960845947266, 0.03865238571166992, 0.03859807968139648, 0.03869497680664063, 0.03866847991943359, 0.039062015533447264, 0.038421985626220706, 0.03893708801269531, 0.038445087432861326, 0.038563838958740236, 0.0386701774597168, 0.03866640090942383, 0.03871030426025391, 0.03869795227050781, 0.03861427307128906, 0.03868156814575195, 0.038391807556152346, 0.038327518463134765, 0.038451839447021484, 0.03839542388916015, 0.03871603012084961, 0.03877248001098633, 0.038727935791015626, 0.03899299240112305, 0.038847007751464845, 0.03925411224365234, 0.03907379150390625, 0.03898780822753906, 0.03904332733154297, 0.038801406860351564, 0.03873747253417969, 0.03901279830932617, 0.038702846527099606, 0.038714942932128904, 0.03880006408691406, 0.03889676666259766, 0.039045631408691404, 0.038975841522216795, 0.03905129623413086, 0.038828033447265625, 0.03866828918457031, 0.03870924758911133, 0.038505630493164064, 0.038495071411132814, 0.038465152740478514, 0.038570369720458984, 0.0389222412109375, 0.03868672180175781, 0.03877801513671875, 0.038961822509765626, 0.038851966857910154, 0.03881452941894531, 0.038973438262939454, 0.03918438339233398, 0.03950387191772461, 0.03931059265136719, 0.038883838653564456, 0.03885696029663086, 0.03877273559570313, 0.03868467330932617, 0.03876764678955078, 0.038958049774169924, 0.038870529174804686, 0.038976001739501956, 0.03908537673950195, 0.03907859039306641, 0.039346176147460936, 0.03925196838378906, 0.039180286407470705, 0.03922700881958008, 0.039725440979003906, 0.03909427261352539, 0.03879731369018555, 0.03857372665405273, 0.03934038543701172, 0.03943833541870117, 0.03911065673828125, 0.03974886322021484, 0.03919247817993164, 0.03922972869873047, 0.038962783813476565, 0.038773151397705076, 0.03880080032348633, 0.03903343963623047, 0.038950912475585936, 0.0389119987487793, 0.0391495361328125, 0.03926019287109375, 0.039188480377197264, 0.03894476699829102, 0.03975958251953125, 0.03863580703735352, 0.03846758270263672, 0.038513729095458984, 0.03858118438720703, 0.03874342346191406, 0.03868940734863281, 0.039198368072509766, 0.0389939193725586, 0.03870896148681641, 0.03861699295043945, 0.038744800567626955, 0.03881974411010742, 0.03871065521240234, 0.03882876968383789, 0.03914547348022461, 0.0389378890991211, 0.03875503921508789, 0.03863552093505859, 0.03855462265014648, 0.038547584533691406, 0.038548351287841794, 0.03848361587524414, 0.03872953414916992, 0.03876099014282226, 0.03857148742675781, 0.0384312629699707, 0.03854070281982422, 0.03846335983276367, 0.03850636672973633, 0.03830662536621094, 0.03872975921630859, 0.038381088256835935, 0.03851516723632813, 0.03874579238891602, 0.03891231918334961, 0.038713088989257814, 0.03858451080322266, 0.038567264556884764, 0.0385830078125, 0.03848969650268555, 0.03905782318115234, 0.039550975799560545, 0.03973868942260742, 0.03876860809326172, 0.038578208923339845, 0.03840480041503906, 0.038578174591064454, 0.038647296905517575, 0.03893299102783203, 0.0388935661315918, 0.03907788848876953, 0.038919456481933595, 0.038677215576171875, 0.03867820739746094, 0.038596927642822264, 0.038674144744873046, 0.03937923049926758, 0.038838367462158206, 0.03869686508178711, 0.0387437744140625, 0.038704769134521484, 0.038847007751464845, 0.03877286529541016, 0.0389343376159668, 0.03918409729003906, 0.039772544860839844, 0.04098467254638672, 0.0388403205871582, 0.0388454704284668, 0.03903539276123047, 0.0393732795715332, 0.03930521774291992, 0.0392163200378418, 0.03901113510131836, 0.03887308883666992, 0.03910351943969727, 0.03908019256591797, 0.03905974578857422, 0.03918819046020508, 0.03895296096801758, 0.03870297622680664, 0.04091990280151367, 0.0390467529296875, 0.03885615921020508, 0.038529407501220705, 0.03864121627807617, 0.039182910919189455, 0.03920732879638672, 0.039235614776611326, 0.039370750427246096, 0.03882393646240234, 0.038877185821533204, 0.03869219207763672, 0.03865871810913086, 0.03862099075317383, 0.03856320190429687, 0.038449985504150394, 0.0385731201171875, 0.03858051300048828, 0.03847439956665039, 0.03836928176879883, 0.03854336166381836, 0.03845119857788086, 0.03847782516479492, 0.03881488037109375, 0.03955766296386719, 0.039305534362792965, 0.040623390197753906, 0.03938172912597656, 0.039018497467041016, 0.03874784088134765, 0.03886687850952148, 0.03879062271118164, 0.03965801620483399, 0.039378944396972655, 0.039360511779785154, 0.03944243240356445, 0.038934528350830076, 0.03875628662109375, 0.038919681549072264, 0.03909270477294922, 0.039463008880615234, 0.03938070297241211, 0.03948102569580078, 0.039368480682373044, 0.03918316650390625, 0.03903833770751953, 0.03889215850830078, 0.03899094390869141, 0.03897743988037109, 0.038870014190673825, 0.038696704864501955, 0.03871059036254883, 0.03903379058837891, 0.03913113784790039, 0.03986636734008789, 0.03967942428588867, 0.03871977615356445, 0.03866857528686524, 0.038623233795166016, 0.03881574249267578, 0.03890176010131836, 0.03941312026977539, 0.039279232025146486, 0.03878863906860352, 0.038744544982910155, 0.03869203186035156, 0.03875104141235351, 0.03876249694824219, 0.03853017425537109, 0.03843958282470703, 0.038559551239013674, 0.03855606460571289, 0.03853638458251953, 0.040567615509033206, 0.03916185760498047, 0.03916550445556641, 0.03877318572998047, 0.038960990905761717, 0.038768798828125, 0.038981632232666014, 0.03927654266357422, 0.039247039794921876, 0.038849342346191404, 0.039008255004882815, 0.038742015838623044, 0.03879731369018555, 0.03883203125, 0.038860897064208984, 0.03907929611206055, 0.038863487243652343, 0.038991455078125, 0.039438751220703124, 0.03912396621704101, 0.0391929931640625, 0.03904934310913086, 0.03931270217895508, 0.039287166595458986, 0.03936249542236328, 0.03909667205810547, 0.03879638290405273, 0.038561729431152346, 0.039049919128417966, 0.03918057632446289, 0.039216159820556644, 0.0389249267578125, 0.03885408020019531, 0.03887392044067383, 0.038747520446777345, 0.039000064849853515, 0.03902028656005859, 0.03894492721557617, 0.03926713562011719, 0.039174144744873046, 0.039005374908447264, 0.03887772750854492, 0.03901468658447266, 0.03889561462402344, 0.03876812744140625, 0.038628929138183596, 0.03876729583740234, 0.03921920013427734, 0.04048102569580078, 0.03957526397705078, 0.039604511260986325, 0.040205310821533204, 0.039228416442871096, 0.03946656036376953, 0.03937529754638672, 0.039164161682128905, 0.03906480026245117, 0.03878908920288086, 0.03864387130737305, 0.03868918228149414, 0.038539264678955076, 0.03894208145141602, 0.03877747344970703, 0.03878448104858399, 0.03851728057861328, 0.03876435089111328, 0.038863040924072265, 0.03867343902587891, 0.03867500686645508, 0.03881001663208008, 0.03869900894165039, 0.03860425567626953, 0.03851523208618164, 0.038590560913085936, 0.038594463348388675, 0.038547294616699215, 0.03869712066650391, 0.03950592041015625, 0.03907174301147461, 0.0394090576171875, 0.0397092170715332, 0.03950393676757812, 0.039008255004882815, 0.0389304313659668, 0.03877478408813476]",tokens/s,25.695209110095774,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.768768,2558.394368,0.0,2155.872256,2032.413184,s,1,8.82915234375,8.82915234375,0.0,8.82915234375,8.82915234375,8.82915234375,8.82915234375,[8.82915234375],,kWh,5.20656411916813e-05,5.735624447842144e-06,1.662556885598243e-05,7.442683449550588e-05,,MB,2281.263104,2835.218432,0.0,2418.016256,2279.563776,s,10,0.7813948440551757,0.07813948440551757,0.0002024961363432146,0.07812495803833008,0.07832449722290039,0.0784661350250244,0.07857944526672364,"[0.07829302215576171, 0.07809327697753907, 0.07799747467041016, 0.07791468811035156, 0.07785638427734375, 0.07807494354248047, 0.07860777282714844, 0.07816614532470703, 0.0782344970703125, 0.0781566390991211]",tokens/s,3276.192592613567,kWh,2.3349306698669654e-06,2.574976728916439e-07,1.5468523485921981e-06,4.139280691350808e-06,tokens/kWh,61846494.37640753,MB,2281.263104,2919.104512,0.0,2501.902336,2389.801984,s,10,18.997174194335937,1.8997174194335937,0.011290641251550229,1.8964168090820313,1.9193546752929689,1.9203277893066406,1.9211062805175783,"[1.919138427734375, 1.9213009033203126, 1.899252685546875, 1.8935809326171875, 1.9007515869140625, 1.8887833251953126, 1.9031004638671876, 1.88919775390625, 1.8914566650390625, 1.8906114501953124]",tokens/s,33.16282693179896,kWh,5.5700861302215156e-05,6.143544489834176e-06,2.862189400860593e-05,9.046629980065526e-05,tokens/kWh,696391.9176402932,,s,630,18.989331668853744,0.030141796299767876,0.0007460634415589727,0.029934239387512204,0.030612844848632813,0.030947160434722898,0.0340253570175171,"[0.030558048248291017, 0.030486528396606444, 0.030330783843994142, 0.030415103912353515, 0.030291967391967774, 0.030265344619750976, 0.030349311828613282, 0.030142208099365235, 0.0301646728515625, 0.03019219207763672, 0.030318592071533205, 0.030785152435302734, 0.03052787208557129, 0.030334943771362304, 0.030236703872680664, 0.030269439697265626, 0.030209888458251954, 0.030352800369262696, 0.030572736740112304, 0.030468671798706055, 0.030631328582763673, 0.03061190414428711, 0.030808191299438476, 0.03056643295288086, 0.030670656204223632, 0.03046623992919922, 0.030414848327636718, 0.03029395294189453, 0.030881120681762696, 0.030269792556762695, 0.030283679962158205, 0.030119808197021484, 0.030146495819091797, 0.030128799438476562, 0.03033462333679199, 0.030409055709838866, 0.03033087921142578, 0.03014656066894531, 0.0302174072265625, 0.03012681579589844, 0.03020956802368164, 0.030210624694824217, 0.03059097671508789, 0.030283584594726562, 0.030430879592895508, 0.03134927940368652, 0.030697471618652345, 0.030457792282104493, 0.03056195259094238, 0.030392608642578124, 0.03062131118774414, 0.030577152252197266, 0.030674495697021485, 0.030312896728515625, 0.030107391357421874, 0.031443199157714846, 0.031112512588500976, 0.03053843116760254, 0.030504959106445313, 0.030357503890991212, 0.030510080337524413, 0.030745599746704103, 0.030478208541870118, 0.03100057601928711, 0.03058278465270996, 0.03052867126464844, 0.03038809585571289, 0.030105951309204102, 0.03027180862426758, 0.030095680236816406, 0.030476287841796876, 0.030699039459228517, 0.030599647521972657, 0.030474239349365235, 0.030164512634277343, 0.03247766494750977, 0.03461465454101562, 0.030491264343261718, 0.030385503768920897, 0.030507680892944335, 0.030338495254516602, 0.030540000915527343, 0.030255456924438477, 0.03024025535583496, 0.030046144485473634, 0.030282112121582033, 0.031941856384277344, 0.030317535400390627, 0.030105375289916993, 0.029866207122802736, 0.030304224014282226, 0.030746656417846678, 0.030468095779418947, 0.03055001640319824, 0.030521343231201172, 0.030498815536499024, 0.030201087951660155, 0.02996659278869629, 0.030117952346801757, 0.029940160751342773, 0.03031839942932129, 0.029862079620361328, 0.02965488052368164, 0.02975270462036133, 0.029897504806518555, 0.029684959411621095, 0.029585504531860353, 0.030086912155151368, 0.02983622360229492, 0.029822975158691405, 0.033889694213867186, 0.0298702392578125, 0.02979680061340332, 0.02977791976928711, 0.029894367218017578, 0.029942047119140624, 0.02987558364868164, 0.03020044708251953, 0.03027078437805176, 0.03843084716796875, 0.02998124885559082, 0.02987331199645996, 0.02972051239013672, 0.029842336654663085, 0.029906303405761718, 0.02965772819519043, 0.030539072036743165, 0.030067392349243164, 0.029863040924072267, 0.029825439453125, 0.02971696090698242, 0.02974515151977539, 0.029820928573608397, 0.029618175506591796, 0.029769376754760744, 0.029622400283813476, 0.029700063705444337, 0.029915391921997072, 0.0305664005279541, 0.03025676727294922, 0.03024662399291992, 0.034718368530273436, 0.030567903518676758, 0.029991455078125, 0.02996633529663086, 0.029891807556152342, 0.029799200057983397, 0.029931360244750977, 0.029851743698120117, 0.029642208099365235, 0.02978371238708496, 0.029602048873901367, 0.029672128677368164, 0.029572479248046873, 0.029784000396728516, 0.029967039108276368, 0.029841407775878907, 0.03295187377929688, 0.030300607681274416, 0.030060543060302734, 0.03006780815124512, 0.029968832015991213, 0.029945728302001953, 0.029843839645385742, 0.02982524871826172, 0.029715871810913085, 0.029777759552001952, 0.030029632568359374, 0.029797311782836913, 0.02976563262939453, 0.029724224090576172, 0.029661216735839845, 0.029727136611938477, 0.029705343246459962, 0.02977849578857422, 0.03042336082458496, 0.03326342391967774, 0.02993142318725586, 0.029792543411254882, 0.02977952003479004, 0.03130822372436524, 0.031069311141967773, 0.030122880935668946, 0.030050304412841795, 0.029937664031982423, 0.0299289608001709, 0.030128320693969726, 0.029903167724609374, 0.030332927703857423, 0.030072704315185547, 0.029730560302734375, 0.029593984603881837, 0.029714431762695313, 0.029607936859130858, 0.029675519943237305, 0.02968681526184082, 0.02958198356628418, 0.029724000930786133, 0.029572063446044922, 0.029677568435668947, 0.029602975845336915, 0.029620128631591795, 0.029543359756469725, 0.02979840087890625, 0.029937664031982423, 0.030224384307861327, 0.02984454345703125, 0.03368236923217773, 0.030552064895629883, 0.030000320434570314, 0.029845632553100587, 0.030028480529785156, 0.029703968048095702, 0.029722623825073242, 0.029647071838378905, 0.029654592514038087, 0.029716928482055663, 0.02978950309753418, 0.029937952041625977, 0.030103424072265624, 0.029742687225341798, 0.029864896774291994, 0.02964678382873535, 0.029761600494384765, 0.029648895263671874, 0.029743104934692382, 0.029743104934692382, 0.029808639526367187, 0.03064944076538086, 0.030085599899291993, 0.029847135543823244, 0.029968927383422852, 0.02976790428161621, 0.029726816177368165, 0.02969705581665039, 0.029662176132202147, 0.031163455963134766, 0.029864896774291994, 0.029908992767333983, 0.02999622344970703, 0.029784896850585937, 0.03008633613586426, 0.034921279907226564, 0.030662656784057617, 0.030674943923950194, 0.030443519592285157, 0.03029337692260742, 0.030364288330078124, 0.02976153564453125, 0.029799903869628906, 0.02999760055541992, 0.030093311309814453, 0.030271360397338867, 0.030065439224243165, 0.03009449577331543, 0.02987094306945801, 0.02975129508972168, 0.029822111129760742, 0.029767776489257814, 0.030855712890625, 0.029944032669067384, 0.029759040832519533, 0.030032320022583006, 0.029975616455078125, 0.029862432479858397, 0.03124060821533203, 0.029995008468627928, 0.02994095993041992, 0.029891359329223634, 0.029902687072753908, 0.02993168067932129, 0.030207359313964843, 0.030276224136352538, 0.034631679534912106, 0.030257152557373046, 0.030134271621704102, 0.030023679733276368, 0.030078975677490235, 0.029945535659790037, 0.02981100845336914, 0.029867136001586914, 0.029698944091796874, 0.03011568069458008, 0.029679264068603516, 0.029719039916992186, 0.029792255401611328, 0.02987353515625, 0.029856000900268555, 0.029800832748413084, 0.03079897689819336, 0.029870975494384767, 0.029767679214477538, 0.029781951904296874, 0.029675424575805662, 0.029747360229492186, 0.030117887496948242, 0.02993120002746582, 0.029888832092285156, 0.02970364761352539, 0.029684255599975586, 0.029785728454589842, 0.030837120056152342, 0.029837312698364257, 0.02996633529663086, 0.029929471969604493, 0.029887744903564453, 0.02990070343017578, 0.03264188766479492, 0.03228374481201172, 0.030219167709350587, 0.03019753646850586, 0.03057276725769043, 0.030205343246459963, 0.030073440551757813, 0.029853696823120116, 0.030272256851196288, 0.029853696823120116, 0.029585407257080077, 0.02981888008117676, 0.029593599319458007, 0.029726240158081056, 0.029731296539306642, 0.029683712005615235, 0.029884416580200194, 0.029685760498046877, 0.02962227249145508, 0.029609983444213867, 0.029618175506591796, 0.029566591262817382, 0.02971072006225586, 0.029830432891845702, 0.02989948844909668, 0.02970412826538086, 0.029588991165161133, 0.0296265926361084, 0.02953660774230957, 0.029669376373291017, 0.02955264091491699, 0.03016908836364746, 0.034080768585205076, 0.030248031616210938, 0.02986892890930176, 0.03021548843383789, 0.029946592330932616, 0.029875680923461913, 0.02983580780029297, 0.02989849662780762, 0.029899007797241212, 0.02976972770690918, 0.02979635238647461, 0.029711904525756835, 0.02990742492675781, 0.03021785545349121, 0.030407039642333985, 0.030044160842895507, 0.029904895782470704, 0.029954048156738283, 0.029876224517822264, 0.029809759140014647, 0.029835775375366212, 0.029828895568847658, 0.029893247604370118, 0.029829120635986327, 0.02984457588195801, 0.030028703689575196, 0.029663232803344725, 0.029728160858154298, 0.029774431228637696, 0.029638656616210936, 0.029692928314208986, 0.029730976104736326, 0.02979311943054199, 0.030224384307861327, 0.03025686454772949, 0.034390174865722656, 0.029934911727905272, 0.029807424545288085, 0.02977952003479004, 0.03029257583618164, 0.030517248153686522, 0.029849599838256836, 0.029872127532958984, 0.02963599967956543, 0.029801055908203124, 0.029865535736083984, 0.029807039260864258, 0.029865888595581053, 0.029714527130126952, 0.029874176025390626, 0.03026464080810547, 0.03030291175842285, 0.030303232192993163, 0.03024176025390625, 0.02988172721862793, 0.029829792022705078, 0.0328675537109375, 0.030268192291259766, 0.030659839630126952, 0.03044620704650879, 0.030227903366088868, 0.030628543853759765, 0.030303552627563478, 0.030281728744506835, 0.03014672088623047, 0.030980640411376954, 0.030894079208374024, 0.03049228858947754, 0.030036352157592774, 0.02977782440185547, 0.029887807846069335, 0.032774398803710934, 0.030758592605590822, 0.030567264556884764, 0.030638080596923828, 0.030551551818847656, 0.030130687713623046, 0.030029823303222656, 0.029871200561523436, 0.02997545623779297, 0.029857791900634766, 0.029868032455444334, 0.029882368087768556, 0.029865440368652345, 0.02977436828613281, 0.029861120223999022, 0.029876991271972655, 0.029724672317504884, 0.02977177619934082, 0.02962339210510254, 0.02965305519104004, 0.029585472106933595, 0.029856544494628906, 0.02978611183166504, 0.029812736511230467, 0.029732864379882814, 0.029671424865722655, 0.029730815887451172, 0.031139839172363282, 0.03176790428161621, 0.030114463806152344, 0.03016214370727539, 0.030884063720703125, 0.03066249656677246, 0.030123455047607422, 0.0300795841217041, 0.02971251106262207, 0.02985478401184082, 0.02988310432434082, 0.02984943962097168, 0.029820768356323243, 0.029727264404296874, 0.0299233283996582, 0.02975948715209961, 0.029855743408203125, 0.029800447463989257, 0.029920736312866212, 0.030126623153686524, 0.03059663963317871, 0.03032431983947754, 0.030071680068969726, 0.03014614486694336, 0.03020947265625, 0.029969375610351564, 0.029841407775878907, 0.029771360397338867, 0.02983772850036621, 0.030299903869628907, 0.030209856033325197, 0.03040300750732422, 0.03025436782836914, 0.029987552642822265, 0.02997248077392578, 0.030089216232299806, 0.029869600296020506, 0.02985990333557129, 0.029722368240356446, 0.029711008071899414, 0.029642751693725586, 0.029722623825073242, 0.029741056442260744, 0.029618175506591796, 0.02977996826171875, 0.029644256591796876, 0.029696224212646484, 0.029750751495361327, 0.029773920059204102, 0.03021286392211914, 0.029642688751220704, 0.02964271926879883, 0.029642047882080077, 0.02962646484375, 0.02961631965637207, 0.029745248794555663, 0.02962428855895996, 0.029630912780761718, 0.0297205753326416, 0.029676576614379883, 0.029899744033813475, 0.0297205753326416, 0.03079167938232422, 0.031522495269775394, 0.030355712890625, 0.030577791213989257, 0.030190528869628905, 0.030562271118164064, 0.030244895935058594, 0.029825023651123047, 0.030257152557373046, 0.029612031936645508, 0.02974883270263672, 0.0297903995513916, 0.029903072357177735, 0.02997657585144043, 0.02989779281616211, 0.029890623092651367, 0.029827871322631837, 0.029859935760498047, 0.029855743408203125, 0.029746496200561523, 0.029835968017578124, 0.02981888008117676, 0.029767679214477538, 0.029845504760742186, 0.02994576072692871, 0.0298374080657959, 0.02963046455383301, 0.029716480255126954, 0.02970147132873535, 0.029683391571044923, 0.029887359619140626, 0.029743200302124025, 0.0298024959564209, 0.029883392333984377, 0.029972799301147462, 0.02999776077270508, 0.02991209602355957, 0.02990959930419922, 0.030001535415649414, 0.03001753616333008, 0.030449663162231445, 0.030089216232299806, 0.029659040451049806, 0.029745248794555663, 0.029896703720092774, 0.030170976638793947, 0.03010985565185547, 0.03019366455078125, 0.029888320922851562, 0.030906240463256837, 0.029890880584716797, 0.029999103546142578, 0.02993356704711914, 0.030291904449462892, 0.029843040466308594, 0.02970627212524414, 0.030037824630737304, 0.030745216369628906, 0.030298112869262695, 0.02993075180053711, 0.029985504150390627, 0.02987126350402832, 0.030131071090698243, 0.03019366455078125, 0.030125471115112306, 0.0304849910736084, 0.031034912109375, 0.031004703521728516, 0.03079360008239746, 0.030725088119506836, 0.030529535293579102, 0.030757919311523437, 0.03026019287109375, 0.029882272720336913, 0.029922847747802735, 0.030032447814941406, 0.03013222312927246, 0.029954048156738283, 0.029978624343872072, 0.02983078384399414, 0.029862272262573243, 0.029843456268310548, 0.02998886489868164, 0.02988559913635254, 0.029958015441894532, 0.029721471786499025, 0.029765727996826172, 0.029657087326049804, 0.029730335235595703, 0.029622751235961912, 0.029669376373291017, 0.029646848678588866, 0.029969696044921876, 0.02963324737548828, 0.029716032028198242, 0.029706687927246095, 0.030139488220214845, 0.02975619125366211, 0.029977760314941405, 0.029852640151977538, 0.029969823837280272, 0.029782623291015626, 0.030104608535766603, 0.030028127670288087, 0.030590879440307618, 0.030221023559570313, 0.030244768142700194, 0.029892704010009766, 0.03006460762023926, 0.029953311920166016, 0.029944576263427735, 0.02979020881652832, 0.02994790458679199, 0.02976972770690918, 0.02976051139831543, 0.02999193572998047, 0.030690303802490236, 0.029842432022094727, 0.029908992767333983, 0.029775871276855468, 0.02973676872253418, 0.029898944854736327, 0.03137126350402832, 0.029880256652832032, 0.029935680389404296, 0.02995814323425293, 0.02978611183166504, 0.029849599838256836, 0.03016048049926758, 0.030042400360107423, 0.02994803237915039]",tokens/s,33.176523059699036,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2228.256768,2558.394368,0.0,2155.872256,2032.413184,s,1,9.1341005859375,9.1341005859375,0.0,9.1341005859375,9.1341005859375,9.1341005859375,9.1341005859375,[9.1341005859375],,kWh,5.169524065420319e-05,5.695051776205635e-06,1.6650013319990942e-05,7.404030575039976e-05,,MB,2285.228032,2835.218432,0.0,2418.016256,2279.563776,s,10,0.7810281829833985,0.07810281829833984,0.0001284550209585552,0.07809267044067383,0.07828038482666015,0.07829091110229493,0.07829933212280274,"[0.07827804565429687, 0.0779207992553711, 0.07806588745117188, 0.0779493408203125, 0.07820748901367187, 0.0779736328125, 0.078172607421875, 0.07803948974609375, 0.07830143737792969, 0.07811945343017578]",tokens/s,3277.730632230483,kWh,2.3386067667666187e-06,2.579053324702673e-07,1.5470967932318392e-06,4.143608892468724e-06,tokens/kWh,61781892.70355522,MB,2286.727168,2919.104512,0.0,2501.902336,2389.801984,s,10,18.702352172851565,1.8702352172851562,0.14610106509636314,1.9295911865234374,1.9375388061523438,1.9379331604003907,1.9382486437988282,"[1.4416275634765625, 1.8312318115234374, 1.91214794921875, 1.9103414306640625, 1.937451171875, 1.9356417236328125, 1.9276470947265625, 1.9315352783203126, 1.936400634765625, 1.9383275146484376]",tokens/s,33.685602440665804,kWh,5.5891985026146625e-05,6.164648744589533e-06,2.8758371895569734e-05,9.08150056663059e-05,tokens/kWh,693717.9548442642,,s,630,18.69372937393187,0.029672586307828392,0.0025473643425977205,0.030441247940063476,0.030992185783386233,0.03132554359436036,0.03211103267669678,"[0.022968191146850586, 0.022790048599243166, 0.022491968154907227, 0.022609920501708985, 0.023149919509887696, 0.022624416351318358, 0.02258995246887207, 0.02261759948730469, 0.02261862373352051, 0.022449472427368163, 0.022608575820922853, 0.02262396812438965, 0.025937856674194334, 0.02303830337524414, 0.02274691200256348, 0.022546655654907228, 0.02272051239013672, 0.022632448196411133, 0.022645792007446288, 0.022581504821777343, 0.022741312026977538, 0.022698400497436523, 0.02265292739868164, 0.022640640258789063, 0.022763519287109374, 0.022648832321166993, 0.022637823104858398, 0.022794111251831055, 0.02266761589050293, 0.022569375991821287, 0.02263667106628418, 0.022814720153808594, 0.023029760360717775, 0.022804479598999023, 0.02289254379272461, 0.023178752899169923, 0.02306287956237793, 0.022790176391601562, 0.024536352157592773, 0.02341974449157715, 0.022951936721801756, 0.022863552093505858, 0.02259350395202637, 0.02267526435852051, 0.02273539161682129, 0.02264793586730957, 0.022594432830810546, 0.0224849910736084, 0.023810047149658203, 0.022603776931762694, 0.02265622329711914, 0.022993696212768554, 0.022719520568847656, 0.02317123222351074, 0.02322719955444336, 0.023252864837646485, 0.023024959564208983, 0.02296486473083496, 0.02315283203125, 0.02292259216308594, 0.02266716766357422, 0.022594303131103517, 0.022632448196411133, 0.023195295333862304, 0.022868000030517577, 0.022524192810058595, 0.022667648315429688, 0.022540288925170897, 0.022605823516845702, 0.02262771224975586, 0.022493824005126953, 0.022587392807006838, 0.022687744140625, 0.022822271347045897, 0.030532224655151367, 0.030689279556274415, 0.03074015998840332, 0.0304202880859375, 0.030307327270507813, 0.03037161636352539, 0.030256736755371095, 0.03028646469116211, 0.030263296127319338, 0.030121376037597656, 0.030322912216186524, 0.030402944564819335, 0.030137727737426758, 0.030175199508666994, 0.03006892776489258, 0.03036518478393555, 0.03025404739379883, 0.030193279266357422, 0.030372064590454103, 0.03135222434997559, 0.03010432052612305, 0.03064352035522461, 0.030685344696044923, 0.030133855819702147, 0.030763168334960938, 0.030321279525756837, 0.030152671813964842, 0.030140607833862305, 0.0301977596282959, 0.03020595169067383, 0.03021004867553711, 0.03012784004211426, 0.03031475257873535, 0.030427167892456055, 0.030717952728271485, 0.03089993667602539, 0.0307238712310791, 0.030441984176635743, 0.030180416107177734, 0.030604127883911134, 0.030234176635742186, 0.0302126407623291, 0.030105600357055663, 0.030422624588012696, 0.030207839965820313, 0.030991968154907228, 0.030989023208618165, 0.030348575592041016, 0.030516191482543944, 0.03051238441467285, 0.03028659248352051, 0.03036774444580078, 0.03027235221862793, 0.030351327896118163, 0.030242847442626952, 0.030623552322387695, 0.03019932746887207, 0.030298784255981447, 0.03039232063293457, 0.030164512634277343, 0.030255552291870116, 0.030268831253051756, 0.030351808547973633, 0.03024300765991211, 0.031235136032104493, 0.031046592712402343, 0.030509056091308592, 0.030310400009155275, 0.030480384826660156, 0.030306304931640625, 0.03021548843383789, 0.030223039627075194, 0.030143680572509764, 0.03027846336364746, 0.030480384826660156, 0.03015679931640625, 0.030135360717773438, 0.030096319198608397, 0.030302207946777345, 0.030746368408203124, 0.03147782325744629, 0.030762624740600587, 0.03059529685974121, 0.030669151306152345, 0.030453216552734374, 0.030278175354003907, 0.030140384674072266, 0.03023174476623535, 0.03008140754699707, 0.03005036735534668, 0.030036384582519532, 0.030064640045166017, 0.030035968780517577, 0.03096575927734375, 0.030058496475219725, 0.03002572822570801, 0.03012393569946289, 0.030056543350219726, 0.03019980812072754, 0.03058892822265625, 0.03056003189086914, 0.030281951904296875, 0.03056537628173828, 0.03021516799926758, 0.030096511840820312, 0.030309055328369142, 0.03035759925842285, 0.030209983825683594, 0.03011123275756836, 0.030560863494873046, 0.03022854423522949, 0.030195487976074218, 0.03017545509338379, 0.03015475273132324, 0.030107072830200195, 0.03019366455078125, 0.030126079559326172, 0.03009916877746582, 0.03008950424194336, 0.030073888778686525, 0.030106592178344726, 0.030297792434692383, 0.03002191925048828, 0.03031657600402832, 0.03010668754577637, 0.030118751525878906, 0.030138463973999025, 0.030076608657836915, 0.030178720474243165, 0.030188032150268555, 0.0306712646484375, 0.030774784088134766, 0.030525951385498046, 0.03058278465270996, 0.03035696029663086, 0.030214208602905274, 0.03031907272338867, 0.0305581111907959, 0.030318592071533205, 0.030275680541992187, 0.030201343536376952, 0.030196128845214845, 0.030144607543945313, 0.030515199661254884, 0.030469951629638673, 0.030408287048339845, 0.030128223419189453, 0.030321151733398437, 0.030252767562866212, 0.03045404815673828, 0.03013795280456543, 0.030163360595703126, 0.03039641571044922, 0.030297439575195314, 0.030300319671630858, 0.030216480255126955, 0.030128063201904295, 0.030118175506591797, 0.03024028778076172, 0.030210527420043945, 0.030074880599975585, 0.03035955238342285, 0.030153919219970703, 0.030845760345458984, 0.03048431968688965, 0.030709503173828125, 0.030391872406005858, 0.030249631881713868, 0.030136032104492186, 0.030292448043823243, 0.030471775054931642, 0.030529951095581053, 0.031692800521850584, 0.03035126495361328, 0.030154848098754884, 0.030130176544189452, 0.03014656066894531, 0.030281728744506835, 0.03044166374206543, 0.030717567443847658, 0.03105830383300781, 0.031008447647094727, 0.030916288375854493, 0.03086809539794922, 0.030507007598876954, 0.030453760147094725, 0.030258975982666015, 0.030238943099975588, 0.030115840911865234, 0.0301711368560791, 0.030168224334716796, 0.0305446720123291, 0.030306367874145507, 0.03023855972290039, 0.030623903274536134, 0.03807436752319336, 0.03138559913635254, 0.030947135925292968, 0.030668991088867188, 0.030453760147094725, 0.030509056091308592, 0.03042918395996094, 0.03029305648803711, 0.03068204879760742, 0.030519296646118164, 0.03058687973022461, 0.03082854461669922, 0.0306296329498291, 0.030506591796875, 0.030450336456298827, 0.030464000701904297, 0.03030531120300293, 0.03087808036804199, 0.030882400512695314, 0.030930431365966796, 0.03058883285522461, 0.0305230712890625, 0.03057551956176758, 0.030440832138061525, 0.030670944213867186, 0.03068332862854004, 0.030615455627441408, 0.030380224227905272, 0.030398687362670897, 0.03038211250305176, 0.030422304153442385, 0.030532384872436525, 0.031190975189208985, 0.03213516616821289, 0.030840831756591795, 0.031068159103393556, 0.03095142364501953, 0.030654016494750976, 0.03044748878479004, 0.030415071487426757, 0.030826688766479492, 0.03044710350036621, 0.03062646484375, 0.030467456817626953, 0.030732927322387697, 0.030523391723632814, 0.030846975326538087, 0.031070207595825194, 0.03077939224243164, 0.031110431671142576, 0.030903007507324217, 0.030717952728271485, 0.030654464721679688, 0.030406496047973634, 0.03033923149108887, 0.030375072479248047, 0.03064918327331543, 0.0307521915435791, 0.030517824172973634, 0.030509056091308592, 0.030510400772094725, 0.03039507293701172, 0.03057196807861328, 0.0305894718170166, 0.030484415054321288, 0.030423040390014647, 0.03079542350769043, 0.03073811149597168, 0.030544639587402344, 0.03058230400085449, 0.030542303085327148, 0.03035545539855957, 0.0304901123046875, 0.030407072067260742, 0.03062588882446289, 0.030644224166870116, 0.030885663986206055, 0.031096128463745116, 0.03097420883178711, 0.030627647399902345, 0.030559072494506834, 0.030648319244384766, 0.03058687973022461, 0.030875648498535156, 0.03081216049194336, 0.03057459259033203, 0.030810111999511718, 0.03078963279724121, 0.03076300811767578, 0.030586687088012696, 0.03051705551147461, 0.030652799606323243, 0.030489952087402343, 0.03226259231567383, 0.030912607192993165, 0.030940383911132813, 0.030743167877197265, 0.030617887496948243, 0.030674400329589843, 0.03066316795349121, 0.030894111633300782, 0.03059712028503418, 0.030557632446289062, 0.03038470458984375, 0.03082806396484375, 0.031042015075683594, 0.03101900863647461, 0.03099033546447754, 0.03098534393310547, 0.0312801284790039, 0.03072096061706543, 0.030554079055786134, 0.030596607208251952, 0.03041302490234375, 0.030470527648925782, 0.030676992416381835, 0.0307259521484375, 0.030793376922607422, 0.03063222312927246, 0.0308756160736084, 0.030837024688720703, 0.030922752380371094, 0.03075004768371582, 0.030507680892944335, 0.03019161605834961, 0.03055820846557617, 0.030711807250976563, 0.03077939224243164, 0.03085270309448242, 0.030943647384643554, 0.030744575500488282, 0.030740480422973632, 0.03033497619628906, 0.030182624816894533, 0.030118240356445312, 0.030191360473632814, 0.030476703643798828, 0.030236576080322267, 0.030142847061157226, 0.030312095642089844, 0.0303353271484375, 0.030302143096923827, 0.030246463775634766, 0.030202367782592773, 0.030123039245605467, 0.030303199768066405, 0.030430912017822265, 0.030351680755615236, 0.03037183952331543, 0.0304005126953125, 0.03042255973815918, 0.030460384368896483, 0.030408319473266603, 0.030361120223999023, 0.030288415908813475, 0.030442911148071287, 0.031110048294067383, 0.03146956825256347, 0.031204864501953124, 0.030834527969360353, 0.030652959823608397, 0.03051251220703125, 0.030650720596313477, 0.03087558364868164, 0.030802400588989257, 0.031178752899169923, 0.03084649658203125, 0.030586944580078126, 0.030476543426513673, 0.030685344696044923, 0.030515199661254884, 0.03058892822265625, 0.03095142364501953, 0.030830591201782227, 0.030537727355957032, 0.03056025505065918, 0.0304388484954834, 0.030634559631347657, 0.03061299133300781, 0.030330751419067385, 0.030376575469970704, 0.030330623626708984, 0.03035366439819336, 0.03048409652709961, 0.03053811264038086, 0.030795391082763673, 0.030739871978759766, 0.03049478340148926, 0.03075718307495117, 0.03076361656188965, 0.030515199661254884, 0.030365695953369142, 0.03024883270263672, 0.030371807098388673, 0.03025312042236328, 0.03024905586242676, 0.030840831756591795, 0.03057814407348633, 0.030822975158691406, 0.031317983627319336, 0.030801919937133788, 0.03062579154968262, 0.030613088607788087, 0.03066227149963379, 0.03038467216491699, 0.030353471755981444, 0.030326976776123046, 0.03024025535583496, 0.030198272705078126, 0.03031449508666992, 0.030531583786010744, 0.030482431411743165, 0.03133807945251465, 0.030941600799560546, 0.030298015594482423, 0.03022447967529297, 0.030203903198242187, 0.03027257537841797, 0.030377920150756837, 0.030315231323242188, 0.030540063858032228, 0.032057022094726564, 0.031170879364013672, 0.03077120018005371, 0.03059507179260254, 0.030631935119628906, 0.03060531234741211, 0.03169484710693359, 0.03298822402954102, 0.030659135818481446, 0.03069171142578125, 0.03068623924255371, 0.030704448699951172, 0.03068943977355957, 0.03055001640319824, 0.030629728317260744, 0.030554399490356446, 0.03051091194152832, 0.030767200469970703, 0.03065251159667969, 0.030674144744873046, 0.030657312393188477, 0.030337024688720703, 0.030553695678710937, 0.03047875213623047, 0.03044883155822754, 0.03033375930786133, 0.030444992065429687, 0.03060383987426758, 0.030703615188598633, 0.030627456665039063, 0.030552288055419922, 0.03094281578063965, 0.03109267234802246, 0.03102934455871582, 0.030503456115722655, 0.030455711364746094, 0.03026543998718262, 0.030195711135864257, 0.030160415649414064, 0.030255584716796874, 0.030672096252441407, 0.03061020851135254, 0.030732000350952148, 0.030376224517822265, 0.030150047302246095, 0.030024192810058595, 0.03018044853210449, 0.030077951431274414, 0.03004412841796875, 0.030222368240356446, 0.0304389762878418, 0.030560703277587892, 0.03071311950683594, 0.030527584075927733, 0.03064076805114746, 0.030461919784545897, 0.03030428886413574, 0.030203903198242187, 0.030074880599975585, 0.030066688537597655, 0.030468095779418947, 0.031078399658203124, 0.03138150405883789, 0.031524768829345705, 0.03148195266723633, 0.03145078468322754, 0.03143100738525391, 0.031244255065917968, 0.03152899169921875, 0.03211468887329102, 0.03194870376586914, 0.03132835197448731, 0.03144851112365723, 0.03141279983520508, 0.031913312911987304, 0.031124128341674804, 0.031039424896240234, 0.03179177665710449, 0.03137740707397461, 0.03121971130371094, 0.031322111129760744, 0.03152678489685059, 0.031047807693481446, 0.031106943130493163, 0.03111315155029297, 0.0310581111907959, 0.030838464736938475, 0.030994144439697266, 0.03148041534423828, 0.030943231582641603, 0.03058073616027832, 0.03123404884338379, 0.03342540740966797, 0.03428303909301758, 0.03073276710510254, 0.03060531234741211, 0.03058687973022461, 0.03026697540283203, 0.030359968185424805, 0.030681087493896485, 0.030498815536499024, 0.030459615707397462, 0.0302957763671875, 0.03062022399902344, 0.0305947208404541, 0.030547359466552734, 0.030239648818969726, 0.030253087997436524, 0.030095359802246095, 0.030332160949707032, 0.030185888290405274, 0.030447967529296877, 0.030259199142456054, 0.030338655471801756, 0.03061801528930664, 0.03026265525817871, 0.03031667137145996, 0.030165504455566407, 0.0305166072845459, 0.03065100860595703, 0.03054755210876465, 0.030345632553100587, 0.03015996742248535, 0.0302347526550293, 0.031537952423095705, 0.032102081298828126, 0.030492992401123048, 0.030450815200805663, 0.030352256774902345, 0.030248960494995116, 0.030410751342773438, 0.030199392318725586, 0.03065603256225586, 0.03043724822998047, 0.030257631301879882, 0.030244735717773436, 0.03059779167175293, 0.030480224609375, 0.030756160736083983, 0.030653120040893555]",tokens/s,33.70114049465834,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.496256,4886.233088,0.0,4483.710976,4465.672704,s,1,10.7436259765625,10.7436259765625,0.0,10.7436259765625,10.7436259765625,10.7436259765625,10.7436259765625,[10.7436259765625],,kWh,0.00010669827799168464,1.1762235266076216e-05,3.3638638021998846e-05,0.0001520991512797597,,MB,2152.845312,5309.857792,0.0,4892.655616,4837.669376,s,10,1.8159718017578124,0.18159718017578125,0.0004328877022416853,0.18151288604736326,0.1820882095336914,0.18225849075317382,0.18239471572875976,"[0.1809141387939453, 0.18121597290039063, 0.1814171142578125, 0.1812464599609375, 0.18155850219726563, 0.18146726989746093, 0.1816407928466797, 0.18242877197265625, 0.18203240966796874, 0.1820503692626953]",tokens/s,1409.7135195172018,kWh,5.323411342348636e-06,5.870750273224289e-07,3.516093721963396e-06,9.426580091634461e-06,tokens/kWh,27157250.828132786,MB,2157.498368,5477.629952,0.0,5060.427776,5014.227968,s,10,15.907208740234374,1.5907208740234373,0.0014287310827142909,1.5904212646484375,1.592446325683594,1.5929980041503906,1.5934393469238282,"[1.5904366455078125, 1.5901727294921875, 1.5904058837890624, 1.588873779296875, 1.5897474365234374, 1.5935496826171875, 1.5888824462890625, 1.59232373046875, 1.590826416015625, 1.591989990234375]",tokens/s,39.604685541501084,kWh,4.919065063765526e-05,5.425514434583151e-06,3.2807518670239434e-05,8.742368374247784e-05,tokens/kWh,720628.5219641145,,s,630,15.904182754516606,0.025244734530978735,0.0002521348008896871,0.025230415344238283,0.025554061698913576,0.02562706956863403,0.02580793775558472,"[0.025482976913452148, 0.025077856063842774, 0.024987199783325195, 0.024844928741455077, 0.024714656829833984, 0.025843744277954103, 0.02469536018371582, 0.02472256088256836, 0.02476063919067383, 0.02501206398010254, 0.024976032257080078, 0.02500204849243164, 0.025004032135009766, 0.025109888076782227, 0.0251331844329834, 0.025141759872436522, 0.025059328079223633, 0.025067520141601563, 0.02507366371154785, 0.025070655822753907, 0.025012672424316405, 0.025206464767456055, 0.02519500732421875, 0.025221439361572267, 0.02518764877319336, 0.02513580894470215, 0.02516099166870117, 0.025176799774169922, 0.0251144962310791, 0.02527244758605957, 0.025260032653808592, 0.025174016952514647, 0.02513484764099121, 0.025141248703002928, 0.025157152175903322, 0.025148128509521483, 0.025099615097045898, 0.025339584350585937, 0.02538732719421387, 0.025385631561279296, 0.02533171272277832, 0.025311168670654298, 0.025305152893066406, 0.02531648063659668, 0.02530393600463867, 0.02526608085632324, 0.02549884796142578, 0.02537353515625, 0.025319456100463867, 0.025403392791748046, 0.025527776718139647, 0.025518239974975584, 0.025512319564819336, 0.02546073532104492, 0.02547711944580078, 0.025431968688964843, 0.025378528594970702, 0.02550137519836426, 0.025614463806152343, 0.025596351623535157, 0.025714208602905273, 0.025709152221679688, 0.025587711334228515, 0.025292640686035157, 0.024971424102783205, 0.024938495635986328, 0.024805376052856445, 0.024778623580932618, 0.024937728881835937, 0.025027456283569335, 0.02498684883117676, 0.024942848205566408, 0.024988191604614258, 0.025351360321044923, 0.02497823905944824, 0.024983552932739257, 0.024993215560913086, 0.024977983474731444, 0.02489753532409668, 0.024885248184204102, 0.024895456314086913, 0.025218528747558595, 0.025216991424560548, 0.025366336822509765, 0.025033088684082033, 0.02500239944458008, 0.024993471145629883, 0.02496748733520508, 0.02496672058105469, 0.025219520568847655, 0.025251232147216796, 0.025286304473876954, 0.025205503463745116, 0.02515727996826172, 0.02516022491455078, 0.025161184310913087, 0.02511311912536621, 0.025219072341918947, 0.02522915267944336, 0.02527609634399414, 0.02527609634399414, 0.02546512031555176, 0.025495168685913085, 0.025443199157714844, 0.02534947204589844, 0.025335487365722657, 0.025415903091430665, 0.025475839614868163, 0.025537599563598634, 0.025562047958374023, 0.025484575271606445, 0.025334495544433594, 0.0253798713684082, 0.02552931213378906, 0.02553856086730957, 0.025483264923095703, 0.02548086357116699, 0.02546928024291992, 0.025490848541259766, 0.02543881607055664, 0.025434112548828124, 0.02555904006958008, 0.02553241539001465, 0.02550489616394043, 0.02560089683532715, 0.02557542419433594, 0.025468063354492188, 0.02500079917907715, 0.02502374458312988, 0.024976127624511717, 0.02491596794128418, 0.024852479934692383, 0.024975360870361327, 0.024979455947875977, 0.024987648010253907, 0.024891263961791994, 0.024894880294799804, 0.024922847747802734, 0.02501206398010254, 0.024981664657592773, 0.025126911163330077, 0.025126815795898438, 0.025127008438110353, 0.025025856018066405, 0.025053312301635742, 0.02511929512023926, 0.02531328010559082, 0.025046016693115233, 0.0250644474029541, 0.02505251121520996, 0.025258623123168945, 0.024931520462036134, 0.025319263458251952, 0.025033727645874023, 0.025115999221801757, 0.025170591354370116, 0.02515558433532715, 0.025132768630981444, 0.025131296157836915, 0.0251409912109375, 0.025400768280029296, 0.025518911361694336, 0.025503679275512694, 0.025294912338256835, 0.025257728576660157, 0.025360639572143556, 0.025339296340942383, 0.02523561668395996, 0.025385215759277345, 0.02537081527709961, 0.0253515510559082, 0.02535183906555176, 0.025357088088989257, 0.025319616317749025, 0.025389055252075195, 0.025464832305908205, 0.02549760055541992, 0.025470975875854493, 0.025436159133911132, 0.025405439376831054, 0.025403392791748046, 0.025453983306884767, 0.02555120086669922, 0.025647327423095702, 0.025704063415527344, 0.025686431884765625, 0.02555904006958008, 0.025425920486450194, 0.025413631439208984, 0.02527145576477051, 0.024894304275512695, 0.024864767074584963, 0.024877056121826172, 0.024880800247192383, 0.024797536849975585, 0.024958976745605467, 0.024991743087768553, 0.024995840072631836, 0.024942495346069335, 0.024825536727905273, 0.02494095993041992, 0.02494054412841797, 0.02493440055847168, 0.025014272689819338, 0.02507776069641113, 0.025071264266967774, 0.025071968078613283, 0.025022432327270507, 0.02504297637939453, 0.02518560028076172, 0.02504159927368164, 0.02493846321105957, 0.02506959915161133, 0.02517100715637207, 0.025117631912231445, 0.025016319274902343, 0.025241216659545897, 0.025216640472412108, 0.025479936599731447, 0.025161727905273438, 0.0250666561126709, 0.025164640426635743, 0.025214975357055663, 0.02515328025817871, 0.025218591690063477, 0.025322080612182617, 0.025349952697753905, 0.025281919479370116, 0.02526470375061035, 0.0252542724609375, 0.025278303146362306, 0.02522947120666504, 0.02546601676940918, 0.025570144653320314, 0.02557855987548828, 0.025502656936645506, 0.025430015563964844, 0.025364479064941405, 0.025272319793701172, 0.025349632263183593, 0.02538947105407715, 0.025421920776367186, 0.025399295806884766, 0.02553036880493164, 0.025683967590332032, 0.02567987251281738, 0.025595008850097658, 0.025540639877319336, 0.025537343978881837, 0.025413312911987306, 0.025413984298706054, 0.025612287521362305, 0.02546659278869629, 0.025020288467407226, 0.024939008712768555, 0.024938880920410158, 0.025333791732788085, 0.024893440246582032, 0.024815616607666017, 0.024766271591186523, 0.024796703338623046, 0.024861343383789064, 0.024893440246582032, 0.02500982475280762, 0.0250412483215332, 0.024997535705566405, 0.024991424560546874, 0.02515011215209961, 0.0251146240234375, 0.02511052894592285, 0.024969215393066405, 0.02490777587890625, 0.025017343521118163, 0.025019392013549805, 0.025010175704956054, 0.0251342716217041, 0.025255775451660155, 0.02516067123413086, 0.02517523193359375, 0.02522604751586914, 0.025202688217163087, 0.025116672515869142, 0.025135103225708007, 0.025157632827758788, 0.02514739227294922, 0.025166879653930663, 0.025319520950317382, 0.025250688552856445, 0.025340959548950194, 0.02531427192687988, 0.025255935668945313, 0.025434112548828124, 0.02549350357055664, 0.025351903915405274, 0.025337343215942384, 0.025393632888793944, 0.02545631980895996, 0.025308832168579102, 0.025289695739746095, 0.025427520751953123, 0.025444799423217774, 0.02545814323425293, 0.025367071151733398, 0.025462656021118163, 0.025490751266479494, 0.025506624221801756, 0.025437503814697265, 0.025415552139282226, 0.025485952377319335, 0.025481407165527343, 0.025392608642578127, 0.025661983489990235, 0.02569215965270996, 0.025633792877197265, 0.025621503829956056, 0.025135103225708007, 0.025229215621948242, 0.025067039489746094, 0.02492678451538086, 0.024816864013671874, 0.024895872116088867, 0.024917984008789064, 0.024957376480102537, 0.024974655151367188, 0.02505081558227539, 0.024972288131713868, 0.024838144302368165, 0.02498883247375488, 0.02498031997680664, 0.024985599517822265, 0.024961023330688475, 0.024888896942138673, 0.025100383758544922, 0.02511702346801758, 0.025392736434936523, 0.025835935592651366, 0.025044992446899415, 0.025038240432739257, 0.02497529602050781, 0.025004703521728514, 0.025100288391113282, 0.02507980728149414, 0.025133056640625, 0.025231359481811523, 0.025219072341918947, 0.025169919967651368, 0.025092096328735353, 0.025169919967651368, 0.02527631950378418, 0.025316864013671874, 0.025279071807861327, 0.02531123161315918, 0.02533692741394043, 0.025332639694213867, 0.025251136779785157, 0.025256351470947267, 0.025342239379882812, 0.025409536361694338, 0.02532044792175293, 0.026850303649902343, 0.025411487579345703, 0.025455839157104494, 0.025553792953491212, 0.025577472686767577, 0.025547775268554687, 0.025739391326904296, 0.025530656814575194, 0.025535072326660156, 0.02550169563293457, 0.025423871994018556, 0.025629791259765625, 0.025611103057861326, 0.02553862380981445, 0.02567532730102539, 0.025682367324829102, 0.025636863708496094, 0.0255283203125, 0.0261079044342041, 0.025251295089721678, 0.025010623931884766, 0.024860511779785155, 0.02483839988708496, 0.024928224563598632, 0.024938528060913085, 0.024977344512939453, 0.02496054458618164, 0.024916095733642576, 0.02504300880432129, 0.02504710388183594, 0.0250534725189209, 0.025044767379760743, 0.025005535125732423, 0.02494540786743164, 0.024975360870361327, 0.024991104125976563, 0.025070207595825195, 0.02510374450683594, 0.025104448318481444, 0.02514796829223633, 0.02512076759338379, 0.025078912734985352, 0.024963327407836914, 0.025049152374267577, 0.025112447738647462, 0.025202880859375, 0.025114528656005858, 0.025082080841064454, 0.025133440017700195, 0.025113824844360352, 0.025040735244750978, 0.025191360473632813, 0.025159679412841796, 0.025154815673828126, 0.025194847106933593, 0.025450847625732423, 0.025442079544067384, 0.02544041633605957, 0.025393184661865235, 0.02526755142211914, 0.025376640319824218, 0.02534480094909668, 0.025217119216918944, 0.025358335494995117, 0.025403392791748046, 0.02532352066040039, 0.025417728424072264, 0.02546246337890625, 0.02540729522705078, 0.025295295715332032, 0.02537055969238281, 0.02548748779296875, 0.02550579261779785, 0.025448383331298827, 0.02541526412963867, 0.025481184005737303, 0.025577280044555666, 0.025584320068359374, 0.02560550308227539, 0.025563135147094726, 0.025514623641967774, 0.025523296356201174, 0.025169088363647462, 0.02525472068786621, 0.02491939163208008, 0.024828575134277345, 0.024860511779785155, 0.024942495346069335, 0.024971519470214844, 0.024999935150146483, 0.024911392211914064, 0.024979936599731446, 0.024922111511230468, 0.024847423553466797, 0.024941503524780275, 0.025028255462646483, 0.02504870414733887, 0.025022592544555664, 0.024966815948486328, 0.02507257652282715, 0.02508799934387207, 0.025036800384521486, 0.025019968032836914, 0.025102399826049806, 0.02511267280578613, 0.025083648681640626, 0.025133600234985353, 0.025218303680419923, 0.025496320724487306, 0.02508799934387207, 0.025108480453491212, 0.025499296188354493, 0.025191904067993164, 0.0251278076171875, 0.025202688217163087, 0.025196544647216795, 0.025160736083984375, 0.025242591857910158, 0.025341951370239257, 0.025341951370239257, 0.025412960052490233, 0.02540336036682129, 0.025345727920532225, 0.025396223068237304, 0.025397247314453125, 0.025324735641479492, 0.02595680046081543, 0.025637247085571288, 0.02556723213195801, 0.025480384826660155, 0.025518911361694336, 0.025617759704589845, 0.02554323196411133, 0.025438304901123046, 0.025556480407714844, 0.025532928466796875, 0.025556991577148438, 0.025550079345703126, 0.02551612854003906, 0.025696928024291993, 0.025591808319091795, 0.02566511917114258, 0.025713056564331056, 0.025630720138549806, 0.0255283203125, 0.02552217674255371, 0.0252620792388916, 0.02490096092224121, 0.024849056243896484, 0.02487436866760254, 0.02496575927734375, 0.024969215393066405, 0.02495692825317383, 0.024987648010253907, 0.024904767990112306, 0.024888256072998046, 0.024816928863525392, 0.024941280364990236, 0.02503638458251953, 0.0250351676940918, 0.02505308723449707, 0.02508195114135742, 0.024999584197998047, 0.024981376647949218, 0.024979167938232422, 0.025213695526123046, 0.025257984161376954, 0.025173759460449217, 0.02508121681213379, 0.025080320358276367, 0.02506380844116211, 0.025177120208740234, 0.02514838409423828, 0.025204063415527344, 0.025211551666259765, 0.025228288650512694, 0.025175039291381835, 0.025235456466674806, 0.025271392822265624, 0.025318048477172853, 0.02526028823852539, 0.02523750305175781, 0.025384960174560548, 0.025358335494995117, 0.02530508804321289, 0.025376096725463867, 0.025377439498901366, 0.025640960693359374, 0.025225215911865235, 0.025403392791748046, 0.0255283203125, 0.02548121643066406, 0.025358335494995117, 0.02535580825805664, 0.025356767654418945, 0.025374719619750977, 0.02532147216796875, 0.025392255783081054, 0.02551897621154785, 0.025643007278442383, 0.025617759704589845, 0.02557151985168457, 0.025577472686767577, 0.025553375244140624, 0.0256060791015625, 0.0255795841217041, 0.025623743057250976, 0.025674560546875, 0.025342687606811524, 0.02511824035644531, 0.024986240386962892, 0.024872703552246092, 0.024883455276489257, 0.0249234561920166, 0.024994304656982422, 0.02492844772338867, 0.024893440246582032, 0.024979455947875977, 0.024995840072631836, 0.025019775390625, 0.024969856262207032, 0.025004032135009766, 0.025026559829711914, 0.025064895629882813, 0.024983423233032227, 0.025041568756103517, 0.02508188819885254, 0.02512076759338379, 0.025108480453491212, 0.025024511337280272, 0.02507776069641113, 0.025200639724731445, 0.02508799934387207, 0.025091775894165037, 0.02511289596557617, 0.025116031646728515, 0.02504972839355469, 0.025308256149291993, 0.02548624038696289, 0.025371904373168944, 0.025240320205688477, 0.025285856246948242, 0.025246496200561522, 0.025269983291625976, 0.02523753547668457, 0.025342208862304687, 0.025440256118774415, 0.025472799301147462, 0.025394464492797853, 0.025373632431030274, 0.025411584854125976, 0.025391103744506836, 0.025333759307861328, 0.025341951370239257, 0.025397247314453125, 0.025453760147094728, 0.02547590446472168, 0.026191871643066408, 0.025390975952148436, 0.025468448638916015, 0.02528316879272461, 0.025500959396362304, 0.025498111724853514, 0.025453920364379882, 0.02615180778503418, 0.025391103744506836, 0.0254783992767334, 0.025512704849243163, 0.02561337661743164, 0.025709056854248048, 0.025704896926879883]",tokens/s,39.61222086819188,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,872.648704,601.751552,0.0,199.22944,181.241344,s,1,7.62974609375,7.62974609375,0.0,7.62974609375,7.62974609375,7.62974609375,7.62974609375,[7.62974609375],,kWh,1.5839093170833013e-05,1.735011818051321e-06,4.55639253400808e-06,2.2130497522892415e-05,,MB,1345.232896,658.374656,0.0,241.17248,225.803264,s,13,0.2078934392929077,0.015991803022531365,0.0001516011805989282,0.01594268798828125,0.016125178146362305,0.016220512771606445,0.01633095100402832,"[0.015738271713256837, 0.01612848091125488, 0.016100799560546875, 0.015918496131896973, 0.01594268798828125, 0.01602694320678711, 0.01585247993469238, 0.015911999702453612, 0.01611196708679199, 0.01601443290710449, 0.01591158390045166, 0.01587673568725586, 0.01635856056213379]",tokens/s,16008.201179023616,kWh,3.7532006402102423e-07,4.139127851199241e-08,2.1282364926478213e-07,6.295349917977988e-07,tokens/kWh,406649357.5979411,MB,1385.091072,673.05472,0.0,255.852544,225.805824,s,13,10.247889221191407,0.7882991708608775,0.005680795175853167,0.7881617431640625,0.7948011596679687,0.7981488037109374,0.8018119384765625,"[0.7881617431640625, 0.7883017578125, 0.7817357788085938, 0.7851591186523438, 0.7893953857421875, 0.7831097412109375, 0.7844913940429687, 0.78282275390625, 0.7849459228515625, 0.79509619140625, 0.7883206787109375, 0.8027277221679687, 0.7936210327148437]",tokens/s,79.91889669400466,kWh,1.7405593295293662e-05,1.9193962539178246e-06,6.656185113910894e-06,2.5981174663122382e-05,tokens/kWh,2424832.6265795077,,s,819,10.23985273742676,0.012502872695270768,0.00030264183369250086,0.012435551643371581,0.012743321800231933,0.012822761726379393,0.013601171455383289,"[0.01198198413848877, 0.012350208282470703, 0.012357215881347656, 0.012525312423706055, 0.012591967582702637, 0.012636128425598145, 0.012521408081054687, 0.012443648338317872, 0.012475839614868164, 0.012512160301208495, 0.01245967960357666, 0.012568767547607423, 0.012465248107910156, 0.012393183708190918, 0.012388352394104005, 0.012414400100708007, 0.012417599678039551, 0.012373151779174805, 0.012397727966308593, 0.012549823760986328, 0.013082847595214844, 0.012875679969787598, 0.012486751556396485, 0.012463680267333984, 0.01237007999420166, 0.01239244842529297, 0.01235750389099121, 0.012382399559020997, 0.012383968353271484, 0.01239641571044922, 0.012435872077941895, 0.012693056106567383, 0.01236950397491455, 0.01252233600616455, 0.012355584144592285, 0.01243068790435791, 0.012581536293029785, 0.012406720161437988, 0.012914752006530762, 0.012416255950927735, 0.012354304313659668, 0.012879103660583495, 0.013402175903320313, 0.013077088356018066, 0.012494943618774413, 0.012539487838745117, 0.01256220817565918, 0.012573311805725098, 0.012411999702453613, 0.012369983673095703, 0.012523776054382324, 0.012497504234313965, 0.01243785572052002, 0.012424863815307618, 0.012392607688903809, 0.012433247566223144, 0.012406975746154784, 0.012400575637817383, 0.012435327529907227, 0.012314623832702636, 0.012435456275939942, 0.012507136344909669, 0.01252137565612793, 0.01205020809173584, 0.012647520065307618, 0.01250387191772461, 0.01244371223449707, 0.012386528015136719, 0.012518783569335938, 0.01240550422668457, 0.012389472007751465, 0.012396991729736328, 0.012387999534606934, 0.012345888137817384, 0.012447584152221679, 0.012392319679260255, 0.012370304107666015, 0.012495743751525878, 0.01257369613647461, 0.012445695877075195, 0.012509471893310547, 0.012457183837890625, 0.012392992019653321, 0.012380127906799316, 0.012402688026428223, 0.012548095703125, 0.012506560325622558, 0.012390975952148437, 0.012438559532165527, 0.012380288124084473, 0.012352352142333984, 0.012331007957458496, 0.012430879592895508, 0.012313055992126464, 0.012360960006713868, 0.012410847663879395, 0.012399392127990723, 0.012389856338500977, 0.012607935905456544, 0.013056096076965331, 0.012377728462219238, 0.012519359588623047, 0.012484576225280762, 0.012462559700012208, 0.012428544044494628, 0.012460415840148925, 0.012402336120605468, 0.012370911598205567, 0.012379903793334961, 0.012404831886291504, 0.01258892822265625, 0.012490367889404297, 0.012485024452209472, 0.01246003246307373, 0.01238374423980713, 0.012538528442382813, 0.012413824081420898, 0.012609631538391113, 0.01248863983154297, 0.013198271751403808, 0.01374617576599121, 0.012725983619689941, 0.013637920379638672, 0.01246399974822998, 0.012441439628601074, 0.012467904090881348, 0.011947744369506836, 0.012431615829467774, 0.012388383865356446, 0.012335264205932617, 0.012379008293151856, 0.012375007629394532, 0.012504351615905763, 0.012352224349975585, 0.012310015678405761, 0.012316351890563964, 0.01232102394104004, 0.012603615760803223, 0.012357888221740722, 0.01238003158569336, 0.01244745635986328, 0.012798463821411133, 0.012468447685241699, 0.012482336044311523, 0.012364864349365234, 0.012431839942932128, 0.012329376220703125, 0.012426688194274902, 0.0123439359664917, 0.012463583946228027, 0.012333503723144531, 0.012527711868286133, 0.012285056114196778, 0.012400832176208496, 0.012364319801330566, 0.012406944274902344, 0.012470272064208985, 0.01243945598602295, 0.012404831886291504, 0.012349504470825196, 0.012328895568847656, 0.012364928245544433, 0.0122990083694458, 0.012333344459533692, 0.012348992347717285, 0.0123602876663208, 0.012390080451965333, 0.012373344421386719, 0.01239311981201172, 0.012290047645568849, 0.012354592323303222, 0.012340512275695801, 0.012369600296020508, 0.012363776206970215, 0.01237606430053711, 0.012375519752502442, 0.012403231620788574, 0.012656736373901366, 0.013035264015197754, 0.012396639823913574, 0.012400704383850098, 0.012448927879333497, 0.012395359992980958, 0.012395808219909668, 0.012421759605407715, 0.01232630443572998, 0.01235424041748047, 0.012347359657287598, 0.012360960006713868, 0.012040191650390625, 0.012465920448303223, 0.012430784225463867, 0.012524352073669434, 0.012565631866455079, 0.012648832321166993, 0.012497152328491211, 0.01239641571044922, 0.012401056289672852, 0.012384415626525879, 0.012385791778564453, 0.012456255912780762, 0.01258512020111084, 0.012426176071166993, 0.012389280319213868, 0.012421119689941406, 0.012419072151184082, 0.012377823829650879, 0.012541536331176759, 0.012391103744506835, 0.01236787223815918, 0.012369919776916503, 0.012416095733642578, 0.013433759689331054, 0.012470272064208985, 0.012374015808105468, 0.012428959846496583, 0.012351840019226075, 0.012361536026000977, 0.012376511573791504, 0.012356512069702149, 0.012403072357177735, 0.012435935974121093, 0.012574720382690429, 0.012476415634155273, 0.012593152046203614, 0.012371968269348145, 0.01248799991607666, 0.012351615905761719, 0.012485183715820313, 0.012474176406860351, 0.012464320182800293, 0.012384384155273438, 0.012949376106262208, 0.01244985580444336, 0.012508416175842285, 0.0125632963180542, 0.012422464370727538, 0.012536191940307617, 0.012392607688903809, 0.01237600040435791, 0.012347455978393554, 0.012373215675354004, 0.012362367630004882, 0.012327072143554687, 0.012405920028686523, 0.012548959732055665, 0.012506848335266113, 0.01236201572418213, 0.012373408317565919, 0.01238096046447754, 0.012397919654846191, 0.012429408073425293, 0.01209062385559082, 0.012446240425109863, 0.012421343803405762, 0.012705151557922362, 0.013056639671325684, 0.01234227180480957, 0.012391424179077149, 0.012569952011108398, 0.012458975791931153, 0.012455615997314453, 0.012404735565185548, 0.012370047569274902, 0.012369536399841308, 0.012392224311828614, 0.01241254425048828, 0.012555007934570312, 0.012435551643371581, 0.012465600013732911, 0.012341407775878907, 0.012667296409606933, 0.01392198371887207, 0.014188672065734863, 0.012578047752380372, 0.012529664039611817, 0.012485055923461915, 0.01247929573059082, 0.013117312431335449, 0.012392255783081054, 0.012378111839294433, 0.012503264427185058, 0.012474143981933594, 0.012464320182800293, 0.012558239936828614, 0.012507040023803711, 0.012459168434143067, 0.012395135879516602, 0.01238588809967041, 0.0123439359664917, 0.012848544120788574, 0.012433535575866699, 0.012525856018066406, 0.012359871864318847, 0.012333056449890138, 0.012638208389282226, 0.012373344421386719, 0.012395327568054199, 0.012341216087341308, 0.01239452838897705, 0.012380000114440917, 0.012412927627563476, 0.01256595230102539, 0.012439488410949707, 0.012503680229187011, 0.01257203197479248, 0.012336031913757324, 0.01240617561340332, 0.01237980842590332, 0.012381055831909179, 0.01240236759185791, 0.012615679740905762, 0.012312671661376954, 0.012469951629638671, 0.012458304405212402, 0.012097023963928222, 0.012489695549011231, 0.012529664039611817, 0.012504927635192871, 0.012435615539550781, 0.012480511665344238, 0.012426912307739258, 0.012424991607666016, 0.012509056091308594, 0.012417599678039551, 0.012445183753967285, 0.012429951667785644, 0.01242521572113037, 0.012429311752319335, 0.012382080078125, 0.012406975746154784, 0.012472576141357421, 0.012529472351074218, 0.012404000282287598, 0.012425824165344239, 0.013084671974182128, 0.012445792198181153, 0.012339327812194824, 0.012392064094543458, 0.012490912437438965, 0.012369888305664063, 0.012341279983520508, 0.012411935806274414, 0.012477408409118653, 0.012443648338317872, 0.012394495964050293, 0.012337151527404786, 0.012380160331726075, 0.01251097583770752, 0.0123721923828125, 0.012469280242919921, 0.012345696449279785, 0.012425248146057128, 0.012378687858581544, 0.012384415626525879, 0.012384160041809082, 0.012359999656677246, 0.012330047607421875, 0.01236838436126709, 0.012391584396362305, 0.012348480224609375, 0.012387359619140625, 0.01234774398803711, 0.012601887702941895, 0.012492735862731934, 0.012468223571777343, 0.01241215991973877, 0.012388575553894042, 0.012420895576477051, 0.012456704139709472, 0.012404159545898437, 0.01236032009124756, 0.012312576293945313, 0.01242262363433838, 0.012316512107849122, 0.012348095893859863, 0.012390399932861328, 0.01232425594329834, 0.011966464042663574, 0.012464127540588378, 0.012406592369079589, 0.01242950439453125, 0.01243558406829834, 0.012589119911193847, 0.01244745635986328, 0.012472415924072265, 0.012381919860839844, 0.012403264045715331, 0.01227952003479004, 0.012461855888366699, 0.01234556770324707, 0.012629471778869628, 0.01249728012084961, 0.01254319953918457, 0.012483872413635255, 0.012379615783691407, 0.01243391990661621, 0.012358783721923828, 0.012354111671447754, 0.012373503684997558, 0.012335616111755371, 0.012431488037109376, 0.01240447998046875, 0.012488191604614257, 0.012479104042053223, 0.01235267162322998, 0.012341535568237305, 0.0123472318649292, 0.012337663650512695, 0.012356224060058594, 0.012434207916259765, 0.012366880416870117, 0.01234886360168457, 0.012428704261779786, 0.01318179225921631, 0.012441439628601074, 0.012470687866210937, 0.012346240043640136, 0.012413920402526856, 0.012333056449890138, 0.012289600372314453, 0.012327615737915038, 0.012370847702026367, 0.012435808181762695, 0.012545791625976563, 0.012711968421936035, 0.012839551925659179, 0.012743807792663575, 0.012546431541442872, 0.012628704071044923, 0.012443455696105957, 0.012447168350219726, 0.012483200073242188, 0.012361984252929687, 0.012418815612792969, 0.01234124755859375, 0.012480511665344238, 0.012388511657714844, 0.012514240264892578, 0.012380352020263672, 0.012417695999145507, 0.012034175872802734, 0.012362624168395996, 0.012402624130249023, 0.012726271629333496, 0.01242521572113037, 0.012435392379760742, 0.012388511657714844, 0.01242841625213623, 0.01232975959777832, 0.012339200019836426, 0.012421279907226563, 0.01274454402923584, 0.012443231582641602, 0.012421536445617675, 0.012434720039367676, 0.012405471801757812, 0.012476448059082032, 0.012442655563354492, 0.012856063842773437, 0.012635968208312988, 0.012689791679382324, 0.012889920234680175, 0.012621376037597656, 0.012472543716430664, 0.012447584152221679, 0.012363327980041503, 0.01236620807647705, 0.012333024024963378, 0.0123155517578125, 0.012400480270385742, 0.012381504058837891, 0.01233897590637207, 0.01233199977874756, 0.012375935554504395, 0.012357760429382324, 0.012311391830444336, 0.012315008163452148, 0.012357695579528809, 0.012431936264038086, 0.012328895568847656, 0.012303584098815918, 0.012526432037353515, 0.012400992393493652, 0.012332703590393066, 0.012346464157104492, 0.012321696281433106, 0.0123571195602417, 0.012357695579528809, 0.0123788480758667, 0.012336864471435546, 0.012269344329833985, 0.012646592140197754, 0.012388447761535644, 0.012285887718200683, 0.012297727584838868, 0.012348256111145019, 0.01245353603363037, 0.012312543869018555, 0.01231868839263916, 0.012293184280395508, 0.012526592254638673, 0.01234329605102539, 0.01242031955718994, 0.012142335891723633, 0.012440064430236816, 0.012351327896118165, 0.012417183876037598, 0.012394399642944335, 0.01246009635925293, 0.01237564754486084, 0.01234716796875, 0.01252406406402588, 0.012423295974731446, 0.01239459228515625, 0.012359328269958496, 0.012367424011230468, 0.012454560279846191, 0.012345439910888672, 0.012435392379760742, 0.012482239723205566, 0.012555680274963378, 0.012452768325805665, 0.012326911926269531, 0.012380000114440917, 0.012437248229980468, 0.012560000419616699, 0.01244883155822754, 0.01254371166229248, 0.01242473602294922, 0.012425248146057128, 0.012514816284179688, 0.012465087890625, 0.012496607780456543, 0.012428863525390625, 0.012536255836486817, 0.01372755241394043, 0.012492704391479491, 0.012683327674865724, 0.012730879783630371, 0.0124203519821167, 0.012413824081420898, 0.012405887603759765, 0.012423935890197755, 0.012609888076782226, 0.01234124755859375, 0.012311200141906738, 0.012355680465698243, 0.012358816146850585, 0.012406240463256836, 0.012311840057373047, 0.012346367835998535, 0.01235968017578125, 0.012593024253845215, 0.012540032386779785, 0.01235919952392578, 0.012388128280639649, 0.012427552223205567, 0.012376128196716308, 0.012362079620361328, 0.01237007999420166, 0.012600192070007324, 0.012446687698364258, 0.012355584144592285, 0.01238755226135254, 0.01234819221496582, 0.0123689603805542, 0.012066816329956055, 0.012415167808532714, 0.01240764808654785, 0.012814847946166993, 0.012362208366394043, 0.012438976287841796, 0.012398816108703614, 0.012359007835388184, 0.012412128448486328, 0.012456959724426269, 0.012380767822265625, 0.012363967895507812, 0.012351263999938964, 0.012412672042846679, 0.012567008018493652, 0.012586336135864257, 0.012580703735351563, 0.012563263893127442, 0.01274454402923584, 0.01285750389099121, 0.012555520057678222, 0.012511391639709473, 0.012446304321289063, 0.012460224151611328, 0.012483615875244141, 0.012436351776123046, 0.012629311561584472, 0.012546848297119141, 0.012430399894714355, 0.01234934425354004, 0.012349632263183594, 0.012316320419311523, 0.012376928329467773, 0.012458144187927246, 0.012589056015014649, 0.012431360244750977, 0.01236086368560791, 0.012566975593566895, 0.012355744361877441, 0.012536064147949218, 0.012334303855895996, 0.012551967620849609, 0.01245900821685791, 0.012705792427062988, 0.012416095733642578, 0.012432064056396484, 0.012496800422668456, 0.012446016311645507, 0.012439680099487305, 0.013960800170898437, 0.01851011276245117, 0.01386291217803955, 0.012566783905029296, 0.012526975631713868, 0.012537535667419433, 0.01245206356048584, 0.01247110366821289, 0.012453408241271972, 0.01243926429748535, 0.01234985637664795, 0.012412927627563476, 0.01235324764251709, 0.012560928344726562, 0.012049504280090332, 0.012434335708618164, 0.0124552001953125, 0.012558783531188965, 0.012468480110168456, 0.012455967903137207, 0.012430527687072753, 0.01250387191772461, 0.012515328407287597, 0.012404959678649902, 0.012535584449768067, 0.012417311668395997, 0.01250483226776123, 0.012506272315979004, 0.012605600357055664, 0.012694432258605956, 0.012780575752258301, 0.012594176292419433, 0.012662495613098144, 0.012453887939453125, 0.012578911781311035, 0.012451904296875, 0.012504735946655273, 0.012556480407714845, 0.012494239807128906, 0.012515935897827148, 0.012404352188110352, 0.012353664398193359, 0.012383551597595214, 0.012486783981323243, 0.012448575973510742, 0.01238755226135254, 0.012450592041015625, 0.013008128166198731, 0.012403136253356933, 0.012335424423217773, 0.012385600090026856, 0.012407232284545898, 0.012345600128173827, 0.01229759979248047, 0.012320927619934082, 0.0123721923828125, 0.012536064147949218, 0.012478464126586914, 0.01239247989654541, 0.01247555160522461, 0.012343680381774903, 0.012423359870910644, 0.012390656471252441, 0.01246003246307373, 0.012372096061706543, 0.012810111999511719, 0.013035039901733398, 0.012484064102172851, 0.012399104118347168, 0.012531455993652343, 0.012688128471374512, 0.012619168281555175, 0.012730208396911621, 0.01277340793609619, 0.012781791687011719, 0.012584959983825684, 0.01248044776916504, 0.012170656204223633, 0.012594079971313477, 0.012591296195983887, 0.012846495628356934, 0.01294927978515625, 0.012888799667358399, 0.012767231941223145, 0.012572064399719238, 0.012510848045349122, 0.012544992446899415, 0.012613632202148438, 0.012512991905212403, 0.012462080001831055, 0.012531999588012696, 0.012526944160461425, 0.012632736206054688, 0.012545408248901367, 0.01249788761138916, 0.012649727821350097, 0.01250761604309082, 0.01267296028137207, 0.012690848350524902, 0.01270025634765625, 0.01269375991821289, 0.01278649616241455, 0.01271395206451416, 0.012806400299072266, 0.012778207778930665, 0.012811264038085938, 0.01280016040802002, 0.012729439735412598, 0.012756287574768066, 0.012756447792053222, 0.012760031700134277, 0.012775424003601075, 0.012928159713745117, 0.012810976028442382, 0.012744480133056641, 0.012701375961303711, 0.012744799613952636, 0.012743200302124023, 0.012785696029663085, 0.012672991752624511, 0.01276473617553711, 0.012771391868591308, 0.012790176391601562, 0.012785663604736328, 0.012726271629333496, 0.012793919563293458, 0.012732064247131348, 0.0127675199508667, 0.012904064178466797, 0.012864224433898926, 0.012707488059997558, 0.012781567573547363, 0.01276518440246582, 0.013930144309997559, 0.012804448127746582, 0.012775424003601075, 0.01277337646484375, 0.012769280433654785, 0.01275276756286621, 0.012785856246948241, 0.01240764808654785, 0.012931296348571777, 0.012926719665527343, 0.0128123197555542, 0.012771295547485352, 0.012758655548095703, 0.012631839752197265, 0.012459936141967774, 0.012958399772644043, 0.01264185619354248, 0.012540351867675782, 0.012685312271118163, 0.012688672065734864, 0.012628288269042968, 0.01282089614868164, 0.01264579200744629, 0.012569184303283692, 0.012548095703125, 0.012546175956726075, 0.012697471618652343, 0.012709888458251953, 0.012650208473205566, 0.012514911651611327, 0.012486656188964844, 0.012636863708496093, 0.012638208389282226, 0.012576416015625, 0.012582240104675292, 0.012741632461547851, 0.012640255928039551, 0.012716032028198243, 0.012645471572875976, 0.012681632041931153, 0.012702431678771973, 0.012621600151062012, 0.012693535804748536, 0.012718048095703125, 0.01269257640838623, 0.012671903610229492, 0.012639455795288086, 0.012585247993469238, 0.012549792289733887, 0.012604224205017089, 0.012625951766967774, 0.012533760070800782, 0.01241215991973877, 0.01236832046508789, 0.012371520042419433, 0.012569472312927247, 0.013086591720581055, 0.012331007957458496, 0.012343328475952148, 0.012349120140075684, 0.012310336112976075, 0.012265952110290527, 0.012406784057617188, 0.012350912094116212, 0.012379743576049805, 0.012403200149536133, 0.012489184379577637, 0.012383359909057617, 0.012323712348937988, 0.01234329605102539]",tokens/s,79.98161897451388,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1060.069376,912.130048,0.0,509.607936,491.434496,s,1,8.07005810546875,8.07005810546875,0.0,8.07005810546875,8.07005810546875,8.07005810546875,8.07005810546875,[8.07005810546875],,kWh,2.5786774854153314e-05,2.8370980970060674e-06,8.215839906011713e-06,3.68397128571711e-05,,MB,1384.59136,1046.347776,0.0,629.1456,592.24832,s,10,0.33173187255859377,0.033173187255859375,0.0003151173575147231,0.03304300880432129,0.0334689956665039,0.033747505950927736,0.0339703141784668,"[0.034026016235351564, 0.0334071044921875, 0.03305481719970703, 0.03294736099243164, 0.033019775390625, 0.03303120040893555, 0.03298323059082031, 0.03316534423828125, 0.032916191101074216, 0.03318083190917969]",tokens/s,7717.075782484023,kWh,9.6787689624711e-07,1.0673981451579888e-07,5.570635435165666e-07,1.6316802542794755e-06,tokens/kWh,156893484.08095163,MB,1424.1792,1061.02784,0.0,643.825664,605.085696,s,10,18.10406884765625,1.8104068847656252,0.005429275622437041,1.8084608154296875,1.8154996826171874,1.8190633178710935,1.8219142260742187,"[1.814707763671875, 1.813886962890625, 1.807641357421875, 1.80636328125, 1.8037791748046874, 1.8055064697265626, 1.822626953125, 1.8134154052734375, 1.8068612060546876, 1.8092802734375]",tokens/s,34.79880712459617,kWh,5.2695541612919345e-05,5.811851654905267e-06,1.9231618954084674e-05,7.77390122219093e-05,tokens/kWh,810403.9168926387,,s,630,18.09740370750428,0.02872603763095916,0.0006436883955033683,0.028597968101501465,0.0290340030670166,0.029228936004638672,0.031079838600158723,"[0.028856319427490236, 0.02893824005126953, 0.028479007720947264, 0.028586143493652343, 0.02841971206665039, 0.028465856552124025, 0.02851020812988281, 0.028449087142944335, 0.028401344299316407, 0.028771392822265623, 0.028488576889038084, 0.028407871246337892, 0.028592159271240234, 0.02882671928405762, 0.030316991806030272, 0.028801471710205077, 0.028763359069824218, 0.028560319900512696, 0.02849500846862793, 0.028749759674072266, 0.02843942451477051, 0.028458240509033204, 0.02890220832824707, 0.02910927963256836, 0.028936416625976562, 0.02875062370300293, 0.02860540771484375, 0.02850918388366699, 0.028806976318359375, 0.02895427131652832, 0.02870694351196289, 0.02937875175476074, 0.028769920349121094, 0.028545440673828124, 0.02859187126159668, 0.028544479370117188, 0.0285579833984375, 0.028442752838134765, 0.02859212875366211, 0.028487680435180664, 0.02867795181274414, 0.031322111129760744, 0.028657855987548827, 0.028550912857055664, 0.028571903228759767, 0.02868412780761719, 0.028619935989379883, 0.028543264389038085, 0.029229440689086915, 0.030486688613891602, 0.02882374382019043, 0.028817407608032225, 0.02875775909423828, 0.028606719970703125, 0.028665855407714845, 0.028600032806396485, 0.02874332809448242, 0.028514944076538085, 0.028858367919921874, 0.028786687850952147, 0.029605535507202147, 0.028895584106445313, 0.0290119686126709, 0.028492191314697265, 0.028786687850952147, 0.029297920227050783, 0.028754688262939452, 0.028588031768798827, 0.028531776428222657, 0.028425216674804688, 0.028570655822753907, 0.028579839706420897, 0.028507232666015625, 0.028591712951660155, 0.028532960891723632, 0.028528608322143555, 0.028614591598510743, 0.02866419219970703, 0.028537759780883788, 0.02852128028869629, 0.02895427131652832, 0.02862508773803711, 0.028614816665649415, 0.0286167049407959, 0.028546655654907226, 0.0286231689453125, 0.028558975219726564, 0.02851443290710449, 0.03210505676269531, 0.02871072006225586, 0.028539840698242187, 0.028455360412597656, 0.02854355239868164, 0.0285883846282959, 0.028683935165405273, 0.028665855407714845, 0.028709184646606444, 0.028611648559570314, 0.028572288513183594, 0.028859872817993164, 0.02940310478210449, 0.02883030319213867, 0.02852249526977539, 0.02858336067199707, 0.028471519470214843, 0.028618240356445314, 0.028523040771484376, 0.028449087142944335, 0.028666112899780275, 0.029159231185913084, 0.02853059196472168, 0.028598207473754883, 0.028635040283203125, 0.028538240432739257, 0.029078304290771486, 0.03334348678588867, 0.029075199127197266, 0.028958080291748046, 0.028519264221191408, 0.028645408630371093, 0.02855116844177246, 0.028591487884521486, 0.02845350456237793, 0.02842835235595703, 0.028464895248413086, 0.028963008880615235, 0.028922143936157226, 0.028877887725830078, 0.028854751586914064, 0.02922831916809082, 0.029172639846801757, 0.028635135650634767, 0.028528640747070313, 0.02910335922241211, 0.028804191589355467, 0.028663455963134767, 0.02847551918029785, 0.028489599227905272, 0.028597728729248047, 0.028437023162841798, 0.02840403175354004, 0.028638687133789063, 0.028877023696899415, 0.028462080001831053, 0.028516351699829103, 0.028480512619018555, 0.028475391387939454, 0.028521919250488283, 0.02850668716430664, 0.028467456817626954, 0.02842198371887207, 0.02839241600036621, 0.028479583740234377, 0.028477567672729492, 0.028427167892456053, 0.028493312835693358, 0.0285863037109375, 0.02842748832702637, 0.028468000411987306, 0.02840985679626465, 0.02854105567932129, 0.02838435173034668, 0.028392032623291017, 0.0284202880859375, 0.02870204734802246, 0.028815328598022463, 0.028792736053466796, 0.028801952362060547, 0.028638784408569335, 0.029481279373168946, 0.02904800033569336, 0.0289370231628418, 0.02878086471557617, 0.02878825569152832, 0.028639392852783205, 0.028690431594848635, 0.028819456100463867, 0.02894339179992676, 0.02866419219970703, 0.02849967956542969, 0.028511104583740236, 0.0285732479095459, 0.028774431228637695, 0.028610143661499023, 0.028805952072143554, 0.028639232635498047, 0.029879423141479493, 0.028610431671142578, 0.029089056015014648, 0.02823776054382324, 0.028576032638549805, 0.028370431900024414, 0.029018688201904296, 0.028543392181396485, 0.02860406494140625, 0.028636735916137697, 0.02847750473022461, 0.028418079376220703, 0.028477472305297853, 0.02863580894470215, 0.028435871124267577, 0.0284432315826416, 0.02914080047607422, 0.028592319488525392, 0.028729343414306642, 0.029081600189208984, 0.02874982452392578, 0.02858415985107422, 0.02853071975708008, 0.028413951873779295, 0.02857382392883301, 0.02859993553161621, 0.028516416549682618, 0.02848953628540039, 0.02851238441467285, 0.028520448684692383, 0.02854707145690918, 0.02854252815246582, 0.028510175704956054, 0.028439008712768554, 0.028552223205566406, 0.028559680938720702, 0.028580511093139648, 0.02850201606750488, 0.028649471282958985, 0.02853215980529785, 0.02849385643005371, 0.028436895370483398, 0.028454303741455078, 0.028393184661865235, 0.028861440658569337, 0.029095039367675782, 0.029944192886352538, 0.029112831115722656, 0.02910207939147949, 0.02894438362121582, 0.028692384719848633, 0.02920479965209961, 0.02860598373413086, 0.028723455429077147, 0.028840160369873045, 0.028802976608276368, 0.028860416412353516, 0.02844086456298828, 0.028569183349609374, 0.028624191284179687, 0.02954310417175293, 0.028704767227172853, 0.028405023574829102, 0.028510047912597657, 0.028494016647338867, 0.02850681686401367, 0.028075904846191407, 0.028477184295654295, 0.02859657669067383, 0.028476160049438478, 0.02854185676574707, 0.028391424179077147, 0.028672159194946287, 0.02843836784362793, 0.0285614070892334, 0.028894432067871095, 0.0286046085357666, 0.0284835205078125, 0.028553407669067384, 0.028475872039794924, 0.02853068733215332, 0.028659711837768553, 0.02882476806640625, 0.028754751205444337, 0.028626943588256838, 0.029394176483154295, 0.028564287185668946, 0.02859017562866211, 0.028716896057128908, 0.02857164764404297, 0.028790143966674803, 0.028617055892944335, 0.028569120407104492, 0.028664575576782227, 0.028487104415893555, 0.028531007766723633, 0.02847932815551758, 0.028613407135009764, 0.028735456466674806, 0.028870304107666014, 0.028784543991088866, 0.028721408843994142, 0.028509504318237306, 0.028954944610595702, 0.028585664749145506, 0.028647968292236328, 0.028596223831176756, 0.02858393669128418, 0.02849897575378418, 0.028467519760131836, 0.028484512329101562, 0.028494720458984376, 0.028406784057617186, 0.02842166328430176, 0.028666208267211914, 0.02866111946105957, 0.02865545654296875, 0.028649824142456055, 0.028691232681274412, 0.02846067237854004, 0.029126688003540038, 0.028464384078979492, 0.028678815841674806, 0.028642847061157228, 0.028594751358032227, 0.028544384002685545, 0.02870457649230957, 0.02862115287780762, 0.028975584030151366, 0.028079263687133788, 0.028518367767333984, 0.028532672882080078, 0.028375936508178712, 0.028724319458007814, 0.028652639389038087, 0.028611967086791992, 0.028600992202758788, 0.028623008728027345, 0.0284932804107666, 0.028393760681152343, 0.02844870376586914, 0.028766239166259765, 0.02840928077697754, 0.028668447494506834, 0.028518655776977538, 0.02848863983154297, 0.02841881561279297, 0.028387104034423828, 0.02834454345703125, 0.028472448348999025, 0.028467296600341797, 0.028523359298706055, 0.028479488372802734, 0.028552928924560548, 0.02981507110595703, 0.028966848373413085, 0.02918729591369629, 0.02921558380126953, 0.029021568298339843, 0.028670879364013673, 0.028388320922851564, 0.028643295288085936, 0.02889193534851074, 0.02854707145690918, 0.028422143936157225, 0.028459007263183594, 0.029563936233520507, 0.028570592880249022, 0.02855673599243164, 0.028598848342895507, 0.02951116752624512, 0.030081695556640625, 0.02880905532836914, 0.02862054443359375, 0.028565759658813476, 0.028483680725097656, 0.028771520614624024, 0.028471935272216798, 0.02838947105407715, 0.02866387176513672, 0.028578880310058594, 0.0284671688079834, 0.028391839981079102, 0.028382848739624024, 0.02850099182128906, 0.028444543838500976, 0.028417152404785158, 0.02853772735595703, 0.028487680435180664, 0.029210687637329102, 0.02856153678894043, 0.02843219184875488, 0.02822867202758789, 0.02851081657409668, 0.028440832138061523, 0.028648799896240234, 0.02884000015258789, 0.032350688934326174, 0.03720451354980469, 0.028966592788696288, 0.02878463935852051, 0.028500160217285155, 0.02855507278442383, 0.02927129554748535, 0.036795135498046874, 0.028704639434814452, 0.028563583374023437, 0.028524192810058593, 0.028432512283325197, 0.028729888916015626, 0.028370208740234375, 0.028618656158447265, 0.028557823181152343, 0.028493824005126952, 0.02836070442199707, 0.02836419105529785, 0.02833417510986328, 0.02842265510559082, 0.028377023696899414, 0.028469600677490235, 0.02839727973937988, 0.028318912506103515, 0.028973888397216797, 0.02837513542175293, 0.02842399978637695, 0.028672063827514648, 0.028659168243408202, 0.02872332763671875, 0.02881955146789551, 0.028620704650878907, 0.02862758445739746, 0.028728607177734376, 0.028852672576904298, 0.028604415893554686, 0.028687616348266602, 0.02861961555480957, 0.028766559600830077, 0.02875935935974121, 0.028619104385375977, 0.028536352157592773, 0.028799423217773436, 0.028618783950805665, 0.028513504028320313, 0.028660512924194335, 0.02861836814880371, 0.028596607208251953, 0.028525856018066405, 0.028564064025878907, 0.028489376068115236, 0.028471296310424804, 0.028481151580810545, 0.028527456283569334, 0.028624895095825196, 0.028653312683105468, 0.028612863540649413, 0.02836534309387207, 0.029236671447753906, 0.028820032119750975, 0.028762111663818358, 0.02875596809387207, 0.028906911849975587, 0.02890713691711426, 0.02875859260559082, 0.028809503555297853, 0.028790271759033204, 0.02887718391418457, 0.028944416046142576, 0.02898147201538086, 0.028909759521484377, 0.028700735092163084, 0.03034316825866699, 0.02883967971801758, 0.02865679931640625, 0.028867136001586913, 0.028555551528930665, 0.028534048080444335, 0.028531328201293945, 0.02868003273010254, 0.028532384872436523, 0.028632959365844726, 0.028816095352172853, 0.02870681571960449, 0.029063167572021483, 0.02895408058166504, 0.028705024719238283, 0.028846208572387694, 0.02906947135925293, 0.0289814395904541, 0.028725055694580077, 0.028752960205078126, 0.028576704025268556, 0.029212671279907225, 0.028643007278442382, 0.028350784301757814, 0.028661760330200195, 0.028722272872924805, 0.028638111114501954, 0.02841427230834961, 0.028572799682617188, 0.028801599502563478, 0.028504064559936523, 0.02871286392211914, 0.02903183937072754, 0.029122655868530273, 0.029053312301635742, 0.028897504806518554, 0.028497343063354493, 0.02870265579223633, 0.028639871597290038, 0.02917100715637207, 0.02869219207763672, 0.02862998390197754, 0.028638879776000978, 0.028527967453002928, 0.028476415634155275, 0.02855116844177246, 0.028565536499023436, 0.028450143814086913, 0.028219392776489258, 0.028499807357788086, 0.028700864791870118, 0.028669919967651367, 0.028646400451660156, 0.028496896743774414, 0.02884988784790039, 0.02912838363647461, 0.028910335540771485, 0.02847327995300293, 0.028565343856811524, 0.028591455459594725, 0.028579647064208985, 0.02856643295288086, 0.028630239486694336, 0.028680992126464844, 0.02849990463256836, 0.02843244743347168, 0.028931520462036134, 0.029557376861572265, 0.029308544158935548, 0.02874729537963867, 0.028500320434570313, 0.030367807388305666, 0.028555648803710938, 0.028624895095825196, 0.028525856018066405, 0.029020896911621095, 0.028692447662353515, 0.029439552307128906, 0.028895391464233398, 0.028693952560424806, 0.02862499237060547, 0.02852739143371582, 0.028457056045532225, 0.028593887329101564, 0.02863942337036133, 0.028669952392578125, 0.028651935577392578, 0.028788320541381834, 0.02854537582397461, 0.02917340850830078, 0.02849184036254883, 0.028513696670532225, 0.02840630340576172, 0.028564863204956055, 0.028762271881103516, 0.028532575607299805, 0.02860723114013672, 0.028706687927246094, 0.028406015396118166, 0.02838844871520996, 0.02842822456359863, 0.0284150390625, 0.0283787841796875, 0.02865100860595703, 0.028590719223022462, 0.02850931167602539, 0.02840447998046875, 0.028360799789428712, 0.02850966453552246, 0.028428735733032225, 0.028398752212524414, 0.028092096328735352, 0.02849065589904785, 0.028409664154052734, 0.028385568618774414, 0.02866713523864746, 0.02909257507324219, 0.02880512046813965, 0.02876367950439453, 0.029028863906860353, 0.02881328010559082, 0.028620800018310546, 0.02840118408203125, 0.02909811210632324, 0.028530431747436524, 0.028557504653930664, 0.02832633590698242, 0.02849168014526367, 0.02853455924987793, 0.028504671096801756, 0.028390304565429687, 0.02993414306640625, 0.028532991409301756, 0.028372896194458007, 0.028411680221557618, 0.028553375244140623, 0.028636831283569336, 0.028758527755737305, 0.02866486358642578, 0.028744287490844726, 0.028752256393432617, 0.028458784103393555, 0.028604000091552735, 0.028438272476196288, 0.028661663055419923, 0.03275465774536133, 0.029657087326049804, 0.029032447814941405, 0.02854707145690918, 0.02852454376220703, 0.028593215942382812, 0.02850422477722168, 0.02878544044494629, 0.02856755256652832, 0.02857516860961914, 0.028592319488525392, 0.028479616165161134, 0.02849363136291504, 0.02917344093322754, 0.028527360916137695, 0.02835251235961914, 0.028588031768798827, 0.028473344802856446, 0.028642751693725585, 0.028612863540649413, 0.02856118392944336, 0.028755840301513673, 0.028474016189575194, 0.028447839736938478, 0.02845372772216797, 0.02848092842102051, 0.029218463897705077, 0.028666879653930662, 0.028564544677734376]",tokens/s,34.81162326830142,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.603136,1604.190208,0.0,1201.668096,1189.151232,s,1,8.3048056640625,8.3048056640625,0.0,8.3048056640625,8.3048056640625,8.3048056640625,8.3048056640625,[8.3048056640625],,kWh,3.692426027498641e-05,4.0654078609166135e-06,1.1942787332006044e-05,5.293245546790907e-05,,MB,1615.224832,1799.225344,0.0,1382.023168,1351.367168,s,10,0.49314117813110353,0.04931411781311035,0.0005387807762566702,0.049204910278320316,0.049985800933837886,0.05012710914611816,0.05024015571594238,"[0.04842873764038086, 0.04926041412353516, 0.04927507019042969, 0.049923168182373044, 0.049954399108886716, 0.04914940643310547, 0.04890201568603516, 0.04907807922363281, 0.048901470184326175, 0.05026841735839844]",tokens/s,5191.21118561187,kWh,1.4354283868464054e-06,1.5830207674527977e-07,9.477077298430566e-07,2.5414381934347417e-06,tokens/kWh,100730366.23960437,MB,1621.979136,1841.168384,0.0,1423.966208,1407.328256,s,10,18.014220581054687,1.8014220581054687,0.11444368025304653,1.820716064453125,1.8762670776367187,1.8825643859863281,1.8876022326660156,"[1.4675819091796876, 1.8641611328125, 1.8888616943359375, 1.87486767578125, 1.838138671875, 1.818370361328125, 1.82248828125, 1.8098170166015626, 1.81894384765625, 1.810989990234375]",tokens/s,34.97237069821175,kWh,4.036273550898625e-05,4.451640172260774e-06,1.8804446960756822e-05,6.361882264200383e-05,tokens/kWh,990272.9629957776,,s,630,18.008997428894048,0.028585710204593717,0.001989006076888536,0.02893305587768555,0.030049209213256836,0.030249720191955567,0.030651383075714113,"[0.02265011215209961, 0.022601760864257813, 0.02278873634338379, 0.02324643135070801, 0.022796159744262696, 0.022604671478271485, 0.02279408073425293, 0.022865215301513673, 0.022794336318969727, 0.022706687927246092, 0.02291916847229004, 0.02286796760559082, 0.02290278434753418, 0.02261142349243164, 0.02266374397277832, 0.023689184188842773, 0.02314854431152344, 0.02401840019226074, 0.022813024520874022, 0.022729183197021486, 0.0224105281829834, 0.022411712646484373, 0.022675455093383787, 0.022388608932495117, 0.022132287979125975, 0.02227872085571289, 0.022597631454467772, 0.02227516746520996, 0.022006752014160157, 0.022217920303344726, 0.02214374351501465, 0.022173696517944336, 0.022240320205688478, 0.022150079727172853, 0.02214067268371582, 0.021976543426513673, 0.02209071922302246, 0.02183353614807129, 0.021964479446411132, 0.022126911163330078, 0.021864288330078124, 0.022024351119995116, 0.022040672302246093, 0.022219776153564453, 0.022071712493896483, 0.022347776412963868, 0.022342144012451173, 0.02226585578918457, 0.022329343795776366, 0.021989280700683594, 0.022024160385131837, 0.021956735610961915, 0.021960704803466798, 0.02208355140686035, 0.02402921676635742, 0.02899279975891113, 0.02926665687561035, 0.02896294403076172, 0.02873740768432617, 0.02873916816711426, 0.028931583404541016, 0.02870710372924805, 0.028756607055664064, 0.028702816009521483, 0.02884329605102539, 0.028699359893798827, 0.0313666877746582, 0.029974016189575195, 0.029049407958984374, 0.0288734073638916, 0.028779455184936523, 0.029757728576660158, 0.028756479263305663, 0.028827775955200197, 0.028634016036987304, 0.02878121566772461, 0.02930713653564453, 0.02882975959777832, 0.02872319984436035, 0.028680416107177736, 0.02900559997558594, 0.029061119079589845, 0.02915328025817871, 0.029276159286499022, 0.029212127685546874, 0.029297183990478516, 0.029329408645629884, 0.029378559112548826, 0.029393056869506835, 0.029712223052978517, 0.03275775909423828, 0.029708288192749024, 0.03022643280029297, 0.03021824073791504, 0.03008665657043457, 0.030023328781127928, 0.030151647567749025, 0.030216064453125, 0.030088415145874025, 0.030407455444335936, 0.030228479385375977, 0.030354463577270507, 0.030049087524414063, 0.02994764709472656, 0.029981088638305665, 0.02988764762878418, 0.030133087158203124, 0.02996428871154785, 0.030050304412841795, 0.02998271942138672, 0.029898591995239258, 0.0294169921875, 0.029592191696166992, 0.02934783935546875, 0.029163520812988283, 0.029278207778930664, 0.030191104888916017, 0.02930956840515137, 0.029383871078491212, 0.029211328506469725, 0.029717632293701172, 0.02951603126525879, 0.029495296478271486, 0.0294017276763916, 0.029470975875854493, 0.029283967971801758, 0.02921945571899414, 0.029323135375976563, 0.029362464904785158, 0.02957107162475586, 0.029535871505737304, 0.029561216354370118, 0.02958336067199707, 0.030336959838867188, 0.02967558479309082, 0.03028201675415039, 0.030117055892944337, 0.030258720397949218, 0.030116640090942382, 0.030433727264404298, 0.030367519378662108, 0.030657535552978517, 0.030284799575805665, 0.030311744689941408, 0.030104288101196287, 0.030130144119262695, 0.030045696258544922, 0.029970943450927736, 0.030238719940185548, 0.029953983306884764, 0.029767391204833984, 0.029686111450195313, 0.030154304504394533, 0.029790304183959962, 0.029698400497436522, 0.029603296279907227, 0.029703840255737305, 0.02941222381591797, 0.030410751342773438, 0.029454336166381836, 0.029490304946899415, 0.02926595115661621, 0.029725568771362305, 0.02980246353149414, 0.02966752052307129, 0.02985148811340332, 0.030590944290161133, 0.030535680770874023, 0.02961408042907715, 0.029884544372558594, 0.029750688552856445, 0.029470592498779296, 0.0300795841217041, 0.029898752212524415, 0.02996019172668457, 0.029609983444213867, 0.02996019172668457, 0.030009248733520507, 0.03013033676147461, 0.03028780746459961, 0.030302463531494142, 0.03049616050720215, 0.03007535934448242, 0.030530656814575195, 0.030101696014404298, 0.0301712646484375, 0.030861087799072266, 0.030296031951904296, 0.030669536590576172, 0.029436159133911132, 0.029691648483276368, 0.029728639602661134, 0.030183551788330078, 0.02944233512878418, 0.029394752502441408, 0.029543968200683595, 0.029551231384277343, 0.02941961669921875, 0.029433504104614257, 0.029239295959472656, 0.02958758354187012, 0.030535551071166993, 0.030160287857055663, 0.029657472610473634, 0.02963657569885254, 0.02969011116027832, 0.029447839736938475, 0.029644832611083985, 0.030266752243041994, 0.02953926467895508, 0.029634559631347656, 0.029691904067993165, 0.02966691207885742, 0.03018364715576172, 0.030021408081054687, 0.029940128326416016, 0.030007295608520508, 0.03006233596801758, 0.03063632011413574, 0.030176416397094726, 0.03014956855773926, 0.030044000625610353, 0.029988895416259764, 0.030583967208862306, 0.02975984001159668, 0.029727231979370116, 0.02964630317687988, 0.029564512252807616, 0.02964371109008789, 0.02948841667175293, 0.029952543258666992, 0.02999519920349121, 0.029845855712890626, 0.02973654365539551, 0.029499616622924805, 0.029535903930664062, 0.029583616256713866, 0.02944812774658203, 0.029505344390869142, 0.029411327362060546, 0.02928223991394043, 0.02961408042907715, 0.029554943084716796, 0.02939084815979004, 0.029454336166381836, 0.029499391555786132, 0.029768863677978517, 0.02990937614440918, 0.029728256225585937, 0.02978096008300781, 0.029773120880126954, 0.03001532745361328, 0.029785663604736327, 0.030091712951660157, 0.030108768463134764, 0.030458784103393553, 0.030066688537597655, 0.029910400390625, 0.02981337547302246, 0.03001491165161133, 0.029626943588256835, 0.02965888023376465, 0.02953446388244629, 0.02954854393005371, 0.029551776885986328, 0.029563743591308592, 0.029583583831787108, 0.029300512313842772, 0.02920857620239258, 0.029388799667358398, 0.02919785690307617, 0.029178335189819337, 0.029243392944335936, 0.02910207939147949, 0.02907939147949219, 0.029062463760375978, 0.029063711166381834, 0.029002304077148437, 0.02901750373840332, 0.02919590377807617, 0.028934879302978514, 0.02894438362121582, 0.028814464569091796, 0.028683135986328125, 0.028717056274414062, 0.02935398483276367, 0.028945760726928713, 0.028723583221435547, 0.02885251235961914, 0.029390752792358397, 0.02856150436401367, 0.02856345558166504, 0.028631103515625, 0.02867807960510254, 0.02902835273742676, 0.028733440399169922, 0.02898739242553711, 0.02866579246520996, 0.028565248489379882, 0.028601055145263673, 0.028699583053588867, 0.028488351821899415, 0.02856483268737793, 0.028652351379394533, 0.02865135955810547, 0.028548831939697265, 0.028698720932006837, 0.029235328674316406, 0.029184064865112304, 0.029401151657104493, 0.02944607925415039, 0.029220863342285155, 0.029212415695190428, 0.02949760055541992, 0.029308063507080078, 0.028859264373779298, 0.02896940803527832, 0.028927711486816405, 0.02942630386352539, 0.029124607086181642, 0.02888672065734863, 0.0322808952331543, 0.028999679565429686, 0.028763200759887694, 0.0284902400970459, 0.02838547134399414, 0.02902412796020508, 0.02868467140197754, 0.028614112854003906, 0.028731136322021483, 0.028789695739746095, 0.028736799240112305, 0.028703264236450195, 0.028667552947998047, 0.028677919387817382, 0.02856188774108887, 0.02874166488647461, 0.02877663993835449, 0.028834911346435548, 0.029218784332275392, 0.029223615646362305, 0.029008031845092774, 0.0290645751953125, 0.029147775650024414, 0.02900979232788086, 0.02891983985900879, 0.02882147216796875, 0.028792287826538084, 0.02839414405822754, 0.028315135955810547, 0.02857187271118164, 0.02867363166809082, 0.028728256225585936, 0.029321151733398436, 0.02898841667175293, 0.029159679412841796, 0.030002912521362304, 0.029348352432250976, 0.028584287643432616, 0.028476991653442384, 0.028465599060058595, 0.028344352722167968, 0.02858799934387207, 0.02885840034484863, 0.029418943405151367, 0.028586528778076173, 0.028596223831176756, 0.02858559989929199, 0.028582271575927735, 0.02854832077026367, 0.028773151397705077, 0.028477439880371092, 0.028374528884887694, 0.028534879684448244, 0.0286429443359375, 0.028402271270751952, 0.028512319564819335, 0.029301887512207032, 0.028208927154541017, 0.0287457275390625, 0.02876416015625, 0.02846886444091797, 0.028389280319213867, 0.028506591796875, 0.02849715232849121, 0.02853494453430176, 0.028893184661865235, 0.02878838348388672, 0.028470207214355468, 0.02842624092102051, 0.02831167984008789, 0.02828620719909668, 0.02830761528015137, 0.02853731155395508, 0.028827423095703124, 0.028889312744140624, 0.02933945655822754, 0.02909382438659668, 0.029112447738647462, 0.028971136093139647, 0.029198112487792968, 0.02917398452758789, 0.029155231475830077, 0.02914518356323242, 0.029970495223999024, 0.029388511657714844, 0.029081823348999024, 0.029337600708007814, 0.029509632110595704, 0.029061151504516602, 0.02919215965270996, 0.029019968032836914, 0.028983007431030272, 0.028994016647338867, 0.029440031051635743, 0.02901968002319336, 0.02884886360168457, 0.02903830337524414, 0.02898739242553711, 0.029056896209716798, 0.02894041633605957, 0.030424863815307616, 0.030468544006347655, 0.029101856231689455, 0.029020320892333983, 0.028906463623046875, 0.028812416076660158, 0.028796672821044922, 0.028665727615356445, 0.0287642879486084, 0.028881919860839843, 0.02902876853942871, 0.029014623641967774, 0.02874367904663086, 0.02865974426269531, 0.02863920021057129, 0.028684288024902343, 0.028661535263061522, 0.028733055114746095, 0.028578079223632813, 0.028625215530395508, 0.028395168304443358, 0.02859791946411133, 0.02854707145690918, 0.028635200500488282, 0.028476255416870117, 0.02845414352416992, 0.028679935455322266, 0.028642303466796876, 0.028559263229370118, 0.028524511337280272, 0.02853696060180664, 0.028583744049072265, 0.028649375915527343, 0.028542751312255858, 0.02924345588684082, 0.028753440856933595, 0.028666528701782226, 0.028681631088256835, 0.02864828872680664, 0.028548831939697265, 0.02852236747741699, 0.028574111938476563, 0.028579551696777342, 0.02864566421508789, 0.028846368789672852, 0.028739295959472656, 0.028734464645385743, 0.02864963150024414, 0.028618976593017577, 0.028568511962890626, 0.02857542419433594, 0.02859212875366211, 0.02871023941040039, 0.028701343536376954, 0.02885856056213379, 0.028630847930908202, 0.02858393669128418, 0.02854934310913086, 0.02858576011657715, 0.02858982467651367, 0.028817087173461913, 0.028861152648925782, 0.028538719177246093, 0.028497919082641602, 0.028520448684692383, 0.02853887939453125, 0.028721151351928712, 0.02899760055541992, 0.028827680587768554, 0.029150239944458006, 0.028857215881347657, 0.028917856216430664, 0.0287457275390625, 0.028717056274414062, 0.028626943588256838, 0.028628959655761718, 0.02843177604675293, 0.028486303329467774, 0.028757984161376954, 0.028807008743286133, 0.029018272399902345, 0.03161894416809082, 0.029021568298339843, 0.028649248123168946, 0.02859657669067383, 0.028634336471557616, 0.028856800079345702, 0.028518592834472656, 0.02879692840576172, 0.028741823196411134, 0.028794143676757814, 0.028936128616333007, 0.028842592239379884, 0.02878463935852051, 0.02910771179199219, 0.02888502311706543, 0.028842464447021484, 0.029083648681640626, 0.029490591049194336, 0.028951135635375977, 0.028903423309326173, 0.02915123176574707, 0.029142431259155274, 0.030067615509033203, 0.029081247329711915, 0.028972288131713868, 0.029000480651855468, 0.028876447677612306, 0.02907935905456543, 0.02896067237854004, 0.02887295913696289, 0.028934528350830078, 0.028911615371704103, 0.029865856170654296, 0.02911177635192871, 0.02917558479309082, 0.028986240386962892, 0.029108224868774416, 0.02903654479980469, 0.0285631046295166, 0.028452863693237306, 0.028387903213500976, 0.02973673629760742, 0.02873139190673828, 0.028848127365112306, 0.028628992080688476, 0.028366847991943358, 0.02836070442199707, 0.028512256622314453, 0.02880512046813965, 0.028391103744506836, 0.02835852813720703, 0.02853638458251953, 0.028625791549682617, 0.028661792755126952, 0.029001792907714843, 0.028892063140869142, 0.02899398422241211, 0.02918252754211426, 0.028846080780029298, 0.028659711837768553, 0.028686336517333984, 0.028460607528686524, 0.028757984161376954, 0.028587583541870118, 0.028777376174926757, 0.03031644821166992, 0.029130655288696287, 0.028821599960327147, 0.02883635139465332, 0.02851820755004883, 0.028452287673950194, 0.02835772705078125, 0.028337024688720704, 0.028308256149291992, 0.02836275291442871, 0.02840166473388672, 0.02843235206604004, 0.02853891181945801, 0.028358655929565428, 0.028435583114624022, 0.02837388801574707, 0.028740991592407228, 0.028430976867675782, 0.028553216934204102, 0.02851020812988281, 0.028499967575073244, 0.028575872421264647, 0.0288275203704834, 0.02842732810974121, 0.02841619110107422, 0.029102848052978515, 0.029045888900756836, 0.028642175674438476, 0.028753919601440428, 0.02879852867126465, 0.028707584381103515, 0.028434112548828126, 0.02845635223388672, 0.02842508888244629, 0.028716768264770508, 0.02868400001525879, 0.028725536346435546, 0.029018272399902345, 0.02913865661621094, 0.02923734474182129, 0.028819583892822267, 0.028831872940063476, 0.028516128540039064, 0.02855311965942383, 0.02846691131591797, 0.028604799270629883, 0.02860188865661621, 0.028758079528808593, 0.029149343490600586, 0.028812768936157227, 0.02873321533203125, 0.029248384475708007, 0.02875539207458496, 0.028758880615234374, 0.028778335571289063, 0.028892799377441405, 0.02891404724121094, 0.028647424697875977, 0.029398752212524415, 0.028539007186889648, 0.029167295455932617, 0.02894428825378418, 0.029682432174682617]",tokens/s,34.98251374000497,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4296.548352,4886.233088,0.0,4483.710976,4465.672704,s,1,10.7379111328125,10.7379111328125,0.0,10.7379111328125,10.7379111328125,10.7379111328125,10.7379111328125,[10.7379111328125],,kWh,0.00010794066158750865,1.1899220575926714e-05,3.3526693487995196e-05,0.00015336657565143057,,MB,2153.914368,5309.857792,0.0,4892.655616,4841.339904,s,10,1.9641685333251955,0.19641685333251954,0.00045467831309295995,0.1965788803100586,0.1968275390625,0.19697163848876953,0.19708691802978515,"[0.19561196899414063, 0.19575485229492187, 0.19661724853515625, 0.1961144256591797, 0.19654051208496093, 0.19622055053710938, 0.19679551696777345, 0.19711573791503906, 0.1967484130859375, 0.19664930725097657]",tokens/s,1303.3504796383766,kWh,5.775228835375177e-06,6.369021732562968e-07,3.8252427116076765e-06,1.023737372023915e-05,tokens/kWh,25006413.46069954,MB,2159.026176,5477.629952,0.0,5060.427776,5012.931584,s,10,18.77374719238281,1.877374719238281,0.004906811295751002,1.8758689575195313,1.88391943359375,1.8859052124023437,1.8874938354492186,"[1.8726434326171875, 1.8878909912109374, 1.8749144287109376, 1.876823486328125, 1.8713726806640625, 1.8744427490234374, 1.8804154052734374, 1.8775953369140626, 1.8834781494140624, 1.8741705322265625]",tokens/s,33.55749885966366,kWh,7.074144245045491e-05,7.802359644343153e-06,4.5506157864592e-05,0.00012404995995939005,tokens/kWh,507859.89790423284,,s,630,18.770692888259887,0.029794750616285535,0.00037116582118075714,0.029753759384155275,0.030097993278503418,0.03028514232635498,0.03113627298355103,"[0.030451520919799805, 0.030077024459838866, 0.02974224090576172, 0.02972271919250488, 0.029774751663208008, 0.029726463317871092, 0.029574880599975584, 0.029427263259887697, 0.02961916732788086, 0.02982268714904785, 0.02953411293029785, 0.02962371253967285, 0.029535200119018553, 0.029655040740966795, 0.029577215194702147, 0.02956883239746094, 0.029625984191894533, 0.029628992080688477, 0.029456384658813478, 0.029851648330688478, 0.029808639526367187, 0.029652992248535157, 0.029505151748657227, 0.02956326484680176, 0.029564416885375977, 0.029444608688354492, 0.02944371223449707, 0.029444480895996095, 0.029538272857666015, 0.029595680236816406, 0.02983065605163574, 0.029809312820434572, 0.02974857521057129, 0.029651456832885743, 0.029663135528564453, 0.029526111602783203, 0.02977382469177246, 0.029940927505493164, 0.0297357120513916, 0.02964406394958496, 0.029512447357177736, 0.029896703720092774, 0.02977164840698242, 0.02992140769958496, 0.029978624343872072, 0.029702144622802733, 0.029655040740966795, 0.029480960845947264, 0.029708288192749024, 0.02981068801879883, 0.029728096008300783, 0.029713056564331056, 0.029667327880859375, 0.029861312866210937, 0.0300118408203125, 0.030111007690429688, 0.0299036808013916, 0.02976361656188965, 0.02967523193359375, 0.0298621768951416, 0.030006784439086914, 0.02992793655395508, 0.02983024024963379, 0.030214879989624025, 0.030154176712036133, 0.03013222312927246, 0.030087039947509765, 0.029792959213256837, 0.029921279907226563, 0.029576416015625, 0.02973161506652832, 0.029823999404907226, 0.030225408554077147, 0.0306845760345459, 0.02991551971435547, 0.0301592960357666, 0.030307775497436525, 0.030259552001953124, 0.029855743408203125, 0.02994790458679199, 0.02999295997619629, 0.03094438362121582, 0.029907840728759766, 0.029861888885498046, 0.029650495529174804, 0.02968783950805664, 0.02969171142578125, 0.029614208221435546, 0.03096419143676758, 0.029658271789550782, 0.029588319778442382, 0.030035968780517577, 0.029874176025390626, 0.029908607482910157, 0.02983318328857422, 0.029792671203613282, 0.030027776718139648, 0.03065353584289551, 0.029746080398559572, 0.029972032546997072, 0.029847999572753907, 0.02993561553955078, 0.02995814323425293, 0.029929279327392578, 0.02979158401489258, 0.029772640228271485, 0.0299683837890625, 0.029996639251708986, 0.02986844825744629, 0.029827199935913085, 0.02984284782409668, 0.02985526466369629, 0.029881280899047853, 0.029945600509643556, 0.030030080795288086, 0.029839359283447265, 0.029903968811035155, 0.029901727676391602, 0.029890560150146486, 0.02976972770690918, 0.029880319595336914, 0.029903936386108398, 0.029930431365966795, 0.029840768814086913, 0.029840000152587892, 0.030158975601196288, 0.030060543060302734, 0.029801599502563475, 0.029610368728637697, 0.029413728713989257, 0.02972457695007324, 0.02952422332763672, 0.02957107162475586, 0.030496768951416016, 0.02962188720703125, 0.029571456909179686, 0.029521184921264647, 0.02949612808227539, 0.029475967407226564, 0.029455135345458985, 0.029286399841308593, 0.029497472763061524, 0.029441919326782227, 0.02956492805480957, 0.029462528228759766, 0.029787519454956054, 0.029807231903076173, 0.029698175430297853, 0.029674848556518554, 0.029626688003540038, 0.02950681686401367, 0.029466943740844728, 0.02955721664428711, 0.029579456329345704, 0.029552160263061525, 0.029708063125610352, 0.029674175262451172, 0.02957017517089844, 0.02959654426574707, 0.029628416061401368, 0.02954035186767578, 0.029873376846313478, 0.029901023864746093, 0.029686336517333985, 0.029906944274902345, 0.029749248504638674, 0.02969584083557129, 0.029659296035766603, 0.03000054359436035, 0.02992188835144043, 0.02978611183166504, 0.02973526382446289, 0.030025375366210937, 0.030103551864624024, 0.030130176544189452, 0.030164831161499022, 0.03007913589477539, 0.029937664031982423, 0.029816831588745117, 0.029767679214477538, 0.02998476791381836, 0.029947359085083006, 0.02977846336364746, 0.029918880462646485, 0.029882720947265625, 0.029784063339233398, 0.029868032455444334, 0.03020185661315918, 0.03075391960144043, 0.030172767639160155, 0.031145952224731446, 0.029937440872192383, 0.02979033660888672, 0.02955753517150879, 0.029660160064697266, 0.029587839126586916, 0.029597343444824217, 0.029395679473876953, 0.029341440200805664, 0.02944175910949707, 0.029544607162475586, 0.029607295989990234, 0.02963929557800293, 0.029481311798095704, 0.029593503952026368, 0.02936025619506836, 0.02956492805480957, 0.02951148796081543, 0.029773439407348633, 0.029676095962524414, 0.029460479736328125, 0.02977382469177246, 0.029913087844848633, 0.02995769691467285, 0.029710784912109375, 0.030050304412841795, 0.030668800354003906, 0.029863199234008788, 0.02981961631774902, 0.029777088165283204, 0.029754175186157226, 0.02977791976928711, 0.029457504272460938, 0.03021446418762207, 0.029641311645507814, 0.029849599838256836, 0.02980659294128418, 0.029736000061035155, 0.029589567184448242, 0.02948931121826172, 0.029782751083374023, 0.029709791183471678, 0.029585792541503907, 0.029531648635864258, 0.029514400482177735, 0.02949337577819824, 0.02965872001647949, 0.029591840744018556, 0.029687231063842773, 0.029880800247192384, 0.031954944610595705, 0.0297762565612793, 0.02963987159729004, 0.02993174362182617, 0.029835552215576173, 0.02990083122253418, 0.029939552307128907, 0.02994806480407715, 0.02977903938293457, 0.029891679763793946, 0.02984940719604492, 0.029982048034667967, 0.030318464279174805, 0.029675039291381836, 0.029469152450561524, 0.02939084815979004, 0.02930892753601074, 0.02932326316833496, 0.029269760131835937, 0.029393152236938478, 0.0295280647277832, 0.029553951263427733, 0.029360864639282228, 0.0293703670501709, 0.029419519424438476, 0.02945852851867676, 0.029456287384033202, 0.02948409652709961, 0.029397951126098634, 0.029281696319580077, 0.029415231704711914, 0.029512479782104493, 0.02957107162475586, 0.02981827163696289, 0.031202943801879882, 0.029865215301513672, 0.030199520111083983, 0.02958745574951172, 0.03022435188293457, 0.02967705535888672, 0.029757984161376955, 0.029684032440185547, 0.02973459243774414, 0.02996566390991211, 0.029581024169921876, 0.02948601531982422, 0.029595232009887694, 0.029536991119384765, 0.029605567932128905, 0.029894559860229493, 0.02972598457336426, 0.029577695846557617, 0.02962214469909668, 0.03001388740539551, 0.02981603240966797, 0.02993577575683594, 0.029811359405517577, 0.029959199905395507, 0.02987923240661621, 0.029930912017822265, 0.029721216201782228, 0.029789407730102538, 0.029815584182739257, 0.029691904067993165, 0.029634559631347656, 0.029788127899169924, 0.030115167617797853, 0.02976838493347168, 0.029684736251831056, 0.02972774314880371, 0.029741056442260744, 0.029738431930541993, 0.02957779121398926, 0.029638656616210936, 0.029961919784545897, 0.03045452880859375, 0.029914880752563478, 0.029591583251953126, 0.030306079864501952, 0.02952851104736328, 0.029800447463989257, 0.029378559112548826, 0.029489152908325194, 0.029368064880371095, 0.02955904006958008, 0.02939289665222168, 0.02957926368713379, 0.029470720291137696, 0.02951372718811035, 0.02950156784057617, 0.029532032012939455, 0.029562496185302736, 0.02964518356323242, 0.029825023651123047, 0.029878271102905272, 0.02979430389404297, 0.02963046455383301, 0.029849599838256836, 0.029535232543945314, 0.02948761558532715, 0.02938070487976074, 0.029598432540893553, 0.02970355224609375, 0.029775327682495117, 0.030311264038085938, 0.029786176681518554, 0.029793632507324218, 0.02956319999694824, 0.02960598373413086, 0.029835424423217773, 0.029689119338989257, 0.029552480697631837, 0.029644927978515624, 0.02972537612915039, 0.029663328170776368, 0.029878175735473633, 0.029507680892944334, 0.029739007949829102, 0.02976563262939453, 0.02978755187988281, 0.02955120086669922, 0.029673471450805664, 0.029747200012207032, 0.029751232147216797, 0.029769311904907225, 0.030021440505981444, 0.030075551986694336, 0.02977702331542969, 0.02967635154724121, 0.029752511978149414, 0.029854591369628907, 0.029898752212524415, 0.030105600357055663, 0.03066249656677246, 0.030097503662109375, 0.029964384078979493, 0.029877504348754882, 0.030017791748046876, 0.030410816192626953, 0.03043132781982422, 0.030050048828125, 0.02992665672302246, 0.029911359786987304, 0.0297807674407959, 0.02971116828918457, 0.029671455383300783, 0.02962326431274414, 0.029599456787109374, 0.029626495361328126, 0.029603103637695312, 0.029473663330078125, 0.03091632080078125, 0.030005279541015624, 0.029617792129516603, 0.029864576339721678, 0.030310400009155275, 0.02958745574951172, 0.029807872772216797, 0.029683488845825196, 0.029955039978027342, 0.029459808349609377, 0.02951750373840332, 0.029555679321289063, 0.029467744827270506, 0.029901311874389647, 0.03214080047607422, 0.030055328369140624, 0.029902847290039062, 0.029847360610961913, 0.029698240280151368, 0.029752511978149414, 0.029576000213623048, 0.029659135818481445, 0.029684864044189452, 0.029655935287475586, 0.029521568298339844, 0.02965679931640625, 0.029528671264648438, 0.029460512161254882, 0.02953215980529785, 0.02951372718811035, 0.029628416061401368, 0.029568735122680663, 0.02973651123046875, 0.029945920944213868, 0.02991375923156738, 0.029994176864624023, 0.030020416259765623, 0.02975334358215332, 0.029661184310913087, 0.03003968048095703, 0.030404319763183595, 0.02998134422302246, 0.029868032455444334, 0.029761280059814453, 0.029783359527587892, 0.029819328308105467, 0.029763967514038085, 0.029775999069213868, 0.029941055297851564, 0.03014463996887207, 0.030188703536987306, 0.02997228813171387, 0.029716512680053712, 0.029573087692260743, 0.029501440048217774, 0.029432863235473634, 0.029485376358032226, 0.029481632232666016, 0.029607936859130858, 0.029503488540649415, 0.029529184341430665, 0.029500320434570314, 0.029504608154296875, 0.029420448303222657, 0.029453407287597655, 0.02946345520019531, 0.029480960845947264, 0.029609983444213867, 0.029577215194702147, 0.02954035186767578, 0.029708288192749024, 0.029575040817260742, 0.029800575256347658, 0.02978748893737793, 0.029877119064331055, 0.029613855361938477, 0.029533824920654296, 0.029585792541503907, 0.029620159149169923, 0.029576351165771484, 0.029650911331176758, 0.029821887969970703, 0.02978201675415039, 0.029515775680541992, 0.030021631240844726, 0.030039072036743164, 0.029999616622924805, 0.030063072204589845, 0.029872352600097657, 0.029880096435546875, 0.02999075126647949, 0.02984976005554199, 0.02973401641845703, 0.02956991958618164, 0.029652000427246094, 0.029997152328491213, 0.030102399826049803, 0.029851520538330078, 0.02971660804748535, 0.03019913673400879, 0.03356444931030273, 0.029950847625732423, 0.029882368087768556, 0.029790111541748047, 0.029833311080932616, 0.02960380744934082, 0.02965711975097656, 0.030078880310058592, 0.030035808563232423, 0.029851743698120117, 0.029792224884033203, 0.029761728286743165, 0.03000444793701172, 0.029831647872924805, 0.029684032440185547, 0.029402336120605468, 0.029504255294799806, 0.02959516716003418, 0.029691551208496095, 0.029550815582275392, 0.029475263595581055, 0.029401023864746093, 0.029427967071533202, 0.029476608276367187, 0.029474815368652343, 0.02953379249572754, 0.030188095092773436, 0.03111257553100586, 0.03023232078552246, 0.029704927444458008, 0.029587135314941407, 0.02951945686340332, 0.02972336006164551, 0.029636608123779298, 0.029900447845458984, 0.029997247695922852, 0.029643968582153322, 0.02980963134765625, 0.0297259521484375, 0.02968828773498535, 0.029544736862182615, 0.029593183517456056, 0.029858207702636717, 0.029661184310913087, 0.029558784484863283, 0.029677183151245116, 0.030226816177368165, 0.030082239151000976, 0.0297828483581543, 0.02950454330444336, 0.02981177520751953, 0.03013532829284668, 0.03038640022277832, 0.03315155029296875, 0.030478431701660157, 0.030383167266845704, 0.03030726432800293, 0.030218175888061524, 0.029898143768310546, 0.029880992889404295, 0.02978816032409668, 0.02978927993774414, 0.02961497688293457, 0.02997865676879883, 0.02984899139404297, 0.029644895553588867, 0.029829280853271484, 0.030103231430053713, 0.029920223236083985, 0.02978771209716797, 0.029980703353881834, 0.029871807098388672, 0.029780384063720702, 0.030135520935058592, 0.030087648391723634, 0.030168544769287108, 0.03038822364807129, 0.02995814323425293, 0.029380607604980468, 0.029398719787597657, 0.029281696319580077, 0.02931337547302246, 0.02935398483276367, 0.029452863693237304, 0.0295731201171875, 0.031870975494384765, 0.030136320114135744, 0.0297259521484375, 0.029816831588745117, 0.029601951599121094, 0.02952569580078125, 0.029489664077758788, 0.029445600509643555, 0.02947372817993164, 0.02935398483276367, 0.030045536041259764, 0.029745439529418945, 0.029796735763549805, 0.029647136688232423, 0.02946019172668457, 0.029452287673950195, 0.02941654396057129, 0.029772703170776366, 0.029693183898925782, 0.029686527252197267, 0.02949734306335449, 0.029574527740478515, 0.02947270393371582, 0.029582048416137697, 0.02943791961669922, 0.029476127624511718, 0.029557472229003907, 0.02959974479675293, 0.029800512313842772, 0.029781951904296874, 0.029794208526611327, 0.02969766426086426, 0.029542400360107423, 0.02976755142211914, 0.029958751678466795, 0.030061824798583985, 0.029835712432861327, 0.02995750427246094, 0.029878944396972657, 0.02985398483276367, 0.02978611183166504, 0.029902847290039062, 0.029859647750854493, 0.029966623306274413, 0.029898111343383788, 0.03003228759765625, 0.029958272933959963, 0.029767679214477538, 0.029673471450805664, 0.0298591365814209, 0.029948415756225585, 0.0299946231842041, 0.029961824417114258, 0.029915199279785157]",tokens/s,33.56295922320656,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7436.419072,8048.738304,0.0,7646.216192,7627.584,s,1,12.9949267578125,12.9949267578125,0.0,12.9949267578125,12.9949267578125,12.9949267578125,12.9949267578125,[12.9949267578125],,kWh,0.00017278734016250232,1.9052442514780857e-05,5.561560004800625e-05,0.00024745538272528944,,MB,1761.652736,8732.409856,0.0,8315.20768,8191.863296,s,10,3.3556708984375003,0.33556708984375005,0.0007634867049992896,0.3355626220703125,0.3364967987060547,0.33655421600341795,0.3366001498413086,"[0.333852294921875, 0.33557867431640626, 0.3357731018066406, 0.33551687622070314, 0.33551300048828125, 0.3364840393066406, 0.33474517822265626, 0.33554656982421877, 0.33661163330078125, 0.3360495300292969]",tokens/s,762.8876840073953,kWh,9.832132973056104e-06,1.0843030367387353e-06,6.513440395933268e-06,1.742987640572811e-05,tokens/kWh,14687424.85838103,MB,1764.708352,9046.982656,0.0,8629.78048,8480.067584,s,10,26.989277099609374,2.6989277099609374,0.004195169949303004,2.6994720458984376,2.703811279296875,2.7050911865234375,2.7061151123046874,"[2.69077001953125, 2.694811767578125, 2.696556884765625, 2.700678955078125, 2.697007080078125, 2.6997451171875, 2.699198974609375, 2.7006103515625, 2.70352685546875, 2.70637109375]",tokens/s,23.342603719056935,kWh,7.891598056610822e-05,8.704517592640453e-06,5.248802347186665e-05,0.0001401085216306153,tokens/kWh,449651.45065261883,,s,630,26.980206951141366,0.042825725319272,0.0004004493700450249,0.042804304122924804,0.043337841796875,0.04346133193969726,0.04374581111907959,"[0.04240771102905273, 0.042219070434570315, 0.042070785522460935, 0.042178558349609374, 0.04206310272216797, 0.04201958465576172, 0.04237107086181641, 0.04257791900634766, 0.04242432022094727, 0.042246143341064454, 0.04215398406982422, 0.04219465637207031, 0.04242665481567383, 0.04238950347900391, 0.04224518585205078, 0.04222246551513672, 0.042381374359130856, 0.04242227172851563, 0.042280384063720707, 0.04236899185180664, 0.042566177368164065, 0.04250636672973633, 0.042418113708496095, 0.042635265350341796, 0.04259430313110352, 0.04259443283081055, 0.042610561370849606, 0.04264700698852539, 0.04296121597290039, 0.04274537658691406, 0.04264006423950195, 0.04254719924926758, 0.04248358535766601, 0.04252278518676758, 0.04274774551391602, 0.042790912628173826, 0.04273984146118164, 0.04274892807006836, 0.0433746566772461, 0.04310931015014648, 0.04274380874633789, 0.04256959915161133, 0.04288524627685547, 0.04296278381347656, 0.04287408065795899, 0.04304991912841797, 0.04287887954711914, 0.042789985656738284, 0.042857185363769534, 0.04292809677124024, 0.043254112243652346, 0.043312160491943356, 0.043162559509277346, 0.04325750350952148, 0.043178337097167965, 0.04310220718383789, 0.043173824310302734, 0.043460224151611326, 0.04332774353027344, 0.04303683090209961, 0.04307462310791016, 0.04327318572998047, 0.04311651229858399, 0.042435073852539064, 0.042298881530761716, 0.04208051300048828, 0.04196537780761719, 0.04196099090576172, 0.042298271179199216, 0.04242009735107422, 0.042178688049316404, 0.04229254531860351, 0.04231180953979492, 0.042262527465820314, 0.042248321533203126, 0.04245139312744141, 0.04242227172851563, 0.04222873687744141, 0.04240876770019531, 0.04237740707397461, 0.04234684753417969, 0.04270431900024414, 0.042524574279785156, 0.04245945739746094, 0.042323200225830075, 0.04249174499511719, 0.042500255584716796, 0.042746719360351564, 0.04267804718017578, 0.042559616088867186, 0.04258201599121094, 0.042493568420410154, 0.04278656005859375, 0.04296768188476562, 0.042957855224609376, 0.04270956802368164, 0.042759681701660154, 0.043088798522949216, 0.04310630416870117, 0.04295065689086914, 0.042827617645263674, 0.042870849609375, 0.042842208862304686, 0.042799102783203126, 0.043201633453369144, 0.04315228652954101, 0.042967041015625, 0.04294655990600586, 0.04312659072875977, 0.043090110778808595, 0.04284332656860351, 0.043125343322753903, 0.04317001724243164, 0.043358207702636715, 0.043342910766601565, 0.04317033767700195, 0.04304732894897461, 0.04306534576416016, 0.04315039825439453, 0.04342879867553711, 0.04333772659301758, 0.04315955352783203, 0.04314041519165039, 0.04320230484008789, 0.04335308837890625, 0.0437037124633789, 0.042369022369384765, 0.041901920318603514, 0.04215964889526367, 0.04257228851318359, 0.0425546875, 0.042427455902099606, 0.042313472747802734, 0.04232806396484375, 0.04237721633911133, 0.042194942474365234, 0.042272384643554685, 0.042291038513183596, 0.042433185577392576, 0.042649375915527345, 0.04272544097900391, 0.04242416000366211, 0.042514240264892575, 0.04264774322509766, 0.04262639999389648, 0.042574687957763674, 0.04255894470214844, 0.04255104064941406, 0.042681121826171876, 0.042633216857910154, 0.042700801849365234, 0.04271916961669922, 0.04269472122192383, 0.04259430313110352, 0.04254105758666992, 0.04255942535400391, 0.042547264099121095, 0.04259430313110352, 0.04328243255615234, 0.043186176300048826, 0.04310812759399414, 0.042948833465576174, 0.04282572937011719, 0.04287680053710938, 0.04331270217895508, 0.043186912536621096, 0.043122528076171875, 0.042915969848632815, 0.04287638473510742, 0.04303094482421875, 0.04308374404907227, 0.04295068740844726, 0.04307958221435547, 0.043057247161865236, 0.04295897674560547, 0.04302630233764648, 0.04309302520751953, 0.04291683197021484, 0.04291788864135742, 0.043030529022216796, 0.043194366455078126, 0.043165023803710935, 0.04339926528930664, 0.04328422546386719, 0.04307846450805664, 0.04302643203735351, 0.04318342590332031, 0.04339321517944336, 0.04330339050292969, 0.042619102478027346, 0.04248828887939453, 0.04230963134765625, 0.04223590469360351, 0.04265574264526367, 0.042409984588623044, 0.04216169738769531, 0.04206991958618164, 0.042388031005859375, 0.04230553436279297, 0.04241766357421875, 0.04264585494995117, 0.04273920059204102, 0.04260019302368164, 0.043015071868896484, 0.042727039337158206, 0.042469215393066403, 0.04227519989013672, 0.04239580917358399, 0.0435233268737793, 0.04235728073120117, 0.042494174957275394, 0.04251443099975586, 0.042409854888916014, 0.042565185546875, 0.04257984161376953, 0.04263187026977539, 0.04302643203735351, 0.043087871551513675, 0.04292812728881836, 0.04275814437866211, 0.04269875335693359, 0.04262838363647461, 0.04272732925415039, 0.042808128356933595, 0.04286640167236328, 0.04333596801757812, 0.0433144645690918, 0.043026943206787106, 0.04320892715454101, 0.043511520385742186, 0.043327136993408205, 0.04269939041137695, 0.042839969635009766, 0.04288441467285156, 0.042769184112548826, 0.0428851203918457, 0.042874622344970706, 0.043014400482177736, 0.043251361846923825, 0.043102558135986326, 0.043284481048583984, 0.04344591903686523, 0.04327452850341797, 0.04312070465087891, 0.04308329772949219, 0.043149856567382815, 0.043274177551269534, 0.04350566482543945, 0.04337254333496094, 0.043218944549560545, 0.04356300735473633, 0.04374937438964844, 0.042342784881591794, 0.04211097717285156, 0.0421124153137207, 0.042418174743652344, 0.042154590606689454, 0.042376766204833986, 0.042393409729003906, 0.04249049758911133, 0.04253286361694336, 0.042379264831542966, 0.04237107086181641, 0.04228300857543945, 0.042336254119873046, 0.042684417724609375, 0.0428851203918457, 0.04269465637207031, 0.04245209503173828, 0.042261375427246096, 0.042280960083007815, 0.042254337310791014, 0.04271430587768555, 0.04253984069824219, 0.04249980926513672, 0.04244425582885742, 0.0435552978515625, 0.04278822326660156, 0.04263625717163086, 0.04271513748168945, 0.04315955352783203, 0.04315750503540039, 0.04274585723876953, 0.04258611297607422, 0.04291161727905273, 0.042825344085693356, 0.042816001892089846, 0.043038719177246096, 0.04288716888427734, 0.04273152160644531, 0.04272537612915039, 0.04259244918823242, 0.04270880126953125, 0.04294041442871094, 0.04296054458618164, 0.04320403289794922, 0.04308060836791992, 0.04304828643798828, 0.04300457763671875, 0.042943710327148436, 0.04293916702270508, 0.04327423858642578, 0.043319297790527345, 0.04331520080566406, 0.04312675094604492, 0.04319798278808594, 0.043347648620605465, 0.0432393913269043, 0.043131328582763674, 0.043318878173828124, 0.04339590454101563, 0.04333363342285156, 0.0430709457397461, 0.0430720329284668, 0.043112449645996094, 0.04311286544799805, 0.04261273574829102, 0.042522209167480465, 0.042449310302734376, 0.04232137680053711, 0.04220163345336914, 0.04226662445068359, 0.04228300857543945, 0.042270721435546874, 0.04277414321899414, 0.042707328796386716, 0.04253900909423828, 0.0424466552734375, 0.04227910232543945, 0.04228656005859375, 0.042408481597900394, 0.04233343887329102, 0.04238425445556641, 0.04247539138793945, 0.042643455505371096, 0.04248735809326172, 0.042463104248046876, 0.042453567504882814, 0.04274486541748047, 0.042707935333251956, 0.042600704193115235, 0.042813182830810544, 0.0429315185546875, 0.04288166427612305, 0.04269574356079102, 0.04296102523803711, 0.04258671951293945, 0.042754337310791014, 0.04272518539428711, 0.04293807983398437, 0.042996192932128904, 0.0430571517944336, 0.04285619354248047, 0.04290768051147461, 0.04296051025390625, 0.04291814422607422, 0.04288499069213867, 0.0428834228515625, 0.04297331237792969, 0.04298342514038086, 0.04332748794555664, 0.0432988166809082, 0.04284521484375, 0.04313113784790039, 0.043133663177490233, 0.04277388763427734, 0.043287105560302734, 0.04349292755126953, 0.04322537612915039, 0.04310860824584961, 0.04312675094604492, 0.04310220718383789, 0.043441215515136716, 0.04345951843261719, 0.04435968017578125, 0.043308704376220704, 0.04334175872802734, 0.0435142707824707, 0.04244851303100586, 0.04234073638916016, 0.04207526397705078, 0.042115966796875, 0.04218483352661133, 0.04245008087158203, 0.04247625732421875, 0.04237516784667969, 0.04279283142089844, 0.04273574447631836, 0.042417728424072265, 0.04227734375, 0.04233827209472656, 0.04233420944213867, 0.04246527862548828, 0.042659423828125, 0.042644992828369144, 0.04232080078125, 0.04234444808959961, 0.04250624084472656, 0.04273561477661133, 0.04264128112792969, 0.0422872314453125, 0.04241183853149414, 0.04280339050292969, 0.042774528503417966, 0.042729022979736325, 0.04272377777099609, 0.04268191909790039, 0.042866657257080075, 0.04269836807250976, 0.04281180953979492, 0.043014591217041015, 0.043109825134277344, 0.04290208053588867, 0.04377990341186523, 0.043165889739990235, 0.042962944030761716, 0.0427724494934082, 0.0426475830078125, 0.04272332763671875, 0.043157054901123044, 0.043800289154052735, 0.043068126678466795, 0.042842079162597656, 0.04282371139526367, 0.042854080200195314, 0.04309171295166016, 0.043141246795654294, 0.04307398223876953, 0.0433389778137207, 0.04318288040161133, 0.04296089553833008, 0.043044864654541014, 0.04333542251586914, 0.04331695938110352, 0.04364751815795898, 0.043503200531005856, 0.04346223831176758, 0.04326892852783203, 0.043112449645996094, 0.04337660980224609, 0.043591297149658204, 0.04244307327270508, 0.042485759735107424, 0.04243865585327149, 0.04234598541259765, 0.042111328125, 0.0422762565612793, 0.04238582229614258, 0.042326366424560544, 0.042434558868408204, 0.042590206146240234, 0.04271020889282227, 0.0425841293334961, 0.0426580810546875, 0.042750175476074216, 0.042893566131591794, 0.0425467529296875, 0.042158527374267576, 0.0424035530090332, 0.04242051315307617, 0.042426368713378904, 0.0424161262512207, 0.04300185775756836, 0.042782016754150394, 0.042527423858642575, 0.04250624084472656, 0.04246262359619141, 0.04246384048461914, 0.04341955184936523, 0.042815582275390625, 0.04286259078979492, 0.04316719818115235, 0.04317033767700195, 0.042987518310546875, 0.042713024139404296, 0.04248912048339844, 0.042752799987792967, 0.043030529022216796, 0.04306284713745117, 0.04300598526000977, 0.04330486297607422, 0.04306175994873047, 0.04259625625610351, 0.042573760986328125, 0.04274192047119141, 0.04321484756469727, 0.043431934356689454, 0.043055103302001956, 0.04297318267822266, 0.04287491226196289, 0.04274991989135742, 0.04308377456665039, 0.043565055847167966, 0.043407360076904294, 0.04319641494750977, 0.0433388786315918, 0.04337696075439453, 0.04353286361694336, 0.04390911865234375, 0.04368384170532227, 0.04330464172363281, 0.043225120544433594, 0.043216705322265625, 0.04313516616821289, 0.042603038787841795, 0.04217484664916992, 0.042440769195556644, 0.04225574493408203, 0.042420799255371094, 0.04244614410400391, 0.04237382507324219, 0.042780670166015625, 0.04251238250732422, 0.04247296142578125, 0.04255385589599609, 0.042665985107421874, 0.042618881225585936, 0.04301824188232422, 0.042618881225585936, 0.042518527984619144, 0.04293427276611328, 0.0426126708984375, 0.042548576354980466, 0.042363616943359376, 0.04243046569824219, 0.04250944137573242, 0.04253168106079101, 0.042590240478515624, 0.042807422637939456, 0.0426638069152832, 0.04250419235229492, 0.04263484954833984, 0.042662303924560545, 0.0424917106628418, 0.04287916946411133, 0.04290313720703125, 0.04284867095947266, 0.043237377166748046, 0.04328208160400391, 0.04319881439208984, 0.04294246292114258, 0.04327340698242187, 0.042942272186279294, 0.04298649597167969, 0.043207809448242186, 0.0430208625793457, 0.043009471893310544, 0.04302495956420899, 0.04299177551269531, 0.043182239532470704, 0.04319612884521484, 0.04361568069458008, 0.04312697601318359, 0.04331587219238281, 0.04323328018188476, 0.04323942565917969, 0.04330691146850586, 0.0433419189453125, 0.04311964797973633, 0.04314726257324219, 0.04344249725341797, 0.043423999786376954, 0.04350812911987305, 0.04400128173828125, 0.04373708724975586, 0.04334796905517578, 0.04335340881347656, 0.04265878295898438, 0.042186561584472655, 0.0422360954284668, 0.04237311935424805, 0.04224777603149414, 0.042068382263183594, 0.04221747207641602, 0.04250598526000977, 0.0425432014465332, 0.04283523178100586, 0.04269964981079102, 0.04290755081176758, 0.04257596969604492, 0.0424447021484375, 0.04248601531982422, 0.042604385375976564, 0.04265372848510742, 0.04262499237060547, 0.04293222427368164, 0.04271068954467774, 0.04244278335571289, 0.04241027069091797, 0.04254646301269531, 0.042695423126220704, 0.042807201385498046, 0.04280521774291992, 0.042528606414794924, 0.04307766342163086, 0.04312086486816406, 0.04293593597412109, 0.04266377639770508, 0.0427088623046875, 0.04273788833618164, 0.043199008941650394, 0.043254913330078124, 0.04320703887939453, 0.04351155090332031, 0.04292665481567383, 0.04290063858032227, 0.043119583129882816, 0.04314931106567383, 0.04319846343994141, 0.04310630416870117, 0.043450366973876955, 0.04346006393432617, 0.04305148696899414, 0.043044769287109375, 0.04299792098999024, 0.04323728179931641, 0.043251808166503904, 0.043162689208984376, 0.043315265655517576, 0.04346537780761719, 0.04338217544555664, 0.043536865234375, 0.0434378547668457, 0.04367190551757812, 0.0435219841003418, 0.043143455505371096, 0.04331427383422851, 0.04370524978637695, 0.04372889709472656, 0.043862014770507815]",tokens/s,23.35045098582347,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11044.757504,12553.4208,0.0,12150.898688,12116.742656,s,1,16.163751953125,16.163751953125,0.0,16.163751953125,16.163751953125,16.163751953125,16.163751953125,[16.163751953125],,kWh,0.0002646348572291724,2.9180462749588036e-05,8.516951258001615e-05,0.00037898483255877655,,MB,2008.809472,13513.916416,0.0,13096.71424,12892.965376,s,10,6.359149841308594,0.6359149841308593,0.0009729047114947553,0.6356495971679688,0.6363471740722657,0.6374991180419922,0.6384206732177734,"[0.635283203125, 0.6349862060546875, 0.6352772827148437, 0.6359114990234375, 0.6356328735351563, 0.635577880859375, 0.6356663208007812, 0.6386510620117187, 0.6360911865234375, 0.6360723266601562]",tokens/s,402.56953584745224,kWh,1.857343602916804e-05,2.0483146219586413e-06,1.2327058472750424e-05,3.294880912387711e-05,tokens/kWh,7769628.305457746,MB,2015.424512,13870.432256,0.0,13453.23008,13237.236736,s,10,43.336995117187506,4.33369951171875,0.004127981912586184,4.33511865234375,4.3372568359375,4.338267578125,4.339076171875,"[4.32694677734375, 4.32575927734375, 4.33499462890625, 4.3370322265625, 4.33584033203125, 4.33420068359375, 4.33615087890625, 4.33524267578125, 4.3392783203125, 4.33154931640625]",tokens/s,14.537233102950907,kWh,0.00012665167289833352,1.3970375263824311e-05,8.414712981765007e-05,0.0002247691779798079,tokens/kWh,280287.5401611318,,s,630,43.31354483032221,0.068751658460829,0.00046645941135002854,0.0687289924621582,0.06930681762695312,0.06947806243896484,0.06990054779052735,"[0.06900150299072266, 0.0679708480834961, 0.06777446746826171, 0.06792806243896485, 0.06832745361328126, 0.06807753753662109, 0.06795574188232421, 0.0680212173461914, 0.067866943359375, 0.06803533172607422, 0.06792697906494141, 0.06813286590576172, 0.06796288299560548, 0.06793011474609376, 0.06854412841796875, 0.06831494140625, 0.06813954925537109, 0.06860189056396485, 0.06823677062988281, 0.06833821105957032, 0.06845442962646485, 0.06852191925048828, 0.06842121887207031, 0.06817836761474609, 0.06834381103515624, 0.06830694580078125, 0.06814080047607422, 0.06834611511230469, 0.06874111938476563, 0.06886768341064453, 0.06855651092529297, 0.06863737487792969, 0.06863193511962891, 0.06858812713623047, 0.0692408676147461, 0.06886144256591797, 0.07063385772705078, 0.06901475524902344, 0.06863311767578124, 0.0685141143798828, 0.06879238128662109, 0.0687586898803711, 0.06892015838623047, 0.06902694702148438, 0.06896473693847656, 0.06862019348144531, 0.06882284545898437, 0.06882969665527344, 0.06937423706054688, 0.06930636596679687, 0.06890812683105468, 0.06907766723632812, 0.06889702606201172, 0.06913999938964843, 0.06901193237304687, 0.06892704010009766, 0.0690588150024414, 0.06920006561279297, 0.06919487762451172, 0.06914669036865234, 0.06928623962402344, 0.06912617492675781, 0.06936815643310547, 0.06848687744140625, 0.06805117034912109, 0.06773395538330078, 0.06842982482910157, 0.06884146881103516, 0.06815948486328124, 0.06769398498535156, 0.0677894058227539, 0.06840729522705079, 0.06807091522216797, 0.06793472290039063, 0.06805020904541016, 0.06808444976806641, 0.0684150390625, 0.0683687973022461, 0.06818000030517578, 0.06818102264404297, 0.06865814208984375, 0.06851993560791016, 0.06814431762695312, 0.06774867248535156, 0.06827152252197266, 0.06812662506103516, 0.0681561279296875, 0.06851785278320313, 0.06819840240478516, 0.06847283172607421, 0.06843392181396485, 0.06858342742919922, 0.06861798095703125, 0.06859801483154297, 0.06886399841308594, 0.06894796752929687, 0.06872064208984376, 0.06867353820800781, 0.06870995330810546, 0.06860598754882813, 0.06873744201660156, 0.06865510559082032, 0.06865296173095703, 0.06882723236083985, 0.06872179412841797, 0.06866323089599609, 0.06855961608886718, 0.06898889923095704, 0.06894818878173828, 0.06908646392822265, 0.0691119384765625, 0.06890354919433593, 0.06919577789306641, 0.06872064208984376, 0.06905401611328126, 0.0689176025390625, 0.06895574188232421, 0.06934783935546875, 0.06928787231445313, 0.0692484130859375, 0.06893647766113281, 0.06931238555908203, 0.06918544006347656, 0.06943513488769532, 0.0694889907836914, 0.06956646728515625, 0.06836873626708985, 0.0683067855834961, 0.06799298858642579, 0.06827196502685547, 0.0684942398071289, 0.06832128143310547, 0.06821887969970702, 0.06790060424804688, 0.06796781158447265, 0.06789849853515625, 0.06833987426757812, 0.06801261138916016, 0.06817340850830078, 0.06838329315185547, 0.06833913421630859, 0.06809164428710937, 0.06848185729980469, 0.06867967987060547, 0.06888253021240234, 0.06856285095214844, 0.06855248260498047, 0.06840956878662109, 0.06871858978271485, 0.06849104309082031, 0.06861993408203125, 0.06849513244628906, 0.06861103820800782, 0.06855023956298828, 0.06871036529541015, 0.0685835189819336, 0.06843750762939453, 0.06878224182128906, 0.06898944091796876, 0.06884352111816407, 0.06927897644042968, 0.06995795440673828, 0.06898323059082032, 0.06870130920410156, 0.06923715209960937, 0.06869001770019531, 0.06884390258789062, 0.06879225921630859, 0.06903814697265626, 0.06886195373535156, 0.06895206451416015, 0.06907289886474609, 0.06895359802246094, 0.06897270202636718, 0.06933744049072266, 0.06988390350341797, 0.06974463653564453, 0.06895820617675781, 0.06906674957275391, 0.06895410919189453, 0.06908313751220703, 0.06896640014648438, 0.06936486053466796, 0.06918643188476563, 0.06898178863525391, 0.06958732604980469, 0.06956502532958984, 0.06937548828125, 0.06923699188232423, 0.06836224365234375, 0.06808370971679688, 0.06827740478515625, 0.0680684814453125, 0.06812812805175782, 0.06876604461669922, 0.06833331298828126, 0.06835225677490234, 0.06800498962402343, 0.06823004913330079, 0.0679927978515625, 0.06807154846191406, 0.06817820739746094, 0.06831251525878906, 0.06854732513427735, 0.06856924438476562, 0.06848886108398437, 0.0685202865600586, 0.06849641418457031, 0.0685696029663086, 0.06873545837402344, 0.06858265686035156, 0.06885247802734375, 0.06863040161132812, 0.06867366027832031, 0.06842569732666015, 0.06838393402099609, 0.06865408325195313, 0.06867692565917968, 0.06933353424072265, 0.06871654510498047, 0.06878396606445313, 0.06876585388183594, 0.06885990142822265, 0.06889881896972656, 0.06874317169189453, 0.06895820617675781, 0.06894950103759766, 0.06915737915039062, 0.06877369689941407, 0.06862595367431641, 0.0685700454711914, 0.06905213165283203, 0.06929612731933593, 0.06912000274658203, 0.06944678497314453, 0.06921100616455078, 0.06887750244140625, 0.06923875427246094, 0.06911673736572266, 0.0692734375, 0.06932294464111328, 0.06950895690917969, 0.06939478302001953, 0.06895996856689453, 0.06938390350341797, 0.06927123260498047, 0.06921222686767578, 0.06947264099121093, 0.06940287780761718, 0.06955785369873047, 0.0692161636352539, 0.06962226867675782, 0.06849472045898437, 0.06810598754882813, 0.0683795166015625, 0.06817791748046875, 0.06851961517333985, 0.06822319793701172, 0.06814115142822266, 0.06847452545166016, 0.06825196838378907, 0.06862806701660157, 0.06831887817382812, 0.06800259399414063, 0.06807347106933594, 0.06819580841064453, 0.06793679809570312, 0.06825984191894531, 0.0685025634765625, 0.0684299545288086, 0.06828050994873047, 0.06809900665283203, 0.06832495880126953, 0.06875762939453126, 0.06868694305419921, 0.06863302612304688, 0.0684712677001953, 0.06844825744628906, 0.06845219421386718, 0.06895222473144531, 0.06860582733154297, 0.0689210205078125, 0.06907743835449219, 0.06864691162109375, 0.06883123016357422, 0.06891315460205077, 0.069010498046875, 0.06877689361572266, 0.06982963562011718, 0.06894627380371093, 0.0690121307373047, 0.06929759979248047, 0.06887216186523437, 0.0689889907836914, 0.06891574096679688, 0.06903504180908203, 0.06913260650634766, 0.06897731018066407, 0.0690851821899414, 0.06886809539794922, 0.06901484680175782, 0.06904694366455078, 0.07095094299316407, 0.0689459228515625, 0.0689760284423828, 0.06923734283447265, 0.06898051452636719, 0.06931417846679687, 0.06914012908935546, 0.06935228729248047, 0.06937760162353515, 0.06915740966796875, 0.06899017333984375, 0.06915129852294923, 0.0695031967163086, 0.0684229736328125, 0.06814176177978516, 0.06821485137939454, 0.06809532928466797, 0.06822108459472656, 0.06821727752685547, 0.06864876556396485, 0.06803465270996094, 0.0682558364868164, 0.06806118774414062, 0.06832947540283203, 0.06843145751953125, 0.06798582458496094, 0.06803014373779297, 0.06841580963134766, 0.06870425415039062, 0.06844153594970703, 0.06833200073242188, 0.06845843505859375, 0.06862025451660156, 0.06870384216308593, 0.06826863861083984, 0.06831094360351563, 0.06828998565673829, 0.06844892883300781, 0.06867743682861328, 0.06861590576171875, 0.0687491226196289, 0.068776611328125, 0.06897782135009765, 0.068698974609375, 0.06900531005859376, 0.06883897399902343, 0.06901395416259766, 0.06877798461914063, 0.0689991683959961, 0.06881206512451171, 0.06856902313232421, 0.0686702423095703, 0.06901103973388673, 0.06931088256835938, 0.0688348159790039, 0.06877776336669922, 0.06894870758056641, 0.06890086364746094, 0.0692608642578125, 0.06916953277587891, 0.06922386932373047, 0.06907062530517578, 0.0693331527709961, 0.06904083251953125, 0.06882508850097656, 0.06896367645263672, 0.06909404754638672, 0.06918915557861328, 0.06898121643066406, 0.0696995849609375, 0.06929571533203124, 0.0692166748046875, 0.06954803466796874, 0.06918962860107422, 0.06962790679931641, 0.06949795532226563, 0.06900339508056641, 0.06843424224853516, 0.06827225494384766, 0.06840115356445313, 0.06854860687255859, 0.06808707427978515, 0.06846937561035156, 0.06816563415527344, 0.06855484771728515, 0.06818201446533204, 0.06847801971435546, 0.06839961242675781, 0.06825414276123047, 0.06829670715332031, 0.06833561706542969, 0.06841548919677734, 0.06836771392822266, 0.06899164581298828, 0.06870582580566406, 0.06850812530517578, 0.06822911834716797, 0.0685301742553711, 0.06847644805908203, 0.06853167724609376, 0.06869078063964844, 0.06832720184326171, 0.06866767883300781, 0.06876761627197266, 0.06866710662841796, 0.06845449829101563, 0.06891718292236328, 0.06900121307373047, 0.06890306854248048, 0.06914899444580078, 0.06895206451416015, 0.0686173095703125, 0.06896527862548828, 0.06928524780273437, 0.06901209259033203, 0.06863168334960937, 0.06926630401611328, 0.06896640014648438, 0.0689620132446289, 0.06916706848144531, 0.0688046417236328, 0.06884156799316406, 0.06915705871582031, 0.06924889373779297, 0.06914012908935546, 0.06907299041748047, 0.06909580993652344, 0.07061885070800782, 0.06927593231201172, 0.0691855010986328, 0.06927158355712891, 0.0691190414428711, 0.06894812774658203, 0.06924368286132812, 0.06930335998535156, 0.06918649291992188, 0.0694497299194336, 0.069359619140625, 0.06945094299316407, 0.06860160064697265, 0.06859750366210937, 0.06799001312255859, 0.06782943725585938, 0.06823299407958984, 0.06799747467041016, 0.06803456115722656, 0.06822988891601563, 0.0683499526977539, 0.06830694580078125, 0.06800982666015624, 0.06833776092529296, 0.06842784118652344, 0.06821456146240235, 0.06817609405517579, 0.06860594940185546, 0.06848684692382813, 0.06859808349609375, 0.06989209747314454, 0.06874908447265625, 0.06839727783203126, 0.06813491058349609, 0.06843516540527343, 0.06856716918945313, 0.06877865600585938, 0.06869827270507813, 0.06865699005126953, 0.0685322265625, 0.0684585952758789, 0.06895811462402343, 0.06855862426757812, 0.06853449249267578, 0.06883942413330078, 0.0699697265625, 0.06882733154296874, 0.06859503936767578, 0.06887286376953125, 0.06875955200195312, 0.06864659118652344, 0.0691630096435547, 0.06899727630615235, 0.06885801696777344, 0.0689510726928711, 0.06867222595214843, 0.069046142578125, 0.06896473693847656, 0.06927565002441406, 0.06869427490234375, 0.06905625915527344, 0.06901350402832031, 0.06898892974853515, 0.06904422760009765, 0.06907698822021484, 0.06934454345703125, 0.06920060729980469, 0.06912818908691407, 0.06936985778808594, 0.06990399932861328, 0.06925350189208984, 0.06923011016845704, 0.06945184326171874, 0.06941903686523437, 0.06927932739257812, 0.06830899047851563, 0.06804275512695312, 0.06808595275878906, 0.06823273468017578, 0.06821670532226562, 0.06804851531982421, 0.06803282928466797, 0.06858758544921875, 0.0684361572265625, 0.06817404937744141, 0.06845849609375, 0.06829055786132812, 0.06822463989257813, 0.06839263916015625, 0.06833433532714844, 0.06847443389892578, 0.06843020629882812, 0.0685136947631836, 0.06870435333251954, 0.06846995544433594, 0.06825043487548828, 0.06838198089599609, 0.06847357177734376, 0.06865440368652344, 0.06847353363037109, 0.06890086364746094, 0.06864281463623047, 0.06895549011230469, 0.068917724609375, 0.06855289459228515, 0.06884146881103516, 0.06869401550292968, 0.06902925109863281, 0.06898252868652344, 0.06858432006835938, 0.06891926574707032, 0.06901353454589844, 0.06923878479003906, 0.06895206451416015, 0.06880032348632813, 0.06952365112304687, 0.06926335906982421, 0.06989004516601563, 0.06880461120605469, 0.06909337615966797, 0.06905036926269531, 0.06918057250976563, 0.06896025848388672, 0.0693666229248047, 0.06893772888183594, 0.0690579833984375, 0.0695301742553711, 0.0704716796875, 0.0689315185546875, 0.06957266998291016, 0.06917485046386719, 0.06935308837890625, 0.06915328216552734, 0.069521728515625, 0.06928179168701172, 0.06957807922363281, 0.06946272277832032, 0.06939958190917969, 0.0685110092163086, 0.0683458251953125, 0.06796492767333985, 0.06784486389160156, 0.06816973114013672, 0.0678845443725586, 0.06827247619628907, 0.06828457641601562, 0.06871654510498047, 0.06872252655029297, 0.06846672058105469, 0.06830681610107422, 0.06832972717285156, 0.06838066864013671, 0.06834381103515624, 0.0685804443359375, 0.06817475128173828, 0.06828851318359375, 0.0683826904296875, 0.06843299102783203, 0.06858428955078125, 0.06866134643554687, 0.06863667297363281, 0.06860594940185546, 0.06866124725341796, 0.06906655883789062, 0.06859359741210938, 0.0686451187133789, 0.06855059051513672, 0.06874323272705078, 0.06860185241699218, 0.06915891265869141, 0.06883888244628907, 0.06844889831542969, 0.06868163299560547, 0.06908108520507812, 0.0689203872680664, 0.0690390396118164, 0.06895616149902344, 0.06855270385742188, 0.06900681304931641, 0.06895980834960938, 0.06880150604248046, 0.06887833404541016, 0.06887628936767579, 0.06902169799804687, 0.06898687744140625, 0.06906400299072266, 0.06897939300537109, 0.06913801574707032, 0.06897090911865235, 0.06922444915771485, 0.06913024139404297, 0.06898258972167969, 0.06922169494628906, 0.06901219177246094, 0.0692245101928711, 0.06961366271972656, 0.0692142105102539, 0.0692490234375, 0.0694824981689453, 0.06948863983154296, 0.06925107574462891]",tokens/s,14.545103672949887,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6772.20352,7769.817088,0.0,7367.294976,7351.94368,s,1,12.590875,12.590875,0.0,12.590875,12.590875,12.590875,12.590875,[12.590875],,kWh,0.0001635466874708527,1.8026189208199503e-05,5.0951151871994615e-05,0.0002325240285510468,,MB,1478.303744,8380.08832,0.0,7962.886144,7872.44544,s,10,3.210407012939453,0.3210407012939453,0.000807979931147453,0.32093855285644535,0.32192711791992185,0.3223107879638672,0.32261772399902344,"[0.3194629821777344, 0.32087701416015624, 0.32062774658203125, 0.3210090942382812, 0.32062860107421876, 0.3210000915527344, 0.3207613220214844, 0.32184185791015624, 0.32150384521484376, 0.3226944580078125]",tokens/s,797.4066807361165,kWh,9.383221678906749e-06,1.0344282560913563e-06,6.212400803249762e-06,1.6630050738247867e-05,tokens/kWh,15393819.53966137,MB,1504.11264,8631.74656,0.0,8214.544384,8118.577152,s,10,27.203293212890628,2.7203293212890625,0.01649439509137784,2.7150029296875,2.72555498046875,2.747215844726562,2.7645445361328123,"[2.768876708984375, 2.71070458984375, 2.713300048828125, 2.7123681640625, 2.720741455078125, 2.719334228515625, 2.716846923828125, 2.716705810546875, 2.712340576171875, 2.71207470703125]",tokens/s,23.158960757790403,kWh,8.360070792901119e-05,9.220501548192092e-06,5.555381527635088e-05,0.00014837502475355415,tokens/kWh,424599.7606715878,,s,630,27.20015459060668,0.043174848556518555,0.0005240365914026493,0.04310723304748535,0.043551283645629886,0.043972430419921875,0.04518917556762696,"[0.04547404861450195, 0.04490156936645508, 0.04463494491577148, 0.044729984283447266, 0.0447696647644043, 0.04494950485229492, 0.046565376281738284, 0.04503081512451172, 0.04482844924926758, 0.044800159454345706, 0.04483865737915039, 0.04484716796875, 0.044896575927734376, 0.046123550415039065, 0.044849056243896485, 0.044882080078125, 0.04503910446166992, 0.04514003372192383, 0.04507388687133789, 0.04527590560913086, 0.045304065704345704, 0.045074337005615236, 0.04553055953979492, 0.04510163116455078, 0.04520924758911133, 0.04500825500488281, 0.04282262420654297, 0.04330905532836914, 0.04291945648193359, 0.04271558380126953, 0.042721473693847656, 0.04292291259765625, 0.04280937576293945, 0.04296771240234375, 0.042864768981933594, 0.042952831268310544, 0.04291584014892578, 0.04310835266113281, 0.04299980926513672, 0.042971134185791016, 0.043197921752929684, 0.043092254638671876, 0.04339510345458984, 0.043269535064697266, 0.04311737442016601, 0.04305065536499023, 0.04302678298950195, 0.04321852874755859, 0.04339241409301758, 0.04371148681640625, 0.043413791656494144, 0.04313430404663086, 0.04310054397583008, 0.04335615921020508, 0.043396480560302736, 0.043305408477783205, 0.04303683090209961, 0.043091999053955075, 0.04301123046875, 0.043170654296875, 0.04348518371582031, 0.04343145751953125, 0.043310848236083985, 0.04321465682983398, 0.043002239227294924, 0.042633342742919925, 0.042496414184570314, 0.04256668853759766, 0.04279526519775391, 0.042756481170654295, 0.04271923065185547, 0.042729503631591795, 0.04270646286010742, 0.04288095855712891, 0.04279177474975586, 0.042790912628173826, 0.04281145477294922, 0.04266521453857422, 0.04273174285888672, 0.042899585723876955, 0.0429567985534668, 0.042823936462402346, 0.04281068801879883, 0.04300377655029297, 0.04364550399780273, 0.04308412933349609, 0.04308992004394531, 0.04301004791259765, 0.042917728424072266, 0.04279721450805664, 0.0430489616394043, 0.043007713317871094, 0.04280275344848633, 0.04295276641845703, 0.0429532470703125, 0.042882686614990236, 0.0428175048828125, 0.04285699081420898, 0.04279500961303711, 0.042855712890625, 0.043074241638183595, 0.043216705322265625, 0.043065792083740236, 0.04304873657226563, 0.04306041717529297, 0.043031360626220705, 0.043128833770751954, 0.04419350433349609, 0.04348956680297852, 0.04332966232299805, 0.04316697692871094, 0.04301919937133789, 0.0431715202331543, 0.042898944854736325, 0.043132606506347655, 0.04330780792236328, 0.0432619857788086, 0.04324687957763672, 0.04305145645141602, 0.043297054290771485, 0.04340031814575195, 0.04327459335327148, 0.04326863861083984, 0.04328268814086914, 0.04332108688354492, 0.04336435317993164, 0.0431957778930664, 0.04283660888671875, 0.04231913757324219, 0.04239206314086914, 0.04243206405639648, 0.042482177734375, 0.0425533447265625, 0.04277664184570312, 0.042834014892578126, 0.04274774551391602, 0.04260265731811524, 0.04266937637329102, 0.04282575988769531, 0.04310287857055664, 0.042694625854492185, 0.04273696136474609, 0.043218944549560545, 0.04279779052734375, 0.04319660949707031, 0.04362630462646484, 0.043251041412353516, 0.043167999267578125, 0.04304838562011719, 0.043095008850097656, 0.04307331085205078, 0.04298160171508789, 0.0429969596862793, 0.04287321472167969, 0.04275024032592773, 0.04281753540039063, 0.04312876892089844, 0.04297900772094727, 0.04315014266967773, 0.042888671875, 0.043253982543945316, 0.04305481719970703, 0.04305871963500976, 0.043036800384521484, 0.042880672454833985, 0.042964256286621094, 0.04297849655151367, 0.04314940643310547, 0.04318044662475586, 0.04319551849365234, 0.04349862289428711, 0.043412830352783205, 0.04341088104248047, 0.04325270462036133, 0.043256961822509765, 0.04315865707397461, 0.04337638473510742, 0.04313702392578125, 0.04334796905517578, 0.04368384170532227, 0.04324758529663086, 0.043313087463378905, 0.04326972961425781, 0.04323158264160156, 0.043440288543701175, 0.04345436859130859, 0.04344841766357422, 0.04338687896728516, 0.04367715072631836, 0.04334787368774414, 0.04311075210571289, 0.042593505859375, 0.04251305770874023, 0.042661823272705075, 0.04260636901855469, 0.04242179107666016, 0.04263417434692383, 0.042683712005615236, 0.04258652877807617, 0.042522911071777345, 0.0426618881225586, 0.042756000518798826, 0.04266153717041016, 0.04259468841552734, 0.042675777435302736, 0.04291411209106445, 0.042942657470703124, 0.04291977691650391, 0.043167903900146486, 0.04277657699584961, 0.04285001754760742, 0.04290793609619141, 0.04303462219238281, 0.043001697540283206, 0.04287913513183594, 0.04297727966308594, 0.04442844772338867, 0.042859390258789064, 0.04270441436767578, 0.04278726577758789, 0.0430931510925293, 0.042859649658203124, 0.04306118392944336, 0.043171104431152345, 0.043037185668945314, 0.04282755279541016, 0.043004127502441404, 0.04309987258911133, 0.04320489501953125, 0.04312406539916992, 0.04349721527099609, 0.04345529556274414, 0.04336044692993164, 0.04332278442382813, 0.043269630432128905, 0.04328339385986328, 0.043327327728271484, 0.04340943908691406, 0.0431943359375, 0.043412704467773434, 0.04329779052734375, 0.043278335571289066, 0.04310220718383789, 0.04325564956665039, 0.04329276657104492, 0.04321481704711914, 0.043308353424072264, 0.04355088043212891, 0.0433834228515625, 0.043474945068359375, 0.0433889274597168, 0.043325439453125, 0.04362444686889649, 0.04311654281616211, 0.042725025177001955, 0.042590015411376955, 0.04262351989746094, 0.04250419235229492, 0.04257763290405273, 0.042839710235595706, 0.04266684722900391, 0.042575649261474606, 0.042739742279052736, 0.04283107376098633, 0.04271590423583985, 0.04298947143554688, 0.04294819259643555, 0.04285852813720703, 0.04283644866943359, 0.04282969665527344, 0.04281375885009766, 0.043206462860107424, 0.0431756477355957, 0.04322323226928711, 0.04339251327514648, 0.043753726959228516, 0.043211105346679685, 0.04310630416870117, 0.04313497543334961, 0.043286529541015625, 0.04311859130859375, 0.04303366470336914, 0.04313183975219727, 0.04319801712036133, 0.04310265731811523, 0.043259166717529295, 0.04303702545166015, 0.04301862335205078, 0.0431957778930664, 0.043294368743896486, 0.04334793472290039, 0.043321441650390625, 0.04332588958740234, 0.04345849609375, 0.043743873596191404, 0.04394780731201172, 0.043706497192382815, 0.0435868148803711, 0.04345727920532227, 0.04352380752563476, 0.043716896057128904, 0.043431934356689454, 0.043431198120117184, 0.04325449752807617, 0.0434442253112793, 0.043453857421875, 0.043378913879394534, 0.04325619125366211, 0.04315545654296875, 0.043235294342041014, 0.0432918701171875, 0.04321260833740234, 0.043221343994140626, 0.043482975006103514, 0.04373503875732422, 0.0433438720703125, 0.042869857788085934, 0.04244303894042969, 0.04281126403808594, 0.04281532669067383, 0.042861473083496096, 0.04284604644775391, 0.042551647186279296, 0.042618942260742185, 0.042727169036865235, 0.04256288146972656, 0.04246953582763672, 0.04285699081420898, 0.04296908950805664, 0.04283801651000976, 0.042663040161132815, 0.04285494232177734, 0.04300425720214844, 0.04329987335205078, 0.04294755172729492, 0.0431077766418457, 0.04337107086181641, 0.04399257659912109, 0.043065185546875, 0.04302499389648438, 0.04297324752807617, 0.04286259078979492, 0.04294451141357422, 0.042971134185791016, 0.04294179153442383, 0.04327596664428711, 0.04322617721557617, 0.043224990844726564, 0.04304620742797852, 0.04308652877807617, 0.04316908645629883, 0.04309676742553711, 0.04315321731567383, 0.043198143005371094, 0.04366592025756836, 0.04333158493041992, 0.043286529541015625, 0.0433807373046875, 0.04355072021484375, 0.043837440490722655, 0.0436317138671875, 0.043598751068115234, 0.043493377685546876, 0.043464702606201173, 0.04350339126586914, 0.04335164642333984, 0.043372318267822264, 0.04350553512573242, 0.04329964828491211, 0.043323585510253906, 0.04344828796386719, 0.04356095886230469, 0.04336640167236328, 0.04333932876586914, 0.04326649475097656, 0.0434031982421875, 0.04347475051879883, 0.043493824005126955, 0.0434466552734375, 0.042961406707763675, 0.04258816146850586, 0.04237491226196289, 0.0425904312133789, 0.04287472152709961, 0.04278620910644531, 0.042750751495361325, 0.042902751922607424, 0.04293097686767578, 0.042601631164550784, 0.042455230712890625, 0.0426420783996582, 0.04262092971801758, 0.042947681427001956, 0.04277945709228516, 0.04466201782226562, 0.043037151336669924, 0.04289984130859375, 0.043076961517333985, 0.04303699111938476, 0.04336470413208008, 0.043140350341796876, 0.04316032028198242, 0.04303804779052734, 0.04292470550537109, 0.04273775863647461, 0.04274319839477539, 0.04287744140625, 0.04291743850708008, 0.04319891357421875, 0.04297430419921875, 0.04307241439819336, 0.04291961669921875, 0.042983745574951174, 0.04280723190307617, 0.042842174530029295, 0.043081310272216795, 0.04309852981567383, 0.04304451370239258, 0.04316195297241211, 0.0431366081237793, 0.043380767822265624, 0.043302879333496094, 0.043356449127197265, 0.04349932861328125, 0.04349363327026367, 0.04365727996826172, 0.04440883255004883, 0.043609569549560544, 0.04342335891723633, 0.04326902389526367, 0.04323433685302734, 0.04366640090942383, 0.04363241577148438, 0.043221214294433596, 0.04323657608032227, 0.04325046539306641, 0.043358207702636715, 0.04333158493041992, 0.0432632942199707, 0.04330361557006836, 0.04342761611938477, 0.04371388626098633, 0.043098785400390624, 0.042563201904296875, 0.04244057464599609, 0.042503841400146486, 0.04265804672241211, 0.04280790328979492, 0.04281939315795898, 0.042627391815185545, 0.04263718414306641, 0.04277248001098633, 0.042700542449951175, 0.04281756973266602, 0.04277881622314453, 0.04265577697753906, 0.042567680358886716, 0.042668094635009764, 0.04280313491821289, 0.0429936637878418, 0.04319641494750977, 0.04321033477783203, 0.04310467147827148, 0.04322880172729492, 0.04341798400878906, 0.04323638534545898, 0.043170753479003905, 0.04297846221923828, 0.042953601837158205, 0.04297040176391602, 0.042947296142578126, 0.04293017578125, 0.042831871032714845, 0.04287692642211914, 0.042901344299316406, 0.04303478240966797, 0.04291788864135742, 0.042891265869140625, 0.042937408447265624, 0.0429249267578125, 0.04334188842773438, 0.04316928100585937, 0.043332096099853515, 0.04335142517089844, 0.043520641326904294, 0.0436894416809082, 0.04355491256713867, 0.043565441131591796, 0.04345043182373047, 0.04344416046142578, 0.04338070297241211, 0.043286624908447265, 0.04347417449951172, 0.04339993667602539, 0.043493377685546876, 0.04340256118774414, 0.043401920318603515, 0.04323942565917969, 0.04311977767944336, 0.043397441864013675, 0.043363998413085934, 0.04349244689941406, 0.04340038299560547, 0.04480051040649414, 0.04347292709350586, 0.042930912017822266, 0.04253804779052734, 0.042633567810058594, 0.04266649627685547, 0.0425780143737793, 0.042784160614013675, 0.04270755386352539, 0.042634750366210936, 0.04253676986694336, 0.042662593841552736, 0.04255539321899414, 0.042705982208251954, 0.04276319885253906, 0.04299494552612305, 0.04333849716186523, 0.04275350570678711, 0.04280732727050781, 0.042791263580322265, 0.04305276870727539, 0.04317193603515625, 0.04294879913330078, 0.04318838500976562, 0.043177982330322266, 0.042995712280273435, 0.04293340682983399, 0.042797920227050784, 0.04273347091674805, 0.04314064025878906, 0.04302905654907226, 0.04290873718261719, 0.04298438262939453, 0.04276838302612305, 0.0427762565612793, 0.042974910736083984, 0.04303756713867188, 0.04307235336303711, 0.04315180969238281, 0.04300377655029297, 0.04308643341064453, 0.042954177856445314, 0.04308230209350586, 0.04315955352783203, 0.04314012908935547, 0.04346364974975586, 0.043495712280273435, 0.0433573112487793, 0.043315486907958986, 0.04331552124023438, 0.043292671203613284, 0.04334592056274414, 0.04326950454711914, 0.04332112121582031, 0.043192161560058596, 0.04305926513671875, 0.04319865417480469, 0.043346145629882815, 0.043248126983642575, 0.04320604705810547, 0.043460704803466796, 0.04378883361816406, 0.0435939826965332, 0.04356995010375977, 0.04345804977416992, 0.042869056701660156, 0.0426148796081543, 0.0424403190612793, 0.04246380615234375, 0.042390785217285155, 0.042557247161865236, 0.04263151931762695, 0.04278953552246094, 0.04264748764038086, 0.04280934524536133, 0.04256668853759766, 0.042468318939208986, 0.04252396774291992, 0.04256217575073242, 0.04256774520874024, 0.04284323120117187, 0.04304169464111328, 0.04309158325195313, 0.043106689453125, 0.043081729888916016, 0.04314112091064453, 0.042905025482177735, 0.04289388656616211, 0.04310220718383789, 0.043804672241210936, 0.043069438934326174, 0.043038593292236325, 0.04280854415893555, 0.042844959259033207, 0.0428600959777832, 0.04294303894042969, 0.04299161529541016, 0.042909664154052736, 0.042926048278808596, 0.043347808837890626, 0.04315286254882812, 0.04312345504760742, 0.04296860885620117, 0.04306172943115234, 0.04319843292236328, 0.043292705535888674, 0.04332876968383789, 0.043289470672607425, 0.043299999237060544, 0.04336304092407227, 0.04334511947631836, 0.04338358306884765, 0.04333772659301758, 0.04340073776245117, 0.04325542449951172, 0.043141983032226563, 0.0432375373840332, 0.04329635238647461, 0.0432474250793457, 0.04322758483886719, 0.04330086517333984, 0.043630592346191405, 0.043351455688476564, 0.04330723190307617, 0.04334630584716797, 0.043235328674316405, 0.043541568756103516]",tokens/s,23.161633067246036,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,874.82368,601.751552,0.0,199.22944,181.241344,s,1,7.609712890625,7.609712890625,0.0,7.609712890625,7.609712890625,7.609712890625,7.609712890625,[7.609712890625],,kWh,1.523218246665389e-05,1.673019634453695e-06,4.512503609999108e-06,2.1417705711106694e-05,,MB,1331.22048,658.374656,0.0,241.17248,225.803264,s,16,0.20963401603698734,0.013102126002311705,6.758385017720097e-05,0.013090928077697754,0.013177184104919432,0.013189184188842773,0.01320124168395996,"[0.01291977596282959, 0.013167200088500976, 0.013060511589050293, 0.013204256057739257, 0.013160927772521973, 0.01305583953857422, 0.013170207977294922, 0.013070752143859863, 0.013088831901550293, 0.013119104385375977, 0.013043071746826172, 0.01308505630493164, 0.013129440307617188, 0.013184160232543944, 0.013081855773925781, 0.013093024253845216]",tokens/s,19538.81377379763,kWh,3.869660549458439e-07,4.26755320812715e-08,2.1465755457292492e-07,6.442991416000403e-07,tokens/kWh,397330965.49570817,MB,1370.812416,673.05472,0.0,255.852544,225.805824,s,16,9.79578155517578,0.6122363471984864,0.001720885806545329,0.612547576904297,0.6142755737304688,0.6150134277343751,0.615147314453125,"[0.6149576416015625, 0.6123853149414062, 0.6119273071289062, 0.6151807861328125, 0.6099966430664062, 0.6135643920898437, 0.6131492309570312, 0.613593505859375, 0.6134678955078126, 0.6127098388671876, 0.6115953369140625, 0.6128223876953125, 0.6104877319335937, 0.6095772705078125, 0.6095999755859375, 0.6107662963867188]",tokens/s,102.90143714642193,kWh,1.8656132445834464e-05,2.057373956881256e-06,6.97149402767747e-06,2.7685000430393195e-05,tokens/kWh,2275600.470312337,,s,1008,9.788004720687862,0.009710322143539549,0.00015099873226139787,0.009684304237365722,0.009800239849090576,0.009897614479064941,0.010486351537704459,"[0.009378527641296387, 0.00971078395843506, 0.00965283203125, 0.009756352424621582, 0.009669216156005859, 0.009766912460327149, 0.00964566421508789, 0.009698719978332519, 0.009649151802062989, 0.009621503829956055, 0.009695232391357422, 0.0096910400390625, 0.00975267219543457, 0.00973414421081543, 0.009704959869384766, 0.009752608299255372, 0.009705727577209472, 0.009859104156494141, 0.00967420768737793, 0.009709504127502442, 0.009668831825256347, 0.00970143985748291, 0.009792223930358886, 0.009676639556884765, 0.009742591857910156, 0.009641183853149413, 0.009676639556884765, 0.011713408470153808, 0.01059609603881836, 0.009736191749572755, 0.009665727615356445, 0.009684991836547852, 0.009711808204650878, 0.009789088249206542, 0.009725184440612792, 0.009696703910827636, 0.009679488182067871, 0.009717408180236817, 0.009641792297363281, 0.009660927772521973, 0.009657952308654785, 0.010162079811096191, 0.009693056106567384, 0.009777471542358399, 0.009722975730895997, 0.009743264198303223, 0.00970137596130371, 0.009694879531860351, 0.009725855827331544, 0.009676480293273925, 0.009689855575561523, 0.009645824432373046, 0.009748224258422852, 0.009773568153381347, 0.009667872428894043, 0.009752863883972169, 0.009667167663574219, 0.009692352294921875, 0.009642848014831543, 0.009687199592590331, 0.009688799858093261, 0.00973423957824707, 0.009786496162414551, 0.009465855598449707, 0.009729887962341309, 0.0097009916305542, 0.009723808288574219, 0.009672927856445312, 0.009810015678405762, 0.009774944305419923, 0.009798015594482422, 0.009827872276306153, 0.009826144218444825, 0.009812704086303711, 0.009777183532714845, 0.00976863956451416, 0.009724191665649414, 0.009695455551147461, 0.009778271675109864, 0.009743103981018067, 0.009729984283447266, 0.009713760375976562, 0.009707712173461914, 0.009708352088928222, 0.009720895767211914, 0.009722784042358398, 0.00971872043609619, 0.009666560173034668, 0.009641983985900878, 0.00963817596435547, 0.00963145637512207, 0.009703424453735352, 0.009708864212036133, 0.009672639846801757, 0.009675616264343262, 0.009661503791809082, 0.009732288360595703, 0.009887840270996094, 0.009783103942871093, 0.009675519943237305, 0.009672575950622559, 0.009689120292663575, 0.00963798427581787, 0.009711615562438965, 0.009682559967041015, 0.009716095924377442, 0.009713664054870605, 0.00969324779510498, 0.009670080184936524, 0.009652735710144043, 0.009758720397949219, 0.009691136360168457, 0.009711615562438965, 0.009705408096313477, 0.009719391822814942, 0.009708000183105468, 0.009672703742980958, 0.009711808204650878, 0.009697216033935547, 0.009725055694580077, 0.00967347240447998, 0.009685088157653808, 0.009685088157653808, 0.009785152435302735, 0.009702848434448243, 0.00970400047302246, 0.009390111923217773, 0.009686495780944824, 0.009698016166687012, 0.009662431716918945, 0.00973516845703125, 0.009737024307250977, 0.009739551544189453, 0.009670623779296875, 0.009603167533874512, 0.009681568145751954, 0.009631039619445801, 0.009681599617004395, 0.009684991836547852, 0.009686816215515136, 0.00983420753479004, 0.009763551712036132, 0.009785120010375976, 0.009759072303771973, 0.009799327850341797, 0.009836544036865234, 0.009844799995422364, 0.009795167922973632, 0.009740863800048828, 0.009652000427246094, 0.009713248252868652, 0.009722271919250488, 0.009625920295715332, 0.00967795181274414, 0.009716287612915039, 0.009727487564086914, 0.009758720397949219, 0.009705599784851074, 0.009746975898742676, 0.009657855987548827, 0.00969257640838623, 0.009759807586669922, 0.009687071800231933, 0.00971555233001709, 0.009772895812988281, 0.009674336433410645, 0.009687295913696289, 0.009727968215942382, 0.00971401596069336, 0.009678144454956055, 0.009713567733764649, 0.009666720390319824, 0.009687583923339843, 0.009672800064086913, 0.009692352294921875, 0.009687135696411133, 0.00969929599761963, 0.009710335731506347, 0.009733535766601563, 0.009683039665222168, 0.009654784202575683, 0.009742048263549805, 0.00972390365600586, 0.009642271995544434, 0.009713664054870605, 0.0096495361328125, 0.009709983825683595, 0.009686431884765624, 0.009675583839416503, 0.009451807975769043, 0.009762944221496582, 0.009787103652954102, 0.009836895942687988, 0.009799615859985351, 0.009801695823669434, 0.009777183532714845, 0.009736191749572755, 0.010002623558044434, 0.00970956802368164, 0.00978105640411377, 0.00979535961151123, 0.00991215991973877, 0.009752991676330567, 0.009734111785888672, 0.009791040420532226, 0.009771455764770507, 0.009762975692749023, 0.009639360427856446, 0.009750944137573242, 0.009750304222106933, 0.00972208023071289, 0.009676192283630371, 0.009680543899536133, 0.009804608345031739, 0.009667840003967285, 0.009718560218811035, 0.009699328422546387, 0.00965334415435791, 0.00970684814453125, 0.009673600196838378, 0.009771039962768555, 0.009626367568969726, 0.009756735801696777, 0.009678784370422364, 0.009735296249389649, 0.009764063835144042, 0.009780991554260253, 0.009725855827331544, 0.009684703826904296, 0.009764927864074707, 0.009641792297363281, 0.009669183731079101, 0.009623295783996582, 0.00961900806427002, 0.009695775985717773, 0.009668607711791993, 0.009635968208312988, 0.009651200294494629, 0.010290047645568848, 0.010686464309692383, 0.010522527694702149, 0.00970259189605713, 0.009735072135925293, 0.009750528335571289, 0.009696288108825684, 0.00983465576171875, 0.009679840087890626, 0.009624928474426269, 0.009593343734741211, 0.009682559967041015, 0.009613247871398925, 0.00968883228302002, 0.009385984420776367, 0.009863167762756348, 0.009660415649414063, 0.00972332763671875, 0.009640159606933594, 0.009556127548217773, 0.009639776229858399, 0.009627840042114259, 0.009641663551330566, 0.009690624237060547, 0.009669440269470215, 0.009661760330200195, 0.00967148780822754, 0.0097542724609375, 0.009692895889282226, 0.009729951858520507, 0.009786111831665039, 0.00962713623046875, 0.009658368110656738, 0.009660927772521973, 0.009902079582214356, 0.009654272079467773, 0.00964844799041748, 0.009598943710327148, 0.009576512336730956, 0.009609919548034668, 0.009632736206054688, 0.00960256004333496, 0.009620991706848145, 0.009593855857849122, 0.009633664131164552, 0.009600255966186523, 0.009620351791381836, 0.009866975784301757, 0.0096527042388916, 0.009826208114624023, 0.00975436782836914, 0.009658047676086426, 0.009640416145324708, 0.00978121566772461, 0.0097357759475708, 0.0097489595413208, 0.009622912406921386, 0.009645759582519531, 0.009718879699707032, 0.009641632080078125, 0.009644191741943359, 0.009938943862915038, 0.00965452766418457, 0.009625632286071778, 0.009635231971740722, 0.00964236831665039, 0.009599072456359863, 0.009652064323425294, 0.0096046724319458, 0.009628095626831056, 0.009736191749572755, 0.0097958402633667, 0.009848896026611329, 0.00964303970336914, 0.009634559631347656, 0.009631520271301269, 0.009652352333068848, 0.009379167556762696, 0.009737088203430175, 0.009769280433654786, 0.009727359771728516, 0.009683520317077636, 0.009702976226806641, 0.00990227222442627, 0.00966204833984375, 0.009707903861999512, 0.009657759666442872, 0.009663104057312012, 0.009713664054870605, 0.00972390365600586, 0.009684896469116211, 0.00970588779449463, 0.009662240028381348, 0.009731552124023438, 0.0097325439453125, 0.009672703742980958, 0.00963696002960205, 0.009741215705871583, 0.009663871765136719, 0.009679488182067871, 0.009973183631896972, 0.009757247924804688, 0.010553695678710938, 0.010306719779968262, 0.010275551795959473, 0.009700960159301757, 0.009666303634643554, 0.009701343536376953, 0.00963811206817627, 0.009687295913696289, 0.009787391662597657, 0.009652223587036133, 0.009776448249816895, 0.009668864250183106, 0.00966256046295166, 0.009685343742370605, 0.00962723159790039, 0.009797183990478516, 0.00967750358581543, 0.009666720390319824, 0.009666848182678222, 0.009632543563842773, 0.00970963191986084, 0.009659520149230957, 0.009644191741943359, 0.009944671630859376, 0.009821184158325195, 0.009910271644592286, 0.009620479583740234, 0.009744383811950684, 0.009641983985900878, 0.009645792007446289, 0.009623776435852051, 0.009631808280944825, 0.009627936363220214, 0.009629407882690429, 0.00967024040222168, 0.009679519653320312, 0.00973299217224121, 0.00964851188659668, 0.009414655685424805, 0.009716927528381348, 0.009662816047668457, 0.009712096214294433, 0.009662464141845703, 0.00971731185913086, 0.009671104431152345, 0.010227711677551269, 0.009782719612121581, 0.009702015876770019, 0.009939904212951661, 0.00982528018951416, 0.009684991836547852, 0.009828255653381348, 0.009750816345214844, 0.009703231811523437, 0.00974847984313965, 0.009670528411865234, 0.009754752159118652, 0.009682944297790527, 0.00969315242767334, 0.009875712394714355, 0.009665727615356445, 0.00963747215270996, 0.009651200294494629, 0.009652383804321289, 0.00978927993774414, 0.009744383811950684, 0.009655712127685546, 0.009654879570007324, 0.009764863967895507, 0.009631584167480469, 0.009715871810913086, 0.00961945629119873, 0.009701312065124512, 0.009664575576782226, 0.010124768257141113, 0.009679360389709473, 0.009760800361633301, 0.009659711837768554, 0.009778047561645508, 0.009678848266601562, 0.009682751655578614, 0.009637791633605957, 0.009699584007263184, 0.009671680450439453, 0.00971072006225586, 0.009917951583862305, 0.009664640426635743, 0.009704575538635254, 0.00963868808746338, 0.00976905632019043, 0.009667840003967285, 0.00971452808380127, 0.009698687553405761, 0.009721664428710938, 0.00992131233215332, 0.009641087532043457, 0.009739328384399414, 0.009639552116394043, 0.009750752449035645, 0.00973209571838379, 0.009675840377807617, 0.00939680004119873, 0.009863167762756348, 0.009658271789550782, 0.009668704032897948, 0.009629856109619141, 0.009675968170166015, 0.009648799896240234, 0.009700960159301757, 0.009646495819091798, 0.009872511863708496, 0.009806464195251465, 0.00967903995513916, 0.00969257640838623, 0.009705696105957031, 0.009691712379455566, 0.009729920387268066, 0.009688384056091308, 0.00972275161743164, 0.009706368446350097, 0.009626560211181641, 0.009664511680603028, 0.009627967834472657, 0.009666272163391113, 0.010286144256591797, 0.011024736404418945, 0.009785792350769044, 0.009709823608398437, 0.009754495620727539, 0.009771007537841797, 0.00960643196105957, 0.009684703826904296, 0.009636863708496094, 0.009652223587036133, 0.009676799774169922, 0.009687040328979492, 0.009724032402038575, 0.009635552406311036, 0.00967404842376709, 0.009758591651916504, 0.009683648109436034, 0.00971395206451416, 0.009691136360168457, 0.009666560173034668, 0.00971571159362793, 0.009660415649414063, 0.009746432304382324, 0.009646080017089843, 0.00965993595123291, 0.009661087989807129, 0.0099585599899292, 0.009898655891418457, 0.009711615562438965, 0.009668095588684082, 0.009603584289550781, 0.00971776008605957, 0.009675040245056152, 0.009707167625427246, 0.009744447708129883, 0.009701472282409668, 0.009771167755126953, 0.009907967567443847, 0.0096943359375, 0.009666751861572266, 0.009380767822265625, 0.009678239822387696, 0.009658975601196289, 0.009659456253051759, 0.00967910385131836, 0.009748191833496093, 0.009747679710388184, 0.009639936447143555, 0.009663519859313964, 0.009664287567138672, 0.009608127593994141, 0.009713664054870605, 0.009693087577819825, 0.009693280220031738, 0.009646080017089843, 0.009758175849914551, 0.00969372844696045, 0.009705599784851074, 0.00966748809814453, 0.009630144119262695, 0.009634559631347656, 0.009849760055541992, 0.009661151885986327, 0.009652576446533202, 0.009643839836120606, 0.009705632209777833, 0.009822303771972657, 0.009626751899719238, 0.009701631546020507, 0.00962598419189453, 0.010499872207641602, 0.01065187168121338, 0.010602496147155761, 0.00977468776702881, 0.009875071525573731, 0.009734272003173828, 0.00969382381439209, 0.00968502426147461, 0.009836095809936524, 0.009880000114440918, 0.009707072257995605, 0.009608927726745606, 0.009659104347229004, 0.009618656158447265, 0.009608063697814941, 0.009668831825256347, 0.009676128387451173, 0.009668959617614745, 0.009646080017089843, 0.00960102367401123, 0.009808128356933593, 0.009631487846374511, 0.009633088111877441, 0.009661120414733887, 0.009710847854614258, 0.009648480415344239, 0.009658464431762695, 0.009682432174682617, 0.009733216285705566, 0.009643808364868164, 0.00983836841583252, 0.009707327842712402, 0.009671008110046387, 0.009339808464050293, 0.009689087867736817, 0.009683039665222168, 0.009686079978942871, 0.009620320320129395, 0.00972390365600586, 0.009641311645507812, 0.009650400161743164, 0.009652576446533202, 0.009584735870361329, 0.009647808074951171, 0.009621824264526367, 0.00964025592803955, 0.009715456008911133, 0.009781184196472168, 0.009738240242004394, 0.009748543739318848, 0.00974841594696045, 0.009907808303833008, 0.009718527793884277, 0.009658271789550782, 0.009716608047485352, 0.009689984321594238, 0.009665535926818849, 0.009728863716125488, 0.009932415962219238, 0.009851424217224122, 0.009796607971191406, 0.009698304176330566, 0.009648127555847168, 0.009687040328979492, 0.009653471946716308, 0.009677599906921387, 0.009705471992492675, 0.00989568042755127, 0.009758048057556153, 0.009705471992492675, 0.009732224464416505, 0.009644736289978027, 0.009754719734191895, 0.009684991836547852, 0.010029024124145507, 0.009733407974243165, 0.009636608123779297, 0.00971132755279541, 0.009652128219604492, 0.009660256385803223, 0.009685279846191407, 0.009656000137329101, 0.009691712379455566, 0.009770976066589356, 0.009717791557312012, 0.009668607711791993, 0.009653951644897461, 0.009744704246520996, 0.009634143829345703, 0.009682368278503418, 0.00965766429901123, 0.0098721923828125, 0.009916095733642579, 0.010150303840637207, 0.009770400047302246, 0.009682815551757812, 0.009576128005981446, 0.00970751953125, 0.009728320121765137, 0.009761759757995606, 0.009677536010742187, 0.009668160438537597, 0.009760736465454101, 0.00967523193359375, 0.009692288398742676, 0.00964083194732666, 0.00982630443572998, 0.009670816421508788, 0.009631263732910155, 0.009699040412902833, 0.009736672401428222, 0.009750656127929688, 0.009727392196655273, 0.009639967918395996, 0.009672767639160156, 0.009708383560180663, 0.009737215995788574, 0.009630208015441894, 0.009680288314819336, 0.009648896217346192, 0.009613247871398925, 0.009686880111694336, 0.009713184356689454, 0.009698304176330566, 0.009755552291870117, 0.009654656410217285, 0.009666303634643554, 0.009738592147827149, 0.00968735980987549, 0.009684096336364746, 0.00969974422454834, 0.009654751777648926, 0.00967632007598877, 0.009752287864685058, 0.009650943756103515, 0.009680895805358887, 0.009619392395019532, 0.009685248374938964, 0.009628767967224122, 0.009648223876953126, 0.009708095550537109, 0.009648192405700683, 0.00962326431274414, 0.00964022445678711, 0.00963913631439209, 0.009677599906921387, 0.01009663963317871, 0.009727999687194825, 0.009832448005676269, 0.009668607711791993, 0.009656160354614259, 0.009715295791625977, 0.00971014404296875, 0.00967302417755127, 0.009873087882995606, 0.009781248092651367, 0.009731648445129394, 0.009678751945495605, 0.009791071891784669, 0.009849472045898438, 0.00992972755432129, 0.009734368324279786, 0.010288064002990722, 0.009733792304992675, 0.00979372787475586, 0.009733375549316406, 0.009751296043395996, 0.010035200119018555, 0.00973020839691162, 0.009705599784851074, 0.009696224212646484, 0.009757535934448242, 0.009741600036621095, 0.00977779197692871, 0.009746784210205079, 0.00973910427093506, 0.009861951828002929, 0.009823488235473634, 0.00965503978729248, 0.009666560173034668, 0.009727999687194825, 0.00965129566192627, 0.009675616264343262, 0.009674240112304687, 0.009691712379455566, 0.009698752403259278, 0.009654911994934082, 0.009662752151489257, 0.009643679618835449, 0.00970787239074707, 0.009786016464233398, 0.009683967590332031, 0.009686271667480468, 0.009766752243041993, 0.009775487899780274, 0.009664159774780273, 0.009680959701538086, 0.009771488189697265, 0.009828672409057618, 0.009675968170166015, 0.009677663803100586, 0.00965993595123291, 0.009656800270080566, 0.009665632247924804, 0.009659168243408203, 0.0096014404296875, 0.00960854434967041, 0.009605504035949708, 0.009711808204650878, 0.009723711967468261, 0.010065312385559083, 0.00968992042541504, 0.009596672058105469, 0.00966374397277832, 0.00961411190032959, 0.009606656074523925, 0.009675104141235352, 0.009574048042297363, 0.009624064445495606, 0.00960524845123291, 0.009597824096679688, 0.00961023998260498, 0.009421216011047364, 0.009784704208374023, 0.009763456344604493, 0.01004748821258545, 0.009707232475280762, 0.009642271995544434, 0.009756159782409669, 0.009632224082946778, 0.009642016410827637, 0.010061823844909668, 0.00959062385559082, 0.009685152053833008, 0.009703136444091797, 0.009652511596679687, 0.009859199523925782, 0.009690912246704101, 0.009656415939331055, 0.00961945629119873, 0.009637887954711915, 0.009623871803283692, 0.009640928268432616, 0.009612000465393067, 0.009613311767578125, 0.009596927642822266, 0.00961235237121582, 0.009595840454101563, 0.009624671936035157, 0.009568511962890625, 0.009722528457641601, 0.009586784362792969, 0.009596832275390625, 0.009676799774169922, 0.009627584457397462, 0.009625663757324219, 0.00960905647277832, 0.009692671775817872, 0.009665184020996094, 0.00970963191986084, 0.009676959991455078, 0.009614751815795899, 0.009982208251953124, 0.009915840148925782, 0.009728704452514648, 0.009660160064697266, 0.009627903938293457, 0.00970137596130371, 0.009646080017089843, 0.009930720329284668, 0.009683135986328126, 0.00968892765045166, 0.009637887954711915, 0.00960102367401123, 0.00963327980041504, 0.009683327674865723, 0.009635968208312988, 0.009647456169128418, 0.009673376083374024, 0.00964025592803955, 0.009752256393432617, 0.009668607711791993, 0.009672703742980958, 0.009646080017089843, 0.00970531177520752, 0.009380448341369628, 0.009729184150695801, 0.009707615852355958, 0.009707551956176758, 0.009677536010742187, 0.009702560424804687, 0.009599648475646972, 0.009625951766967773, 0.009840703964233399, 0.00963587188720703, 0.00974841594696045, 0.00965782356262207, 0.00967305564880371, 0.0096845121383667, 0.009745951652526855, 0.009655424118041992, 0.009714752197265626, 0.009652064323425294, 0.009587519645690918, 0.009599072456359863, 0.009655712127685546, 0.009624159812927247, 0.00960041618347168, 0.009545344352722168, 0.009612256050109864, 0.009605119705200196, 0.009629695892333985, 0.00959228801727295, 0.009553664207458496, 0.009602848052978516, 0.009675135612487793, 0.00959552001953125, 0.009574399948120118, 0.009578304290771484, 0.009641152381896972, 0.009607583999633788, 0.009623135566711426, 0.009624768257141113, 0.00957420825958252, 0.009682944297790527, 0.009794591903686523, 0.009710559844970703, 0.009597023963928223, 0.009582719802856444, 0.009625215530395508, 0.009607616424560548, 0.009588128089904785, 0.009584799766540527, 0.009582719802856444, 0.009702783584594726, 0.00966329574584961, 0.009594623565673827, 0.009864352226257323, 0.010728384017944336, 0.009596351623535157, 0.009574975967407227, 0.009573984146118163, 0.009621919631958008, 0.009598976135253906, 0.009584383964538574, 0.009599231719970703, 0.009612959861755371, 0.010572159767150879, 0.009334912300109863, 0.00972531223297119, 0.009654784202575683, 0.009666144371032715, 0.009703359603881835, 0.009662943840026856, 0.009705471992492675, 0.009758912086486816, 0.009710559844970703, 0.009648032188415527, 0.009720704078674317, 0.009655455589294433, 0.009657247543334961, 0.009694560050964356, 0.009644031524658203, 0.009939519882202149, 0.009871359825134277, 0.009735520362854005, 0.009808992385864258, 0.00973910427093506, 0.009677696228027343, 0.009619199752807617, 0.010004192352294923, 0.009644512176513673, 0.009670047760009766, 0.009637920379638672, 0.009677375793457031, 0.00965552043914795, 0.009620320320129395, 0.009657376289367676, 0.009622431755065919, 0.009655360221862794, 0.009654560089111329, 0.009576543807983399, 0.00970793628692627, 0.009612575531005859, 0.009775360107421876, 0.009703424453735352, 0.009699584007263184, 0.009634400367736816, 0.009629471778869629, 0.009654175758361817, 0.009575776100158691, 0.009642623901367187, 0.009602815628051757, 0.009595264434814453, 0.009809920310974121, 0.00961740779876709, 0.009614656448364258, 0.009646783828735352, 0.00961945629119873, 0.009629471778869629, 0.009692640304565429, 0.009658623695373535, 0.009624064445495606, 0.009590784072875976, 0.009613311767578125, 0.00962559986114502, 0.009729824066162109, 0.009619680404663086, 0.009607040405273437, 0.009592960357666016, 0.009588319778442383, 0.009335776329040528, 0.009822208404541016, 0.009659744262695313, 0.009716352462768555, 0.009672672271728515, 0.009618528366088867, 0.00965452766418457, 0.0096428804397583, 0.009650176048278808, 0.009770175933837891, 0.009646464347839355, 0.00968876838684082, 0.009615967750549317, 0.00962764835357666, 0.009736191749572755, 0.009815423965454102, 0.009763232231140137, 0.00963532829284668, 0.009665216445922852, 0.00967683219909668, 0.00969651222229004, 0.009640128135681152, 0.009808064460754395, 0.009743935585021972, 0.00967353630065918, 0.009694239616394044, 0.009630687713623047, 0.00958291244506836, 0.00967033576965332, 0.009763968467712403, 0.00966540813446045, 0.009633024215698243, 0.009656224250793457, 0.009644895553588868, 0.009691136360168457, 0.009750528335571289, 0.009667936325073243, 0.009695679664611816, 0.009652447700500489, 0.009635456085205078, 0.00962716770172119, 0.009593855857849122, 0.009752415657043456, 0.009681920051574706, 0.009651424407958985, 0.00968172836303711, 0.009662655830383301, 0.009691935539245605, 0.009567808151245118, 0.009683615684509277, 0.00964185619354248, 0.009648032188415527, 0.009653759956359862, 0.009639904022216797, 0.009726719856262207, 0.009750528335571289, 0.010079168319702148, 0.00971452808380127, 0.009762816429138184, 0.009699328422546387, 0.009695232391357422, 0.009984224319458009, 0.009682720184326171]",tokens/s,102.9831951214222,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7346.364416,8048.738304,0.0,7646.216192,7627.584,s,1,13.3150380859375,13.3150380859375,0.0,13.3150380859375,13.3150380859375,13.3150380859375,13.3150380859375,[13.3150380859375],,kWh,0.00017400211561249154,1.918628030314691e-05,5.574587793000241e-05,0.00024893427384564085,,MB,1640.206336,8732.409856,0.0,8315.20768,8191.863296,s,10,3.347322021484375,0.3347322021484375,0.001556680523422081,0.33537840270996094,0.3357929138183594,0.3359058258056641,0.3359961553955078,"[0.3356960754394531, 0.3349137878417969, 0.33501318359375, 0.3321338500976562, 0.3312918701171875, 0.3352266845703125, 0.335767822265625, 0.33553012084960937, 0.33572988891601563, 0.33601873779296876]",tokens/s,764.7904753617831,kWh,9.794899431527078e-06,1.0802078404072887e-06,6.506495945933499e-06,1.7381603217867866e-05,tokens/kWh,14728215.619192032,MB,1645.154304,9046.982656,0.0,8629.78048,8480.067584,s,10,26.841802001953123,2.6841802001953123,0.004277086541023966,2.6841704101562502,2.6892189208984374,2.689655847167969,2.690005388183594,"[2.676899169921875, 2.67719921875, 2.683130126953125, 2.683345458984375, 2.683413818359375, 2.684927001953125, 2.68529443359375, 2.689121826171875, 2.688378173828125, 2.6900927734375]",tokens/s,23.47085340820853,kWh,7.84914445972188e-05,8.657679412901368e-06,5.208691203986565e-05,0.00013923603604998585,tokens/kWh,452469.0718527993,,s,630,26.833035770416267,0.042592120270502,0.00040777624677249474,0.04256488037109375,0.04314727554321289,0.043258581733703616,0.043506920623779304,"[0.041979839324951175, 0.04193862533569336, 0.04200688171386719, 0.04189801788330078, 0.04225843048095703, 0.04190003204345703, 0.04186246490478516, 0.04202089691162109, 0.041920352935791015, 0.04185990524291992, 0.041909408569335935, 0.04188655853271484, 0.041972927093505856, 0.04208108901977539, 0.04220284652709961, 0.042176799774169924, 0.04210019302368164, 0.042039329528808594, 0.04239411163330078, 0.04243046569824219, 0.042385406494140625, 0.04230144119262695, 0.04219062423706055, 0.04204886245727539, 0.0422716178894043, 0.04234649658203125, 0.042264575958251956, 0.04257510375976563, 0.042300159454345704, 0.04242950439453125, 0.04258092880249023, 0.042458560943603514, 0.04245145416259766, 0.04254422378540039, 0.042619873046875, 0.042403839111328126, 0.04258611297607422, 0.042641407012939454, 0.04274288177490235, 0.04248463821411133, 0.04252467346191406, 0.04277657699584961, 0.04271513748168945, 0.0429356803894043, 0.04311286544799805, 0.042890750885009765, 0.042938144683837894, 0.04277139282226562, 0.04274720001220703, 0.042584766387939454, 0.04261004638671875, 0.04280793762207031, 0.04308582305908203, 0.04299980926513672, 0.043052223205566405, 0.04279993438720703, 0.04271084976196289, 0.04289932632446289, 0.04298681640625, 0.04295167922973633, 0.04302438354492188, 0.043225086212158204, 0.043218944549560545, 0.04233830261230469, 0.04193833541870117, 0.04205014419555664, 0.04183635330200195, 0.04186438369750976, 0.04182710266113281, 0.04189795303344727, 0.041742591857910155, 0.04171980667114258, 0.04184473419189453, 0.042090496063232424, 0.04208822250366211, 0.04202313613891601, 0.04237107086181641, 0.042352638244628905, 0.04231167984008789, 0.042526241302490234, 0.04223433685302734, 0.04215193557739258, 0.04211004638671875, 0.04225276947021484, 0.04225606536865235, 0.04223862457275391, 0.04254710388183594, 0.04244908905029297, 0.042239742279052736, 0.042194942474365234, 0.0422413444519043, 0.04228396987915039, 0.04226662445068359, 0.04243046569824219, 0.042420223236083986, 0.042434558868408204, 0.04272742462158203, 0.04263228988647461, 0.042554271697998046, 0.04244883346557617, 0.0427108154296875, 0.04269903945922852, 0.04295475387573242, 0.0429854736328125, 0.04264249420166016, 0.042576831817626955, 0.04261215972900391, 0.042692161560058596, 0.04270332717895508, 0.04267200088500977, 0.04267865753173828, 0.04326607894897461, 0.043159103393554686, 0.04304966354370117, 0.04287065505981445, 0.042778560638427734, 0.04292323303222656, 0.042724319458007816, 0.042616256713867186, 0.042821887969970704, 0.042977535247802734, 0.04300191879272461, 0.043261951446533206, 0.043515903472900394, 0.04313494491577148, 0.04292816162109375, 0.04245100784301758, 0.04212108612060547, 0.04195974349975586, 0.04179190444946289, 0.04225619125366211, 0.04223980712890625, 0.04196799850463867, 0.04196460723876953, 0.04225657653808594, 0.04205644989013672, 0.04207001495361328, 0.04232787322998047, 0.04222355270385742, 0.04224844741821289, 0.042387454986572266, 0.04229324722290039, 0.04220883178710937, 0.042495582580566404, 0.04279177474975586, 0.04236492919921875, 0.04222908782958985, 0.04226934432983399, 0.042305374145507814, 0.04241628646850586, 0.04221667098999023, 0.04228774261474609, 0.04260192108154297, 0.04251849746704101, 0.04242099380493164, 0.04243619155883789, 0.042368671417236325, 0.04255001449584961, 0.04243251037597656, 0.04243865585327149, 0.04252671813964844, 0.04266582489013672, 0.04319427108764649, 0.042991455078125, 0.042819999694824216, 0.04261833572387695, 0.042618976593017575, 0.04265788650512695, 0.04267852783203125, 0.04278486251831055, 0.04295475387573242, 0.043242782592773435, 0.04305379104614258, 0.04289497756958008, 0.04307187271118164, 0.04287075042724609, 0.04269875335693359, 0.042667552947998046, 0.042813953399658204, 0.04334796905517578, 0.04319232177734375, 0.04295987319946289, 0.04267926406860351, 0.04270012664794922, 0.04279571151733398, 0.042997760772705076, 0.04308281707763672, 0.04309670257568359, 0.04345888137817383, 0.043087871551513675, 0.042487808227539066, 0.04213676834106445, 0.04196435165405273, 0.042000385284423826, 0.04186675262451172, 0.042231647491455075, 0.04203955078125, 0.04193072128295899, 0.04184108734130859, 0.041893280029296875, 0.041934497833251955, 0.042116031646728516, 0.04265071868896484, 0.042552223205566404, 0.04224409484863281, 0.042069950103759766, 0.04216019058227539, 0.042172416687011716, 0.04225843048095703, 0.04215398406982422, 0.04220905685424805, 0.04233443069458008, 0.04256358337402344, 0.042585121154785154, 0.04253494262695313, 0.04270995330810547, 0.042708992004394535, 0.042436607360839845, 0.042471424102783206, 0.042657791137695314, 0.04254227066040039, 0.0425109748840332, 0.04260678482055664, 0.04246473693847656, 0.04305964660644531, 0.04288111877441406, 0.042614463806152345, 0.042365249633789064, 0.04230144119262695, 0.04271104049682617, 0.04271308898925781, 0.04297100830078125, 0.04278607940673828, 0.0427856330871582, 0.043165630340576175, 0.04310185623168945, 0.042705150604248045, 0.04282329559326172, 0.042807422637939456, 0.04283184051513672, 0.04310800170898438, 0.043162399291992185, 0.04298886489868164, 0.042979808807373045, 0.04289945602416992, 0.043171199798583984, 0.04317862319946289, 0.04312067031860352, 0.04340118408203125, 0.04302175903320313, 0.0427874870300293, 0.04273936080932617, 0.04195884704589844, 0.042334110260009765, 0.04220755386352539, 0.042052959442138674, 0.04222159957885742, 0.04223654556274414, 0.042273120880126955, 0.04237516784667969, 0.04217641448974609, 0.0421561279296875, 0.042209121704101564, 0.0419780158996582, 0.04213350296020508, 0.042024959564208986, 0.042075553894042966, 0.04225289535522461, 0.042297344207763675, 0.04237516784667969, 0.042237953186035154, 0.04222265625, 0.04234463882446289, 0.0424189453125, 0.04221887969970703, 0.04237910461425781, 0.04237548828125, 0.0422149772644043, 0.04236355209350586, 0.0423897590637207, 0.04262675094604492, 0.04278681564331055, 0.04255881500244141, 0.04257804870605469, 0.042583934783935545, 0.04259664154052734, 0.04249001693725586, 0.042564128875732424, 0.042608638763427735, 0.04257382583618164, 0.042821632385253904, 0.042700416564941404, 0.04254348754882813, 0.04252057647705078, 0.04265158462524414, 0.04253200149536133, 0.04273004913330078, 0.043353439331054684, 0.04299235153198242, 0.04280271911621094, 0.04269929504394531, 0.04287510299682617, 0.04291584014892578, 0.04275814437866211, 0.04299161529541016, 0.04337596893310547, 0.043412128448486326, 0.04326710510253906, 0.042902305603027345, 0.043071136474609376, 0.042885440826416016, 0.0433337287902832, 0.04370854568481446, 0.04301004791259765, 0.04303792190551758, 0.042523296356201175, 0.042229759216308595, 0.04205926513671875, 0.04224822235107422, 0.04213113784790039, 0.042253055572509766, 0.04205686569213867, 0.041963550567626955, 0.042135841369628904, 0.04232777786254883, 0.04219987106323242, 0.04233577728271484, 0.04222003173828125, 0.04237503814697265, 0.042197120666503905, 0.041990142822265625, 0.042004287719726564, 0.04197548675537109, 0.042146305084228515, 0.04221263885498047, 0.04266169738769531, 0.04258671951293945, 0.04247145462036133, 0.042508449554443356, 0.04236710357666015, 0.042420223236083986, 0.042439903259277344, 0.04245552062988281, 0.04239555358886719, 0.04246726226806641, 0.0428098258972168, 0.0426445426940918, 0.042447296142578125, 0.04242278289794922, 0.04266150283813477, 0.0430428466796875, 0.04280969619750977, 0.042590110778808594, 0.04284214401245117, 0.042767742156982425, 0.04265811157226562, 0.04253852844238281, 0.04254966354370117, 0.04290399932861328, 0.04298070526123047, 0.04292873764038086, 0.04302758407592774, 0.04285945510864258, 0.042813438415527344, 0.04272332763671875, 0.04269833755493164, 0.04296745681762695, 0.04310563278198242, 0.04299388885498047, 0.04317542266845703, 0.0432256965637207, 0.043068992614746095, 0.04382748794555664, 0.04330752182006836, 0.04301004791259765, 0.042980384826660153, 0.04292681503295898, 0.043284736633300784, 0.042352672576904296, 0.04193628692626953, 0.041982559204101565, 0.04195094299316406, 0.0419284782409668, 0.042236415863037106, 0.04207206344604492, 0.042112926483154296, 0.04220435333251953, 0.042390430450439456, 0.04217619323730469, 0.04230995178222656, 0.04231958389282227, 0.04250614547729492, 0.04226015853881836, 0.04219887924194336, 0.04217958450317383, 0.04214531326293945, 0.04245331192016601, 0.04223577499389648, 0.04222784042358398, 0.04267136001586914, 0.04242480087280273, 0.04230377578735352, 0.04228710556030273, 0.04219625473022461, 0.04259667205810547, 0.04240963363647461, 0.0423878402709961, 0.042492286682128906, 0.04267212677001953, 0.04293836975097656, 0.042891265869140625, 0.0426695671081543, 0.042609153747558595, 0.04279439926147461, 0.042813343048095705, 0.04251308822631836, 0.04231987380981445, 0.04268646240234375, 0.0429117431640625, 0.04275379180908203, 0.0431514892578125, 0.04296857452392578, 0.04284444808959961, 0.04275439834594726, 0.042702079772949215, 0.0428223991394043, 0.043302913665771485, 0.04330495834350586, 0.04307763290405273, 0.04307331085205078, 0.0427624626159668, 0.042692607879638675, 0.043127872467041015, 0.04317279815673828, 0.04309196853637695, 0.043484928131103516, 0.04329289627075195, 0.04304694366455078, 0.04301004791259765, 0.043225086212158204, 0.043284481048583984, 0.04225843048095703, 0.04221132659912109, 0.042213375091552735, 0.04214156723022461, 0.042081729888916015, 0.04265644836425781, 0.042334144592285156, 0.0420145263671875, 0.042084320068359375, 0.04232825469970703, 0.04211516952514648, 0.04214988708496094, 0.04238278579711914, 0.04236326217651367, 0.0421638069152832, 0.04288572692871094, 0.04273971176147461, 0.04253286361694336, 0.04231167984008789, 0.04207567977905274, 0.04205001449584961, 0.042223617553710936, 0.04252048110961914, 0.0425492172241211, 0.04243059158325195, 0.04226591873168945, 0.04240864181518555, 0.04261273574829102, 0.04245270538330078, 0.04226019287109375, 0.04236140823364258, 0.042678207397460935, 0.043026496887207034, 0.042912960052490234, 0.04254515075683594, 0.04244972610473633, 0.042460704803466795, 0.04280124664306641, 0.04303203201293945, 0.04322921752929688, 0.04317273712158203, 0.042881023406982424, 0.04282572937011719, 0.042665985107421874, 0.04260454559326172, 0.04298956680297852, 0.04306739044189453, 0.04298649597167969, 0.04292095947265625, 0.042807296752929686, 0.04338687896728516, 0.04335411071777344, 0.04308992004394531, 0.04284415817260742, 0.04321414566040039, 0.04325446319580078, 0.042992862701416015, 0.043709217071533205, 0.043448318481445314, 0.04304659271240235, 0.04298748779296875, 0.043254112243652346, 0.043276287078857424, 0.04219027328491211, 0.0418903694152832, 0.042003456115722655, 0.04208127975463867, 0.04218198394775391, 0.04218537521362305, 0.04199590301513672, 0.042574207305908204, 0.04256911849975586, 0.042176799774169924, 0.04202323150634766, 0.04207206344604492, 0.042231807708740236, 0.04261068725585938, 0.04244070434570312, 0.04238240051269531, 0.04291376113891601, 0.042535743713378905, 0.04231568145751953, 0.04224844741821289, 0.04223590469360351, 0.04208844757080078, 0.042487808227539066, 0.04250624084472656, 0.04242393493652344, 0.042399681091308594, 0.042375232696533205, 0.04224822235107422, 0.0426187858581543, 0.04262137603759766, 0.042716320037841794, 0.04308259201049805, 0.04300595092773438, 0.04266393661499023, 0.04256563186645508, 0.04248371124267578, 0.04247552108764648, 0.042932193756103514, 0.04295478439331055, 0.042932193756103514, 0.04289539337158203, 0.04279856109619141, 0.04341404724121094, 0.04317788696289063, 0.04299305725097656, 0.04313772964477539, 0.04292601776123047, 0.04267833709716797, 0.042883071899414066, 0.04301619338989258, 0.04290560150146484, 0.04288838577270508, 0.042833984375, 0.04311724853515625, 0.043036415100097654, 0.0431962890625, 0.04324806213378906, 0.04314726257324219, 0.04339494323730469, 0.04314739227294922, 0.04313087844848633, 0.04312623977661133, 0.04318601608276367, 0.04237516784667969, 0.041885311126708985, 0.04206016159057617, 0.04207001495361328, 0.041990142822265625, 0.042248191833496096, 0.04231078338623047, 0.042188926696777346, 0.0423199348449707, 0.04221747207641602, 0.042330814361572267, 0.04236492919921875, 0.04260992050170898, 0.0426558723449707, 0.04251916885375977, 0.042436607360839845, 0.0423298225402832, 0.042244384765625, 0.04230758285522461, 0.04210441589355469, 0.042014110565185545, 0.04224665451049805, 0.04279142379760742, 0.042534912109375, 0.04225228881835937, 0.04244038391113281, 0.04240374374389649, 0.04244838333129883, 0.04250483322143555, 0.04244012832641601, 0.04246988677978516, 0.042817726135253906, 0.043003681182861325, 0.04306060791015625, 0.042791519165039066, 0.04295721435546875, 0.04371974563598633, 0.04314822387695313, 0.04270809555053711, 0.04278976058959961, 0.04316774368286133, 0.04322099304199219, 0.042923614501953124, 0.04281180953979492, 0.042657665252685543, 0.04264720153808594, 0.04255097579956055, 0.04254390335083008, 0.0427125129699707, 0.04316831970214844, 0.04314521789550781, 0.04302428817749023, 0.04282540893554688, 0.04284419250488281, 0.042969470977783204, 0.043136096954345705, 0.043172767639160156, 0.04311155319213867, 0.043537281036376954, 0.0433070068359375, 0.04322880172729492, 0.04331763076782227, 0.043893856048583986]",tokens/s,23.478521229960215,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6767.353856,7769.817088,0.0,7367.294976,7351.94368,s,1,12.8867509765625,12.8867509765625,0.0,12.8867509765625,12.8867509765625,12.8867509765625,12.8867509765625,[12.8867509765625],,kWh,0.00016327587415413896,1.8003184592868308e-05,5.1368652206007503e-05,0.00023264771095301477,,MB,1645.051904,8380.08832,0.0,7962.886144,7872.44544,s,10,3.2418509521484373,0.3241850952148438,0.0015105411650276274,0.32444235229492185,0.3255603240966797,0.3261459915161133,0.3266145254516602,"[0.3210010070800781, 0.322678955078125, 0.3233010559082031, 0.32389852905273436, 0.3247224426269531, 0.3267316589355469, 0.32543017578125, 0.3248792419433594, 0.3241622619628906, 0.32504562377929686]",tokens/s,789.6723315744784,kWh,9.446926927554129e-06,1.0417319137976308e-06,6.295587473741944e-06,1.6784246315093702e-05,tokens/kWh,15252397.706400724,MB,1656.262656,8631.74656,0.0,8214.544384,8118.577152,s,10,27.217691650390627,2.7217691650390625,0.005172989513998363,2.721640869140625,2.725898095703125,2.7291671875,2.7317824609374997,"[2.71280712890625, 2.717132080078125, 2.72190673828125, 2.724744873046875, 2.725171630859375, 2.732436279296875, 2.724072021484375, 2.716677978515625, 2.721367919921875, 2.721375]",tokens/s,23.14670943048024,kWh,7.960932990744507e-05,8.779261182958402e-06,5.269984861145714e-05,0.00014108843970186063,tokens/kWh,446528.4337478514,,s,630,27.214150100708,0.04319706365191747,0.0003528519712173712,0.04319479942321777,0.043600189208984376,0.043674482154846185,0.044100844688415526,"[0.04315951919555664, 0.04282080078125, 0.042427009582519534, 0.04227635192871094, 0.042466014862060544, 0.042657791137695314, 0.04270195388793945, 0.042689407348632816, 0.04267327880859375, 0.04271590423583985, 0.04253504180908203, 0.04262297439575195, 0.04268851089477539, 0.04254617691040039, 0.04247180938720703, 0.04258060836791992, 0.04273971176147461, 0.04302204895019531, 0.04285673522949219, 0.04311040115356445, 0.04292124938964844, 0.042996448516845705, 0.04297318267822266, 0.04314028930664063, 0.04326073455810547, 0.043227264404296875, 0.04265167999267578, 0.04281641769409179, 0.04299235153198242, 0.0431126708984375, 0.04299280166625977, 0.04316223907470703, 0.043120384216308594, 0.04311702346801758, 0.043173152923583986, 0.04322172927856445, 0.042962944030761716, 0.043053054809570314, 0.043105953216552736, 0.04313942337036133, 0.04317744064331055, 0.04306179046630859, 0.04338051223754883, 0.04334198379516602, 0.043173694610595705, 0.0432243537902832, 0.04329571151733398, 0.043144512176513675, 0.04309695816040039, 0.043132736206054685, 0.0433172492980957, 0.04331520080566406, 0.04329471969604492, 0.04322086334228516, 0.04317401504516601, 0.04332921600341797, 0.04356143951416016, 0.04369996643066406, 0.04410192108154297, 0.04413420867919922, 0.04343603134155274, 0.04361644744873047, 0.04337558364868164, 0.04331110382080078, 0.04270489501953125, 0.042505599975585937, 0.04272505569458008, 0.04254105758666992, 0.04243734359741211, 0.04252809524536133, 0.042507137298583984, 0.042683902740478515, 0.04318611145019531, 0.04290790557861328, 0.04281107330322265, 0.04273136138916016, 0.04280956649780274, 0.04265584182739258, 0.04275836944580078, 0.04286860656738281, 0.042888702392578124, 0.04299212646484375, 0.04306572723388672, 0.04304076766967774, 0.043286529541015625, 0.04332953643798828, 0.04332134246826172, 0.04306697463989258, 0.04287120056152344, 0.043030529022216796, 0.04366131210327148, 0.04306739044189453, 0.04297727966308594, 0.04288675308227539, 0.042702816009521485, 0.0428807373046875, 0.04300054550170898, 0.04297727966308594, 0.04323942565917969, 0.04313087844848633, 0.04317580795288086, 0.043065311431884766, 0.043222305297851565, 0.043119678497314455, 0.043554622650146486, 0.04320665740966797, 0.043655166625976564, 0.04355276870727539, 0.0434031982421875, 0.043313343048095705, 0.04328396987915039, 0.043159934997558595, 0.04313497543334961, 0.043493377685546876, 0.04352000045776367, 0.0433889274597168, 0.04346038436889649, 0.043490943908691404, 0.04351446533203125, 0.0436157112121582, 0.04374787139892578, 0.04367887878417969, 0.04346966552734375, 0.043473983764648436, 0.04356332778930664, 0.04348582458496094, 0.043355358123779296, 0.04303155136108398, 0.0425571517944336, 0.04234348678588867, 0.04252774429321289, 0.04289945602416992, 0.042616832733154295, 0.04265145492553711, 0.042614849090576175, 0.0426740493774414, 0.042784160614013675, 0.0427402572631836, 0.04272364807128906, 0.04316569519042969, 0.04305017471313476, 0.04274873733520508, 0.04272246551513672, 0.0447672004699707, 0.04298403167724609, 0.04305302429199219, 0.043125057220458986, 0.043055103302001956, 0.04320035171508789, 0.04322694396972656, 0.04313123321533203, 0.04291993713378906, 0.04292099380493164, 0.04296393585205078, 0.04319427108764649, 0.04322313690185547, 0.043030529022216796, 0.04313478469848633, 0.04294412612915039, 0.042892063140869144, 0.04305487823486328, 0.043274177551269534, 0.04329068756103516, 0.04309731292724609, 0.04327651214599609, 0.043162174224853515, 0.043118495941162106, 0.04334406280517578, 0.04334956741333008, 0.04327664184570312, 0.04448988723754883, 0.04360425567626953, 0.043469375610351565, 0.043603649139404295, 0.04361161422729492, 0.043479103088378906, 0.04348912048339844, 0.04337145614624023, 0.0434442253112793, 0.04355686569213867, 0.04350966262817383, 0.04328457641601562, 0.043218944549560545, 0.04345849609375, 0.04339737701416015, 0.04359167861938477, 0.043310016632080076, 0.04336422348022461, 0.0447760009765625, 0.043302913665771485, 0.042823680877685545, 0.042625022888183595, 0.04250419235229492, 0.04252876663208008, 0.04286873626708984, 0.04275804901123047, 0.04278041458129883, 0.0430022087097168, 0.04290556716918945, 0.04286876678466797, 0.04269055938720703, 0.042708545684814456, 0.04276819229125976, 0.0428361930847168, 0.0426907844543457, 0.042973537445068356, 0.04306108856201172, 0.04292937469482422, 0.04312073516845703, 0.04299030303955078, 0.04311036682128906, 0.04330640029907227, 0.043434558868408205, 0.04332137680053711, 0.043165409088134765, 0.04309635162353516, 0.043012096405029294, 0.043065662384033206, 0.043226303100585936, 0.04322150421142578, 0.04335599899291992, 0.043228927612304686, 0.04313087844848633, 0.043035041809082034, 0.043061023712158204, 0.04310374450683594, 0.04329264068603516, 0.043261951446533206, 0.04527180862426758, 0.043256961822509765, 0.04334681701660156, 0.04347475051879883, 0.04348947143554688, 0.0436610221862793, 0.043648990631103515, 0.04371209716796875, 0.043622718811035154, 0.043501983642578124, 0.04349756622314453, 0.043585025787353515, 0.043641246795654294, 0.04389068984985352, 0.04363792037963867, 0.043584159851074215, 0.043518142700195314, 0.04362387084960938, 0.04338336181640625, 0.04348928070068359, 0.04333964920043945, 0.043657344818115236, 0.04377190399169922, 0.04365212631225586, 0.04347856140136719, 0.043055328369140625, 0.042793632507324215, 0.04282102584838867, 0.0429574089050293, 0.043011871337890625, 0.04339529418945313, 0.042917343139648435, 0.04286291122436523, 0.043278560638427735, 0.04265574264526367, 0.04285200119018555, 0.042873184204101564, 0.04296499252319336, 0.04301619338989258, 0.04284723281860352, 0.04288166427612305, 0.04292441558837891, 0.042950496673583985, 0.043058784484863284, 0.043337886810302734, 0.04318864059448242, 0.04310630416870117, 0.04311366271972656, 0.043192127227783206, 0.04297625732421875, 0.04316774368286133, 0.04326822280883789, 0.04304828643798828, 0.04305155181884766, 0.04339244842529297, 0.04289388656616211, 0.042913791656494144, 0.043081729888916016, 0.043118366241455076, 0.04329904174804688, 0.043169792175292966, 0.04337062454223633, 0.04347014236450195, 0.0435918083190918, 0.04365142440795899, 0.04358364868164062, 0.04368729782104492, 0.04358160018920899, 0.04350812911987305, 0.04356915283203125, 0.04394803237915039, 0.04360806274414063, 0.04358758544921875, 0.04346700668334961, 0.043552513122558596, 0.04349708938598633, 0.04341798400878906, 0.04337059020996094, 0.04344412612915039, 0.04331315231323242, 0.0433623046875, 0.04352614212036133, 0.04349542236328125, 0.043579391479492184, 0.04362163162231445, 0.04349580764770508, 0.043581825256347656, 0.043923423767089846, 0.04315929412841797, 0.042705184936523435, 0.04272048187255859, 0.04272739028930664, 0.042780960083007816, 0.04273001480102539, 0.042646526336669925, 0.04270083236694336, 0.042721473693847656, 0.04276508712768555, 0.04293427276611328, 0.042880481719970706, 0.04429059219360352, 0.043111873626708985, 0.04296352005004883, 0.043186176300048826, 0.043547935485839843, 0.04355759811401367, 0.04361811065673828, 0.043337120056152346, 0.043453216552734375, 0.043714561462402345, 0.04357324981689453, 0.04353238296508789, 0.043202465057373046, 0.043186176300048826, 0.043177982330322266, 0.04296908950805664, 0.04354662322998047, 0.04315900802612305, 0.043224769592285155, 0.04325155258178711, 0.04329366302490235, 0.04325759887695312, 0.043358497619628907, 0.043585601806640624, 0.043503551483154296, 0.04351715087890625, 0.04334652709960937, 0.04340505599975586, 0.04349996948242187, 0.0436756477355957, 0.043671550750732424, 0.04366745758056641, 0.0437022705078125, 0.04360806274414063, 0.043837440490722655, 0.043684993743896484, 0.0437072639465332, 0.04355596923828125, 0.043471393585205076, 0.043508064270019534, 0.04371865463256836, 0.04355398559570312, 0.04356998443603516, 0.0435483512878418, 0.043487552642822266, 0.04359372711181641, 0.04347903823852539, 0.04367305755615234, 0.043783935546875, 0.0437911376953125, 0.04349734497070312, 0.04297289657592773, 0.04275651168823242, 0.04283331298828125, 0.04287075042724609, 0.04289804840087891, 0.04285232162475586, 0.04281455993652344, 0.04290041732788086, 0.04279404830932617, 0.043094974517822265, 0.04331087875366211, 0.04300780868530273, 0.04311081695556641, 0.042991329193115234, 0.04292607879638672, 0.04285468673706055, 0.04305059051513672, 0.04297769546508789, 0.04311449432373047, 0.04334905624389648, 0.04324591827392578, 0.04340316772460937, 0.04345708847045898, 0.043292800903320314, 0.04319641494750977, 0.043151039123535156, 0.04318649673461914, 0.04315135955810547, 0.043097118377685546, 0.04298441696166992, 0.04310630416870117, 0.04313670349121094, 0.04315372848510742, 0.04302643203735351, 0.04316159820556641, 0.04336140823364258, 0.04317273712158203, 0.043210750579833986, 0.04328857421875, 0.043243423461914066, 0.04331903839111328, 0.04329084777832031, 0.04324764633178711, 0.043460704803466796, 0.04362406539916992, 0.043646751403808595, 0.04365167999267578, 0.04372480010986328, 0.04336374282836914, 0.04327052688598633, 0.04330889511108398, 0.04360966491699219, 0.04357923126220703, 0.04355171203613281, 0.04389641571044922, 0.043501983642578124, 0.04339507293701172, 0.04337776184082031, 0.04354550552368164, 0.04344153594970703, 0.043375232696533206, 0.04359987258911133, 0.043608158111572266, 0.04308867263793945, 0.042684417724609375, 0.04251238250732422, 0.042608638763427735, 0.04264755249023437, 0.042712894439697266, 0.04288940811157226, 0.042504032135009764, 0.04255350494384766, 0.042790912628173826, 0.04271923065185547, 0.042674175262451174, 0.04258518218994141, 0.04277920150756836, 0.04289308929443359, 0.04285708618164062, 0.04280928039550781, 0.0429793930053711, 0.04310409545898437, 0.043063392639160154, 0.04312268829345703, 0.04314847946166992, 0.043121086120605466, 0.04294620895385742, 0.04332822418212891, 0.0431321907043457, 0.043045600891113284, 0.04303385543823242, 0.043635456085205075, 0.0429031982421875, 0.042909408569335936, 0.04296700668334961, 0.04293904113769531, 0.043184127807617184, 0.04313497543334961, 0.043096065521240234, 0.042921024322509764, 0.04303763198852539, 0.04335753631591797, 0.044098209381103516, 0.04357120132446289, 0.043216896057128903, 0.04338227081298828, 0.043430305480957034, 0.043304798126220706, 0.04333587265014648, 0.04337875366210937, 0.04332134246826172, 0.04333772659301758, 0.043382816314697266, 0.04332249450683594, 0.04311964797973633, 0.043337535858154294, 0.04341974258422852, 0.043394432067871094, 0.04324515151977539, 0.043195327758789065, 0.04347251129150391, 0.04327462387084961, 0.04346886444091797, 0.04352342224121094, 0.04377686309814453, 0.043312896728515626, 0.042902881622314454, 0.04273459243774414, 0.04253839874267578, 0.04268889617919922, 0.042674304962158204, 0.0425513916015625, 0.04270064163208008, 0.04260796737670899, 0.04251526260375976, 0.04299942398071289, 0.042971519470214846, 0.04298940658569336, 0.04308627319335938, 0.042992862701416015, 0.04299622344970703, 0.04286873626708984, 0.04285603332519531, 0.04302275085449219, 0.04307894515991211, 0.04309673690795898, 0.04345862579345703, 0.043409408569335936, 0.04337206268310547, 0.043253280639648437, 0.04305574417114258, 0.04305740737915039, 0.04317190551757812, 0.042974239349365236, 0.04294976043701172, 0.0432342414855957, 0.0429925422668457, 0.042891265869140625, 0.043698238372802733, 0.04316972732543945, 0.043224063873291016, 0.04308889770507812, 0.043503200531005856, 0.0432492790222168, 0.043243358612060544, 0.043582401275634765, 0.04345814514160156, 0.043387294769287106, 0.04349747085571289, 0.043448318481445314, 0.04329849624633789, 0.04346255874633789, 0.04336387252807617, 0.043348575592041014, 0.043329822540283204, 0.04339257431030273, 0.04319916915893555, 0.043346721649169924, 0.043400161743164065, 0.04335411071777344, 0.04348137664794922, 0.04388220977783203, 0.04407523345947266, 0.043519775390625, 0.043490623474121096, 0.043456321716308595, 0.04349017715454102, 0.043603038787841795, 0.04338687896728516, 0.043022335052490236, 0.043003841400146486, 0.04275948715209961, 0.04267910385131836, 0.04281727981567383, 0.042830337524414064, 0.042720958709716796, 0.04269465637207031, 0.042696510314941406, 0.043122753143310544, 0.043106239318847654, 0.042942657470703124, 0.04314051055908203, 0.04302912139892578, 0.042742816925048825, 0.0428515510559082, 0.043028030395507816, 0.043093215942382815, 0.04304991912841797, 0.043202560424804685, 0.043243518829345705, 0.043232383728027346, 0.04313516616821289, 0.04331590270996094, 0.04318320083618164, 0.04310108947753906, 0.04302643203735351, 0.043126686096191406, 0.04294851303100586, 0.04294675064086914, 0.04345167922973633, 0.04333615875244141, 0.04322076797485352, 0.043006431579589846, 0.043300193786621095, 0.043452510833740236, 0.04345455932617188, 0.04330713653564453, 0.04343228912353515, 0.043241504669189454, 0.04320428848266602, 0.04336054229736328, 0.04348108673095703, 0.043568737030029295, 0.04342630386352539, 0.04342998504638672, 0.043473888397216796, 0.0434813117980957, 0.0433397445678711, 0.04318479919433594, 0.04328857421875, 0.043433982849121096, 0.04341862487792969, 0.04338108825683594, 0.04335887908935547, 0.04325785446166992, 0.04317388916015625, 0.043327617645263675, 0.04342771148681641, 0.04352204895019531, 0.04364287948608398, 0.04352000045776367]",tokens/s,23.14972165835191,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2230.239232,2558.394368,0.0,2155.872256,2032.413184,s,1,8.849880859375,8.849880859375,0.0,8.849880859375,8.849880859375,8.849880859375,8.849880859375,[8.849880859375],,kWh,5.069948461662837e-05,5.585193968730092e-06,1.5416401221990395e-05,7.170107980734885e-05,,MB,2298.92096,2835.218432,0.0,2418.016256,2280.154112,s,10,0.9324708175659179,0.0932470817565918,0.00012180643037577638,0.09319584274291992,0.09344761810302735,0.0934663360595703,0.09348131042480468,"[0.09348505401611328, 0.0934434585571289, 0.09312166595458984, 0.09317011260986328, 0.09320662689208985, 0.09316172790527344, 0.09312764739990234, 0.09318505859375, 0.09331346893310546, 0.09325599670410156]",tokens/s,2745.39422765263,kWh,2.769611799213855e-06,3.054406640319133e-07,1.8421555617734332e-06,4.917208025019201e-06,tokens/kWh,52062064.223731995,MB,2313.904128,2919.104512,0.0,2501.902336,2389.055488,s,10,18.272229736328125,1.8272229736328125,0.004028818296486248,1.8277330932617186,1.8326639526367188,1.833317694091797,1.8338406872558595,"[1.8243748779296876, 1.8225260009765625, 1.833971435546875, 1.8325186767578125, 1.82675390625, 1.828748779296875, 1.8228560791015624, 1.8219384765625, 1.8287122802734375, 1.8298292236328124]",tokens/s,34.478550734695446,kWh,5.357079897370606e-05,5.90859924818996e-06,2.7599229102426552e-05,8.707862732432257e-05,tokens/kWh,723484.0733692067,,s,630,18.269282154083232,0.02899886056203691,0.0004212822361993602,0.028892560005187987,0.02935540561676025,0.02950972194671631,0.031019194602966316,"[0.02936832046508789, 0.029171295166015625, 0.029054399490356445, 0.028795007705688477, 0.028957536697387695, 0.028795135498046874, 0.02892982482910156, 0.029145055770874024, 0.0287926082611084, 0.02875004768371582, 0.02897702407836914, 0.029155296325683595, 0.028884416580200196, 0.028773088455200196, 0.029148639678955077, 0.028981632232666015, 0.028879007339477538, 0.0291778564453125, 0.02919219207763672, 0.029105600357055665, 0.029057056427001952, 0.029309152603149414, 0.028952768325805664, 0.02902134323120117, 0.02902524757385254, 0.028884992599487305, 0.029165151596069337, 0.02938307189941406, 0.02897305679321289, 0.029116031646728515, 0.028708959579467775, 0.028876895904541015, 0.02880735969543457, 0.029233024597167968, 0.028894464492797853, 0.028930944442749025, 0.028748832702636718, 0.02879283142089844, 0.028867551803588867, 0.028829504013061523, 0.028913440704345702, 0.029016128540039064, 0.029143392562866213, 0.02887843132019043, 0.02880963134765625, 0.028923904418945313, 0.02866899108886719, 0.028937055587768556, 0.028763776779174806, 0.028978912353515626, 0.028872800827026368, 0.029212736129760743, 0.02878108787536621, 0.028835264205932618, 0.02865011215209961, 0.02874937629699707, 0.028792768478393554, 0.028748287200927734, 0.02863088035583496, 0.028991647720336914, 0.02891484832763672, 0.02900387191772461, 0.029264095306396485, 0.028944128036499022, 0.028813823699951172, 0.030018560409545897, 0.028920095443725587, 0.028942815780639647, 0.028719104766845704, 0.028743648529052736, 0.028823328018188477, 0.02880073547363281, 0.028676191329956056, 0.028713504791259767, 0.02894838333129883, 0.028880895614624022, 0.028794879913330077, 0.030019584655761718, 0.028872703552246092, 0.028712959289550782, 0.02891366386413574, 0.02882537651062012, 0.028716480255126953, 0.02889193534851074, 0.029089599609375, 0.028976768493652345, 0.02889776039123535, 0.029464672088623047, 0.028835840225219726, 0.028692031860351564, 0.028715456008911133, 0.02881452751159668, 0.028833759307861327, 0.028703584671020507, 0.029048831939697265, 0.02959984016418457, 0.0289770565032959, 0.02880246353149414, 0.02860895919799805, 0.02879657554626465, 0.029187936782836914, 0.028945056915283204, 0.02899740791320801, 0.028788896560668947, 0.028921920776367186, 0.028667903900146483, 0.02880512046813965, 0.02883987236022949, 0.02896214485168457, 0.02868502426147461, 0.028803071975708007, 0.028856576919555663, 0.028700416564941406, 0.02870412826538086, 0.028817695617675783, 0.02909388732910156, 0.02907161521911621, 0.028721151351928712, 0.028891231536865233, 0.028891136169433593, 0.029020095825195314, 0.02887481689453125, 0.029402591705322265, 0.02906883239746094, 0.02915020751953125, 0.028827487945556642, 0.02893177604675293, 0.029077856063842774, 0.028998912811279295, 0.02903055953979492, 0.02890390396118164, 0.029096031188964845, 0.029558687210083007, 0.029183263778686522, 0.029227455139160155, 0.029118112564086914, 0.028771039962768554, 0.028871999740600587, 0.029098688125610353, 0.029100032806396486, 0.02880102348327637, 0.028915552139282225, 0.028912864685058593, 0.02882361602783203, 0.028745824813842774, 0.028787744522094726, 0.028992416381835938, 0.02906153678894043, 0.028770240783691406, 0.028815872192382814, 0.028884832382202148, 0.028760223388671874, 0.028853824615478516, 0.029429887771606444, 0.02920854377746582, 0.030882080078125, 0.029161535263061523, 0.028970144271850587, 0.028875616073608397, 0.028844032287597656, 0.0290729923248291, 0.02960220718383789, 0.028845407485961913, 0.028854944229125976, 0.02890547180175781, 0.02896895980834961, 0.029220863342285155, 0.02922256088256836, 0.028811616897583006, 0.02879897689819336, 0.029114368438720704, 0.029411327362060546, 0.029372415542602538, 0.029427711486816405, 0.029243392944335936, 0.028899328231811523, 0.028692415237426758, 0.0289068489074707, 0.029000415802001953, 0.029314111709594727, 0.02911747169494629, 0.02939689636230469, 0.029319007873535155, 0.029501535415649413, 0.029101568222045897, 0.02893881607055664, 0.028923904418945313, 0.030875648498535156, 0.029313024520874024, 0.029527103424072266, 0.029543359756469725, 0.02937228775024414, 0.029507328033447265, 0.029388992309570313, 0.029335775375366212, 0.029251552581787108, 0.029098207473754883, 0.028980192184448243, 0.029369152069091797, 0.028714303970336915, 0.028808895111083983, 0.028828224182128905, 0.028703168869018556, 0.028743423461914063, 0.028907840728759765, 0.02892972755432129, 0.028708351135253905, 0.02879974365234375, 0.028925952911376954, 0.028913919448852538, 0.028732704162597655, 0.028776575088500976, 0.029221216201782228, 0.029005823135375978, 0.028663808822631837, 0.02865679931640625, 0.028695392608642577, 0.028683391571044922, 0.031075199127197264, 0.030453855514526368, 0.028964799880981447, 0.029134016036987304, 0.028990207672119142, 0.028724512100219726, 0.02881920051574707, 0.028786943435668944, 0.029383424758911134, 0.028792352676391603, 0.028785120010375975, 0.028878847122192384, 0.028782495498657225, 0.028760160446166992, 0.029175199508666993, 0.029477760314941405, 0.029121408462524413, 0.028957536697387695, 0.029007871627807616, 0.028872703552246092, 0.02873321533203125, 0.02872326469421387, 0.029081760406494142, 0.02931475257873535, 0.02922224044799805, 0.028925952911376954, 0.029342687606811524, 0.029560831069946288, 0.0295402889251709, 0.029339519500732422, 0.0290633602142334, 0.02930892753601074, 0.02923423957824707, 0.029084287643432617, 0.02959971237182617, 0.02934105682373047, 0.029487743377685546, 0.02909388732910156, 0.029050815582275392, 0.028989503860473633, 0.028903423309326173, 0.028672000885009766, 0.028708864212036132, 0.028638303756713866, 0.02869487953186035, 0.028772928237915038, 0.029394399642944335, 0.029089439392089845, 0.02879199981689453, 0.02875289535522461, 0.029231807708740235, 0.028935359954833983, 0.028822336196899414, 0.028847103118896485, 0.028879264831542968, 0.028750431060791014, 0.028581056594848633, 0.02888377571105957, 0.028657663345336915, 0.028720928192138673, 0.02904412841796875, 0.02895136070251465, 0.028688383102416993, 0.029798015594482422, 0.03125900840759278, 0.02906060791015625, 0.029125120162963865, 0.028820608139038088, 0.028906208038330078, 0.029247648239135744, 0.02913267135620117, 0.028914880752563477, 0.028879648208618165, 0.029058847427368164, 0.0291494083404541, 0.02869900894165039, 0.028815135955810547, 0.028839967727661134, 0.02873936080932617, 0.028690624237060546, 0.028640512466430665, 0.029313568115234376, 0.029092063903808595, 0.029024255752563476, 0.028897056579589842, 0.028804800033569337, 0.028793056488037108, 0.028676671981811522, 0.028706560134887694, 0.02877440071105957, 0.028843360900878905, 0.028758047103881836, 0.0288590087890625, 0.029023391723632812, 0.029540895462036133, 0.029477216720581054, 0.029112287521362305, 0.02958038330078125, 0.029139871597290038, 0.028964704513549804, 0.02889129638671875, 0.028893184661865235, 0.02911552047729492, 0.028953695297241212, 0.02885744094848633, 0.028961471557617188, 0.028790208816528322, 0.02866223907470703, 0.028739200592041016, 0.028789215087890625, 0.028635135650634767, 0.029007007598876953, 0.028646240234375, 0.029425952911376955, 0.03298275375366211, 0.028907743453979493, 0.02888598442077637, 0.029207136154174803, 0.02895689582824707, 0.02889900779724121, 0.028923871994018555, 0.02896931266784668, 0.028807167053222657, 0.028800512313842775, 0.028725376129150392, 0.02899523162841797, 0.029016799926757812, 0.029187360763549806, 0.02962076759338379, 0.029375904083251952, 0.029110847473144533, 0.028864736557006835, 0.029130752563476563, 0.02887424087524414, 0.02891187286376953, 0.02870297622680664, 0.028839103698730467, 0.02890015983581543, 0.02894771194458008, 0.029260799407958983, 0.02920979118347168, 0.028891712188720702, 0.02877644729614258, 0.02879046440124512, 0.02879929542541504, 0.028712480545043946, 0.02872777557373047, 0.02879283142089844, 0.029413375854492187, 0.02890547180175781, 0.02935398483276367, 0.0287455997467041, 0.02882918357849121, 0.02882115173339844, 0.028834783554077148, 0.02897942352294922, 0.029331232070922853, 0.028921344757080077, 0.02888140869140625, 0.02889507293701172, 0.02907935905456543, 0.02908220863342285, 0.02870604705810547, 0.02872159957885742, 0.02874073600769043, 0.02876710319519043, 0.02875596809387207, 0.02884160041809082, 0.028844095230102538, 0.02875596809387207, 0.028672319412231445, 0.02881331253051758, 0.028888416290283204, 0.02879350471496582, 0.02874163246154785, 0.028803199768066407, 0.028685983657836915, 0.02871104049682617, 0.02862908744812012, 0.02914303970336914, 0.028866559982299804, 0.028862016677856445, 0.028856767654418945, 0.028880895614624022, 0.02879283142089844, 0.028827264785766603, 0.028662143707275392, 0.0288721923828125, 0.029932031631469725, 0.028856319427490236, 0.0287825927734375, 0.02895795249938965, 0.0288222713470459, 0.02870854377746582, 0.028717376708984374, 0.02879897689819336, 0.02874163246154785, 0.028681568145751953, 0.029125280380249023, 0.028860416412353516, 0.028839935302734376, 0.028728736877441406, 0.028770912170410157, 0.03156172752380371, 0.02931711959838867, 0.029450016021728517, 0.029200672149658203, 0.028905376434326172, 0.029052959442138673, 0.028981216430664064, 0.02887887954711914, 0.028962591171264648, 0.028834016799926757, 0.02892185592651367, 0.02878998374938965, 0.0291167049407959, 0.029075904846191405, 0.028933280944824218, 0.029049503326416017, 0.0291343994140625, 0.028805824279785158, 0.02875801658630371, 0.028704544067382813, 0.029051136016845704, 0.02885593605041504, 0.02865567970275879, 0.028795103073120117, 0.02870684814453125, 0.028660736083984374, 0.028664640426635742, 0.02996006393432617, 0.028979328155517577, 0.02894419288635254, 0.028960960388183594, 0.028854207992553713, 0.028876863479614257, 0.028807167053222657, 0.02874355125427246, 0.029040767669677735, 0.028669952392578125, 0.028872703552246092, 0.029083648681640626, 0.029083648681640626, 0.028805215835571288, 0.02897091293334961, 0.028800991058349608, 0.02872659111022949, 0.028842048645019533, 0.028871328353881835, 0.02882899284362793, 0.028998239517211914, 0.029065311431884764, 0.029337600708007814, 0.028740991592407228, 0.02877884864807129, 0.02883612823486328, 0.028700672149658202, 0.028882911682128906, 0.02879622459411621, 0.028642015457153322, 0.028979167938232422, 0.028651744842529296, 0.028786495208740236, 0.028676095962524413, 0.02879283142089844, 0.028723039627075196, 0.028694911956787108, 0.031139616012573243, 0.02878054428100586, 0.028696575164794923, 0.02875187110900879, 0.02895871925354004, 0.0287457275390625, 0.028700672149658202, 0.028687744140625, 0.028696575164794923, 0.02882828712463379, 0.028786687850952147, 0.02889321517944336, 0.02883516883850098, 0.02863539123535156, 0.02893008041381836, 0.029460351943969728, 0.029483488082885742, 0.02936819267272949, 0.029077632904052735, 0.02930803108215332, 0.02940438461303711, 0.029029727935791016, 0.029493919372558595, 0.029464031219482423, 0.028997472763061524, 0.028836320877075196, 0.028672000885009766, 0.03077939224243164, 0.02993971252441406, 0.02892736053466797, 0.028781248092651368, 0.02878803253173828, 0.02861510467529297, 0.028879039764404296, 0.02933046340942383, 0.029128768920898437, 0.029190143585205077, 0.02924959945678711, 0.029133663177490235, 0.029009920120239258, 0.028749439239501955, 0.028665983200073242, 0.028760351181030274, 0.028763872146606445, 0.028860671997070313, 0.028614656448364258, 0.028692480087280273, 0.028777568817138673, 0.028629663467407227, 0.028874847412109376, 0.028814943313598632, 0.028875328063964843, 0.028943424224853517, 0.028758975982666017, 0.028710912704467774, 0.028637184143066406, 0.029108224868774416, 0.028827648162841796, 0.02887295913696289, 0.028739328384399413, 0.029240671157836913, 0.028893856048583983, 0.03135052871704101, 0.02956723213195801, 0.02879897689819336, 0.028717056274414062, 0.02887059211730957, 0.028758079528808593, 0.028612607955932616, 0.028622848510742187, 0.02914518356323242, 0.028727199554443358, 0.02870083236694336, 0.028723039627075196, 0.028874528884887694, 0.02899171257019043, 0.02965228843688965, 0.0291212158203125, 0.029263872146606446, 0.029163520812988283, 0.028991552352905275, 0.028999616622924804, 0.029511680603027345, 0.02950262451171875, 0.029457536697387696, 0.029553983688354494, 0.02931974411010742, 0.02925881576538086, 0.029245439529418944, 0.02888547134399414, 0.029003807067871094, 0.02900521659851074, 0.028906463623046875, 0.028921760559082032, 0.029144607543945312, 0.028809696197509765, 0.02894256019592285, 0.029010719299316406, 0.028818368911743164, 0.02887651252746582, 0.028821855545043944, 0.02891881561279297, 0.028893247604370117, 0.028766271591186523, 0.028793312072753905, 0.028800928115844726, 0.028923519134521486, 0.029090656280517577, 0.02902022361755371, 0.028972991943359373, 0.028870655059814454, 0.028717056274414062, 0.028712959289550782, 0.02878873634338379, 0.028819456100463867, 0.02873139190673828, 0.028721151351928712, 0.028771551132202148, 0.028859167098999022, 0.028907520294189453, 0.02901158332824707, 0.02908582305908203, 0.029327072143554688, 0.02881718444824219, 0.028717824935913086, 0.029345792770385744, 0.033331329345703126, 0.029343616485595702, 0.02920857620239258, 0.028917919158935546, 0.028888832092285155, 0.028890304565429688, 0.0287425594329834, 0.02884787178039551, 0.028811519622802734, 0.029251583099365236, 0.02894380760192871, 0.0288690242767334, 0.028825439453125, 0.028780736923217774, 0.028652992248535156, 0.02893062400817871, 0.028833919525146485, 0.02907686424255371, 0.02900752067565918]",tokens/s,34.48411353476155,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1585.860608,1604.190208,0.0,1201.668096,1189.151232,s,1,8.352587890625,8.352587890625,0.0,8.352587890625,8.352587890625,8.352587890625,8.352587890625,[8.352587890625],,kWh,3.722197351251755e-05,4.098652479497224e-06,1.1060564404016815e-05,5.238119039603159e-05,,MB,1704.194048,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4133818244934082,0.04133818244934082,0.00021710415600139905,0.041259599685668946,0.0416843391418457,0.041755705642700196,0.04181279884338379,"[0.04182707214355469, 0.04121747207641602, 0.04109958267211914, 0.04123040008544922, 0.04134012985229492, 0.04129801559448242, 0.04118268966674805, 0.04166847991943359, 0.041229183197021485, 0.041288799285888675]",tokens/s,6192.821861815605,kWh,1.2516321108796287e-06,1.3802899662050067e-07,8.327475797778712e-07,2.222408687278001e-06,tokens/kWh,115190334.46253668,MB,1709.89568,1843.265536,0.0,1426.06336,1407.548416,s,10,10.648607055664064,1.064860705566406,0.003970466598262344,1.0655339965820312,1.0696396484375001,1.069945068359375,1.070189404296875,"[1.0657847900390625, 1.067237060546875, 1.06957177734375, 1.07025048828125, 1.064951904296875, 1.05922802734375, 1.065283203125, 1.0594881591796874, 1.06758251953125, 1.0592291259765625]",tokens/s,59.162667634063844,kWh,3.110098283995666e-05,3.429957937523701e-06,1.577587681822061e-05,5.030681759570097e-05,tokens/kWh,1252315.352291013,,s,630,10.645323112487779,0.016897338273790148,0.00039187813689604384,0.01681591987609863,0.017147257232666014,0.017354007720947265,0.018099379463195803,"[0.01694918441772461, 0.01691103935241699, 0.01697587203979492, 0.01679155158996582, 0.0168222713470459, 0.017001663208007813, 0.016874303817749025, 0.016639392852783205, 0.016701087951660157, 0.016944128036499022, 0.016809247970581056, 0.01697212791442871, 0.017000064849853516, 0.016884416580200196, 0.016748544692993163, 0.017285024642944336, 0.01686947250366211, 0.01700556755065918, 0.017064960479736328, 0.01722368049621582, 0.01706188774108887, 0.017133567810058595, 0.017213024139404298, 0.017219871520996095, 0.01696329689025879, 0.01706435203552246, 0.01698406410217285, 0.017088512420654296, 0.016946207046508788, 0.017021919250488283, 0.016856224060058593, 0.016872255325317383, 0.016778400421142578, 0.016862079620361327, 0.016869375228881836, 0.01683456039428711, 0.01678303909301758, 0.016875423431396485, 0.01681654357910156, 0.016925952911376954, 0.016952064514160155, 0.016850944519042968, 0.01679974365234375, 0.016863231658935548, 0.01683046340942383, 0.01680384063720703, 0.017476768493652345, 0.016858015060424805, 0.016805824279785157, 0.017102848052978514, 0.01699635124206543, 0.016733728408813476, 0.01672444725036621, 0.016776960372924806, 0.016816032409667968, 0.016828767776489256, 0.016819232940673827, 0.01692755126953125, 0.01676713562011719, 0.016762176513671876, 0.01671443176269531, 0.01681817626953125, 0.016857088088989256, 0.016953855514526366, 0.016889440536499024, 0.01685558319091797, 0.017010175704956054, 0.016814592361450196, 0.01681407928466797, 0.01683865547180176, 0.016805824279785157, 0.016730079650878905, 0.017070175170898438, 0.017035263061523438, 0.017014400482177734, 0.016847232818603515, 0.01678281593322754, 0.016781600952148437, 0.01672831916809082, 0.01681203269958496, 0.017106943130493164, 0.016932863235473633, 0.016918495178222658, 0.016846879959106446, 0.016945152282714843, 0.01678745651245117, 0.016932863235473633, 0.01704550361633301, 0.017147775650024413, 0.01692470359802246, 0.016776479721069337, 0.017033376693725587, 0.01715017509460449, 0.01696607971191406, 0.017013792037963868, 0.017066976547241212, 0.01671347236633301, 0.016961280822753905, 0.016870048522949217, 0.016936735153198244, 0.017059904098510742, 0.017285120010375975, 0.017284927368164064, 0.017747648239135744, 0.017371648788452147, 0.017241279602050782, 0.017033792495727538, 0.017410303115844728, 0.016954368591308593, 0.016909055709838867, 0.016946847915649415, 0.016881568908691406, 0.016887872695922852, 0.016732799530029298, 0.01666201591491699, 0.01672857666015625, 0.016651487350463866, 0.016870176315307617, 0.016699199676513673, 0.016806079864501954, 0.016774879455566407, 0.01705603218078613, 0.01677142333984375, 0.016720640182495118, 0.01675152015686035, 0.016914432525634765, 0.017139711380004884, 0.018069280624389648, 0.01692624092102051, 0.01717318344116211, 0.01707375907897949, 0.017002912521362306, 0.016988000869750976, 0.016864479064941405, 0.016738399505615235, 0.016812416076660158, 0.016796031951904298, 0.016906335830688478, 0.016855039596557618, 0.017491968154907226, 0.0216944637298584, 0.017434623718261717, 0.01680303955078125, 0.01683951950073242, 0.016789375305175783, 0.016693311691284178, 0.016626752853393555, 0.016900447845458983, 0.016738912582397462, 0.01683660888671875, 0.016949247360229493, 0.016873472213745116, 0.016861183166503906, 0.016823423385620116, 0.016806720733642578, 0.016822336196899414, 0.01683456039428711, 0.016655967712402343, 0.01727734375, 0.01715772819519043, 0.01675667190551758, 0.016822399139404295, 0.01687318420410156, 0.01692326354980469, 0.016910335540771485, 0.016879039764404295, 0.016736127853393554, 0.016849599838256835, 0.016682336807250977, 0.016732831954956055, 0.016844032287597656, 0.016769792556762694, 0.0167524471282959, 0.016810176849365234, 0.016766016006469726, 0.016658527374267578, 0.01670662307739258, 0.0167542724609375, 0.016715936660766602, 0.01702300834655762, 0.016789728164672852, 0.016664352416992188, 0.016728063583374024, 0.0168407039642334, 0.016797727584838867, 0.01807686424255371, 0.016742176055908203, 0.016670944213867188, 0.01681488037109375, 0.01683456039428711, 0.016726015090942382, 0.01687875175476074, 0.01684566307067871, 0.01679136085510254, 0.016798240661621094, 0.017088159561157227, 0.016703487396240235, 0.0167380485534668, 0.016983392715454102, 0.01670012855529785, 0.01680601692199707, 0.016880992889404298, 0.016926944732666014, 0.01708902359008789, 0.017051647186279297, 0.016924671173095703, 0.01677926445007324, 0.017010528564453124, 0.018108575820922852, 0.016992095947265626, 0.01689958381652832, 0.016953535079956054, 0.017080575942993163, 0.017049472808837892, 0.017248031616210937, 0.01726310348510742, 0.021069343566894532, 0.017454879760742188, 0.017160959243774414, 0.016912384033203123, 0.01736016082763672, 0.0170317440032959, 0.016830623626708983, 0.016999807357788086, 0.016990848541259766, 0.016951295852661134, 0.016876800537109375, 0.016720640182495118, 0.016752384185791017, 0.016662784576416016, 0.016693248748779296, 0.016668352127075195, 0.016611679077148438, 0.016798912048339845, 0.016801664352416992, 0.016677791595458985, 0.01669478416442871, 0.01683286476135254, 0.016883487701416015, 0.016844512939453125, 0.01699087905883789, 0.01676288032531738, 0.016719871520996094, 0.016703487396240235, 0.01669478416442871, 0.016937408447265625, 0.017122751235961915, 0.01720694351196289, 0.01706188774108887, 0.01685807991027832, 0.01677471923828125, 0.0167542724609375, 0.016790111541748046, 0.016898431777954102, 0.01665836715698242, 0.01668239974975586, 0.016634527206420897, 0.016742399215698242, 0.01757699203491211, 0.017550304412841798, 0.017147199630737305, 0.016961824417114257, 0.01683292770385742, 0.01683456039428711, 0.016764928817749023, 0.017092159271240234, 0.017086912155151367, 0.01709244728088379, 0.01693440055847168, 0.016891775131225587, 0.016943296432495116, 0.01752943992614746, 0.017354368209838867, 0.01710323143005371, 0.016895999908447267, 0.01696329689025879, 0.016840927124023436, 0.016961599349975588, 0.017006591796875, 0.016859136581420898, 0.01675811195373535, 0.01680860710144043, 0.016752639770507814, 0.01681817626953125, 0.016813600540161133, 0.016887519836425783, 0.01692710494995117, 0.01672640037536621, 0.016738304138183592, 0.016747840881347655, 0.016665279388427736, 0.0167293758392334, 0.016707679748535157, 0.016792192459106445, 0.016866912841796877, 0.017152416229248048, 0.016997888565063478, 0.01708624076843262, 0.016920703887939453, 0.016814687728881835, 0.01685196876525879, 0.01683113670349121, 0.016716127395629884, 0.016957439422607423, 0.016989280700683593, 0.016733087539672852, 0.016775007247924804, 0.01696169662475586, 0.016768800735473634, 0.016810207366943358, 0.016882944107055663, 0.016767744064331055, 0.016955392837524414, 0.016732255935668947, 0.01664975929260254, 0.016699296951293945, 0.016758144378662108, 0.01666115188598633, 0.016653663635253908, 0.016648767471313476, 0.01664204788208008, 0.017989183425903322, 0.01680793571472168, 0.016906784057617186, 0.01705308723449707, 0.0170579833984375, 0.017017248153686524, 0.016887807846069337, 0.016934207916259766, 0.016898496627807617, 0.01696908760070801, 0.016832735061645506, 0.016781984329223634, 0.016726015090942382, 0.01711520004272461, 0.01678329658508301, 0.016748544692993163, 0.016639232635498047, 0.0167063045501709, 0.01661337661743164, 0.016680959701538087, 0.01668508720397949, 0.016666591644287108, 0.016602815628051756, 0.01666489601135254, 0.01666815948486328, 0.016639776229858398, 0.0167042236328125, 0.01663795280456543, 0.01668035125732422, 0.016724576950073244, 0.016921728134155274, 0.017050464630126952, 0.016850719451904295, 0.01682022476196289, 0.0168853759765625, 0.017037952423095703, 0.016840736389160157, 0.01688368034362793, 0.01668000030517578, 0.016786367416381835, 0.017047040939331053, 0.017027263641357423, 0.016801664352416992, 0.016791488647460936, 0.016748832702636718, 0.016644447326660157, 0.016737375259399414, 0.016677663803100585, 0.016618879318237304, 0.016726655960083006, 0.016815616607666017, 0.016853343963623046, 0.016650400161743163, 0.016732160568237304, 0.01669059181213379, 0.01703500747680664, 0.016749408721923827, 0.018356544494628906, 0.01692678451538086, 0.016886144638061523, 0.01673116874694824, 0.016688095092773438, 0.016748863220214842, 0.017055456161499023, 0.0168240966796875, 0.016988000869750976, 0.016843103408813478, 0.01696767997741699, 0.016900096893310547, 0.016773120880126953, 0.01702521514892578, 0.016926591873168945, 0.016746431350708007, 0.016699392318725585, 0.01676595115661621, 0.016722944259643553, 0.016672224044799806, 0.016695743560791017, 0.016655967712402343, 0.016777055740356445, 0.01667305564880371, 0.016796031951904298, 0.016655616760253907, 0.016718591690063477, 0.016748544692993163, 0.016699392318725585, 0.016709184646606444, 0.01664620780944824, 0.016715583801269532, 0.01677574348449707, 0.016578559875488282, 0.016740352630615234, 0.016678016662597658, 0.01682521629333496, 0.01717452812194824, 0.017243616104125975, 0.016720415115356446, 0.01676825523376465, 0.017250303268432618, 0.01686342430114746, 0.016876096725463866, 0.01683251190185547, 0.016998079299926756, 0.016902463912963867, 0.01684889602661133, 0.01682022476196289, 0.01697711944580078, 0.016792352676391602, 0.016742399215698242, 0.016742399215698242, 0.016608800888061524, 0.016648128509521486, 0.016732383728027343, 0.016832799911499025, 0.01672812843322754, 0.019907840728759764, 0.01756787109375, 0.017449567794799805, 0.016915712356567383, 0.016769792556762694, 0.017058528900146485, 0.01696281623840332, 0.016972223281860353, 0.016854911804199218, 0.018037151336669922, 0.01704140853881836, 0.01731705665588379, 0.016844640731811522, 0.016790496826171876, 0.01667411231994629, 0.016815807342529295, 0.016705631256103515, 0.01674118423461914, 0.016666688919067384, 0.016727743148803712, 0.016732511520385743, 0.016754304885864258, 0.016662431716918946, 0.016663007736206055, 0.016624799728393556, 0.016664640426635742, 0.016621664047241212, 0.016617696762084962, 0.016595424652099608, 0.01658639907836914, 0.01667228889465332, 0.016642879486083985, 0.01671548843383789, 0.016597280502319334, 0.01659926414489746, 0.01670729637145996, 0.01669740867614746, 0.016701440811157226, 0.016756736755371093, 0.01677926445007324, 0.017014463424682616, 0.01686355209350586, 0.0167956485748291, 0.016880704879760743, 0.01675155258178711, 0.01683865547180176, 0.01670297622680664, 0.016892383575439453, 0.016654367446899413, 0.016695232391357423, 0.01668412780761719, 0.016700384140014648, 0.016668672561645507, 0.01662566375732422, 0.01669875144958496, 0.016720512390136718, 0.016668672561645507, 0.01722163200378418, 0.0166748161315918, 0.017057472229003907, 0.016687200546264647, 0.01718508720397949, 0.016646047592163087, 0.01744076728820801, 0.016733247756958006, 0.01666758346557617, 0.017328128814697266, 0.017059839248657227, 0.017109983444213866, 0.01712544059753418, 0.017131359100341796, 0.016887615203857422, 0.016693504333496093, 0.016684864044189455, 0.016654560089111328, 0.016672767639160157, 0.01665433692932129, 0.016699392318725585, 0.016629791259765624, 0.016672063827514648, 0.01662748718261719, 0.01662656021118164, 0.01661520004272461, 0.016584800720214843, 0.016603071212768553, 0.01666908836364746, 0.016639776229858398, 0.01747760009765625, 0.016744224548339844, 0.016726272583007813, 0.01675040054321289, 0.016611520767211913, 0.01660691261291504, 0.016679231643676757, 0.01661734390258789, 0.016735776901245118, 0.016874080657958986, 0.016936960220336913, 0.017303136825561522, 0.01682806396484375, 0.016743167877197266, 0.016726015090942382, 0.016670303344726564, 0.016644512176513672, 0.016871423721313478, 0.01677926445007324, 0.016723968505859374, 0.016643295288085936, 0.016672832489013672, 0.017232608795166016, 0.018875968933105468, 0.019986303329467773, 0.01736460876464844, 0.01716524887084961, 0.017491584777832032, 0.017405759811401366, 0.016873279571533204, 0.016734912872314454, 0.016684768676757812, 0.01679599952697754, 0.016821823120117187, 0.016933311462402345, 0.016943103790283204, 0.017057119369506837, 0.016777599334716797, 0.016777503967285157, 0.017922048568725587, 0.017165855407714845, 0.016822751998901368, 0.01688083267211914, 0.016835391998291014, 0.016857088088989256, 0.016725759506225586, 0.016772384643554687, 0.01672198486328125, 0.01676790428161621, 0.016768960952758788, 0.01672198486328125, 0.01677107238769531, 0.016695295333862305, 0.016739967346191407, 0.01669977569580078, 0.016919584274291993, 0.01683350372314453, 0.016740352630615234, 0.016756736755371093, 0.016707584381103514, 0.01680998420715332, 0.016842752456665038, 0.01665843200683594, 0.016746496200561522, 0.016728063583374024, 0.016680959701538087, 0.016803199768066407, 0.016890304565429688, 0.017178943634033203, 0.01734671974182129, 0.017428224563598632, 0.01695110321044922, 0.016898015975952148, 0.01697983932495117, 0.016873023986816407, 0.016894336700439452, 0.016654720306396486, 0.01676288032531738, 0.01669494438171387, 0.01683024024963379, 0.016640575408935546, 0.01678950309753418, 0.016788576126098635, 0.016874399185180664, 0.016774303436279298, 0.017353567123413086, 0.016713216781616212, 0.01666819190979004, 0.01667375946044922, 0.016686912536621093, 0.01665247917175293, 0.01678441619873047, 0.016739295959472655, 0.017137664794921875, 0.016790592193603515, 0.016885856628417968, 0.016794464111328126, 0.017035263061523438, 0.016713663101196288, 0.016680063247680663, 0.01660176086425781, 0.01670172882080078, 0.01659059143066406, 0.01671763229370117, 0.016550176620483397, 0.016684511184692382, 0.01670623970031738]",tokens/s,59.180918544497814,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,11045.216256,12553.4208,0.0,12150.898688,12116.742656,s,1,16.161283203125,16.161283203125,0.0,16.161283203125,16.161283203125,16.161283203125,16.161283203125,[16.161283203125],,kWh,0.00026584888338748746,2.9312749071534398e-05,8.613034668200625e-05,0.0003812919791410281,,MB,1978.150912,13505.527808,0.0,13088.325632,12892.834304,s,10,5.963045043945312,0.5963045043945313,0.001705738572441738,0.5955810546875,0.5984445556640625,0.5989901916503906,0.5994267004394531,"[0.5950630493164063, 0.5944014282226563, 0.5951193237304687, 0.595307373046875, 0.5967609252929688, 0.5945224609375, 0.5981566162109375, 0.595854736328125, 0.5995358276367188, 0.5983233032226563]",tokens/s,429.3108606649455,kWh,1.7397974058088253e-05,1.918670501034066e-06,1.1484257553411934e-05,3.0800902112534254e-05,tokens/kWh,8311444.874720803,MB,1982.164992,13883.015168,0.0,13465.812992,13237.636096,s,10,40.82478149414062,4.0824781494140625,0.006199130724574727,4.084245239257813,4.087447387695312,4.088314758300782,4.0890086547851565,"[4.067908203125, 4.0751396484375, 4.082970458984375, 4.080665283203125, 4.08552001953125, 4.082532470703125, 4.08918212890625, 4.086503662109375, 4.087254638671875, 4.08710498046875]",tokens/s,15.431803354303824,kWh,0.00011924196311983081,1.31530402459768e-05,7.93970095959875e-05,0.00021179201296179508,tokens/kWh,297461.64229226386,,s,630,40.81509881973267,0.06478587114243281,0.0005429811052452447,0.06481027221679687,0.06550202178955078,0.06565998191833496,0.06583570388793945,"[0.06345500946044921, 0.06436201477050782, 0.06378771209716796, 0.06349619293212891, 0.0634015998840332, 0.06446630096435547, 0.06484684753417969, 0.06407373046875, 0.06388460922241211, 0.06351327896118164, 0.06353228759765625, 0.06384716796875, 0.06404220581054687, 0.06424262237548828, 0.06377990341186524, 0.06370691299438476, 0.06379321670532226, 0.0645252456665039, 0.06446284484863281, 0.0639642562866211, 0.06425692749023437, 0.06425804901123047, 0.06451404571533204, 0.0648089599609375, 0.0647452163696289, 0.06442623901367188, 0.06430105590820312, 0.06452633666992187, 0.06456444549560547, 0.06434690856933593, 0.06439730834960937, 0.06435382080078125, 0.06442851257324218, 0.06467369842529297, 0.06487769317626953, 0.06495331573486328, 0.06487245178222656, 0.06469017791748047, 0.06508665466308594, 0.06486297607421875, 0.06461856079101562, 0.06464717102050781, 0.0652548828125, 0.0651719970703125, 0.06501331329345703, 0.06459347534179688, 0.06501055908203125, 0.06483926391601562, 0.06531231689453125, 0.06496524810791016, 0.06502620697021484, 0.06502816009521484, 0.06514486694335937, 0.06560355377197266, 0.06523085021972656, 0.06514796447753907, 0.06508025360107422, 0.06508748626708985, 0.06505795288085937, 0.06574355316162109, 0.06520374298095703, 0.06483766174316406, 0.06482118225097656, 0.06349974441528321, 0.0644185562133789, 0.06387507247924805, 0.06384806442260742, 0.06429529571533203, 0.06435129547119141, 0.06420780944824218, 0.06401638031005859, 0.06368255996704102, 0.06378700637817383, 0.06412214660644532, 0.06398230361938477, 0.06414540863037109, 0.06424687957763672, 0.06382479858398438, 0.0641638412475586, 0.0642682876586914, 0.06446694183349609, 0.06418425750732422, 0.0643846435546875, 0.06452428436279296, 0.0644919662475586, 0.06454476928710938, 0.06412847900390625, 0.06414755249023438, 0.0644466552734375, 0.06468633270263671, 0.06514688110351563, 0.0647515869140625, 0.06417001342773437, 0.06456729888916016, 0.06470246124267579, 0.06486409759521485, 0.06440771484375, 0.06460825347900391, 0.06501990509033204, 0.0651325454711914, 0.0648622055053711, 0.06458534240722656, 0.06469430541992187, 0.06487206268310547, 0.06499811553955079, 0.06519193267822265, 0.06496614074707031, 0.06498560333251953, 0.06515618896484375, 0.0648221435546875, 0.06485199737548829, 0.06485964965820312, 0.0651146240234375, 0.06533324432373047, 0.0655804443359375, 0.06534921264648437, 0.06510038757324219, 0.06549954986572265, 0.06525949096679687, 0.06528950500488281, 0.06559616088867187, 0.0653803482055664, 0.06531068420410156, 0.06559283447265625, 0.06574671936035156, 0.06551423645019532, 0.06446316528320313, 0.06478233337402343, 0.0642799072265625, 0.06403247833251953, 0.06408201599121094, 0.06466851043701172, 0.06433990478515625, 0.06386870574951171, 0.06353251266479493, 0.06359494400024414, 0.06438291168212891, 0.06428886413574218, 0.06522096252441406, 0.06479462432861328, 0.06410444641113282, 0.06431712341308594, 0.06451436614990234, 0.06500556945800781, 0.06474137878417968, 0.06480076599121094, 0.06452428436279296, 0.06448121643066407, 0.06443218994140625, 0.0639733772277832, 0.06391398239135743, 0.06418022155761718, 0.064499267578125, 0.06469058990478516, 0.06501299285888672, 0.06463158416748047, 0.0647720947265625, 0.06494384002685546, 0.06531228637695312, 0.06518246459960937, 0.0647515869140625, 0.06465129852294922, 0.06533939361572266, 0.06498863983154297, 0.06457596588134766, 0.06455916595458984, 0.06497280120849609, 0.06480646514892578, 0.06494866943359374, 0.06531072235107421, 0.06508953857421874, 0.06498508453369141, 0.06562569427490235, 0.06526608276367188, 0.06507283020019532, 0.06494969940185547, 0.06554502105712891, 0.06540908813476562, 0.06523052978515625, 0.06529440307617188, 0.06545740509033203, 0.06492057800292969, 0.06485606384277344, 0.0652509765625, 0.0653950424194336, 0.06524912261962891, 0.06569999694824219, 0.0662138900756836, 0.06573465728759766, 0.06416694641113281, 0.06463581085205078, 0.06413033294677735, 0.06390044784545898, 0.06406729888916016, 0.06446723175048828, 0.0642023696899414, 0.06393894577026367, 0.06419455718994141, 0.06417203521728515, 0.06402569580078125, 0.06467052459716797, 0.06431922912597657, 0.06405551910400391, 0.06362044906616211, 0.06391817474365234, 0.06445938873291016, 0.06456297302246093, 0.06432588958740235, 0.06489500427246093, 0.0645406723022461, 0.0643683853149414, 0.0649808349609375, 0.06456156921386719, 0.06438706970214844, 0.06436819458007813, 0.06459613037109375, 0.06527529907226562, 0.06449651336669922, 0.06410649871826171, 0.06416793823242188, 0.06477350616455078, 0.06489561462402343, 0.06514006042480469, 0.06501033782958984, 0.06454271697998047, 0.06497280120849609, 0.06518131256103515, 0.0649742431640625, 0.06494876861572266, 0.06477798461914062, 0.06530265808105469, 0.06505020904541016, 0.06514582061767578, 0.06509929656982422, 0.06488521575927735, 0.0650240020751953, 0.06506495666503906, 0.06513459014892578, 0.06548076629638672, 0.06575430297851563, 0.06542534637451172, 0.0651556167602539, 0.06526799774169922, 0.065617919921875, 0.06573260498046875, 0.06537766265869141, 0.064940673828125, 0.06565631866455078, 0.06566758728027344, 0.06528409576416015, 0.06491545867919922, 0.06548070526123047, 0.06400819396972657, 0.06442803192138671, 0.0641430435180664, 0.06388768005371094, 0.06388300704956054, 0.06410800170898437, 0.06428278350830079, 0.06393283081054688, 0.0639101104736328, 0.06443977355957031, 0.06408246612548828, 0.06379843139648438, 0.06436914825439453, 0.06446934509277344, 0.06426732635498048, 0.06401119995117187, 0.06407743835449219, 0.06412940979003906, 0.06498521423339844, 0.06499520111083984, 0.06471878051757812, 0.06463638305664063, 0.06417369842529297, 0.064629150390625, 0.06481158447265625, 0.06458573150634765, 0.06491693115234375, 0.06469232177734376, 0.06456777954101563, 0.06480806732177734, 0.06470658874511719, 0.06487039947509765, 0.06537302398681641, 0.06492160034179688, 0.06515692901611328, 0.06494841766357422, 0.06446080017089843, 0.06502153778076172, 0.06506861114501954, 0.06487741088867187, 0.06521568298339844, 0.06507807922363282, 0.06499100494384766, 0.06465756988525391, 0.06467913818359375, 0.06466441345214843, 0.06521446228027344, 0.06566297912597656, 0.06559107208251953, 0.06569554901123047, 0.06571356964111329, 0.06517616271972657, 0.0657473602294922, 0.06554828643798828, 0.06535782623291016, 0.06551667022705078, 0.06584937286376953, 0.06537059020996094, 0.06523123168945312, 0.06555648040771485, 0.06509686279296875, 0.06517436981201172, 0.06534963226318359, 0.06410963439941406, 0.06501686096191406, 0.0644525146484375, 0.06417606353759765, 0.06407344055175782, 0.06430665588378906, 0.06421923065185547, 0.06399267196655273, 0.06399516677856446, 0.06421750640869141, 0.06400434875488281, 0.06375177764892578, 0.064372802734375, 0.06449366760253906, 0.06399615859985351, 0.06417565155029296, 0.06400646209716797, 0.06432784271240234, 0.06491136169433594, 0.06509523010253906, 0.0649056625366211, 0.06447030639648438, 0.06432579040527343, 0.06466413116455078, 0.06470655822753907, 0.06454476928710938, 0.06437455749511718, 0.06415923309326171, 0.06444310760498047, 0.06458748626708985, 0.06456348419189453, 0.06456320190429687, 0.06466963195800782, 0.065159423828125, 0.06534124755859375, 0.06547840118408203, 0.06520038604736328, 0.06542047882080078, 0.06511289978027343, 0.06481100463867187, 0.06496256256103515, 0.06510562896728515, 0.06492179107666016, 0.06489449310302735, 0.0649836196899414, 0.06495231628417969, 0.06499305725097657, 0.06564649963378906, 0.06554176330566407, 0.06528396606445312, 0.06505328369140626, 0.06545619201660156, 0.0658597412109375, 0.06533939361572266, 0.06521030426025391, 0.06528620910644531, 0.06545817565917969, 0.06525516510009766, 0.06533548736572266, 0.06552780914306641, 0.06514080047607422, 0.06526726531982421, 0.06550777435302735, 0.06467145538330078, 0.06468991851806641, 0.06447977447509766, 0.06393180847167969, 0.06390403366088868, 0.06408009338378906, 0.06452028656005859, 0.06420486450195312, 0.0639507827758789, 0.0639815673828125, 0.06402252960205078, 0.06466764831542969, 0.06451814270019532, 0.06446604919433593, 0.06395379257202148, 0.06395699310302734, 0.06394012832641602, 0.06442851257324218, 0.06451609802246094, 0.06486630249023438, 0.06470451354980469, 0.06462393951416015, 0.06465164947509766, 0.0644181137084961, 0.06431446075439454, 0.06449561309814453, 0.06445516967773438, 0.06450176239013672, 0.06478479766845703, 0.06509503936767579, 0.06465145874023437, 0.06444486236572265, 0.0648656005859375, 0.06491539001464844, 0.0650165786743164, 0.06500556945800781, 0.06524723052978515, 0.0655093765258789, 0.06519123077392579, 0.06488134765625, 0.06524028778076171, 0.06500784301757813, 0.06515155029296875, 0.06548684692382813, 0.06512249755859376, 0.06496441650390625, 0.06514035034179687, 0.06523750305175781, 0.06483340454101562, 0.06530662536621094, 0.06602342224121094, 0.06538240051269531, 0.0655417251586914, 0.06576512145996094, 0.06533776092529296, 0.0655400619506836, 0.06553628540039062, 0.06567731475830078, 0.06592454528808593, 0.0655038070678711, 0.06547046661376953, 0.06553782653808594, 0.06570393371582031, 0.06445951843261719, 0.06456886291503906, 0.06415408325195313, 0.0641638412475586, 0.06445875549316406, 0.06465945434570312, 0.06452838134765625, 0.06415769958496094, 0.06401229095458984, 0.06443417358398437, 0.06459391784667969, 0.0641638412475586, 0.06412879943847656, 0.0638400001525879, 0.06409613037109375, 0.06428118133544922, 0.0647515869140625, 0.06491753387451171, 0.06431475067138671, 0.06451264190673828, 0.06491545867919922, 0.06452019500732421, 0.06449561309814453, 0.06431334686279297, 0.06439116668701172, 0.06497068786621094, 0.06455017852783203, 0.06464320373535157, 0.064283203125, 0.0645199966430664, 0.0646761245727539, 0.06487216186523438, 0.06486249542236328, 0.06440755462646484, 0.0652759017944336, 0.0655250244140625, 0.06538880157470703, 0.06476438140869141, 0.06473932647705079, 0.06508745574951172, 0.06543363189697265, 0.0650096664428711, 0.06462435150146484, 0.06490287780761719, 0.06503807830810547, 0.06500434875488281, 0.0653311996459961, 0.06536192321777344, 0.0651878433227539, 0.06569983673095703, 0.06571385955810546, 0.06577798461914063, 0.06542950439453125, 0.06513394927978515, 0.06549155426025391, 0.06589033508300782, 0.06548255920410156, 0.06529043579101562, 0.06519987487792969, 0.06579634857177734, 0.06593087768554687, 0.06566336059570313, 0.06530048370361329, 0.06374399948120117, 0.06455910491943359, 0.06463692474365235, 0.06463868713378906, 0.06437097930908203, 0.06448947143554687, 0.06401638031005859, 0.06443827056884766, 0.06395904159545898, 0.06409830474853516, 0.06428995513916015, 0.06417494201660157, 0.06409011077880859, 0.06523075103759765, 0.06437693023681641, 0.06385843276977539, 0.06405286407470703, 0.06440409851074219, 0.06496665954589843, 0.06540665435791015, 0.06494649505615234, 0.06441964721679687, 0.06458386993408204, 0.06469420623779297, 0.06452025604248048, 0.06449971008300781, 0.06453218841552734, 0.06481743621826172, 0.06529811096191407, 0.06469379425048828, 0.06429718780517578, 0.06453862762451172, 0.06467763519287109, 0.06512429046630859, 0.0653436508178711, 0.06528467559814453, 0.06511427307128906, 0.06529843139648438, 0.06501580810546875, 0.06504646301269532, 0.06485612487792969, 0.0654028778076172, 0.06512025451660156, 0.0651325454711914, 0.0650096664428711, 0.06503369903564453, 0.06553577423095704, 0.06521663665771485, 0.06495219421386719, 0.06545843505859375, 0.06548326110839844, 0.0655645751953125, 0.06531491088867188, 0.06577561950683594, 0.06543309020996094, 0.0652715835571289, 0.06509391784667969, 0.06548652648925782, 0.06556076812744141, 0.0653399658203125, 0.06525542449951172, 0.06540083312988282, 0.06564774322509766, 0.06395084762573242, 0.06467151641845703, 0.06417021179199218, 0.0638218231201172, 0.06402662658691406, 0.0646983642578125, 0.06420684814453125, 0.06413926696777343, 0.06416998291015626, 0.06398511886596679, 0.06419222259521484, 0.06469446563720703, 0.06447740936279298, 0.06432953643798828, 0.06379292678833008, 0.06401238250732422, 0.06453936004638672, 0.06487245178222656, 0.06458118438720703, 0.06489542388916016, 0.06507315063476563, 0.06492332458496093, 0.06459347534179688, 0.06462089538574219, 0.06458185577392578, 0.06463507080078125, 0.06503756713867187, 0.06466441345214843, 0.06459327697753907, 0.06466000366210937, 0.06453453063964844, 0.06495846557617188, 0.06487449645996093, 0.06479666900634766, 0.06489497375488282, 0.0651325454711914, 0.06535167694091797, 0.06497599792480468, 0.065162109375, 0.06504857635498047, 0.06517145538330078, 0.06510797119140625, 0.06532505798339844, 0.065185791015625, 0.06504608154296875, 0.06472716522216797, 0.06476774597167968, 0.06480953979492188, 0.06577561950683594, 0.06543974304199218, 0.06580223846435547, 0.06532915496826172, 0.06568950653076172, 0.06567657470703125, 0.06545491027832032, 0.06564800262451172, 0.06550182342529297, 0.06569535827636719, 0.06555391693115234, 0.06530137634277344, 0.06534553527832031, 0.06535897827148437, 0.06565094757080078]",tokens/s,15.435464282042043,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1587.03616,1604.190208,0.0,1201.668096,1189.151232,s,1,8.3159912109375,8.3159912109375,0.0,8.3159912109375,8.3159912109375,8.3159912109375,8.3159912109375,[8.3159912109375],,kWh,3.634801674583438e-05,4.001643686964657e-06,1.1978065137988048e-05,5.232772557078708e-05,,MB,1640.620032,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4135705299377441,0.041357052993774415,0.00017739890895268108,0.041310447692871094,0.04144774856567383,0.041655554962158205,0.0418218000793457,"[0.041863361358642576, 0.04124623870849609, 0.0412476806640625, 0.041315967559814454, 0.04140156936645508, 0.041304927825927734, 0.04135299301147461, 0.04129644775390625, 0.041203166961669924, 0.04133817672729492]",tokens/s,6189.996178850953,kWh,1.2652971018940093e-06,1.3953843145637994e-07,8.379287607705967e-07,2.2427642941209858e-06,tokens/kWh,114144852.7030055,MB,1645.121536,1843.265536,0.0,1426.06336,1407.548416,s,10,10.62936572265625,1.0629365722656252,0.004183777323095977,1.062955078125,1.06710595703125,1.0691208496093751,1.070732763671875,"[1.065916748046875, 1.0711357421875, 1.059759765625, 1.066658203125, 1.0625648193359376, 1.0633453369140624, 1.05744677734375, 1.056900146484375, 1.06080908203125, 1.0648291015625]",tokens/s,59.26976420212631,kWh,4.184492875435695e-05,4.615108470122712e-06,1.935508739362835e-05,6.581512461810802e-05,tokens/kWh,957226.782833843,,s,630,10.626221904754635,0.016867018896435933,0.00024727151265993124,0.016802687644958497,0.017083324241638182,0.01725263395309448,0.0180522176361084,"[0.016684864044189455, 0.016785600662231445, 0.01675468826293945, 0.01678335952758789, 0.016697343826293946, 0.017145856857299805, 0.017501760482788085, 0.01744895935058594, 0.01849795150756836, 0.01746895980834961, 0.017252864837646483, 0.01710041618347168, 0.01700217628479004, 0.0169902400970459, 0.016926399230957033, 0.016825471878051758, 0.016753536224365234, 0.01674710464477539, 0.01670591926574707, 0.01678335952758789, 0.017311744689941407, 0.01702707290649414, 0.017151168823242188, 0.01674505615234375, 0.016761056900024413, 0.0167521915435791, 0.01686582374572754, 0.016693151473999024, 0.0168441276550293, 0.017377952575683593, 0.016834592819213866, 0.016914112091064453, 0.016793888092041017, 0.01681612777709961, 0.01696767997741699, 0.01683772850036621, 0.016892831802368165, 0.0167956485748291, 0.016756736755371093, 0.01681612777709961, 0.016766176223754883, 0.01679420852661133, 0.016726144790649412, 0.016775327682495116, 0.016891807556152345, 0.01680179214477539, 0.016803295135498046, 0.016855712890625, 0.01679759979248047, 0.01695676803588867, 0.016792383193969727, 0.016867136001586912, 0.016770912170410157, 0.016808256149291993, 0.016770624160766603, 0.016830368041992186, 0.016868928909301757, 0.016841312408447266, 0.01680099105834961, 0.01677824020385742, 0.016932863235473633, 0.01679158401489258, 0.016858335494995116, 0.016876287460327148, 0.01693414306640625, 0.01747430419921875, 0.01680998420715332, 0.016717824935913086, 0.016776512145996094, 0.017226560592651367, 0.017315263748168944, 0.016796096801757813, 0.01677248001098633, 0.017029760360717773, 0.01692982482910156, 0.016832992553710936, 0.016848800659179687, 0.016904800415039063, 0.01676697540283203, 0.016748544692993163, 0.016848608016967772, 0.016865568161010744, 0.016934911727905275, 0.016959007263183595, 0.016782848358154297, 0.01680240058898926, 0.01711756706237793, 0.01698329544067383, 0.01705174446105957, 0.017164064407348634, 0.016776063919067382, 0.017035135269165037, 0.016860607147216797, 0.01696224021911621, 0.01689097595214844, 0.017046272277832033, 0.017038623809814454, 0.01685798454284668, 0.01682569694519043, 0.01681167984008789, 0.016884735107421875, 0.016897119522094727, 0.01675152015686035, 0.01676697540283203, 0.01684432029724121, 0.016787519454956056, 0.01683260726928711, 0.01685273551940918, 0.01720582389831543, 0.01730143928527832, 0.017406015396118163, 0.017268735885620116, 0.017071168899536134, 0.017013696670532225, 0.01703910446166992, 0.016962879180908202, 0.01713862419128418, 0.016873472213745116, 0.016776384353637694, 0.01739788818359375, 0.01801491165161133, 0.017072128295898437, 0.01693401527404785, 0.01684979248046875, 0.018550111770629884, 0.017031583786010742, 0.016734207153320312, 0.016859136581420898, 0.016921600341796874, 0.016763904571533202, 0.01667625617980957, 0.016752511978149413, 0.016750656127929687, 0.016699296951293945, 0.016917247772216797, 0.01673539161682129, 0.016716800689697265, 0.016745376586914062, 0.016772031784057617, 0.016709632873535156, 0.016796800613403322, 0.016640895843505858, 0.01665951919555664, 0.016657119750976564, 0.016795072555541992, 0.016712480545043946, 0.016676864624023437, 0.016912351608276366, 0.016842815399169923, 0.016805856704711915, 0.016752639770507814, 0.016774175643920898, 0.016718624114990234, 0.01669340705871582, 0.01681001663208008, 0.01680384063720703, 0.016822559356689453, 0.01674166488647461, 0.01685139274597168, 0.01674835205078125, 0.016744640350341795, 0.016738016128540038, 0.01686895942687988, 0.016792255401611327, 0.016728063583374024, 0.0167587833404541, 0.01677107238769531, 0.016930816650390625, 0.016804927825927733, 0.016810943603515625, 0.01684480094909668, 0.01682022476196289, 0.016965215682983398, 0.01713961601257324, 0.016961183547973633, 0.016925535202026366, 0.01700399971008301, 0.017185312271118164, 0.017180416107177736, 0.016976127624511717, 0.016868768692016603, 0.01683011245727539, 0.016919200897216796, 0.01686966323852539, 0.01679769515991211, 0.016721920013427736, 0.0168175048828125, 0.016828672409057617, 0.016951711654663085, 0.01681875228881836, 0.016835903167724608, 0.017015487670898437, 0.016784383773803712, 0.016888832092285155, 0.01679974365234375, 0.01678108787536621, 0.016750816345214845, 0.017031167984008787, 0.016875520706176757, 0.016767040252685547, 0.016877504348754884, 0.016811904907226564, 0.01698409652709961, 0.01682032012939453, 0.016959232330322267, 0.016799999237060548, 0.016750303268432618, 0.01686147117614746, 0.01717635154724121, 0.016862880706787108, 0.018469024658203125, 0.017827583312988282, 0.01684147262573242, 0.01676483154296875, 0.016764928817749023, 0.016719871520996094, 0.01703321647644043, 0.016752639770507814, 0.016805055618286133, 0.01674118423461914, 0.01683216094970703, 0.016786943435668944, 0.01687843132019043, 0.01684480094909668, 0.016887807846069337, 0.01679155158996582, 0.016846847534179688, 0.01680588722229004, 0.016952352523803713, 0.017015167236328125, 0.017224288940429686, 0.017360895156860352, 0.01719500732421875, 0.016987968444824218, 0.01696767997741699, 0.016806079864501954, 0.016840320587158203, 0.01698240089416504, 0.016867328643798828, 0.01677471923828125, 0.016744895935058592, 0.016732160568237304, 0.01679769515991211, 0.0167314567565918, 0.01674720001220703, 0.01699635124206543, 0.016856800079345702, 0.016916767120361328, 0.01700044822692871, 0.016961536407470702, 0.01721548843383789, 0.016893951416015626, 0.017100799560546876, 0.017115135192871094, 0.016885759353637696, 0.016819456100463866, 0.016818944931030273, 0.01718272018432617, 0.017164287567138673, 0.01713324737548828, 0.017123071670532227, 0.016831039428710937, 0.016746496200561522, 0.016728063583374024, 0.0167969913482666, 0.01699705505371094, 0.016859136581420898, 0.016777215957641603, 0.016727680206298827, 0.016783744812011718, 0.016736255645751954, 0.016815391540527344, 0.016804576873779297, 0.0167890567779541, 0.016751039505004884, 0.016756736755371093, 0.01670262336730957, 0.016796512603759764, 0.016735776901245118, 0.01676278305053711, 0.016673343658447266, 0.016965503692626952, 0.01681011199951172, 0.01684889602661133, 0.01681407928466797, 0.016943328857421874, 0.01696476745605469, 0.01676310348510742, 0.016700992584228514, 0.016794368743896483, 0.016715871810913087, 0.01680179214477539, 0.016774303436279298, 0.01678998374938965, 0.016771039962768554, 0.01681654357910156, 0.016871423721313478, 0.016846847534179688, 0.01676697540283203, 0.01683865547180176, 0.01679155158996582, 0.016965631484985352, 0.017001663208007813, 0.016869823455810548, 0.016736448287963866, 0.016754079818725585, 0.01692956733703613, 0.016928703308105468, 0.016862720489501954, 0.016943679809570313, 0.017043455123901367, 0.016990207672119142, 0.016977920532226562, 0.016988256454467773, 0.016814048767089845, 0.01697110366821289, 0.016883039474487306, 0.016769088745117188, 0.01675971221923828, 0.016690431594848634, 0.01671014404296875, 0.01675270462036133, 0.018353439331054686, 0.016872352600097656, 0.01678982353210449, 0.016725696563720704, 0.016731712341308595, 0.016675264358520507, 0.01687331199645996, 0.016780799865722656, 0.01679417610168457, 0.01666396713256836, 0.016704191207885744, 0.016670719146728515, 0.01681350326538086, 0.016831039428710937, 0.01678102493286133, 0.01666486358642578, 0.016713727951049806, 0.016695167541503905, 0.01680601692199707, 0.01742848014831543, 0.0168154239654541, 0.016775232315063476, 0.016822912216186522, 0.016697343826293946, 0.01718675231933594, 0.01699184036254883, 0.01674083137512207, 0.01681135940551758, 0.016685728073120118, 0.018067455291748045, 0.01678950309753418, 0.017086463928222655, 0.016687103271484375, 0.016730304718017577, 0.016742080688476563, 0.016656639099121094, 0.016768863677978516, 0.01702412796020508, 0.016714048385620118, 0.016699199676513673, 0.016751264572143553, 0.01726678466796875, 0.017030176162719728, 0.01701785659790039, 0.016920576095581053, 0.01743449592590332, 0.016765056610107423, 0.016743616104125978, 0.01708297538757324, 0.016756959915161133, 0.01683456039428711, 0.01683251190185547, 0.016850944519042968, 0.016803199768066407, 0.016797344207763673, 0.0168353271484375, 0.017010719299316406, 0.016857248306274414, 0.016769184112548827, 0.016725055694580077, 0.016706207275390624, 0.01670547294616699, 0.016781312942504883, 0.016819776535034178, 0.016724416732788086, 0.016794912338256834, 0.016782047271728516, 0.0167096004486084, 0.016795679092407225, 0.01690380859375, 0.016738496780395507, 0.016789695739746095, 0.016719423294067382, 0.016685440063476564, 0.017223392486572266, 0.01679759979248047, 0.016778976440429687, 0.016808671951293944, 0.01681612777709961, 0.016889120101928713, 0.016926591873168945, 0.016759647369384765, 0.016901760101318358, 0.01676736068725586, 0.016680959701538087, 0.01681808090209961, 0.01682236862182617, 0.01715376091003418, 0.01680182456970215, 0.016689151763916017, 0.016697343826293946, 0.016661056518554686, 0.01671340751647949, 0.01666771125793457, 0.016861343383789064, 0.016671520233154297, 0.016989856719970702, 0.016693599700927736, 0.01683865547180176, 0.016750591278076172, 0.016738304138183592, 0.01680188751220703, 0.016852895736694337, 0.0167871036529541, 0.016689344406127928, 0.01675894355773926, 0.016853120803833006, 0.016789375305175783, 0.01677516746520996, 0.016719711303710937, 0.01670979118347168, 0.016732160568237304, 0.01671603202819824, 0.016710399627685547, 0.0166997127532959, 0.01666486358642578, 0.01665011215209961, 0.01664463996887207, 0.016728063583374024, 0.016807903289794923, 0.01667068862915039, 0.01675267219543457, 0.016660703659057616, 0.016624992370605468, 0.016671167373657227, 0.016717824935913086, 0.016730112075805666, 0.01665433692932129, 0.01662931251525879, 0.016722368240356444, 0.016652288436889647, 0.016728063583374024, 0.016683231353759764, 0.016813407897949217, 0.016751039505004884, 0.01665023994445801, 0.016711679458618164, 0.01671139144897461, 0.01666227149963379, 0.016734752655029297, 0.016701440811157226, 0.0166297607421875, 0.016672704696655275, 0.01663609504699707, 0.016790912628173827, 0.01671014404296875, 0.01679974365234375, 0.016608383178710936, 0.01663065528869629, 0.016719520568847655, 0.016695327758789062, 0.016676191329956055, 0.016722719192504884, 0.016731487274169923, 0.01684115219116211, 0.016797536849975585, 0.017044031143188476, 0.01677916717529297, 0.016845983505249025, 0.016790687561035158, 0.016711679458618164, 0.016776992797851564, 0.01687936019897461, 0.016817695617675783, 0.01714044761657715, 0.016741472244262694, 0.016733087539672852, 0.016678911209106445, 0.01679769515991211, 0.01682636833190918, 0.016797088623046876, 0.016804447174072267, 0.01685091209411621, 0.017209152221679687, 0.01701411247253418, 0.01701356887817383, 0.016922687530517577, 0.01698374366760254, 0.01692038345336914, 0.016860736846923827, 0.016802751541137695, 0.01681942367553711, 0.016779232025146484, 0.016786527633666993, 0.016710079193115235, 0.01676969528198242, 0.01672515106201172, 0.016793952941894532, 0.016795967102050783, 0.01680998420715332, 0.01677017593383789, 0.016823135375976562, 0.01673423957824707, 0.01677027130126953, 0.016717695236206056, 0.016675392150878907, 0.016722335815429687, 0.016713119506835936, 0.017016735076904297, 0.01914124870300293, 0.01721958351135254, 0.016964832305908204, 0.016827007293701172, 0.016799232482910157, 0.016734880447387697, 0.01670479965209961, 0.01671446418762207, 0.016715103149414063, 0.01682633590698242, 0.016734912872314454, 0.016746496200561522, 0.016752639770507814, 0.01679155158996582, 0.01687868881225586, 0.01693996810913086, 0.016891263961791993, 0.01692323112487793, 0.016840288162231445, 0.016697759628295897, 0.016949247360229493, 0.016854496002197267, 0.01683523178100586, 0.016801536560058592, 0.017168512344360353, 0.016868799209594727, 0.01674435234069824, 0.016917152404785155, 0.016891904830932617, 0.016764928817749023, 0.01674998474121094, 0.016804447174072267, 0.016644096374511717, 0.01665433692932129, 0.016645151138305662, 0.01662870407104492, 0.01662745666503906, 0.016633600234985352, 0.016690752029418946, 0.01669215965270996, 0.01665017509460449, 0.01694451141357422, 0.01686783981323242, 0.016846111297607422, 0.016835487365722657, 0.016909471511840822, 0.016715423583984375, 0.016666976928710938, 0.016639999389648438, 0.016715551376342775, 0.01668070411682129, 0.01664828872680664, 0.01668953514099121, 0.016705184936523437, 0.016699743270874024, 0.016828256607055662, 0.017410207748413085, 0.017988639831542967, 0.018238431930541994, 0.01696767997741699, 0.016902143478393555, 0.01700655937194824, 0.017256479263305664, 0.016711679458618164, 0.016646047592163087, 0.016815744400024413, 0.016804319381713867, 0.016671743392944336, 0.016755680084228515, 0.016745920181274413, 0.01706847953796387, 0.016949407577514647, 0.016887807846069337, 0.016715423583984375, 0.016685216903686524, 0.016736064910888672, 0.016692928314208984, 0.016921279907226562, 0.01677926445007324, 0.016770240783691406, 0.016802623748779298, 0.01685683250427246, 0.016795904159545898, 0.01678745651245117, 0.016766496658325195, 0.01678531265258789, 0.016715551376342775, 0.01675071907043457, 0.016734176635742188, 0.016673471450805662, 0.016777215957641603, 0.016707456588745118, 0.016693376541137697, 0.016662527084350585, 0.01665996742248535, 0.016716287612915038, 0.016842752456665038, 0.01700864028930664, 0.017297407150268555, 0.017299455642700197, 0.01724825668334961, 0.017252351760864256, 0.017380640029907225, 0.01731043243408203, 0.017034847259521483, 0.01689846420288086, 0.016837791442871095, 0.016956256866455077, 0.016815839767456056]",tokens/s,59.28729944159272,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,6770.151424,7769.817088,0.0,7367.294976,7351.94368,s,1,12.8343544921875,12.8343544921875,0.0,12.8343544921875,12.8343544921875,12.8343544921875,12.8343544921875,[12.8343544921875],,kWh,0.00016262083862498002,1.7930771709125052e-05,5.188948595599707e-05,0.00023244109629010215,,MB,1428.455424,8411.5456,0.0,7994.343424,7863.794176,s,10,3.007503631591797,0.3007503631591797,0.0007778213261312478,0.3005518493652344,0.3018658843994141,0.30186633758544923,0.30186670013427735,"[0.2992444152832031, 0.30049298095703125, 0.3002258605957031, 0.3003512878417969, 0.3006107177734375, 0.30155010986328123, 0.30186578369140626, 0.30041238403320314, 0.30088330078125, 0.3018667907714844]",tokens/s,851.2042921940066,kWh,8.817051880515135e-06,9.717589426134103e-07,5.867798811882332e-06,1.5656609635010876e-05,tokens/kWh,16350921.813081415,MB,1449.08288,8684.17536,0.0,8266.973184,8120.408064,s,10,24.71263427734375,2.471263427734375,0.00292096157586033,2.471947998046875,2.4749196044921873,2.475168664550781,2.475367912597656,"[2.46732470703125, 2.467976318359375, 2.466483642578125, 2.47139501953125, 2.47119091796875, 2.472824951171875, 2.4725009765625, 2.47265576171875, 2.475417724609375, 2.4748642578125]",tokens/s,25.49303295349523,kWh,7.265530000698256e-05,8.01449383128984e-06,4.825141115011931e-05,0.00012892120498839176,tokens/kWh,488670.57987607707,,s,630,24.7095817222595,0.03922155828930083,0.00036437899605466735,0.03920817565917969,0.039703884506225584,0.039806880950927735,0.04005883972167969,"[0.03881321716308594, 0.038777313232421874, 0.038645759582519534, 0.03865804672241211, 0.039124256134033204, 0.038887744903564454, 0.03889129638671875, 0.038855072021484374, 0.038717121124267576, 0.03856643295288086, 0.03865599822998047, 0.03866828918457031, 0.038801406860351564, 0.03881788635253906, 0.03878051376342773, 0.03885830307006836, 0.038855422973632814, 0.038795265197753906, 0.03900620651245117, 0.03888332748413086, 0.038973438262939454, 0.03908403015136719, 0.03906054306030273, 0.0388105583190918, 0.03889152145385742, 0.038948863983154294, 0.038885505676269534, 0.03916905593872071, 0.039105377197265624, 0.03909632110595703, 0.03892633438110352, 0.03954051208496094, 0.03961468887329102, 0.03925382232666016, 0.03911648178100586, 0.03908659362792969, 0.039200736999511716, 0.03924153518676758, 0.03912931060791015, 0.039019519805908204, 0.039169025421142575, 0.03928448104858399, 0.03921913528442383, 0.03920518493652344, 0.03921049499511719, 0.03922751998901367, 0.03908563232421875, 0.03937472152709961, 0.039609279632568356, 0.03954278564453125, 0.03947520065307617, 0.03953049468994141, 0.039710430145263674, 0.03959017562866211, 0.03953782272338867, 0.03971772766113281, 0.03973904037475586, 0.03973500823974609, 0.03999756622314453, 0.03967574310302734, 0.03985062408447266, 0.03976217651367187, 0.03954051208496094, 0.039265758514404295, 0.038625823974609376, 0.03847091293334961, 0.038494976043701175, 0.03858454513549805, 0.038758174896240234, 0.038752254486083985, 0.03869286346435547, 0.03901993560791016, 0.038998622894287106, 0.038828033447265625, 0.03895203018188476, 0.038679454803466795, 0.03858160018920898, 0.038719264984130856, 0.03872608184814453, 0.0388935661315918, 0.03888140869140625, 0.038830398559570316, 0.03901801681518555, 0.038943199157714846, 0.03896086502075195, 0.038786945343017576, 0.03882640075683594, 0.039624702453613284, 0.03950982284545899, 0.039317249298095706, 0.03875680160522461, 0.03870515060424805, 0.039051265716552735, 0.03899596786499023, 0.03909427261352539, 0.03927164840698242, 0.039524864196777344, 0.039465248107910154, 0.039290206909179684, 0.03930755233764648, 0.03924825668334961, 0.039165824890136716, 0.03937497711181641, 0.03933180618286133, 0.03947638320922851, 0.03943017578125, 0.03933884811401367, 0.03935846328735351, 0.03925116729736328, 0.039127361297607424, 0.039126846313476564, 0.03927657699584961, 0.03946355056762695, 0.03950150299072266, 0.0394832649230957, 0.03946662521362305, 0.0393135986328125, 0.03932735824584961, 0.03963187026977539, 0.03979375839233398, 0.039768863677978515, 0.039682174682617186, 0.03962595367431641, 0.04024195098876953, 0.0399749755859375, 0.039693984985351566, 0.03864780807495117, 0.03869283294677734, 0.03872361755371094, 0.038577342987060545, 0.0385043830871582, 0.0384582405090332, 0.03851593780517578, 0.0384818229675293, 0.03871833419799805, 0.03873791885375977, 0.038703102111816406, 0.039034881591796876, 0.0390041618347168, 0.03895024108886719, 0.03882870483398437, 0.03882201766967774, 0.038784896850585934, 0.03892412948608399, 0.03884268951416016, 0.03892726516723633, 0.03886371231079101, 0.03894460678100586, 0.03917776107788086, 0.039178272247314454, 0.03912774276733398, 0.03901785659790039, 0.039026878356933595, 0.03903087997436523, 0.03923081588745117, 0.039218177795410154, 0.03907321548461914, 0.03897727966308594, 0.039000896453857424, 0.03905449676513672, 0.039157726287841796, 0.039029632568359375, 0.03909222412109375, 0.03926595306396485, 0.03923126220703125, 0.03926278305053711, 0.039402782440185545, 0.03952905654907227, 0.03977638244628906, 0.039660575866699216, 0.039384063720703126, 0.03952841567993164, 0.03973062515258789, 0.039604801177978516, 0.03959584045410156, 0.03942828750610351, 0.039454559326171874, 0.03965292739868164, 0.03958230209350586, 0.03947315216064453, 0.03932995223999024, 0.03941487884521484, 0.039548736572265625, 0.03958073425292969, 0.03958367919921875, 0.0395873908996582, 0.03954492950439453, 0.039463230133056644, 0.03950364685058594, 0.03920585632324219, 0.03890156936645508, 0.03884236907958984, 0.03868467330932617, 0.03909225463867187, 0.03910960006713867, 0.038757118225097656, 0.038752288818359376, 0.0387108154296875, 0.038617023468017576, 0.03863628768920899, 0.038768638610839845, 0.03878092956542969, 0.038714622497558596, 0.0386578254699707, 0.03869769668579102, 0.03905279922485352, 0.0390838394165039, 0.039000320434570315, 0.03894547271728516, 0.039049217224121094, 0.03896646499633789, 0.03896387100219727, 0.03881590270996094, 0.03883200073242187, 0.039059070587158205, 0.0391064338684082, 0.0390437126159668, 0.039095966339111325, 0.03928508758544922, 0.039258113861083986, 0.03921273422241211, 0.039100734710693356, 0.03966566467285156, 0.03966287994384766, 0.039336318969726565, 0.039389537811279296, 0.039290721893310544, 0.039077247619628906, 0.03907987213134766, 0.039183006286621094, 0.03913852691650391, 0.039130081176757814, 0.03948255920410156, 0.0395720329284668, 0.03963040161132812, 0.039497440338134765, 0.04005372619628906, 0.04006092834472656, 0.039806880950927735, 0.03956556701660156, 0.03939311981201172, 0.039325439453125, 0.03952592086791992, 0.0397215690612793, 0.039626880645751955, 0.03955507278442383, 0.039577598571777346, 0.03944572830200195, 0.03958563232421875, 0.04006803131103515, 0.0400423698425293, 0.039806880950927735, 0.03918841552734375, 0.038800609588623046, 0.03859711837768555, 0.038711647033691406, 0.03868467330932617, 0.03864371109008789, 0.03871535873413086, 0.038817089080810545, 0.03899603271484375, 0.039070369720458985, 0.03878883361816406, 0.0386317138671875, 0.038647167205810545, 0.03864025497436523, 0.03913459014892578, 0.039160446166992186, 0.03893654251098633, 0.0389529914855957, 0.03892633438110352, 0.03877068710327149, 0.03902873611450195, 0.03896934509277344, 0.038938175201416014, 0.03897110366821289, 0.03917488098144531, 0.03921491241455078, 0.039190399169921876, 0.03937721633911133, 0.039066913604736325, 0.039195358276367186, 0.03909427261352539, 0.03909417724609375, 0.03924921417236328, 0.039273246765136716, 0.03922739028930664, 0.03947708892822266, 0.039319713592529296, 0.039417407989501954, 0.03966918563842774, 0.03935334396362305, 0.039360511779785154, 0.0396266860961914, 0.039478782653808595, 0.03942867279052734, 0.039327423095703126, 0.039356735229492186, 0.03930521774291992, 0.03948252868652344, 0.03947577667236328, 0.03956911849975586, 0.0399222412109375, 0.03957712173461914, 0.03972963333129883, 0.03958499145507813, 0.03955791854858398, 0.03952640151977539, 0.03944144058227539, 0.03948847961425781, 0.03947235107421875, 0.03951273727416992, 0.03985625457763672, 0.039841793060302735, 0.039825408935546876, 0.03877507019042969, 0.03871686553955078, 0.03868905639648437, 0.03888127899169922, 0.03893673706054687, 0.03896432113647461, 0.03918876647949219, 0.038928352355957034, 0.03887724685668945, 0.03869331359863281, 0.03865919876098633, 0.03881430435180664, 0.03883647918701172, 0.03876630401611328, 0.039043392181396484, 0.03911676788330078, 0.038995998382568356, 0.038934528350830076, 0.03894038391113281, 0.03922358322143555, 0.0392540168762207, 0.03903065490722656, 0.038884769439697264, 0.038855327606201175, 0.03888457489013672, 0.039008255004882815, 0.03899859237670898, 0.03906383895874024, 0.03907993698120117, 0.03901779174804688, 0.03934016036987305, 0.039330368041992185, 0.03951615905761719, 0.03943587112426758, 0.039444000244140624, 0.03931430435180664, 0.03962879943847656, 0.039550975799560545, 0.03957350540161133, 0.03955276870727539, 0.03970816040039062, 0.039598846435546876, 0.03933388900756836, 0.03929702377319336, 0.0393072624206543, 0.03940512084960938, 0.039455169677734374, 0.03931545639038086, 0.03931340789794922, 0.03927068710327149, 0.039541950225830076, 0.03953718566894531, 0.03942604827880859, 0.03949772644042969, 0.03975167846679688, 0.039725055694580076, 0.03962812805175781, 0.03992822265625, 0.039764225006103514, 0.039600128173828124, 0.03961222457885742, 0.0398460807800293, 0.03990323257446289, 0.03934220886230469, 0.03896918487548828, 0.03878313446044922, 0.038610206604003904, 0.03833932876586914, 0.03845497512817383, 0.03860713577270508, 0.038675937652587894, 0.038724128723144534, 0.03884560012817383, 0.03879817581176758, 0.03898777770996094, 0.03901968002319336, 0.03897740936279297, 0.03928329467773437, 0.03923987197875976, 0.03879955291748047, 0.038731712341308594, 0.03899302291870117, 0.0391894416809082, 0.039051265716552735, 0.03898777770996094, 0.038972991943359375, 0.038961601257324216, 0.039387134552001955, 0.03935641479492188, 0.03927040100097656, 0.03911884689331055, 0.03911065673828125, 0.03930476760864258, 0.03930764770507812, 0.03932985687255859, 0.03929859161376953, 0.03962928009033203, 0.0391632308959961, 0.039060127258300784, 0.03905487823486328, 0.039026817321777346, 0.03947760009765625, 0.0394788818359375, 0.03936502456665039, 0.03929836654663086, 0.0393325424194336, 0.03940556716918946, 0.03950899124145508, 0.03941068649291992, 0.03962265777587891, 0.03961180877685547, 0.03953251266479492, 0.040066879272460935, 0.03984656143188477, 0.03959772872924805, 0.03945475387573242, 0.039553504943847656, 0.03969843292236328, 0.03963264083862305, 0.039548351287841794, 0.03955161666870117, 0.03977580642700195, 0.03973731231689453, 0.03955984115600586, 0.03965542221069336, 0.039723007202148435, 0.039141822814941406, 0.03871337509155273, 0.03872079849243164, 0.03879600143432617, 0.03887104034423828, 0.03873721694946289, 0.03876726531982422, 0.0389714241027832, 0.03896912002563477, 0.03891632080078125, 0.03920230484008789, 0.0390557746887207, 0.038776927947998044, 0.03866828918457031, 0.03875577545166015, 0.039072032928466796, 0.039098495483398436, 0.03893468856811524, 0.038788352966308594, 0.038914817810058594, 0.03914700698852539, 0.03945523071289062, 0.03929292678833008, 0.039167232513427734, 0.03908060836791992, 0.038991966247558595, 0.039019775390625, 0.03887590408325195, 0.03892841720581055, 0.038995681762695314, 0.039106815338134766, 0.03916185760498047, 0.0392309455871582, 0.03906000137329101, 0.03912857437133789, 0.03944252777099609, 0.03955094528198242, 0.03946950531005859, 0.03934822463989258, 0.039390430450439454, 0.03960502243041992, 0.03949977493286133, 0.039485439300537106, 0.039362560272216796, 0.03933184051513672, 0.03943116760253906, 0.03981190490722656, 0.03983174514770508, 0.03966566467285156, 0.039703807830810546, 0.03948588943481445, 0.03936422348022461, 0.03922195053100586, 0.03939897537231445, 0.039635391235351564, 0.03982745742797852, 0.03986022567749024, 0.03966566467285156, 0.03951161575317383, 0.039635391235351564, 0.03973734283447266, 0.03984384155273438, 0.039725055694580076, 0.039215328216552735, 0.03903862380981445, 0.03898185729980469, 0.03918656158447266, 0.03907731246948242, 0.03875897598266602, 0.038596607208251955, 0.03846899032592774, 0.0385399055480957, 0.038833377838134765, 0.038795486450195316, 0.03870483016967773, 0.039168895721435545, 0.03947686386108398, 0.03894480133056641, 0.03914912033081055, 0.039002910614013675, 0.03887308883666992, 0.0389857292175293, 0.03912879943847656, 0.03923791885375977, 0.03910246276855469, 0.03905913543701172, 0.03882835388183594, 0.039241729736328126, 0.0389936637878418, 0.039141120910644533, 0.03928319931030273, 0.03932160186767578, 0.03921075057983398, 0.03939132690429688, 0.0394521598815918, 0.03927107238769531, 0.03918806457519531, 0.0391376953125, 0.03914691162109375, 0.039282398223876955, 0.03930729675292969, 0.039494495391845706, 0.039436286926269534, 0.03933174514770508, 0.03916809463500977, 0.03913318252563477, 0.03929033660888672, 0.039567745208740235, 0.03975183868408203, 0.03964313507080078, 0.03952371215820313, 0.03958348846435547, 0.03976486587524414, 0.039820865631103514, 0.039739776611328125, 0.03964838409423828, 0.03958227157592773, 0.03977664184570313, 0.03975104141235351, 0.03965331268310547, 0.04022342300415039, 0.03990937423706055, 0.0396759033203125, 0.03963187026977539, 0.03965350341796875, 0.03979558563232422, 0.03878675079345703, 0.038596927642822264, 0.03853311920166016, 0.03841804885864258, 0.03860518264770508, 0.038811649322509766, 0.038776832580566405, 0.03880527877807617, 0.039176033020019534, 0.03894515228271484, 0.039090175628662106, 0.039032833099365234, 0.03893657684326172, 0.03920281600952148, 0.03917824172973633, 0.03909836959838867, 0.0390041618347168, 0.03900620651245117, 0.038950912475585936, 0.03885385513305664, 0.03899062347412109, 0.038975486755371096, 0.03890995025634766, 0.03920281600952148, 0.03922108840942383, 0.03910867309570312, 0.039032352447509765, 0.038986305236816406, 0.039294975280761715, 0.0393436164855957, 0.03915622329711914, 0.03919190216064453, 0.039373470306396485, 0.03940761566162109, 0.039378944396972655, 0.03970457458496094, 0.03951542282104492, 0.03933414459228515, 0.03930364990234375, 0.039273727416992185, 0.03926630401611328, 0.039201534271240235, 0.03957145690917969, 0.039534591674804685, 0.03947516632080078, 0.04006095886230469, 0.03987865447998047, 0.03974758529663086, 0.039624126434326175, 0.039586368560791015, 0.03952025604248047, 0.039511585235595705, 0.03970300674438477, 0.039725055694580076, 0.03955712127685547, 0.03952025604248047, 0.039501823425292966, 0.039634944915771485, 0.04006707382202149, 0.03997859191894531, 0.03981548690795898, 0.039712223052978515, 0.039860862731933594]",tokens/s,25.496182294031595,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,878.026752,662.56896,0.0,260.046848,258.555392,s,1,7.6678076171875,7.6678076171875,0.0,7.6678076171875,7.6678076171875,7.6678076171875,7.6678076171875,[7.6678076171875],,kWh,1.514884169165877e-05,1.6637280518077142e-06,4.54222585599684e-06,2.1354795599463324e-05,,MB,1320.751104,759.037952,0.0,341.835776,317.950464,s,18,0.19700390338897705,0.010944661299387613,0.00017890293056821031,0.010882383823394775,0.011055404758453369,0.011227078580856323,0.011532327489852903,"[0.010776960372924804, 0.01088105583190918, 0.01087551975250244, 0.010871904373168945, 0.010880991935729981, 0.010887968063354491, 0.010862624168395996, 0.010838047981262207, 0.01160863971710205, 0.010870752334594726, 0.01085968017578125, 0.010930591583251954, 0.011010687828063965, 0.010936063766479492, 0.010942815780639648, 0.011159744262695313, 0.01088371181488037, 0.010926143646240234]",tokens/s,23390.399483109082,kWh,4.137287959690757e-07,4.5623656667920546e-08,1.9540619976206332e-07,6.547586523990596e-07,tokens/kWh,390983760.293975,MB,1334.74304,786.300928,0.0,369.098752,317.953024,s,18,10.090893615722656,0.5606052008734809,0.0018227325385220328,0.5606619567871094,0.5621151489257813,0.5640866485595702,0.5654816363525391,"[0.5598701782226563, 0.5607464599609375, 0.5606204833984375, 0.561118896484375, 0.5585742797851563, 0.5607034301757813, 0.5637789306640625, 0.5601032104492187, 0.5585845947265625, 0.561402099609375, 0.5605842895507812, 0.5658303833007813, 0.5583926391601562, 0.5607605590820313, 0.5583001098632813, 0.5611235961914063, 0.5611129150390625, 0.5592865600585938]",tokens/s,112.37855071954289,kWh,2.19564079248649e-05,2.421437808531193e-06,7.939937610095888e-06,3.231778334349198e-05,tokens/kWh,1949391.1240879295,,s,1134,10.08220954036713,0.008890837337184414,0.00014483302994854627,0.008857615947723389,0.008994105529785155,0.009098606204986573,0.009525899362564089,"[0.008640512466430664, 0.008859295845031739, 0.008829279899597168, 0.00890880012512207, 0.008984864234924317, 0.008850303649902344, 0.008854399681091309, 0.00910364818572998, 0.008900256156921386, 0.008843296051025391, 0.008849408149719238, 0.008875167846679688, 0.008831839561462402, 0.008849247932434082, 0.008869695663452148, 0.008918399810791016, 0.009139295578002929, 0.008925151824951173, 0.008859359741210937, 0.009140704154968262, 0.008914143562316894, 0.008895135879516602, 0.008879520416259766, 0.008839615821838379, 0.008916416168212891, 0.00888419246673584, 0.008884511947631835, 0.008845376014709474, 0.008890496253967286, 0.008840448379516601, 0.008983424186706543, 0.008822784423828126, 0.008842944145202637, 0.00883465576171875, 0.008866880416870117, 0.008852288246154786, 0.008829216003417968, 0.008900768280029297, 0.008877568244934082, 0.008827808380126954, 0.008852671623229981, 0.008807231903076172, 0.008814592361450196, 0.008844448089599609, 0.008843296051025391, 0.008840000152587891, 0.008865535736083984, 0.008863871574401855, 0.00885580825805664, 0.00884928035736084, 0.008857600212097168, 0.008814592361450196, 0.008889535903930663, 0.00887222385406494, 0.008905247688293457, 0.008889984130859375, 0.008945376396179198, 0.008911520004272462, 0.008886048316955567, 0.008835455894470216, 0.00884275245666504, 0.008876543998718261, 0.008945568084716797, 0.008675328254699707, 0.008900896072387695, 0.008879167556762695, 0.008919103622436524, 0.008878879547119141, 0.008889504432678223, 0.008920831680297852, 0.00888105583190918, 0.008897664070129395, 0.008821855545043946, 0.008853280067443847, 0.008804351806640624, 0.008814592361450196, 0.008878080368041993, 0.008816191673278809, 0.008825280189514161, 0.008816127777099609, 0.008836864471435547, 0.008882944107055663, 0.00912384033203125, 0.008929408073425293, 0.008914655685424804, 0.008960415840148925, 0.008937376022338867, 0.008998751640319824, 0.008897791862487793, 0.008907487869262695, 0.008882207870483399, 0.008839008331298828, 0.008917152404785156, 0.008873984336853028, 0.008856608390808105, 0.008870880126953124, 0.008851103782653809, 0.00885366439819336, 0.008787967681884766, 0.00883244800567627, 0.00887174415588379, 0.008893376350402831, 0.008908512115478516, 0.008922687530517578, 0.009007840156555176, 0.008955360412597656, 0.008895008087158204, 0.008919039726257324, 0.008922495841979981, 0.008946304321289062, 0.008912896156311035, 0.008968000411987304, 0.009021632194519043, 0.008951807975769043, 0.008955904006958008, 0.008981632232666016, 0.008946559906005859, 0.008933216094970704, 0.008852895736694335, 0.00885427188873291, 0.008891488075256348, 0.008907648086547851, 0.008802751541137695, 0.008877056121826172, 0.008882783889770507, 0.008855392456054687, 0.008663135528564453, 0.008889920234680175, 0.008943967819213867, 0.009086432456970215, 0.00887065601348877, 0.008895456314086914, 0.008815423965454101, 0.008838399887084961, 0.008850272178649903, 0.008839167594909669, 0.008849344253540038, 0.008892640113830567, 0.008875743865966797, 0.00894979190826416, 0.008924736022949218, 0.008829055786132812, 0.008827199935913086, 0.008908096313476563, 0.00883084774017334, 0.008825152397155762, 0.008807999610900879, 0.008885184288024902, 0.00883670425415039, 0.008857088088989258, 0.008842144012451172, 0.008872991561889648, 0.008842207908630372, 0.008914943695068359, 0.0088242244720459, 0.008831328392028808, 0.00891055965423584, 0.008825535774230958, 0.008808544158935547, 0.008904735565185546, 0.00888419246673584, 0.008930144309997558, 0.008930208206176758, 0.008885855674743653, 0.008861248016357421, 0.009226112365722657, 0.009089695930480956, 0.008953791618347168, 0.008878463745117188, 0.00897433567047119, 0.008927231788635253, 0.008892352104187012, 0.008851519584655762, 0.00885161590576172, 0.008828767776489258, 0.008889887809753418, 0.008904352188110351, 0.0090032958984375, 0.008859199523925781, 0.008870271682739258, 0.008860223770141602, 0.008861727714538574, 0.009320735931396484, 0.008881888389587402, 0.008890368461608887, 0.008831263542175293, 0.008908032417297363, 0.00885097599029541, 0.008850560188293458, 0.008694432258605956, 0.00883743953704834, 0.009008543968200684, 0.008873663902282714, 0.008834752082824707, 0.008847455978393554, 0.008855903625488281, 0.008825311660766602, 0.008845312118530273, 0.008820544242858887, 0.008907232284545898, 0.009044927597045898, 0.008816479682922363, 0.008898719787597657, 0.008847935676574707, 0.008846591949462891, 0.009173888206481934, 0.008882080078125, 0.009850912094116211, 0.008946975708007813, 0.008887167930603027, 0.008822463989257812, 0.008855232238769531, 0.008891008377075195, 0.008878080368041993, 0.00889241600036621, 0.00889241600036621, 0.008814144134521484, 0.008849856376647949, 0.009095423698425293, 0.008941311836242676, 0.008914912223815918, 0.008925215721130371, 0.00881868839263916, 0.008822784423828126, 0.008820735931396484, 0.008908927917480469, 0.008842687606811523, 0.00881065559387207, 0.008832672119140624, 0.00888486385345459, 0.008842944145202637, 0.008816896438598633, 0.008824128150939942, 0.0088155517578125, 0.008799103736877442, 0.008845952033996582, 0.008818559646606446, 0.008824928283691406, 0.008880288124084472, 0.009271488189697266, 0.009104512214660644, 0.008892319679260253, 0.009046751976013184, 0.008839551925659179, 0.008924127578735352, 0.008889023780822753, 0.008869279861450195, 0.008913920402526856, 0.00885043239593506, 0.008841024398803712, 0.008782624244689941, 0.008890591621398925, 0.00860364818572998, 0.008823616027832031, 0.008828672409057617, 0.008866047859191895, 0.008811903953552245, 0.00889897632598877, 0.008844736099243165, 0.008822719573974609, 0.008803071975708008, 0.008778976440429688, 0.009051008224487305, 0.008869855880737305, 0.008945695877075195, 0.008900799751281739, 0.008888128280639649, 0.00884227180480957, 0.008835136413574219, 0.008810815811157226, 0.008812800407409668, 0.008822463989257812, 0.008817440032958984, 0.00882265567779541, 0.00881049633026123, 0.008845279693603516, 0.008959263801574706, 0.00886627197265625, 0.008951359748840333, 0.008823040008544922, 0.008862175941467286, 0.008839167594909669, 0.008857631683349609, 0.008933664321899414, 0.008885951995849609, 0.008904704093933105, 0.008863743782043456, 0.008857600212097168, 0.008868000030517579, 0.008848416328430175, 0.008973119735717773, 0.00885536003112793, 0.008825023651123047, 0.008830719947814942, 0.00889472007751465, 0.008902655601501466, 0.00892518424987793, 0.008947263717651368, 0.008882111549377441, 0.00888377571105957, 0.008857791900634765, 0.008859968185424805, 0.008847807884216309, 0.008851008415222168, 0.008833472251892089, 0.008848480224609375, 0.008825759887695312, 0.008825023651123047, 0.008863903999328614, 0.008858528137207031, 0.008847935676574707, 0.00883296012878418, 0.008816543579101563, 0.008881792068481446, 0.008855968475341798, 0.008633760452270508, 0.008835871696472168, 0.008824735641479493, 0.008857536315917968, 0.008799967765808105, 0.008853504180908203, 0.008796416282653808, 0.008791328430175781, 0.008839776039123535, 0.008784000396728515, 0.008827103614807128, 0.00882256031036377, 0.00889356803894043, 0.008878975868225098, 0.00885091209411621, 0.008812992095947266, 0.008851103782653809, 0.008973055839538575, 0.008908479690551759, 0.008873567581176758, 0.008878527641296386, 0.008838784217834473, 0.008863615989685058, 0.00898044776916504, 0.008940032005310058, 0.008896512031555176, 0.008933631896972657, 0.00885331153869629, 0.008949664115905762, 0.008926495552062989, 0.00894643211364746, 0.008976479530334473, 0.009099167823791504, 0.008896032333374023, 0.008960479736328126, 0.008956095695495606, 0.008955679893493652, 0.008953887939453125, 0.008928735733032227, 0.008942111968994141, 0.008904831886291504, 0.008871135711669921, 0.008831647872924805, 0.008837120056152344, 0.00881868839263916, 0.008845312118530273, 0.00880851173400879, 0.008766752243041992, 0.010320608139038085, 0.00891203212738037, 0.008901375770568848, 0.00893545627593994, 0.008844544410705566, 0.008839008331298828, 0.008806976318359375, 0.008794464111328126, 0.008890368461608887, 0.008806112289428712, 0.00878211212158203, 0.008805695533752442, 0.00880025577545166, 0.008852160453796387, 0.008867199897766114, 0.008652799606323243, 0.008830944061279296, 0.008841983795166016, 0.008838303565979004, 0.008858464241027831, 0.008867839813232421, 0.00883894443511963, 0.00884275245666504, 0.008874367713928223, 0.008967743873596192, 0.00902633571624756, 0.008934720039367676, 0.008849247932434082, 0.008921952247619628, 0.00896985626220703, 0.009029215812683105, 0.009053024291992187, 0.00904918384552002, 0.008970144271850587, 0.009209792137145996, 0.008929632186889648, 0.00894223976135254, 0.008910847663879394, 0.008929280281066895, 0.008964287757873534, 0.009112704277038574, 0.00888492774963379, 0.008859328269958496, 0.008827199935913086, 0.008936544418334962, 0.008856224060058595, 0.008842559814453125, 0.008885503768920899, 0.00886956787109375, 0.008885760307312012, 0.008879903793334962, 0.008842207908630372, 0.008916640281677246, 0.008917087554931641, 0.008882176399230958, 0.00911359977722168, 0.008912480354309082, 0.00892092800140381, 0.00933523178100586, 0.009336735725402832, 0.009534751892089844, 0.009305024147033691, 0.009040032386779785, 0.008983519554138184, 0.008931967735290527, 0.008923392295837402, 0.008980480194091797, 0.008862848281860352, 0.008845631599426269, 0.008803104400634766, 0.008859423637390137, 0.008956064224243164, 0.008844799995422363, 0.008934816360473634, 0.008844256401062012, 0.008839136123657226, 0.008816639900207519, 0.008835071563720704, 0.00861961555480957, 0.008827872276306152, 0.008849120140075684, 0.008871904373168945, 0.00886787223815918, 0.008935551643371582, 0.008906559944152832, 0.008841279983520509, 0.008851455688476563, 0.008893983840942383, 0.00894108772277832, 0.008865920066833496, 0.00887388801574707, 0.008841343879699707, 0.008854047775268555, 0.008823040008544922, 0.008873984336853028, 0.0088372802734375, 0.008778656005859375, 0.008992768287658692, 0.009391231536865234, 0.008935232162475586, 0.00886128044128418, 0.0089071683883667, 0.008937503814697266, 0.008814175605773926, 0.008999296188354492, 0.008828703880310058, 0.008798111915588379, 0.008818047523498535, 0.008814528465270996, 0.008813599586486816, 0.00895577621459961, 0.008849791526794434, 0.008846431732177735, 0.008817279815673828, 0.008818528175354005, 0.008851263999938965, 0.008819040298461913, 0.008814528465270996, 0.008986687660217286, 0.008820735931396484, 0.008818528175354005, 0.00881065559387207, 0.008867168426513672, 0.008751328468322755, 0.008819135665893554, 0.008882176399230958, 0.008939071655273438, 0.008860095977783203, 0.008847359657287598, 0.008828927993774414, 0.00881868839263916, 0.00887551975250244, 0.008810912132263184, 0.009105504035949707, 0.00912396812438965, 0.008832544326782226, 0.009050815582275391, 0.009129695892333985, 0.008966079711914062, 0.008929280281066895, 0.008914943695068359, 0.008644351959228516, 0.008866047859191895, 0.008976384162902832, 0.008828927993774414, 0.009041312217712403, 0.00891926383972168, 0.008861663818359376, 0.008871583938598633, 0.008835007667541504, 0.008884544372558595, 0.008897024154663086, 0.009084032058715821, 0.008854399681091309, 0.008822367668151856, 0.008840895652770997, 0.008806559562683105, 0.00881107234954834, 0.008863615989685058, 0.008857343673706055, 0.008799967765808105, 0.008798879623413086, 0.008851455688476563, 0.00881049633026123, 0.008775008201599122, 0.008909472465515137, 0.008850655555725097, 0.008843071937561035, 0.008788928031921387, 0.008812000274658203, 0.008788703918457031, 0.008813568115234375, 0.008786304473876953, 0.00883296012878418, 0.008802847862243652, 0.008986623764038085, 0.008882176399230958, 0.008847583770751953, 0.00883407974243164, 0.008895232200622558, 0.008919039726257324, 0.008876192092895507, 0.008869248390197754, 0.008874496459960938, 0.008882304191589356, 0.008887328147888184, 0.008860735893249512, 0.00887782382965088, 0.008855392456054687, 0.008882240295410156, 0.00882697582244873, 0.008793919563293458, 0.008784192085266113, 0.008861056327819824, 0.00881510353088379, 0.008863743782043456, 0.00881868839263916, 0.008848896026611328, 0.008886143684387207, 0.008993120193481445, 0.008884832382202149, 0.008945343971252441, 0.008914591789245606, 0.008810976028442382, 0.00867420768737793, 0.008837120056152344, 0.008875167846679688, 0.008847295761108398, 0.008874912261962891, 0.008882176399230958, 0.008857855796813965, 0.008828448295593263, 0.00883670425415039, 0.008864383697509765, 0.008828960418701171, 0.008857888221740723, 0.010130847930908203, 0.010232095718383788, 0.008853407859802246, 0.008926912307739258, 0.00891536045074463, 0.008892127990722656, 0.008880127906799316, 0.008851743698120118, 0.00882307243347168, 0.00882044792175293, 0.008812416076660156, 0.008816736221313477, 0.00882697582244873, 0.008837056159973145, 0.008802623748779296, 0.008844415664672851, 0.008805055618286133, 0.00883894443511963, 0.00884556770324707, 0.008828703880310058, 0.008836928367614747, 0.008835328102111817, 0.008888383865356446, 0.008843199729919433, 0.008833024024963379, 0.008838208198547363, 0.008876992225646972, 0.008839232444763184, 0.008834943771362305, 0.008846943855285644, 0.008800288200378419, 0.009210304260253906, 0.009099167823791504, 0.009027456283569336, 0.00888649559020996, 0.008853280067443847, 0.00884879970550537, 0.008842144012451172, 0.008886176109313965, 0.009179136276245118, 0.008850943565368653, 0.008824864387512206, 0.008827584266662598, 0.008908864021301269, 0.008836671829223633, 0.008820384025573731, 0.008833824157714843, 0.008800127983093261, 0.008810111999511719, 0.00882915210723877, 0.008834879875183105, 0.008590463638305665, 0.008800352096557617, 0.008807104110717773, 0.008830464363098145, 0.008813055992126465, 0.008847104072570802, 0.008822400093078614, 0.008894816398620605, 0.008987263679504394, 0.008824000358581543, 0.009057951927185058, 0.00885647964477539, 0.00892518424987793, 0.008869888305664063, 0.008806400299072266, 0.008865983963012695, 0.008805919647216796, 0.008780063629150391, 0.00880844783782959, 0.00882915210723877, 0.00878767967224121, 0.008835136413574219, 0.008939488410949707, 0.008867744445800782, 0.008849535942077637, 0.008789888381958008, 0.008795968055725098, 0.00888864040374756, 0.00880031967163086, 0.008794367790222168, 0.008803520202636719, 0.008812895774841309, 0.008863424301147461, 0.008840959548950195, 0.008986335754394531, 0.008884960174560547, 0.008962335586547852, 0.008851231575012207, 0.008831199645996093, 0.008835071563720704, 0.008840928077697754, 0.008880000114440918, 0.008954272270202637, 0.00898579216003418, 0.009009984016418457, 0.00902950382232666, 0.00909119987487793, 0.009019392013549805, 0.00899452781677246, 0.009029055595397949, 0.009134943962097169, 0.00907196807861328, 0.009358304023742676, 0.008982208251953125, 0.008996416091918945, 0.00880025577545166, 0.00885820770263672, 0.008837056159973145, 0.008938591957092285, 0.008863903999328614, 0.008864416122436523, 0.008832672119140624, 0.008882816314697266, 0.008711968421936035, 0.008958208084106445, 0.008836992263793945, 0.008889856338500977, 0.009001567840576171, 0.009032768249511719, 0.008977343559265136, 0.009035872459411622, 0.00913920021057129, 0.009012127876281738, 0.009033727645874023, 0.008914943695068359, 0.008947936058044434, 0.008885184288024902, 0.008936287879943848, 0.008869279861450195, 0.00890294361114502, 0.008995136260986329, 0.008978400230407715, 0.008949952125549316, 0.009106656074523926, 0.009114239692687988, 0.008893792152404784, 0.008915840148925781, 0.00894153594970703, 0.00881999969482422, 0.008818240165710449, 0.00888111972808838, 0.009181216239929199, 0.009116767883300781, 0.008899392127990723, 0.009752832412719726, 0.009035264015197754, 0.00962559986114502, 0.00967024040222168, 0.009016032218933106, 0.008888319969177246, 0.008956159591674805, 0.008850848197937012, 0.008866175651550293, 0.008810015678405761, 0.008803968429565429, 0.008761504173278808, 0.00880502414703369, 0.008830975532531739, 0.008863743782043456, 0.008898207664489746, 0.008791808128356934, 0.0088438720703125, 0.008859647750854491, 0.008914912223815918, 0.008925248146057128, 0.009021568298339843, 0.008972127914428711, 0.009054207801818847, 0.00901961612701416, 0.008965696334838867, 0.008958239555358887, 0.00895411205291748, 0.008945504188537597, 0.008902496337890625, 0.009047679901123047, 0.00902182388305664, 0.008762592315673829, 0.008930591583251954, 0.00889628791809082, 0.008919648170471191, 0.00891603183746338, 0.008862175941467286, 0.008831007957458496, 0.008845760345458984, 0.0088242244720459, 0.008824928283691406, 0.008811008453369141, 0.00880016040802002, 0.008872032165527344, 0.008928511619567871, 0.008818752288818359, 0.00882051181793213, 0.00879094409942627, 0.008783103942871094, 0.008758015632629394, 0.008842304229736328, 0.008831904411315919, 0.008898591995239258, 0.00882688045501709, 0.008835488319396973, 0.008877663612365723, 0.008803968429565429, 0.008812671661376953, 0.008852800369262696, 0.008834015846252442, 0.008782848358154297, 0.008784064292907715, 0.008803135871887207, 0.008900256156921386, 0.008902976036071777, 0.00893545627593994, 0.008867520332336426, 0.008923647880554199, 0.00892630386352539, 0.008878368377685547, 0.008833439826965332, 0.008820735931396484, 0.008916383743286132, 0.00893398380279541, 0.008969951629638671, 0.00889680004119873, 0.008912320137023927, 0.008866368293762207, 0.008850655555725097, 0.008879263877868653, 0.00886240005493164, 0.008896544456481933, 0.008827808380126954, 0.008824831962585449, 0.008813664436340332, 0.008831839561462402, 0.008898176193237305, 0.00882323169708252, 0.008871904373168945, 0.008894495964050293, 0.008830975532531739, 0.008888319969177246, 0.008853504180908203, 0.008810208320617676, 0.008571743965148926, 0.008814304351806641, 0.008839263916015624, 0.008812479972839356, 0.008805919647216796, 0.00883296012878418, 0.008779904365539552, 0.008854175567626953, 0.008914943695068359, 0.008809599876403808, 0.008838144302368164, 0.008992639541625977, 0.008920127868652344, 0.00919980812072754, 0.009195648193359374, 0.009276032447814941, 0.009453472137451171, 0.00897862434387207, 0.009240351676940917, 0.009836671829223632, 0.008896512031555176, 0.008947551727294922, 0.008917247772216797, 0.008812640190124512, 0.00880832004547119, 0.008808575630187988, 0.008828736305236817, 0.008842911720275879, 0.008796511650085449, 0.008951807975769043, 0.008813599586486816, 0.008840319633483886, 0.008810463905334473, 0.008775551795959473, 0.008814592361450196, 0.008806079864501952, 0.008933823585510254, 0.008854944229125977, 0.008823264122009277, 0.008816991806030273, 0.008861184120178223, 0.008847519874572754, 0.009263104438781738, 0.008910847663879394, 0.009115391731262208, 0.00885756778717041, 0.008771903991699219, 0.008801471710205079, 0.008789888381958008, 0.008811455726623535, 0.008824352264404298, 0.008801759719848633, 0.008763456344604492, 0.008763744354248046, 0.008799936294555664, 0.00880076789855957, 0.008784255981445312, 0.008802304267883301, 0.008802304267883301, 0.008838784217834473, 0.008808064460754394, 0.008794143676757812, 0.008809184074401856, 0.008601568222045898, 0.008816384315490722, 0.008842816352844238, 0.00884601593017578, 0.008804351806640624, 0.008826175689697265, 0.008810272216796875, 0.008878175735473632, 0.008829888343811034, 0.008830975532531739, 0.008880224227905274, 0.008863519668579102, 0.008853504180908203, 0.008855680465698242, 0.008828607559204102, 0.008827168464660645, 0.008888223648071288, 0.00882688045501709, 0.00887622356414795, 0.008852416038513184, 0.008883071899414062, 0.009121055603027345, 0.0089136323928833, 0.008894432067871093, 0.008914976119995118, 0.00897590446472168, 0.00906060791015625, 0.008966367721557617, 0.00893337631225586, 0.008893792152404784, 0.00884224033355713, 0.008803999900817872, 0.008824831962585449, 0.008816191673278809, 0.008855999946594239, 0.008854687690734863, 0.008827136039733887, 0.008807007789611816, 0.008834752082824707, 0.008821056365966798, 0.008951199531555177, 0.008802623748779296, 0.00880668830871582, 0.008750432014465332, 0.008810751914978028, 0.008783295631408691, 0.00879100799560547, 0.008768511772155761, 0.008894975662231446, 0.008843775749206542, 0.008823007583618163, 0.008771136283874511, 0.008832672119140624, 0.008798912048339843, 0.00882636833190918, 0.00882307243347168, 0.008816160202026368, 0.009058367729187012, 0.008946175575256348, 0.008812543869018554, 0.008847359657287598, 0.00880844783782959, 0.008936832427978516, 0.00866646385192871, 0.008882847785949707, 0.00886905574798584, 0.008827712059020996, 0.008832096099853515, 0.008892959594726562, 0.008837311744689942, 0.008849535942077637, 0.008808639526367188, 0.00879964828491211, 0.008828864097595215, 0.008817184448242187, 0.008839391708374023, 0.008823904037475586, 0.008897215843200683, 0.008802016258239747, 0.008923392295837402, 0.008863776206970216, 0.008808608055114747, 0.00888105583190918, 0.008847488403320312, 0.008833632469177247, 0.00881276798248291, 0.00880025577545166, 0.008875743865966797, 0.008878368377685547, 0.00890675163269043, 0.009447711944580078, 0.01053872013092041, 0.009529151916503907, 0.00888646411895752, 0.008841216087341308, 0.008837311744689942, 0.008832832336425782, 0.008906815528869629, 0.008787584304809571, 0.008851136207580566, 0.008790656089782715, 0.00876966381072998, 0.008839232444763184, 0.008793919563293458, 0.008810175895690917, 0.008806719779968261, 0.008765439987182617, 0.008814592361450196, 0.008867615699768067, 0.008795488357543945, 0.008841407775878907, 0.008806912422180176, 0.008873472213745117, 0.008881183624267578, 0.008823552131652833, 0.008829855918884277, 0.008867903709411621, 0.008861791610717774, 0.008875871658325195, 0.008850879669189453, 0.008853376388549805, 0.009073344230651856, 0.008839167594909669, 0.008938655853271485, 0.00920847988128662, 0.009009344100952148, 0.008625727653503418, 0.008834752082824707, 0.008859711647033692, 0.008841919898986816, 0.009086976051330567, 0.008855456352233887, 0.00886086368560791, 0.00885372829437256, 0.008849408149719238, 0.008940511703491211, 0.008912608146667481, 0.008863519668579102, 0.008855487823486328, 0.008802592277526855, 0.008875712394714356, 0.008821120262145996, 0.008833087921142579, 0.00886070442199707, 0.008794976234436034, 0.008799807548522949, 0.008812447547912598, 0.008804032325744629, 0.008840031623840332, 0.008847359657287598, 0.008939264297485351, 0.008803775787353516, 0.008821567535400391, 0.008769536018371582, 0.00883897590637207, 0.008800576210021973, 0.008927103996276855, 0.008816127777099609, 0.008776191711425782, 0.008828927993774414, 0.008791232109069824, 0.008800607681274414, 0.008780256271362304, 0.008795424461364746, 0.00882096004486084, 0.008788479804992675, 0.008822175979614258, 0.009593440055847167, 0.009172991752624511, 0.009242624282836913, 0.009519295692443848, 0.008933183670043945, 0.00886732769012451, 0.008855551719665527, 0.009095808029174805, 0.009078656196594238, 0.009000767707824707, 0.009035167694091796, 0.008852255821228028, 0.009000960350036622, 0.008912896156311035, 0.008888319969177246, 0.009011199951171875, 0.008922783851623536, 0.008959808349609376, 0.008909536361694335, 0.008854623794555663, 0.008817376136779785, 0.008863743782043456, 0.00862611198425293, 0.008853568077087402, 0.008849184036254882, 0.00886518383026123, 0.008860480308532715, 0.008861696243286133, 0.008822879791259765, 0.008830880165100098, 0.008831040382385254, 0.0088984956741333, 0.009232224464416504, 0.008974495887756348, 0.008841216087341308, 0.00889840030670166, 0.00887168025970459, 0.008886783599853516, 0.008892288208007812, 0.00887782382965088, 0.008861984252929687, 0.008828927993774414, 0.008824831962585449, 0.00883027172088623, 0.009104063987731933, 0.009012319564819337, 0.008903743743896485, 0.008858943939208984, 0.008826399803161621, 0.008876128196716309, 0.008864159584045411, 0.008837056159973145, 0.008840895652770997, 0.00883801555633545, 0.008828895568847656, 0.008850815773010254, 0.008868512153625488, 0.009023296356201171, 0.008894944190979004, 0.008840928077697754, 0.00880835247039795, 0.008924480438232421, 0.008831775665283204, 0.008876031875610351, 0.0089169921875, 0.008819904327392578, 0.008781855583190917, 0.008780415534973145, 0.00880832004547119, 0.008853055953979493, 0.008800224304199218, 0.008819680213928222, 0.008801664352416992, 0.008860063552856446, 0.00909830379486084, 0.008913472175598144, 0.008839551925659179, 0.00882688045501709, 0.008882176399230958, 0.008878080368041993, 0.008851743698120118, 0.008933088302612305, 0.00881049633026123, 0.008859295845031739, 0.008847935676574707]",tokens/s,112.47534535556849,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3856.355328,4533.911552,0.0,4131.38944,4012.475392,s,1,10.228845703125,10.228845703125,0.0,10.228845703125,10.228845703125,10.228845703125,10.228845703125,[10.228845703125],,kWh,9.272959164585851e-05,1.0221453507349271e-05,2.8893356448012053e-05,0.00013184440160121983,,MB,2170.232832,4733.140992,0.0,4315.938816,4233.462784,s,10,1.7868261718750003,0.1786826171875,0.000816469095859311,0.17896324920654297,0.17915940399169922,0.17919817886352538,0.17922919876098634,"[0.17897996520996093, 0.17631440734863282, 0.17884124755859376, 0.17843927001953125, 0.1790595245361328, 0.1791507873535156, 0.17904202270507813, 0.17881546020507813, 0.17923695373535156, 0.178946533203125]",tokens/s,1432.707915462013,kWh,5.290591152603952e-06,5.834567145564683e-07,3.5171853534285424e-06,9.391233220588963e-06,tokens/kWh,27259465.71519019,MB,2180.902912,4837.998592,0.0,4420.796416,4335.693312,s,10,22.48654248046875,2.2486542480468747,0.21890661804567998,2.1445224609375,2.5158321044921874,2.674327795410156,2.8011243481445316,"[2.832823486328125, 2.48061083984375, 2.13504541015625, 2.141770751953125, 2.1452802734375, 2.138077880859375, 2.159431396484375, 2.141730712890625, 2.1437646484375, 2.168007080078125]",tokens/s,28.016757158073652,kWh,6.228444842489712e-05,6.869768834255755e-06,4.0414127569370076e-05,0.00010956834482852295,tokens/kWh,574983.5876283108,,s,630,22.48379755020143,0.03568856754000225,0.0039310404126924586,0.033976270675659184,0.044604291152954105,0.044869251060485844,0.045552499732971194,"[0.045531200408935546, 0.045709022521972655, 0.04493747329711914, 0.04469184112548828, 0.044634273529052734, 0.044802177429199216, 0.04447235107421875, 0.044824161529541016, 0.044765567779541014, 0.04716694259643555, 0.05268304061889648, 0.04490195083618164, 0.045639968872070315, 0.0448004150390625, 0.044518718719482424, 0.044439998626708985, 0.04440060806274414, 0.04453200149536133, 0.04450028610229492, 0.044431488037109376, 0.04480374526977539, 0.04451414489746094, 0.04445091247558594, 0.04470713424682617, 0.04480169677734375, 0.044566238403320316, 0.04448080062866211, 0.04455987167358398, 0.04454636764526367, 0.044509567260742185, 0.04555705642700195, 0.045164192199707034, 0.044956512451171875, 0.04464191818237305, 0.045023006439208986, 0.04669910430908203, 0.04497760009765625, 0.0449477767944336, 0.045170944213867185, 0.0450522575378418, 0.044718910217285156, 0.04464931106567383, 0.044507137298583986, 0.044593151092529294, 0.04443545532226562, 0.044652576446533206, 0.044850368499755856, 0.044986175537109374, 0.04466787338256836, 0.04447603225708008, 0.04460095977783203, 0.04450966262817383, 0.044666336059570315, 0.04500271987915039, 0.04457558441162109, 0.044980224609375, 0.0447567024230957, 0.044577056884765626, 0.044449760437011716, 0.04446825790405273, 0.0446382064819336, 0.04551686477661133, 0.04476102447509766, 0.044832767486572264, 0.04507555389404297, 0.04491356658935547, 0.04522377777099609, 0.044939422607421876, 0.04487372970581055, 0.04453731155395508, 0.044678817749023436, 0.04508703994750977, 0.04465286254882812, 0.04467766571044922, 0.04471603012084961, 0.04463827133178711, 0.04489161682128906, 0.044531902313232424, 0.044349441528320314, 0.04443734359741211, 0.04443257522583008, 0.04442809677124023, 0.04475081634521484, 0.04532627105712891, 0.04554134368896484, 0.045262462615966795, 0.044956321716308596, 0.0446382064819336, 0.04470723342895508, 0.0447757453918457, 0.04486377716064453, 0.044832767486572264, 0.044548095703125, 0.04605952072143555, 0.03423846435546875, 0.03401728057861328, 0.03378790283203125, 0.03411151885986328, 0.03370390319824219, 0.03505152130126953, 0.033957023620605466, 0.03391296005249023, 0.03361407852172851, 0.0337474250793457, 0.033782142639160156, 0.03383871841430664, 0.033740798950195314, 0.03370751953125, 0.03379584121704102, 0.03371241760253906, 0.03378182220458984, 0.033790721893310546, 0.033781406402587894, 0.03432447814941406, 0.03427532958984375, 0.03407257461547852, 0.03395711898803711, 0.03417779159545899, 0.034129920959472655, 0.03406032180786133, 0.03404115295410156, 0.03388623809814453, 0.03485865783691406, 0.03591881561279297, 0.033896446228027344, 0.03450265502929688, 0.03451087951660156, 0.03374095916748047, 0.0336071662902832, 0.03367731094360352, 0.03361795043945313, 0.03366521453857422, 0.03352604675292969, 0.03358512115478516, 0.033574943542480466, 0.033650688171386715, 0.03366003036499023, 0.0336495361328125, 0.033753089904785157, 0.033646175384521484, 0.03352608108520508, 0.033571006774902344, 0.0337017936706543, 0.03412390518188477, 0.033992576599121097, 0.033933502197265625, 0.033799007415771486, 0.034251232147216794, 0.03378960037231445, 0.0339317741394043, 0.0340863037109375, 0.033592510223388675, 0.03345283126831055, 0.033799198150634764, 0.03343382263183594, 0.033584224700927735, 0.033565185546875, 0.033541343688964845, 0.03361196899414062, 0.033896446228027344, 0.03387875366210937, 0.033659137725830075, 0.033726207733154295, 0.03361996841430664, 0.033872032165527345, 0.033415008544921875, 0.0335810546875, 0.0336445426940918, 0.03359151840209961, 0.03365868759155274, 0.03359331130981445, 0.03372854232788086, 0.03383737564086914, 0.03450003051757813, 0.034399551391601564, 0.03552934265136719, 0.03439465713500976, 0.034598655700683593, 0.03418313598632813, 0.03528297424316406, 0.03415353775024414, 0.034044639587402344, 0.03369132614135742, 0.034256832122802734, 0.033980224609375, 0.03463862228393555, 0.03423231887817383, 0.03403084945678711, 0.03400985717773437, 0.03397030258178711, 0.03388089752197266, 0.03380758285522461, 0.03365353775024414, 0.03402060699462891, 0.03376643371582031, 0.03376726531982422, 0.033816097259521484, 0.03379439926147461, 0.03387631988525391, 0.034028545379638675, 0.03380086517333984, 0.03398649597167969, 0.03437152099609375, 0.033830558776855466, 0.033716350555419924, 0.03383331298828125, 0.03427065658569336, 0.03388582229614258, 0.03418172836303711, 0.03391932678222656, 0.03402073669433594, 0.03408755111694336, 0.03428979110717773, 0.03407040023803711, 0.0340316162109375, 0.03391897583007813, 0.033888256072998044, 0.035096031188964844, 0.033892894744873045, 0.033890209197998046, 0.033871070861816406, 0.03391683197021484, 0.03375510406494141, 0.033827838897705076, 0.03389606475830078, 0.033790336608886716, 0.03421952056884765, 0.034052513122558595, 0.03405804824829101, 0.035025184631347656, 0.034525184631347655, 0.03422639846801758, 0.0341808967590332, 0.0347955207824707, 0.03408031845092773, 0.03404179382324219, 0.03492505645751953, 0.03401932907104492, 0.033716224670410154, 0.03388332748413086, 0.033708030700683594, 0.033797119140625, 0.03363033676147461, 0.03374607849121094, 0.03379878234863281, 0.03397622299194336, 0.03383209609985351, 0.033837471008300785, 0.033630016326904294, 0.03382495880126953, 0.03369004821777344, 0.03388179016113281, 0.03373241424560547, 0.033882144927978516, 0.034054271697998045, 0.033861663818359374, 0.03420774459838867, 0.03804959869384766, 0.03394358444213867, 0.033992862701416014, 0.035676158905029294, 0.03369558334350586, 0.0335398063659668, 0.03364502334594727, 0.03377046585083008, 0.033739776611328126, 0.03349235153198242, 0.033771583557128906, 0.033555007934570315, 0.03379814529418945, 0.03447753524780273, 0.033837600708007814, 0.03379404830932617, 0.03387507247924805, 0.033881057739257814, 0.034074592590332034, 0.034176063537597653, 0.03423100662231445, 0.03418537521362305, 0.03424051284790039, 0.03423136138916016, 0.03444627380371094, 0.03407462310791016, 0.03443705749511719, 0.03467219161987305, 0.03445126342773437, 0.03456480026245117, 0.03446099090576172, 0.03408560180664062, 0.034437088012695315, 0.034589920043945316, 0.03381884765625, 0.03374860763549805, 0.03380844879150391, 0.033676097869873044, 0.03370604705810547, 0.033846847534179686, 0.03376483154296875, 0.03406073760986328, 0.03377369689941406, 0.033992191314697266, 0.03374975967407227, 0.03368137741088867, 0.03389379119873047, 0.033720863342285155, 0.03361939239501953, 0.03364947128295898, 0.03390428924560547, 0.033705665588378904, 0.03384182357788086, 0.034187007904052734, 0.03369599914550781, 0.03358924865722656, 0.034282752990722656, 0.03365964889526367, 0.034241279602050784, 0.03372032165527344, 0.03369779205322266, 0.03388611221313476, 0.033854782104492186, 0.033753536224365235, 0.03393775939941406, 0.03361996841430664, 0.03371171188354492, 0.033674945831298826, 0.033756065368652347, 0.03356409454345703, 0.03361014556884766, 0.03343996810913086, 0.03354188919067383, 0.033805313110351565, 0.034396446228027344, 0.03397091293334961, 0.03369161605834961, 0.03366652679443359, 0.034103519439697264, 0.03358099365234375, 0.03375759887695313, 0.03380223846435547, 0.03377971267700195, 0.03487539291381836, 0.033982463836669925, 0.03375289535522461, 0.03420083236694336, 0.0339752311706543, 0.03387964630126953, 0.034081153869628907, 0.0337490234375, 0.03373577499389648, 0.0336343994140625, 0.03368838500976563, 0.03364255905151367, 0.034280574798583985, 0.03434310531616211, 0.03435532760620117, 0.03447868728637695, 0.0339453125, 0.033941696166992184, 0.03367721557617188, 0.033785247802734376, 0.033628864288330076, 0.0337606086730957, 0.034283424377441404, 0.03405491256713867, 0.033777118682861325, 0.033842849731445315, 0.03658787155151367, 0.03412863922119141, 0.03378575897216797, 0.033814208984375, 0.03432089614868164, 0.034031135559082035, 0.03415382385253906, 0.03393619155883789, 0.033937568664550784, 0.03380003356933594, 0.03371417617797851, 0.033685504913330076, 0.034443614959716796, 0.03384076690673828, 0.03399654388427734, 0.03449913787841797, 0.034611167907714846, 0.034027713775634766, 0.03379404830932617, 0.03388774490356445, 0.03395414352416992, 0.034008895874023434, 0.03444771194458008, 0.033976318359375, 0.03793875122070312, 0.037798336029052734, 0.033896095275878904, 0.03380585479736328, 0.034246494293212894, 0.034011295318603516, 0.03401398468017578, 0.03442076873779297, 0.033724414825439454, 0.033742847442626955, 0.03371859359741211, 0.03356169509887695, 0.03377417755126953, 0.033650463104248046, 0.03360176086425781, 0.0336640625, 0.03372537612915039, 0.033702945709228514, 0.03419030380249023, 0.03363174438476563, 0.03379801559448242, 0.033796737670898434, 0.033938751220703126, 0.033858238220214845, 0.033880062103271484, 0.03407257461547852, 0.03408838272094727, 0.03411580657958985, 0.03430377578735352, 0.034279647827148436, 0.03445334243774414, 0.03411228942871094, 0.03389923095703125, 0.03392537689208985, 0.03668003082275391, 0.042201503753662106, 0.03442278289794922, 0.03412694549560547, 0.03384230422973633, 0.03372623825073242, 0.03470064163208008, 0.033667713165283206, 0.03370569610595703, 0.033769790649414065, 0.03389440155029297, 0.03386982345581055, 0.03395782470703125, 0.034391616821289064, 0.033756736755371095, 0.03394224166870117, 0.03369161605834961, 0.03480780792236328, 0.035917823791503906, 0.03425251388549805, 0.03522496032714844, 0.034044448852539065, 0.03404179382324219, 0.03361942291259765, 0.03473648071289062, 0.0338985595703125, 0.03435782241821289, 0.03434624099731445, 0.03401740646362305, 0.03392607879638672, 0.0338326416015625, 0.03377891159057617, 0.0336629753112793, 0.03385628890991211, 0.03392425537109375, 0.0339549446105957, 0.03380809783935547, 0.0340316162109375, 0.03365478515625, 0.033713470458984374, 0.03355849456787109, 0.033605918884277344, 0.03368185424804687, 0.03362326431274414, 0.03363510513305664, 0.033702239990234376, 0.0336473274230957, 0.033753246307373044, 0.03389487838745117, 0.034003360748291016, 0.03410729598999023, 0.03403363037109375, 0.03397407913208008, 0.0337891845703125, 0.034122528076171874, 0.0337512321472168, 0.033713409423828125, 0.033934368133544925, 0.033959648132324216, 0.03378969573974609, 0.03371443176269531, 0.03367935943603516, 0.03368703842163086, 0.03374518585205078, 0.03381475067138672, 0.0340766716003418, 0.033998462677001955, 0.03419558334350586, 0.03409110260009766, 0.0342119026184082, 0.03404211044311523, 0.03398025512695312, 0.03378969573974609, 0.033734912872314456, 0.03399423980712891, 0.03404032135009766, 0.03382675170898437, 0.034017215728759764, 0.03402108764648438, 0.035095264434814456, 0.03467961502075195, 0.034223934173583985, 0.03415216064453125, 0.03389487838745117, 0.03371212768554688, 0.033699840545654294, 0.03399660873413086, 0.03383091354370117, 0.03380204772949219, 0.033720703125, 0.03385654449462891, 0.033807327270507816, 0.03358643341064453, 0.033656993865966794, 0.03366764831542969, 0.033753440856933596, 0.03376438522338867, 0.033756126403808595, 0.03360326385498047, 0.03376902389526367, 0.033563201904296874, 0.03366860961914062, 0.03368179321289062, 0.033557727813720704, 0.033601406097412106, 0.03378470230102539, 0.033484832763671875, 0.03769379043579101, 0.03435830307006836, 0.03426572799682617, 0.033693695068359376, 0.03361382293701172, 0.03359936141967773, 0.0343903694152832, 0.033920799255371094, 0.03367427062988281, 0.03359843063354492, 0.033742847442626955, 0.033705215454101566, 0.03363507080078125, 0.03376134490966797, 0.033886142730712894, 0.03387801742553711, 0.0341313591003418, 0.03459980773925781, 0.03461465454101562, 0.03515836715698242, 0.034514110565185545, 0.03443385696411133, 0.03433881759643555, 0.03465545654296875, 0.03447654342651367, 0.034062625885009766, 0.033984577178955075, 0.034455486297607425, 0.03424160003662109, 0.03388444900512695, 0.03446031951904297, 0.034557952880859374, 0.03396793746948242, 0.033857921600341796, 0.03370124816894531, 0.03365523147583008, 0.03405369567871094, 0.03399638366699219, 0.033694175720214846, 0.03383456039428711, 0.03405305480957031, 0.03393312072753906, 0.03380550384521484, 0.03389059066772461, 0.03385996627807617, 0.033915103912353514, 0.033891521453857425, 0.03381532669067383, 0.033842849731445315, 0.03380057525634766, 0.03402665710449219, 0.03359625625610352, 0.03381001663208008, 0.033640735626220705, 0.03385712051391602, 0.034043617248535156, 0.03363103866577148, 0.03366864013671875, 0.033673057556152346, 0.033559169769287106, 0.03384848022460937, 0.03376828765869141, 0.034025344848632816, 0.03394572830200195, 0.034072097778320314, 0.034056545257568356, 0.03474214553833008, 0.03438934326171875, 0.034075553894042966, 0.03490611267089844, 0.035100929260253905, 0.035366657257080075, 0.03525151824951172, 0.0353939208984375, 0.035541313171386715, 0.03528908920288086, 0.03555894470214844, 0.0353056640625, 0.035503681182861326, 0.03520995330810547, 0.03505340957641601, 0.03531792068481445, 0.035376991271972656, 0.03469308853149414, 0.03545708847045898, 0.03504140853881836, 0.034945022583007815, 0.03463782501220703, 0.03450470352172851, 0.03440982437133789, 0.034705184936523435, 0.03457247924804688, 0.03459740829467774, 0.03455401611328125, 0.03445555114746094, 0.034500606536865236, 0.034830337524414064, 0.03430950546264649, 0.03448076629638672]",tokens/s,28.02017757869183,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.2304,1604.190208,0.0,1201.668096,1189.151232,s,1,8.4257275390625,8.4257275390625,0.0,8.4257275390625,8.4257275390625,8.4257275390625,8.4257275390625,[8.4257275390625],,kWh,3.6876531970824546e-05,4.060446244156687e-06,1.1971120687986891e-05,5.290809890296812e-05,,MB,1705.672704,1801.322496,0.0,1384.12032,1351.367168,s,10,0.41631919860839844,0.04163191986083985,0.00021260578362603222,0.04160505676269531,0.04197338829040528,0.04202442150115967,0.04206524806976318,"[0.04207545471191406, 0.04160111999511719, 0.04152048110961914, 0.04160899353027344, 0.04165644836425781, 0.041610431671142575, 0.04143766403198242, 0.04136604690551758, 0.0419620475769043, 0.04148051071166992]",tokens/s,6149.127901276558,kWh,1.2611619478269826e-06,1.3908403303927074e-07,8.339374487585984e-07,2.234183429624852e-06,tokens/kWh,114583250.68814322,MB,1711.104,1843.265536,0.0,1426.06336,1407.548416,s,10,14.392046630859374,1.4392046630859376,0.008099490559269654,1.4383462524414061,1.4511020385742186,1.4513244567871093,1.4515023913574219,"[1.4335716552734374, 1.429119384765625, 1.4510526123046874, 1.4428548583984375, 1.451546875, 1.446393798828125, 1.4404183349609374, 1.436274169921875, 1.4305079345703124, 1.4303070068359376]",tokens/s,43.7741772354431,kWh,4.1650047833010315e-05,4.593608544096187e-06,1.9363662042641944e-05,6.560731841974845e-05,tokens/kWh,960258.7259691501,,s,630,14.387594770431514,0.022837452016557968,0.00041325239567124335,0.022740240097045897,0.023255177307128908,0.023431049442291258,0.024598996791839602,"[0.022407136917114257, 0.022738687515258788, 0.022823680877685548, 0.022595552444458007, 0.022511104583740234, 0.022600223541259765, 0.022747135162353514, 0.02258470344543457, 0.022624895095825194, 0.022541664123535157, 0.02248678398132324, 0.022606752395629884, 0.02269935989379883, 0.022724767684936524, 0.02263609504699707, 0.022932416915893556, 0.022819967269897462, 0.022563327789306642, 0.022473087310791017, 0.022530176162719726, 0.02250124740600586, 0.02255462455749512, 0.02263859176635742, 0.0227061767578125, 0.0229171199798584, 0.02314963150024414, 0.02295097541809082, 0.02312588882446289, 0.023349248886108398, 0.02303385543823242, 0.022706016540527344, 0.02263055992126465, 0.022732288360595702, 0.022540128707885743, 0.022592159271240236, 0.022673280715942382, 0.02251750373840332, 0.02258729553222656, 0.02250595283508301, 0.022536191940307617, 0.02269388771057129, 0.022703487396240233, 0.022530559539794923, 0.02243561553955078, 0.022535680770874023, 0.02372489547729492, 0.024386655807495116, 0.022791007995605468, 0.022672544479370116, 0.022757408142089843, 0.022670207977294923, 0.02261974334716797, 0.02258367919921875, 0.022464351654052736, 0.0241276798248291, 0.022640640258789063, 0.022769664764404295, 0.022656383514404296, 0.022807167053222655, 0.02272368049621582, 0.02286089515686035, 0.022795551300048827, 0.02272483253479004, 0.022463455200195312, 0.022514944076538087, 0.022600128173828126, 0.022587711334228516, 0.022475872039794922, 0.02249616050720215, 0.022583295822143554, 0.022593055725097656, 0.02244985580444336, 0.022547231674194337, 0.022452224731445314, 0.0229881591796875, 0.02247929573059082, 0.022503616333007813, 0.022603776931762694, 0.02247270393371582, 0.022441375732421876, 0.022451839447021484, 0.022639583587646485, 0.022459680557250977, 0.022562559127807618, 0.022411359786987304, 0.022594335556030274, 0.022527072906494142, 0.023577184677124025, 0.024560031890869142, 0.02325872039794922, 0.02277827262878418, 0.02287001609802246, 0.022657024383544923, 0.022634368896484375, 0.022679040908813477, 0.022577791213989257, 0.022614015579223632, 0.02411017608642578, 0.022764448165893555, 0.022755199432373047, 0.022726783752441405, 0.022540288925170897, 0.022503103256225586, 0.022458688735961914, 0.022722560882568358, 0.022786048889160155, 0.022536191940307617, 0.022486560821533202, 0.02242198371887207, 0.022611967086791994, 0.02269593620300293, 0.022684768676757814, 0.022575807571411134, 0.022601408004760744, 0.02271286392211914, 0.02268079948425293, 0.02251580810546875, 0.022518463134765625, 0.022513439178466797, 0.022624479293823243, 0.022569984436035157, 0.022772735595703125, 0.02276118469238281, 0.022532384872436525, 0.02282700729370117, 0.02265894317626953, 0.022624799728393555, 0.0227772159576416, 0.02256528091430664, 0.022556032180786133, 0.022993247985839845, 0.02262646484375, 0.02277731132507324, 0.02260799980163574, 0.02254310417175293, 0.0226507511138916, 0.022507648468017578, 0.022562816619873048, 0.02241539192199707, 0.025847776412963867, 0.022953983306884765, 0.022497215270996095, 0.022528064727783202, 0.02244812774658203, 0.022621919631958008, 0.022523967742919922, 0.022403295516967774, 0.022535743713378905, 0.022706623077392577, 0.022529983520507814, 0.022413375854492188, 0.022488096237182616, 0.02254742431640625, 0.02271820831298828, 0.022708480834960937, 0.022558719635009765, 0.022845439910888672, 0.02299212837219238, 0.022985151290893555, 0.02314681625366211, 0.02274492835998535, 0.022904928207397462, 0.02317932891845703, 0.02335273551940918, 0.02340127944946289, 0.02369308853149414, 0.02330156707763672, 0.023179840087890625, 0.023310176849365233, 0.023406528472900392, 0.02466633605957031, 0.02553241539001465, 0.023427072525024413, 0.02332467269897461, 0.02371583938598633, 0.023381120681762697, 0.02332147216796875, 0.023359487533569336, 0.02327347183227539, 0.02305638313293457, 0.02315673637390137, 0.022980607986450196, 0.023201791763305665, 0.023297632217407226, 0.023073183059692384, 0.022961408615112304, 0.02292108726501465, 0.023008127212524414, 0.023162879943847657, 0.022878911972045897, 0.023035903930664063, 0.023149599075317384, 0.02296112060546875, 0.022750944137573243, 0.022864160537719728, 0.022853631973266602, 0.022785472869873046, 0.022930240631103514, 0.022834943771362304, 0.02286992073059082, 0.02272982406616211, 0.022745567321777342, 0.02273948860168457, 0.022810720443725587, 0.02275017547607422, 0.02269683265686035, 0.022665279388427734, 0.022804128646850587, 0.02288470458984375, 0.022863872528076173, 0.022848831176757813, 0.022831104278564454, 0.022721216201782225, 0.022803936004638672, 0.022800928115844728, 0.022716255187988282, 0.022804224014282226, 0.02270044708251953, 0.022749183654785156, 0.022837247848510742, 0.0229171199798584, 0.02306662368774414, 0.0228351993560791, 0.02291302490234375, 0.022755327224731444, 0.022675455093383787, 0.022771263122558595, 0.022630239486694338, 0.022710880279541015, 0.02588857650756836, 0.023270687103271483, 0.023145248413085937, 0.02287424087524414, 0.023279167175292968, 0.02289504051208496, 0.022962175369262695, 0.022947839736938477, 0.02288025665283203, 0.023117824554443358, 0.022841344833374022, 0.023188703536987303, 0.022886367797851564, 0.02288928031921387, 0.022769760131835938, 0.022800159454345704, 0.022798240661621092, 0.02273695945739746, 0.022755008697509765, 0.022731231689453124, 0.022681472778320312, 0.02276313591003418, 0.022704479217529296, 0.02298307228088379, 0.023236480712890625, 0.02300864028930664, 0.02296486473083496, 0.022880640029907227, 0.023076864242553712, 0.022941503524780273, 0.022769855499267577, 0.022831104278564454, 0.02295804786682129, 0.024036895751953124, 0.02324496078491211, 0.02289084815979004, 0.023017311096191408, 0.02300739288330078, 0.02295964813232422, 0.022729183197021486, 0.022793664932250976, 0.022721023559570314, 0.022695968627929688, 0.022743072509765625, 0.02279209518432617, 0.02290390396118164, 0.022946399688720705, 0.02284979248046875, 0.022835264205932616, 0.02273206329345703, 0.023003360748291016, 0.022998687744140624, 0.02307148742675781, 0.022993087768554688, 0.02323036766052246, 0.02327356719970703, 0.022871679306030273, 0.022890880584716798, 0.022838911056518554, 0.022935935974121094, 0.02304819107055664, 0.022943584442138672, 0.02303932762145996, 0.022964832305908203, 0.023090944290161133, 0.02324937629699707, 0.023379072189331055, 0.023434303283691407, 0.023205696105957033, 0.023136095046997072, 0.023091360092163084, 0.023064031600952148, 0.023112096786499024, 0.023021024703979494, 0.02316464042663574, 0.02315340805053711, 0.023148735046386718, 0.022980607986450196, 0.023238304138183594, 0.022899040222167967, 0.02326643180847168, 0.023016319274902344, 0.023029472351074217, 0.023213472366333008, 0.023292800903320313, 0.023197696685791015, 0.023254783630371093, 0.023392223358154298, 0.0231461124420166, 0.023152448654174804, 0.023012447357177734, 0.022953727722167968, 0.024614912033081054, 0.02323865509033203, 0.023339008331298827, 0.023226367950439454, 0.02315590476989746, 0.02269660758972168, 0.02261417579650879, 0.022831104278564454, 0.02277168083190918, 0.02261199951171875, 0.02253004837036133, 0.022619359970092772, 0.022501535415649414, 0.022637184143066407, 0.0224451847076416, 0.022616960525512694, 0.02307276725769043, 0.023082271575927734, 0.022919456481933595, 0.02285817527770996, 0.022787200927734376, 0.02287295913696289, 0.022640640258789063, 0.023117824554443358, 0.022733823776245117, 0.022597696304321287, 0.022862783432006838, 0.022623552322387695, 0.022702783584594727, 0.022746368408203124, 0.02274348831176758, 0.023081024169921874, 0.022534400939941406, 0.022618112564086915, 0.022425184249877928, 0.022721120834350586, 0.022442943572998048, 0.022440256118774413, 0.023393888473510743, 0.02252662467956543, 0.022550848007202147, 0.02333695983886719, 0.023077951431274415, 0.022915136337280272, 0.02262841606140137, 0.022571840286254884, 0.022991968154907227, 0.023557024002075197, 0.023440671920776368, 0.02377948760986328, 0.0239968318939209, 0.02384092712402344, 0.023685056686401366, 0.02337183952331543, 0.022970367431640625, 0.022760831832885742, 0.022667903900146485, 0.02261020851135254, 0.022978559494018554, 0.02320115280151367, 0.023087007522583008, 0.022902624130249023, 0.02289955139160156, 0.02304732894897461, 0.02323747253417969, 0.0227587833404541, 0.02275600051879883, 0.022773759841918945, 0.022751232147216797, 0.022937599182128905, 0.022796287536621093, 0.02309868812561035, 0.02379961585998535, 0.023217023849487303, 0.023254655838012697, 0.023240928649902345, 0.023537824630737305, 0.02347001647949219, 0.023275583267211915, 0.023177215576171875, 0.022796287536621093, 0.022724191665649415, 0.02258780860900879, 0.02294937515258789, 0.022656639099121093, 0.022519712448120118, 0.022725183486938475, 0.022487455368041993, 0.022884416580200195, 0.02305836868286133, 0.023044095993041993, 0.0231014404296875, 0.022732799530029296, 0.022773759841918945, 0.02267955207824707, 0.02247475242614746, 0.0224399356842041, 0.022550527572631835, 0.02247065544128418, 0.022495231628417968, 0.022441471099853515, 0.022591007232666015, 0.022679616928100586, 0.023208864212036134, 0.023182464599609376, 0.022620512008666993, 0.022638208389282228, 0.023622560501098632, 0.022513151168823242, 0.022616575241088867, 0.022562816619873048, 0.02278188705444336, 0.022599519729614256, 0.022761695861816405, 0.02271628761291504, 0.0226910400390625, 0.02317366409301758, 0.02264035224914551, 0.02254217529296875, 0.022545215606689453, 0.022417407989501953, 0.022417760848999022, 0.022588607788085937, 0.02262268829345703, 0.022380767822265626, 0.022500736236572266, 0.02242176055908203, 0.02282124710083008, 0.02305001640319824, 0.022845439910888672, 0.02315673637390137, 0.023357440948486328, 0.02319683265686035, 0.022862688064575195, 0.02294988822937012, 0.0226694393157959, 0.02250534439086914, 0.02250547218322754, 0.022498559951782227, 0.022440704345703125, 0.022474079132080077, 0.02252047920227051, 0.022502399444580077, 0.022614879608154295, 0.022539583206176758, 0.02258211135864258, 0.022633920669555663, 0.02272483253479004, 0.02393350410461426, 0.024034143447875977, 0.022805440902709962, 0.02285158348083496, 0.022775808334350587, 0.02288761520385742, 0.022752031326293946, 0.02299295997619629, 0.022578752517700196, 0.02279052734375, 0.02307075119018555, 0.02318889617919922, 0.02286569595336914, 0.022697792053222657, 0.022932479858398438, 0.022474176406860353, 0.0225118408203125, 0.022571359634399414, 0.022718463897705078, 0.022473791122436523, 0.02267616081237793, 0.02261395263671875, 0.02279846382141113, 0.023103679656982422, 0.022730752944946288, 0.022688959121704103, 0.02278384017944336, 0.023028703689575197, 0.023029760360717775, 0.022718463897705078, 0.022980607986450196, 0.02336467170715332, 0.023022016525268554, 0.022985183715820312, 0.02272662353515625, 0.022693920135498046, 0.02260531234741211, 0.022530208587646483, 0.02261622428894043, 0.02260348892211914, 0.022743999481201174, 0.02265875244140625, 0.023810335159301758, 0.024748064041137694, 0.02292950439453125, 0.022824415206909178, 0.022770111083984374, 0.022875776290893556, 0.022673791885375976, 0.022796287536621093, 0.022609855651855467, 0.022707712173461913, 0.023822463989257813, 0.023067071914672853, 0.02264678382873535, 0.02253379249572754, 0.022455648422241212, 0.022443008422851563, 0.022396160125732423, 0.022386560440063475, 0.02255961608886719, 0.022427648544311524, 0.02257289505004883, 0.022431455612182617, 0.02262633514404297, 0.022487455368041993, 0.0225218563079834, 0.022511615753173828, 0.02255462455749512, 0.02268345642089844, 0.022819007873535156, 0.02263859176635742, 0.022622207641601562, 0.022479103088378905, 0.02255641555786133, 0.022740991592407226, 0.02252783966064453, 0.02250921630859375, 0.022426111221313477, 0.022562240600585936, 0.022599327087402345, 0.02242243194580078, 0.022527999877929687, 0.022598783493041993, 0.02292550468444824, 0.022761503219604493, 0.022528415679931642, 0.022960384368896483, 0.0224849910736084, 0.022642688751220705, 0.022656320571899414, 0.022713024139404295, 0.022680639266967773, 0.022805248260498047, 0.022603296279907228, 0.022523679733276368, 0.02266579246520996, 0.022481216430664062, 0.02253206443786621, 0.022802560806274415, 0.02263654327392578, 0.022552032470703125, 0.022509727478027344, 0.022488639831542968, 0.02245622444152832, 0.02265590476989746, 0.022562816619873048, 0.02260201644897461, 0.02274412727355957, 0.02252217674255371, 0.022538400650024413, 0.02270191955566406, 0.022970464706420897, 0.022735103607177735, 0.022717920303344727, 0.022430240631103517, 0.022487039566040038, 0.022874111175537108, 0.02327142333984375, 0.023277759552001953, 0.02285548782348633, 0.0232992000579834, 0.022801279067993164, 0.023037311553955078, 0.022893184661865233, 0.02309734344482422, 0.024930303573608398, 0.023037952423095705, 0.022843616485595703, 0.022810400009155272, 0.022675455093383787, 0.022595264434814452, 0.02253036880493164, 0.022668928146362306, 0.02262054443359375, 0.022724607467651366, 0.022552576065063477, 0.022366207122802736, 0.023994239807128906, 0.022490304946899416, 0.022397247314453125, 0.022405759811401367, 0.022525056838989258, 0.022395776748657226, 0.02240015983581543, 0.02239164733886719, 0.02243142318725586, 0.022450336456298826, 0.022371999740600584, 0.022433696746826173, 0.022476383209228516, 0.022395904541015626, 0.02242355155944824, 0.02238172721862793, 0.022416223526000978, 0.022614015579223632, 0.022566944122314452, 0.022752704620361327, 0.022630527496337892, 0.022433984756469728, 0.022419679641723634]",tokens/s,43.78772199608627,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2210.312192,2271.084544,0.0,1868.562432,1779.722752,s,1,8.8734150390625,8.8734150390625,0.0,8.8734150390625,8.8734150390625,8.8734150390625,8.8734150390625,[8.8734150390625],,kWh,5.555162824166473e-05,6.120300259281184e-06,1.7204735985987263e-05,7.887666448693318e-05,,MB,2291.687424,2378.039296,0.0,1960.83712,1916.447232,s,10,0.9602471694946287,0.09602471694946288,0.0005314548679986565,0.09622380828857421,0.09644146041870116,0.09657058753967285,0.09667388923645019,"[0.09474278259277344, 0.09572444915771484, 0.09566486358642579, 0.09582342529296875, 0.09621011352539062, 0.09623750305175781, 0.09640054321289063, 0.09641276550292968, 0.09669971466064453, 0.09633100891113282]",tokens/s,2665.98026146467,kWh,2.8034591516668823e-06,3.0917322925711184e-07,1.8717634021714865e-06,4.98439578309548e-06,tokens/kWh,51360287.41301423,MB,2296.0128,2545.811456,0.0,2128.60928,2001.572864,s,10,18.213513549804688,1.8213513549804687,0.20169815502324753,1.9585209350585937,1.992731689453125,1.9977705688476561,2.001801672363281,"[1.5250811767578125, 1.5233592529296875, 1.527644775390625, 1.788803466796875, 1.965163818359375, 1.9588001708984375, 1.971997802734375, 1.9916119384765625, 2.0028094482421874, 1.95824169921875]",tokens/s,34.589701667241236,kWh,4.4147953917500264e-05,4.869201887553396e-06,2.447522989762857e-05,7.349238570268222e-05,tokens/kWh,857231.662812937,,s,630,18.210598930358877,0.028905712587871245,0.003399946626746792,0.03098747253417969,0.03175563201904297,0.031974277210235595,0.03289979751586914,"[0.024432640075683593, 0.02431715202331543, 0.024183807373046876, 0.024159423828125, 0.023928672790527343, 0.02410963249206543, 0.02396793556213379, 0.024219968795776366, 0.02398585510253906, 0.023945215225219727, 0.023994367599487306, 0.024037376403808593, 0.024006656646728516, 0.023961599349975587, 0.024004608154296874, 0.02421753692626953, 0.023971168518066407, 0.024091360092163085, 0.024004608154296874, 0.024145919799804686, 0.024253887176513673, 0.02430828857421875, 0.024240127563476564, 0.024252416610717774, 0.024284160614013672, 0.024158720016479493, 0.024111072540283204, 0.02411369514465332, 0.024025087356567384, 0.024183807373046876, 0.024017568588256835, 0.025530303955078125, 0.024197536468505858, 0.024426143646240236, 0.02456611251831055, 0.024473567962646485, 0.024356576919555666, 0.024426816940307617, 0.024223743438720705, 0.024180736541748047, 0.02407993507385254, 0.02425187110900879, 0.024125888824462892, 0.02419868850708008, 0.02412851142883301, 0.02406809616088867, 0.02413363265991211, 0.024045568466186523, 0.0240897274017334, 0.02407823944091797, 0.02402195167541504, 0.024131519317626953, 0.024074335098266602, 0.024215551376342775, 0.024197120666503907, 0.024203264236450195, 0.024325439453125, 0.024324800491333006, 0.024817663192749022, 0.02427903938293457, 0.024367103576660155, 0.024270784378051757, 0.0243569278717041, 0.02420569610595703, 0.024240480422973634, 0.024311872482299806, 0.02417024040222168, 0.023914751052856446, 0.023992095947265625, 0.02406012725830078, 0.024186880111694335, 0.024018943786621092, 0.02426371192932129, 0.024218208312988283, 0.024062463760375977, 0.02409359931945801, 0.023980703353881836, 0.02397369575500488, 0.024051904678344727, 0.023990463256835938, 0.024225439071655273, 0.024142303466796876, 0.02411110305786133, 0.024387584686279298, 0.024563711166381837, 0.024560863494873048, 0.024257312774658202, 0.02416815948486328, 0.024152095794677735, 0.02412774467468262, 0.02425651168823242, 0.024061952590942383, 0.023980031967163085, 0.023941120147705077, 0.023944799423217773, 0.023886144638061522, 0.02394291114807129, 0.02390998458862305, 0.024319936752319336, 0.02409760093688965, 0.024002559661865236, 0.02392678451538086, 0.024006656646728516, 0.023942367553710937, 0.023942975997924804, 0.023894752502441406, 0.023965951919555663, 0.023857152938842774, 0.023928800582885743, 0.023896095275878906, 0.024236032485961914, 0.024371135711669923, 0.024469024658203126, 0.024418495178222657, 0.024330015182495116, 0.02525651168823242, 0.024612224578857422, 0.024295200347900392, 0.02419798469543457, 0.023953407287597657, 0.023957504272460937, 0.02395136070251465, 0.023918048858642578, 0.023980575561523436, 0.025159519195556642, 0.025847455978393555, 0.024130815505981444, 0.024210176467895507, 0.024017951965332032, 0.024083423614501952, 0.023903392791748048, 0.024056671142578125, 0.023975839614868166, 0.02417807960510254, 0.023931583404541015, 0.023975616455078126, 0.024086175918579103, 0.02433910369873047, 0.024631103515625, 0.024463552474975586, 0.024184383392333985, 0.024381887435913085, 0.024518655776977538, 0.02455548858642578, 0.0246661434173584, 0.024532127380371093, 0.024277599334716796, 0.024125696182250977, 0.02405887985229492, 0.02394576072692871, 0.02397161674499512, 0.024043296813964842, 0.02409062385559082, 0.02425324821472168, 0.02448307228088379, 0.024447839736938478, 0.024551424026489257, 0.024669727325439452, 0.024561759948730468, 0.024570240020751952, 0.024434688568115235, 0.02405705642700195, 0.024171295166015624, 0.024120864868164064, 0.024132064819335938, 0.024334495544433593, 0.024008544921875, 0.02399558448791504, 0.023927616119384765, 0.02390822410583496, 0.02404159927368164, 0.023942495346069338, 0.024076383590698244, 0.023982912063598632, 0.02391609573364258, 0.024676544189453125, 0.027428640365600585, 0.0244564151763916, 0.02408345603942871, 0.02402899169921875, 0.02403055953979492, 0.023999135971069337, 0.02395359992980957, 0.024023040771484375, 0.024133184432983398, 0.02405830383300781, 0.024403968811035157, 0.024129535675048826, 0.024049663543701173, 0.02424403190612793, 0.02414031982421875, 0.024271007537841796, 0.024223743438720705, 0.024172096252441405, 0.02414022445678711, 0.024027135848999022, 0.02410086441040039, 0.02393017578125, 0.023994560241699218, 0.02400921630859375, 0.024123392105102538, 0.023998464584350586, 0.02395136070251465, 0.023971839904785155, 0.023992319107055664, 0.023941120147705077, 0.024025087356567384, 0.02393087959289551, 0.025308479309082033, 0.0241177921295166, 0.023984287261962892, 0.023941120147705077, 0.023923776626586915, 0.028683296203613283, 0.02908355140686035, 0.03116851234436035, 0.03103721618652344, 0.03120560073852539, 0.031318016052246093, 0.03093903923034668, 0.031342144012451174, 0.031701120376586914, 0.030986656188964845, 0.030851072311401367, 0.030899295806884764, 0.031191104888916014, 0.03123641586303711, 0.031074848175048828, 0.031045120239257814, 0.0309682559967041, 0.031002687454223632, 0.03119308853149414, 0.03122777557373047, 0.031068288803100585, 0.031020160675048827, 0.031011423110961913, 0.031080608367919923, 0.031549568176269534, 0.03162460708618164, 0.031064640045166014, 0.03121513557434082, 0.031209888458251952, 0.03121561622619629, 0.0314102725982666, 0.03139695930480957, 0.03112335968017578, 0.030895103454589845, 0.030953023910522463, 0.03112588882446289, 0.0309105281829834, 0.031080448150634765, 0.03095756721496582, 0.034810367584228515, 0.03118479919433594, 0.031010368347167968, 0.031063007354736327, 0.03121151924133301, 0.0311910400390625, 0.030944480895996093, 0.031009567260742187, 0.030895103454589845, 0.031153152465820313, 0.031180192947387695, 0.030956127166748046, 0.030891679763793947, 0.03095792007446289, 0.03094691276550293, 0.03140854454040527, 0.031021055221557618, 0.031033344268798828, 0.031064064025878906, 0.031045631408691408, 0.03438310241699219, 0.032406272888183596, 0.03131324768066406, 0.03239593505859375, 0.031070207595825194, 0.030947328567504883, 0.03082582473754883, 0.031107744216918944, 0.030935039520263673, 0.03130121612548828, 0.030996671676635744, 0.030859487533569336, 0.030914560317993164, 0.03082444763183594, 0.030898176193237304, 0.03093891143798828, 0.030933216094970704, 0.03099852752685547, 0.030873600006103515, 0.031123455047607423, 0.030871551513671876, 0.03154438400268555, 0.031189952850341797, 0.031148031234741212, 0.030932159423828126, 0.030946111679077147, 0.031039392471313477, 0.03080953598022461, 0.030900896072387694, 0.03114166450500488, 0.030818527221679687, 0.031301631927490234, 0.031163679122924805, 0.03132489585876465, 0.030936159133911133, 0.031359071731567385, 0.030864191055297852, 0.030911680221557616, 0.030937215805053712, 0.030928800582885742, 0.030946016311645508, 0.03090230369567871, 0.030947328567504883, 0.030955488204956055, 0.030947328567504883, 0.030869504928588868, 0.03100828742980957, 0.03087958335876465, 0.03135087966918945, 0.030967456817626953, 0.03136396789550781, 0.03096575927734375, 0.03105936050415039, 0.031208032608032225, 0.030963712692260743, 0.03103539276123047, 0.03151872062683105, 0.03109622383117676, 0.03116873550415039, 0.031080831527709962, 0.03137104034423828, 0.031381727218627926, 0.03128678321838379, 0.031150592803955077, 0.03146096038818359, 0.03106857681274414, 0.03085011291503906, 0.031036256790161133, 0.031077951431274415, 0.030927295684814452, 0.03140208053588867, 0.03163955116271973, 0.031016960144042968, 0.030818239212036132, 0.030924575805664063, 0.030841119766235353, 0.030912511825561522, 0.03079113578796387, 0.030926368713378907, 0.030952064514160157, 0.031008960723876954, 0.030939327239990235, 0.030830591201782227, 0.03090345573425293, 0.031001440048217775, 0.031054048538208007, 0.031099712371826172, 0.03102614402770996, 0.03176038360595703, 0.030875200271606444, 0.030883615493774413, 0.031002464294433593, 0.03105011177062988, 0.03090598487854004, 0.030898176193237304, 0.030882400512695314, 0.03105990409851074, 0.03125609588623047, 0.0310828800201416, 0.030996864318847656, 0.03132928085327148, 0.03169484710693359, 0.031034048080444337, 0.030922079086303712, 0.030968799591064453, 0.03178838348388672, 0.031049983978271484, 0.03105948829650879, 0.0309039363861084, 0.030976512908935546, 0.030832735061645508, 0.030846080780029296, 0.03091094398498535, 0.031199647903442384, 0.031055871963500976, 0.03119513511657715, 0.03138067245483398, 0.031390527725219726, 0.031348512649536135, 0.031150304794311523, 0.03101865577697754, 0.0312891845703125, 0.030992895126342773, 0.03125657653808594, 0.031040704727172852, 0.03132921600341797, 0.03121958351135254, 0.03095961570739746, 0.031281152725219724, 0.03147987174987793, 0.03124019241333008, 0.03123142433166504, 0.03161039924621582, 0.03127782440185547, 0.031020992279052733, 0.031163936614990236, 0.031200000762939453, 0.031092575073242187, 0.031008832931518553, 0.0312520637512207, 0.03127142333984375, 0.0313507194519043, 0.03128121566772461, 0.031174047470092774, 0.031222368240356447, 0.031344224929809573, 0.03160105514526367, 0.0317704963684082, 0.031557695388793945, 0.03190947151184082, 0.03179529571533203, 0.03164169692993164, 0.03134275245666504, 0.03138291168212891, 0.03182054328918457, 0.03151667213439941, 0.03152246475219726, 0.03170556831359863, 0.03141353607177735, 0.0313286075592041, 0.031025407791137695, 0.03134873580932617, 0.031119359970092773, 0.03115827178955078, 0.03142860794067383, 0.03127705574035645, 0.031520639419555664, 0.03163475227355957, 0.03184067153930664, 0.03191116714477539, 0.03183200073242187, 0.03155753517150879, 0.03135990333557129, 0.031148031234741212, 0.031035200119018554, 0.0326995849609375, 0.033890655517578125, 0.031377344131469725, 0.031523551940917965, 0.03181324768066406, 0.03174233627319336, 0.0313753604888916, 0.03130748748779297, 0.031136383056640626, 0.03166172790527344, 0.03146956825256347, 0.03316940689086914, 0.032129024505615236, 0.031868160247802736, 0.031548160552978516, 0.03225711822509766, 0.03170393562316894, 0.03183996772766113, 0.031412544250488283, 0.03129689598083496, 0.030982784271240234, 0.03110220718383789, 0.03162393569946289, 0.03121971130371094, 0.031236095428466795, 0.03127827262878418, 0.03137801551818847, 0.03195107269287109, 0.031968416213989256, 0.03178780746459961, 0.03180710411071777, 0.03192630386352539, 0.03141059112548828, 0.03178700828552246, 0.031016960144042968, 0.030893695831298827, 0.031000959396362306, 0.031878656387329105, 0.03124393653869629, 0.030984767913818358, 0.03143708801269531, 0.03170857620239258, 0.03292835235595703, 0.031964479446411134, 0.031554239273071286, 0.03143475151062012, 0.031340543746948245, 0.03142204856872559, 0.031365535736083985, 0.031315359115600586, 0.03171711921691894, 0.03126144027709961, 0.03126896095275879, 0.031311071395874024, 0.031828767776489256, 0.03155353546142578, 0.03134668731689453, 0.0317542724609375, 0.031457279205322264, 0.031178655624389647, 0.03150243186950684, 0.03111523246765137, 0.031217695236206055, 0.03122310447692871, 0.031197887420654297, 0.031110687255859373, 0.03139955139160156, 0.031755104064941406, 0.03195462417602539, 0.03214736175537109, 0.03304841613769531, 0.03401529693603516, 0.03281356811523437, 0.03228828811645508, 0.03238479995727539, 0.032191070556640625, 0.032145503997802735, 0.03214950561523437, 0.03199756813049316, 0.03204719924926758, 0.03185795211791992, 0.031924352645874024, 0.031959039688110355, 0.03158233642578125, 0.03155430412292481, 0.03141529655456543, 0.03132943916320801, 0.03148950386047363, 0.03211008071899414, 0.031979711532592776, 0.03153990364074707, 0.031524864196777344, 0.032, 0.03197952079772949, 0.031845727920532224, 0.03162179183959961, 0.03163862419128418, 0.03159942436218262, 0.031893535614013674, 0.03190377616882324, 0.03172969627380371, 0.031526912689208986, 0.032069278717041017, 0.03227888107299805, 0.032271713256835935, 0.03204390335083008, 0.03232745742797852, 0.031959039688110355, 0.03171043205261231, 0.03165056037902832, 0.03190294456481933, 0.03170182418823242, 0.03156172752380371, 0.03165305519104004, 0.031841087341308597, 0.031255935668945314, 0.031142528533935548, 0.031066112518310547, 0.03095529556274414, 0.03104115104675293, 0.03114089584350586, 0.031029632568359375, 0.030808095932006837, 0.031232576370239257, 0.031098400115966797, 0.031119840621948242, 0.031033344268798828, 0.031148031234741212, 0.03197907257080078, 0.03282988739013672, 0.031344127655029294, 0.0313568000793457, 0.03123664093017578, 0.031204864501953124, 0.030898656845092774, 0.0311092472076416, 0.031036672592163087, 0.03099110412597656, 0.03129958343505859, 0.030988288879394532, 0.03099139213562012, 0.030943519592285158, 0.030890687942504883, 0.03081769561767578, 0.03092131233215332, 0.03092265510559082, 0.030775392532348633, 0.03096780776977539, 0.03123184013366699, 0.031404191970825196, 0.03158569526672363, 0.030968223571777344, 0.03080825614929199, 0.030858400344848633, 0.030851680755615233, 0.030828800201416016, 0.030914560317993164, 0.031127359390258787, 0.031060159683227537, 0.0310435848236084, 0.03079743957519531, 0.03110335922241211, 0.0311910400390625, 0.031162368774414063, 0.03121561622619629, 0.031010368347167968, 0.031000736236572266, 0.030801183700561525, 0.031054624557495115, 0.03097007942199707, 0.030926015853881834, 0.03087443161010742, 0.030869440078735353, 0.03088595199584961, 0.03084492874145508, 0.0309935359954834, 0.030753664016723633, 0.031047679901123046, 0.031143583297729493, 0.030865760803222658, 0.031006719589233397, 0.03159823989868164, 0.031050079345703124]",tokens/s,34.595237773850876,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1584.709632,1604.190208,0.0,1201.668096,1189.151232,s,1,8.7103603515625,8.7103603515625,0.0,8.7103603515625,8.7103603515625,8.7103603515625,8.7103603515625,[8.7103603515625],,kWh,3.802293766247355e-05,4.1867660681864155e-06,1.1953620673993415e-05,5.416332440465338e-05,,MB,1604.210688,1801.322496,0.0,1384.12032,1351.367168,s,10,0.4132649230957031,0.04132649230957031,0.00020603983807786284,0.041266910552978514,0.04145219039916992,0.04167006301879883,0.04184436111450195,"[0.041887935638427735, 0.041223136901855466, 0.04118329620361328, 0.04140377426147461, 0.041241630554199216, 0.041363391876220706, 0.04117472076416016, 0.04113151931762695, 0.04129219055175781, 0.041363327026367185]",tokens/s,6194.573642552189,kWh,1.2591889369791742e-06,1.388665947227741e-07,8.346187232500409e-07,2.2326742549519895e-06,tokens/kWh,114660703.15998915,MB,1611.264,1843.265536,0.0,1426.06336,1407.548416,s,10,10.733422241210937,1.0733422241210937,0.0048307207519187245,1.0739862060546874,1.0776815185546875,1.0794766845703125,1.0809128173828126,"[1.0616793212890625, 1.07112255859375, 1.0735338134765624, 1.0744385986328124, 1.0812718505859376, 1.0722564697265624, 1.07509765625, 1.0772825927734375, 1.0754315185546874, 1.071307861328125]",tokens/s,58.695165981742264,kWh,3.126345105843362e-05,3.4478815412109768e-06,1.590875578254956e-05,5.0620088382194155e-05,tokens/kWh,1244565.1916751796,,s,630,10.729833663940438,0.01703148200625465,0.0003127795947548861,0.016963775634765622,0.017254445075988768,0.01743060960769653,0.0182112064743042,"[0.017252735137939453, 0.01695510482788086, 0.01686899185180664, 0.016828895568847656, 0.01681817626953125, 0.016928768157958983, 0.01683046340942383, 0.01672150421142578, 0.01678316879272461, 0.016896608352661133, 0.016786687850952147, 0.016853759765625, 0.016922399520874022, 0.017352863311767577, 0.01684486389160156, 0.01675619125366211, 0.0168371524810791, 0.01679769515991211, 0.016898080825805663, 0.0168570556640625, 0.016867328643798828, 0.01681203269958496, 0.016749664306640624, 0.016808319091796874, 0.016775711059570313, 0.01677926445007324, 0.016782783508300782, 0.016773279190063477, 0.01687196731567383, 0.016904064178466797, 0.016936927795410155, 0.01684230422973633, 0.01676131248474121, 0.01683417510986328, 0.016761215209960937, 0.016766016006469726, 0.01681292724609375, 0.016769088745117188, 0.0167642879486084, 0.016806528091430663, 0.016785408020019533, 0.016786495208740235, 0.01683990478515625, 0.016803552627563476, 0.01674345588684082, 0.016850080490112305, 0.016930303573608398, 0.01683488082885742, 0.016932735443115233, 0.017023103713989258, 0.01681376075744629, 0.017058111190795897, 0.016691200256347655, 0.016781312942504883, 0.0168143367767334, 0.01682419204711914, 0.016805599212646485, 0.01682579231262207, 0.016793472290039062, 0.01687049674987793, 0.016875328063964842, 0.016971616744995116, 0.016830528259277344, 0.017288896560668947, 0.01726131248474121, 0.017272607803344726, 0.016871936798095705, 0.016836448669433592, 0.016969120025634766, 0.01723423957824707, 0.017096351623535157, 0.01707241630554199, 0.016941280364990235, 0.016910207748413085, 0.01680726432800293, 0.01693516731262207, 0.016874015808105467, 0.017059104919433594, 0.017238752365112305, 0.017297407150268555, 0.017168384552001953, 0.017199424743652342, 0.017033119201660157, 0.01703023910522461, 0.016990911483764647, 0.017098751068115235, 0.01704355239868164, 0.017055456161499023, 0.01692486381530762, 0.016973215103149412, 0.016857376098632814, 0.016894271850585937, 0.016898048400878905, 0.016953344345092772, 0.0169881591796875, 0.016957183837890626, 0.016867584228515625, 0.01698406410217285, 0.016914304733276368, 0.01718284797668457, 0.016977216720581053, 0.01695020866394043, 0.01690559959411621, 0.016836383819580077, 0.016996320724487306, 0.017138303756713866, 0.01684889602661133, 0.01721478462219238, 0.016955072402954102, 0.016837631225585938, 0.016910367965698243, 0.01683046340942383, 0.016787424087524414, 0.01680143928527832, 0.016867679595947267, 0.01679155158996582, 0.01702409553527832, 0.0168375358581543, 0.017043327331542967, 0.01717056083679199, 0.017188863754272463, 0.01703891181945801, 0.01698182487487793, 0.016935104370117186, 0.01701865577697754, 0.017014560699462892, 0.017869024276733397, 0.017099872589111328, 0.017021503448486328, 0.016914783477783205, 0.016906368255615235, 0.01683580780029297, 0.016935583114624023, 0.016940256118774415, 0.016841503143310548, 0.017022975921630858, 0.016893503189086913, 0.017139871597290038, 0.01686147117614746, 0.016838048934936522, 0.016863391876220702, 0.0167890567779541, 0.01688172721862793, 0.017081151962280272, 0.017149696350097655, 0.017020959854125977, 0.016949472427368165, 0.017089855194091796, 0.017064479827880858, 0.01707206344604492, 0.016918752670288088, 0.01690835189819336, 0.016904159545898436, 0.0168734073638916, 0.01684592056274414, 0.016876480102539063, 0.01683216094970703, 0.016925024032592773, 0.016877439498901366, 0.016840255737304688, 0.016841567993164063, 0.016983776092529296, 0.017051904678344727, 0.01703094482421875, 0.017259647369384765, 0.017220447540283203, 0.016930816650390625, 0.016947200775146484, 0.016835903167724608, 0.016831167221069337, 0.016955263137817384, 0.01762112045288086, 0.019949567794799804, 0.017156095504760743, 0.0170614070892334, 0.016925247192382812, 0.016885663986206053, 0.016948415756225587, 0.01694598388671875, 0.01690403175354004, 0.0169268798828125, 0.016900352478027344, 0.016968767166137697, 0.016957855224609374, 0.017131807327270508, 0.01708598327636719, 0.01718320083618164, 0.01698793601989746, 0.01699247932434082, 0.01698486328125, 0.017047552108764647, 0.016879552841186522, 0.01701593589782715, 0.016978879928588868, 0.01700044822692871, 0.01691414451599121, 0.016988447189331055, 0.01711471939086914, 0.01710326385498047, 0.01722265625, 0.017385951995849608, 0.01728268814086914, 0.017189855575561522, 0.017078208923339843, 0.016924768447875976, 0.016881471633911134, 0.01688947105407715, 0.016994783401489258, 0.01694063949584961, 0.016984479904174805, 0.016979007720947265, 0.017017824172973633, 0.016861152648925782, 0.01702400016784668, 0.01696870422363281, 0.016898048400878905, 0.01720924758911133, 0.01699446487426758, 0.017042943954467774, 0.016935007095336914, 0.017079999923706055, 0.016970048904418944, 0.017047679901123047, 0.016864864349365235, 0.017067935943603514, 0.017009056091308594, 0.016998880386352538, 0.01725129508972168, 0.01769923210144043, 0.018577888488769533, 0.017089536666870117, 0.01699500846862793, 0.016859455108642576, 0.01687071990966797, 0.01690880012512207, 0.016906431198120117, 0.016857311248779296, 0.016922592163085937, 0.017147008895874023, 0.016937664031982422, 0.017141759872436522, 0.018198528289794923, 0.01701478385925293, 0.01693062400817871, 0.016899648666381835, 0.01684543991088867, 0.016902143478393555, 0.01684889602661133, 0.01683660888671875, 0.016848480224609375, 0.016875936508178712, 0.016854719161987306, 0.016920576095581053, 0.01694246482849121, 0.01688368034362793, 0.01699292755126953, 0.017096704483032226, 0.01707827186584473, 0.01709014320373535, 0.017166751861572266, 0.017156095504760743, 0.017100799560546876, 0.016937152862548828, 0.016852895736694337, 0.016934816360473632, 0.016860191345214843, 0.016935903549194335, 0.01743436813354492, 0.017770015716552734, 0.017769184112548828, 0.017283071517944337, 0.017043455123901367, 0.016891904830932617, 0.016900096893310547, 0.017041023254394532, 0.016946592330932618, 0.016998783111572265, 0.016964191436767577, 0.01704550361633301, 0.017218784332275392, 0.017136768341064455, 0.01708201599121094, 0.01699648094177246, 0.016891775131225587, 0.016874879837036134, 0.017060031890869142, 0.016890304565429688, 0.01674809646606445, 0.017113536834716798, 0.017152000427246093, 0.017104192733764647, 0.01726323127746582, 0.0171397762298584, 0.017069311141967775, 0.01704217529296875, 0.019865791320800782, 0.01952339172363281, 0.01786684799194336, 0.01718400001525879, 0.017435327529907226, 0.01704751968383789, 0.01701593589782715, 0.016913280487060547, 0.01711257553100586, 0.01703196716308594, 0.01711471939086914, 0.017053184509277345, 0.01708518409729004, 0.016977792739868165, 0.016988000869750976, 0.017035680770874022, 0.01711692810058594, 0.016910335540771485, 0.016978015899658205, 0.01695359992980957, 0.017382495880126952, 0.017292160034179688, 0.017051679611206055, 0.017160192489624023, 0.017006303787231444, 0.0170534725189209, 0.016947103500366212, 0.016945760726928712, 0.016990207672119142, 0.016920576095581053, 0.016971168518066407, 0.017027904510498047, 0.01682329559326172, 0.016849695205688478, 0.016869375228881836, 0.017102304458618163, 0.016908832550048828, 0.01724006462097168, 0.016936960220336913, 0.016904191970825197, 0.017127424240112304, 0.018247264862060547, 0.018216384887695312, 0.01727574348449707, 0.017003904342651366, 0.017003263473510742, 0.017426015853881836, 0.01691484832763672, 0.016791679382324218, 0.016864896774291992, 0.017019136428833008, 0.01685443115234375, 0.0169007682800293, 0.01692665672302246, 0.01683046340942383, 0.01682204818725586, 0.01682454490661621, 0.016910560607910158, 0.018183967590332032, 0.016949247360229493, 0.01682841682434082, 0.01681407928466797, 0.01681817626953125, 0.01691596794128418, 0.017084672927856447, 0.016840959548950197, 0.01688528060913086, 0.016883935928344727, 0.016842111587524414, 0.01687612724304199, 0.01691062355041504, 0.01697171211242676, 0.01690630340576172, 0.016867328643798828, 0.016816415786743165, 0.01685798454284668, 0.016816160202026368, 0.016835391998291014, 0.01698739242553711, 0.016849824905395508, 0.016848703384399415, 0.017206367492675782, 0.0168703670501709, 0.017008256912231446, 0.016848512649536133, 0.016941055297851563, 0.017017824172973633, 0.016888992309570312, 0.017787519454956054, 0.01701683235168457, 0.016891904830932617, 0.0169881591796875, 0.017139711380004884, 0.01710665512084961, 0.01730793571472168, 0.017063936233520507, 0.01705727958679199, 0.017259008407592775, 0.01706991958618164, 0.01707638359069824, 0.017209344863891602, 0.017102848052978514, 0.017108160018920897, 0.017056480407714843, 0.01699440002441406, 0.017102176666259766, 0.01705232048034668, 0.017082368850708008, 0.017258176803588866, 0.017180992126464845, 0.017172479629516603, 0.01718272018432617, 0.016982015609741212, 0.01700454330444336, 0.017133567810058595, 0.017276159286499025, 0.016917024612426758, 0.017144031524658203, 0.016893951416015626, 0.01746678352355957, 0.017193567276000975, 0.017090112686157226, 0.016892127990722657, 0.016989599227905272, 0.0170579833984375, 0.017478271484375, 0.016932096481323242, 0.016916576385498046, 0.017014495849609373, 0.01705379295349121, 0.016904415130615233, 0.017201791763305663, 0.016990207672119142, 0.01704960060119629, 0.016893951416015626, 0.016900224685668944, 0.01678451156616211, 0.016845151901245116, 0.01696335983276367, 0.0168088321685791, 0.01679641532897949, 0.016976415634155275, 0.0168720645904541, 0.016991968154907226, 0.01701696014404297, 0.01705289649963379, 0.01717308807373047, 0.017047552108764647, 0.016967456817626955, 0.017158048629760742, 0.017403999328613282, 0.017170656204223634, 0.01700454330444336, 0.01702409553527832, 0.01692560005187988, 0.01702217674255371, 0.016925567626953124, 0.016888832092285155, 0.01693788719177246, 0.017151327133178712, 0.0170482234954834, 0.017036928176879882, 0.016922048568725586, 0.01689491271972656, 0.016903999328613282, 0.01687571144104004, 0.01698771286010742, 0.016914176940917968, 0.01694380760192871, 0.01681612777709961, 0.016939008712768554, 0.01723391914367676, 0.0170446720123291, 0.01702524757385254, 0.016988000869750976, 0.01685068893432617, 0.016870208740234375, 0.01688355255126953, 0.016838464736938476, 0.01682076835632324, 0.016901567459106447, 0.01694572830200195, 0.016904191970825197, 0.01698406410217285, 0.017031167984008787, 0.01705779266357422, 0.016996000289916994, 0.0169039363861084, 0.016965568542480467, 0.016929439544677734, 0.017306623458862306, 0.01991983985900879, 0.01752252769470215, 0.017381568908691407, 0.017254400253295898, 0.01721340751647949, 0.017159296035766602, 0.017089056015014648, 0.01714009666442871, 0.017118240356445314, 0.01722390365600586, 0.017424671173095704, 0.017756639480590822, 0.017092607498168946, 0.01700044822692871, 0.01693903923034668, 0.01703727912902832, 0.01700044822692871, 0.017117183685302736, 0.01729097557067871, 0.017171295166015624, 0.01781545639038086, 0.017098880767822264, 0.017005823135375978, 0.017039360046386717, 0.017135423660278322, 0.01708742332458496, 0.017534975051879884, 0.01782374382019043, 0.017473535537719728, 0.017509376525878906, 0.017314720153808593, 0.017483583450317385, 0.017171743392944337, 0.017271808624267578, 0.017176160812377928, 0.017122880935668945, 0.016992639541625977, 0.016986591339111328, 0.016982080459594727, 0.017157632827758788, 0.017111488342285156, 0.017022016525268555, 0.01694611167907715, 0.017277088165283203, 0.017059295654296876, 0.016963264465332032, 0.017027456283569335, 0.016916160583496095, 0.017156736373901367, 0.0168603515625, 0.016928863525390626, 0.016927040100097657, 0.016776832580566406, 0.016902015686035155, 0.016845056533813477, 0.01695939254760742, 0.018026399612426757, 0.016835071563720702, 0.01680624008178711, 0.016906240463256835, 0.016910335540771485, 0.01689299201965332, 0.016831552505493164, 0.017008512496948243, 0.01682022476196289, 0.016879615783691407, 0.016846656799316406, 0.01688595199584961, 0.016936128616333007, 0.016851776123046874, 0.01688175964355469, 0.016861087799072267, 0.016832639694213867, 0.016794527053833008, 0.017283103942871095, 0.016837568283081056, 0.016895103454589843, 0.017465568542480468, 0.016915103912353516, 0.01681817626953125, 0.016861183166503906, 0.016893951416015626, 0.01686720085144043, 0.016897727966308593, 0.01725484848022461, 0.01692803192138672, 0.01684489631652832, 0.016912927627563478, 0.016918079376220703, 0.016882207870483397, 0.017084415435791016, 0.01693280029296875, 0.01770502471923828, 0.01699951934814453, 0.01694358444213867, 0.01688175964355469, 0.01681852722167969, 0.016865280151367186, 0.01692780876159668, 0.01674131202697754, 0.01686297607421875, 0.016808448791503908, 0.01683328056335449, 0.016896896362304688, 0.016969856262207032, 0.016924448013305664, 0.017074432373046875, 0.016945119857788085, 0.016989408493041994, 0.01683126449584961, 0.017006752014160156, 0.01693065643310547, 0.01717452812194824, 0.017059839248657227, 0.016906400680541993, 0.016761728286743164, 0.016843328475952147, 0.0167346248626709, 0.01683456039428711, 0.016861183166503906, 0.016968767166137697, 0.01711609649658203, 0.01732918357849121, 0.017197439193725586, 0.017230432510375978, 0.01740595245361328, 0.017324031829833983, 0.017115135192871094, 0.017020736694335938, 0.01689414405822754, 0.0172227840423584, 0.0170382080078125, 0.016900224685668944, 0.016942975997924804, 0.016963264465332032, 0.016939327239990233, 0.017031167984008787, 0.016969728469848632, 0.016865280151367186, 0.01749622344970703, 0.01710883140563965, 0.016858720779418947, 0.016988576889038084, 0.01705369567871094]",tokens/s,58.714796494677294,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4294.79936,4886.233088,0.0,4483.710976,4465.672704,s,1,11.3411953125,11.3411953125,0.0,11.3411953125,11.3411953125,11.3411953125,11.3411953125,[11.3411953125],,kWh,0.00010723141636664573,1.1816384344187595e-05,3.3593082430005694e-05,0.00015264088314083903,,MB,2153.345024,5309.857792,0.0,4892.655616,4837.669376,s,10,1.829616714477539,0.1829616714477539,0.0006672803014414367,0.18289334106445312,0.18363573455810547,0.18390239334106445,0.18411572036743162,"[0.1817277069091797, 0.18255628967285156, 0.18336026000976563, 0.1824678039550781, 0.1824938507080078, 0.18301922607421875, 0.1827674560546875, 0.18416905212402343, 0.1834785919189453, 0.18357647705078126]",tokens/s,1399.2001601991417,kWh,5.329781003485523e-06,5.875646131138243e-07,3.535977576254255e-06,9.453323192853603e-06,tokens/kWh,27080423.971278954,MB,2157.24032,5477.629952,0.0,5060.427776,5014.227968,s,10,19.27095361328125,1.9270953613281254,0.003907943556371394,1.928769775390625,1.9308436767578125,1.9309702392578125,1.9310714892578125,"[1.926625732421875, 1.930058349609375, 1.928083251953125, 1.929456298828125, 1.9205299072265625, 1.930412353515625, 1.921873779296875, 1.9308155517578125, 1.9310968017578125, 1.9220015869140625]",tokens/s,32.691687844955084,kWh,4.6519011892763826e-05,5.129247808477372e-06,3.103332785694766e-05,8.268158755818885e-05,tokens/kWh,761959.2446221774,,s,630,19.268220180511484,0.030584476477002343,0.0004521787380282983,0.030477583885192873,0.030909994125366214,0.03117836618423462,0.03220870994567871,"[0.03178332710266113, 0.030823551177978515, 0.030554399490356446, 0.030832767486572266, 0.030376415252685546, 0.030687007904052734, 0.030613727569580078, 0.03100876808166504, 0.030917856216430666, 0.030863391876220704, 0.0306429443359375, 0.03046553611755371, 0.030544384002685547, 0.030478336334228515, 0.03041279983520508, 0.03054547119140625, 0.030280128479003906, 0.030507007598876954, 0.03041862487792969, 0.030425407409667968, 0.03042508888244629, 0.03041689682006836, 0.03056844711303711, 0.030317920684814453, 0.030479007720947266, 0.030402111053466796, 0.030429567337036133, 0.03023468780517578, 0.030436767578125, 0.030446176528930665, 0.030449663162231445, 0.030332927703857423, 0.03035238456726074, 0.030490720748901367, 0.030417823791503908, 0.030390272140502928, 0.03043667221069336, 0.030563007354736327, 0.03031622314453125, 0.030388032913208008, 0.030594911575317383, 0.030390272140502928, 0.030548479080200194, 0.03095363235473633, 0.03139993667602539, 0.030994272232055663, 0.03077340888977051, 0.031098880767822266, 0.03096780776977539, 0.030717952728271485, 0.030692895889282226, 0.03075695991516113, 0.030508447647094726, 0.03047270393371582, 0.03069366455078125, 0.03039660835266113, 0.030283039093017577, 0.030328704833984376, 0.03027571105957031, 0.030210784912109375, 0.031229951858520507, 0.03023027229309082, 0.03036310386657715, 0.03065667152404785, 0.03055001640319824, 0.030482431411743165, 0.03181158447265625, 0.03096940803527832, 0.036151744842529296, 0.030889024734497072, 0.030708192825317383, 0.03054982376098633, 0.030511775970458985, 0.03042416000366211, 0.030413728713989258, 0.030385663986206055, 0.030341119766235353, 0.03030886459350586, 0.03112073516845703, 0.030284448623657225, 0.030259199142456054, 0.030369792938232422, 0.030291967391967774, 0.03034707260131836, 0.03025446319580078, 0.03033087921142578, 0.03035615921020508, 0.030281471252441405, 0.030363935470581055, 0.03055001640319824, 0.030453727722167968, 0.03039039993286133, 0.030502912521362304, 0.03093049621582031, 0.030452159881591796, 0.030900224685668946, 0.03080169677734375, 0.030709983825683594, 0.03057459259033203, 0.030498271942138673, 0.030476831436157228, 0.030492671966552733, 0.030457279205322266, 0.030789567947387696, 0.031066751480102538, 0.030711584091186524, 0.030628063201904296, 0.030449663162231445, 0.030521343231201172, 0.030619359970092772, 0.030427263259887694, 0.03042729568481445, 0.030423040390014647, 0.030498464584350585, 0.030511455535888674, 0.03042460823059082, 0.03039468765258789, 0.030601375579833983, 0.030373888015747072, 0.030537311553955077, 0.030253471374511717, 0.030369792938232422, 0.03056390380859375, 0.030584575653076172, 0.030509183883666992, 0.03050553512573242, 0.031164384841918944, 0.030699871063232423, 0.03047216033935547, 0.030322399139404297, 0.03031881523132324, 0.030632192611694337, 0.030729984283447264, 0.030624479293823243, 0.03047145652770996, 0.03056915283203125, 0.030844959259033203, 0.030691328048706053, 0.03095756721496582, 0.030676671981811524, 0.030474559783935547, 0.030428768157958985, 0.030368160247802735, 0.03036319923400879, 0.030529983520507814, 0.030334911346435546, 0.030281791687011717, 0.030623743057250977, 0.030312448501586913, 0.03078348731994629, 0.030363616943359376, 0.032116767883300784, 0.03069536018371582, 0.030678176879882814, 0.03053865623474121, 0.03040870475769043, 0.030724096298217773, 0.03056844711303711, 0.030543615341186523, 0.03065228843688965, 0.030585216522216796, 0.03040460777282715, 0.03046348762512207, 0.03042870330810547, 0.030475231170654298, 0.03078963279724121, 0.0308403205871582, 0.030789503097534178, 0.030960256576538087, 0.03039174461364746, 0.03033657646179199, 0.030307327270507813, 0.030418272018432616, 0.030632608413696288, 0.03039823913574219, 0.03037552070617676, 0.030322784423828124, 0.030443647384643554, 0.030339487075805666, 0.030319744110107422, 0.03031462478637695, 0.030349119186401367, 0.030345823287963865, 0.03129923248291016, 0.031548095703125, 0.0309616641998291, 0.03060736083984375, 0.030625343322753906, 0.03077974319458008, 0.03080784034729004, 0.030635871887207032, 0.030568416595458985, 0.03084476852416992, 0.030620384216308593, 0.030476287841796876, 0.030644479751586913, 0.030552064895629883, 0.030445568084716795, 0.030522432327270508, 0.030327520370483398, 0.030482656478881837, 0.030373888015747072, 0.030443519592285157, 0.030316543579101563, 0.030330495834350588, 0.03027801513671875, 0.03028326416015625, 0.030660831451416015, 0.03060940742492676, 0.030619935989379884, 0.030352832794189454, 0.03031622314453125, 0.03020889663696289, 0.030552064895629883, 0.031336448669433595, 0.03035955238342285, 0.030475391387939452, 0.03063897514343262, 0.030504959106445313, 0.03042099189758301, 0.030486528396606444, 0.03032678413391113, 0.03040870475769043, 0.0303636474609375, 0.03037593650817871, 0.030418176651000977, 0.033501216888427734, 0.030785247802734374, 0.031013599395751955, 0.03035276794433594, 0.030575328826904297, 0.030345407485961914, 0.03042508888244629, 0.030765056610107422, 0.030887935638427736, 0.030330751419067385, 0.030393535614013673, 0.03049158477783203, 0.030360864639282226, 0.030315231323242188, 0.03052524757385254, 0.032210464477539065, 0.031596256256103517, 0.030880704879760742, 0.031108320236206053, 0.030481184005737304, 0.03056025505065918, 0.03051468849182129, 0.030429632186889648, 0.030684736251831053, 0.03067750358581543, 0.030582208633422852, 0.030943744659423827, 0.030469215393066407, 0.030445632934570314, 0.03079782485961914, 0.030397151947021483, 0.030500768661499023, 0.030427040100097655, 0.030478656768798826, 0.03048796844482422, 0.03038470458984375, 0.030611488342285158, 0.03037593650817871, 0.030449663162231445, 0.03056435203552246, 0.03049193572998047, 0.030425823211669922, 0.030666751861572264, 0.03035545539855957, 0.030648319244384766, 0.03043292808532715, 0.030390207290649413, 0.030517663955688477, 0.03035136032104492, 0.030513120651245118, 0.030253087997436524, 0.030310400009155275, 0.03084492874145508, 0.030242816925048828, 0.030373888015747072, 0.030332927703857423, 0.03038822364807129, 0.030491743087768555, 0.030507936477661132, 0.030619647979736327, 0.030369792938232422, 0.030382080078125, 0.03057049560546875, 0.03042099189758301, 0.03037798309326172, 0.030368896484375, 0.03059596824645996, 0.030459903717041017, 0.030213632583618165, 0.030345184326171875, 0.03026383972167969, 0.03029350471496582, 0.030466560363769532, 0.030619647979736327, 0.030602848052978516, 0.030533151626586916, 0.030613536834716796, 0.03066111946105957, 0.030476383209228516, 0.03052774429321289, 0.03054591941833496, 0.030420736312866212, 0.03088755226135254, 0.030374528884887696, 0.03086089515686035, 0.03055449676513672, 0.030309919357299805, 0.030348991394042967, 0.0302989444732666, 0.030779903411865234, 0.030801599502563476, 0.030909120559692384, 0.030625120162963867, 0.03056912040710449, 0.030365695953369142, 0.030528799057006836, 0.030350048065185545, 0.030410751342773438, 0.03043436813354492, 0.030591936111450196, 0.030466047286987305, 0.030642175674438478, 0.030569696426391603, 0.030433376312255858, 0.030538335800170898, 0.030345312118530275, 0.03041279983520508, 0.030519296646118164, 0.03058687973022461, 0.03058892822265625, 0.030588287353515625, 0.03050150489807129, 0.03033193588256836, 0.03045475196838379, 0.03072204780578613, 0.03100467109680176, 0.030650367736816408, 0.030865407943725585, 0.031186431884765626, 0.0312642879486084, 0.031257568359375, 0.03135078430175781, 0.031464479446411134, 0.031175392150878906, 0.03125683212280273, 0.031061376571655273, 0.031174720764160155, 0.03139641571044922, 0.030590431213378906, 0.03043791961669922, 0.03057254409790039, 0.03170918464660644, 0.030420127868652343, 0.030780256271362303, 0.03056435203552246, 0.03038822364807129, 0.030633983612060548, 0.030377632141113282, 0.03032428741455078, 0.03032143974304199, 0.030328832626342773, 0.03031449508666992, 0.030298080444335938, 0.03055414390563965, 0.030371328353881837, 0.030273727416992188, 0.03021798324584961, 0.030369375228881838, 0.0302458553314209, 0.03032406425476074, 0.030273920059204103, 0.03027756881713867, 0.031180799484252928, 0.03078758430480957, 0.030455808639526367, 0.03050444793701172, 0.030330720901489257, 0.030309024810791015, 0.030410751342773438, 0.03040870475769043, 0.03040460777282715, 0.03037183952331543, 0.030328832626342773, 0.030500864028930662, 0.03095961570739746, 0.030590816497802733, 0.030242271423339843, 0.030359935760498048, 0.030275680541992187, 0.030777183532714844, 0.030386560440063475, 0.030185152053833007, 0.030408287048339845, 0.03025584030151367, 0.030267391204833984, 0.03062700843811035, 0.030404767990112304, 0.030312768936157225, 0.030481855392456056, 0.03049139213562012, 0.030471935272216796, 0.030538143157958983, 0.03039948844909668, 0.030286848068237306, 0.030846975326538087, 0.030393407821655272, 0.030788543701171876, 0.03061759948730469, 0.030502912521362304, 0.03036774444580078, 0.030439424514770507, 0.030308351516723633, 0.03039232063293457, 0.030507007598876954, 0.030346303939819335, 0.030473152160644532, 0.031123455047607423, 0.0304005126953125, 0.030484479904174806, 0.030322559356689455, 0.030294048309326173, 0.03061155128479004, 0.030648319244384766, 0.031055328369140624, 0.030437759399414063, 0.030400672912597657, 0.03029337692260742, 0.030304895401000977, 0.030449663162231445, 0.030203903198242187, 0.030410751342773438, 0.030346944808959962, 0.030652639389038085, 0.031215711593627928, 0.031186559677124023, 0.031336799621582034, 0.030621696472167968, 0.030310400009155275, 0.030640127182006836, 0.030504959106445313, 0.030484479904174806, 0.030582399368286134, 0.03449689483642578, 0.030684288024902345, 0.03075542449951172, 0.030463775634765624, 0.030388608932495117, 0.03043084716796875, 0.03047270393371582, 0.030590879440307618, 0.03061564826965332, 0.03057459259033203, 0.030717792510986327, 0.03046006393432617, 0.030328832626342773, 0.030321792602539064, 0.030354303359985353, 0.030287839889526366, 0.030283584594726562, 0.03056857681274414, 0.030530912399291992, 0.030571264266967775, 0.030457183837890624, 0.03047248077392578, 0.030524831771850586, 0.030783552169799805, 0.030317087173461914, 0.030680639266967773, 0.03088467216491699, 0.03085843276977539, 0.030816287994384767, 0.03039516830444336, 0.03038412857055664, 0.03053932762145996, 0.031526464462280274, 0.03445235061645508, 0.03072204780578613, 0.03055615997314453, 0.030416799545288087, 0.03041904067993164, 0.030250816345214843, 0.030353248596191405, 0.030832256317138672, 0.03067363166809082, 0.030341056823730467, 0.030312320709228516, 0.030349504470825194, 0.030482431411743165, 0.0303176326751709, 0.03031545639038086, 0.03029145622253418, 0.030280191421508788, 0.030365695953369142, 0.03058073616027832, 0.03027507209777832, 0.030370271682739258, 0.030248992919921874, 0.030328832626342773, 0.03230534362792969, 0.03115007972717285, 0.030762975692749023, 0.030697504043579103, 0.030434879302978515, 0.03034976005554199, 0.030398464202880858, 0.030445568084716795, 0.03033625602722168, 0.030395135879516602, 0.030486528396606444, 0.03038822364807129, 0.03205855941772461, 0.03105670356750488, 0.030350751876831054, 0.030316703796386717, 0.030288320541381836, 0.030259199142456054, 0.031102720260620116, 0.03032089614868164, 0.03262428665161133, 0.03077292823791504, 0.03046668815612793, 0.030527360916137697, 0.030353567123413087, 0.03038617515563965, 0.03079100799560547, 0.030503551483154298, 0.030527519226074218, 0.03041279983520508, 0.0303636474609375, 0.030506847381591796, 0.030955680847167967, 0.031098880767822266, 0.03220441436767578, 0.03072198486328125, 0.030635648727416993, 0.03054038429260254, 0.030611679077148436, 0.030283775329589844, 0.030411903381347655, 0.03059391975402832, 0.03059097671508789, 0.030766496658325194, 0.03069603157043457, 0.030465280532836914, 0.03060383987426758, 0.03046214485168457, 0.030317983627319335, 0.03025884819030762, 0.030412832260131837, 0.030501792907714844, 0.030332927703857423, 0.030443519592285157, 0.030672895431518556, 0.030508031845092775, 0.03059391975402832, 0.03027120018005371, 0.030245183944702148, 0.030681184768676758, 0.030762880325317384, 0.03054755210876465, 0.030480928421020508, 0.031389152526855466, 0.030761503219604493, 0.030294015884399415, 0.03037932777404785, 0.030298816680908204, 0.0304005126953125, 0.030522687911987305, 0.030353567123413087, 0.030609344482421873, 0.030359872817993162, 0.030300447463989258, 0.03037593650817871, 0.030472192764282226, 0.03032803153991699, 0.03030505561828613, 0.030891231536865234, 0.030264095306396486, 0.03032678413391113, 0.030502399444580077, 0.030831104278564454, 0.03042451286315918, 0.03035385513305664, 0.030217599868774415, 0.03046665573120117, 0.030535839080810548, 0.03026857566833496, 0.03044028854370117, 0.0303853759765625, 0.03056719970703125, 0.030848608016967774, 0.030979488372802736, 0.030667743682861327, 0.030539712905883788, 0.030685279846191408, 0.03080806350708008, 0.030462976455688476, 0.030327199935913086, 0.030374496459960938, 0.03061721611022949, 0.030622079849243165, 0.030956575393676758, 0.03057094383239746, 0.030329183578491212, 0.030314687728881837, 0.030464000701904297, 0.030320640563964843, 0.03036774444580078, 0.030717952728271485, 0.03052672004699707, 0.030322656631469727, 0.030511903762817382, 0.030443519592285157, 0.030515167236328126, 0.030459455490112305, 0.030312639236450195, 0.030601503372192383, 0.03058278465270996, 0.03034726333618164, 0.030445568084716795, 0.030298112869262695, 0.03040460777282715, 0.0308503360748291, 0.030515552520751953]",tokens/s,32.69632556084257,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1047.318528,912.130048,0.0,509.607936,491.434496,s,1,7.8931640625,7.8931640625,0.0,7.8931640625,7.8931640625,7.8931640625,7.8931640625,[7.8931640625],,kWh,2.3784416454138106e-05,2.615132943439747e-06,7.312783628005137e-06,3.3712333025582986e-05,,MB,1374.629888,1025.376256,0.0,608.17408,592.24832,s,10,0.20221363067626955,0.020221363067626955,0.0009067938122947693,0.019965807914733888,0.020348537445068357,0.021637165069580078,0.022668067169189454,"[0.020027040481567383, 0.019763359069824217, 0.02000592041015625, 0.019802431106567382, 0.019925695419311523, 0.0200064640045166, 0.0229257926940918, 0.019828832626342774, 0.020062175750732422, 0.01986591911315918]",tokens/s,12659.87852272129,kWh,5.841911631499443e-07,6.442588114472575e-08,3.678780720799968e-07,1.016495116374667e-06,tokens/kWh,251845774.63886377,MB,1388.085248,1040.05632,0.0,622.854144,605.085696,s,10,10.521125610351561,1.052112561035156,0.002799083005156702,1.0531063232421873,1.0548631958007813,1.0554110290527343,1.0558492956542969,"[1.050134033203125, 1.0559588623046876, 1.054447265625, 1.0477164306640625, 1.0531734619140625, 1.0530391845703124, 1.0489066162109375, 1.0542381591796874, 1.054741455078125, 1.0487701416015625]",tokens/s,59.8795246185592,kWh,3.0891509911847134e-05,3.40682836533891e-06,1.2174020850319656e-05,4.647235912750569e-05,tokens/kWh,1355644.541890968,,s,630,10.515767957687368,0.016691695170932348,0.0002803327339042266,0.0166222562789917,0.01691523494720459,0.017135252571105954,0.01784000793457032,"[0.016380767822265625, 0.016780960083007813, 0.016632160186767577, 0.016550912857055664, 0.016516096115112306, 0.016520320892333986, 0.016935808181762695, 0.017348608016967772, 0.01657241630554199, 0.016450719833374025, 0.016581151962280275, 0.016547712326049804, 0.016815872192382814, 0.016505279541015626, 0.01681657600402832, 0.016695072174072265, 0.016619327545166016, 0.016633056640625, 0.016667135238647462, 0.01722777557373047, 0.016757247924804687, 0.01666771125793457, 0.016565183639526367, 0.016578559875488282, 0.01658470344543457, 0.01663599967956543, 0.01656812858581543, 0.01670479965209961, 0.01682009506225586, 0.01673116874694824, 0.016596895217895508, 0.016469856262207032, 0.01659427261352539, 0.01664031982421875, 0.016595327377319335, 0.016590911865234374, 0.016670112609863282, 0.0166910400390625, 0.016765247344970702, 0.01670604705810547, 0.016588703155517578, 0.01672764778137207, 0.016765439987182617, 0.016823520660400392, 0.016906463623046875, 0.016689727783203125, 0.016713951110839845, 0.01663369560241699, 0.016564159393310546, 0.01663759994506836, 0.016555519104003907, 0.016612192153930665, 0.01653753662109375, 0.016539520263671875, 0.016572608947753906, 0.016613632202148437, 0.016850656509399414, 0.01660316848754883, 0.016557376861572267, 0.01661612892150879, 0.01664543914794922, 0.01658131217956543, 0.016547136306762696, 0.016222240447998047, 0.016564224243164064, 0.01654278373718262, 0.016564992904663085, 0.016546079635620117, 0.016523231506347658, 0.016410144805908203, 0.01658323287963867, 0.016647359848022462, 0.01657923126220703, 0.016886783599853517, 0.01700931167602539, 0.017004512786865236, 0.017025279998779296, 0.01695961570739746, 0.016698591232299803, 0.016708383560180663, 0.016563711166381837, 0.016656736373901367, 0.01690025520324707, 0.017283296585083006, 0.017351648330688477, 0.01760700798034668, 0.017610303878784178, 0.017590208053588866, 0.017629919052124025, 0.017310047149658205, 0.01720924758911133, 0.01727289581298828, 0.01675993537902832, 0.016640832901000976, 0.016578399658203125, 0.016508159637451173, 0.016516000747680663, 0.016444704055786134, 0.016535999298095704, 0.016480287551879882, 0.01655625534057617, 0.016525344848632814, 0.016750591278076172, 0.016693248748779296, 0.016713983535766603, 0.01667580795288086, 0.016538175582885742, 0.016547679901123047, 0.016498655319213868, 0.016572128295898436, 0.016489152908325196, 0.016856447219848632, 0.01702521514892578, 0.016562400817871095, 0.01682454490661621, 0.016649215698242188, 0.01662054443359375, 0.01656595230102539, 0.016557376861572267, 0.016700672149658202, 0.016664480209350584, 0.01655792045593262, 0.01658880043029785, 0.01650864028930664, 0.016519519805908205, 0.01675052833557129, 0.01627136039733887, 0.016736255645751954, 0.016662431716918946, 0.016621664047241212, 0.01669055938720703, 0.01667750358581543, 0.016664575576782227, 0.01651055908203125, 0.016549823760986328, 0.016500192642211912, 0.016898624420166014, 0.01659539222717285, 0.017922048568725587, 0.017612255096435547, 0.017145952224731444, 0.018630815505981446, 0.01679782485961914, 0.016779008865356444, 0.016991968154907226, 0.01692742347717285, 0.016750591278076172, 0.016626880645751952, 0.016743104934692384, 0.01663724708557129, 0.016620351791381837, 0.016605215072631838, 0.016625631332397462, 0.0165928955078125, 0.016577760696411134, 0.016681760787963868, 0.016533504486083983, 0.01654902458190918, 0.016565088272094727, 0.0165928955078125, 0.0164881591796875, 0.016615711212158202, 0.016664575576782227, 0.01676288032531738, 0.016613439559936525, 0.016793535232543944, 0.01666598320007324, 0.01664064025878906, 0.016652288436889647, 0.016649791717529297, 0.01651737594604492, 0.016566463470458984, 0.016748544692993163, 0.016621408462524415, 0.016545951843261717, 0.01659436798095703, 0.01671174430847168, 0.01696512031555176, 0.016642559051513673, 0.01675315284729004, 0.016523263931274415, 0.016562175750732423, 0.0167607364654541, 0.016500831604003906, 0.016861183166503906, 0.01680201530456543, 0.01668611145019531, 0.016538368225097657, 0.016545791625976563, 0.016180992126464844, 0.016689504623413086, 0.016539648056030275, 0.01652128028869629, 0.016531391143798826, 0.016570367813110352, 0.016547040939331056, 0.016486719131469728, 0.016476640701293944, 0.016490400314331053, 0.016531072616577148, 0.01653536033630371, 0.0165316162109375, 0.016767711639404298, 0.016912160873413087, 0.01742464065551758, 0.0169736328125, 0.016852928161621095, 0.016648096084594728, 0.016670143127441406, 0.016687231063842775, 0.016773664474487304, 0.0165164794921875, 0.01651980781555176, 0.016523296356201173, 0.016539615631103517, 0.016592863082885743, 0.01650716781616211, 0.016520959854125977, 0.016477695465087892, 0.016529504776000976, 0.016488895416259766, 0.01649580764770508, 0.016540224075317383, 0.01656768035888672, 0.016528224945068358, 0.016563615798950194, 0.01669590377807617, 0.016639999389648438, 0.01663795280456543, 0.016558080673217773, 0.016590143203735353, 0.016659135818481444, 0.016517120361328123, 0.016494592666625976, 0.01643283271789551, 0.016500608444213867, 0.017413951873779296, 0.01670412826538086, 0.016693248748779296, 0.016736255645751954, 0.016633087158203125, 0.016578559875488282, 0.016638368606567384, 0.01658915138244629, 0.01676438331604004, 0.016725919723510743, 0.016585344314575194, 0.016596031188964844, 0.01659343910217285, 0.016588672637939453, 0.016796192169189452, 0.01661337661743164, 0.01679952049255371, 0.016644927978515626, 0.016920448303222656, 0.016486080169677734, 0.01651696014404297, 0.016499008178710937, 0.01651091194152832, 0.016544992446899415, 0.016492448806762695, 0.01654015922546387, 0.0166014404296875, 0.016789087295532225, 0.016804384231567382, 0.016621023178100585, 0.016675327301025392, 0.016852767944335937, 0.01664364814758301, 0.01656857681274414, 0.016591264724731446, 0.016861183166503906, 0.016965791702270507, 0.01683030319213867, 0.01679155158996582, 0.016842752456665038, 0.01676723289489746, 0.01682377624511719, 0.016734495162963867, 0.01653555107116699, 0.016519168853759765, 0.016661792755126952, 0.016523263931274415, 0.01654425621032715, 0.01659516716003418, 0.01658399963378906, 0.016644800186157226, 0.016633056640625, 0.016605983734130858, 0.016517120361328123, 0.016536767959594727, 0.016696128845214844, 0.016695232391357423, 0.016587936401367187, 0.016673471450805662, 0.016482336044311523, 0.016644287109375, 0.01660723114013672, 0.016547840118408205, 0.016631135940551756, 0.019049280166625975, 0.016885599136352538, 0.016899391174316405, 0.01668764877319336, 0.0167425594329834, 0.016616800308227538, 0.016705888748168946, 0.016666112899780275, 0.016874303817749025, 0.01678870391845703, 0.016519968032836913, 0.016748544692993163, 0.016576255798339844, 0.016914655685424804, 0.016746559143066407, 0.016268447875976564, 0.016722911834716796, 0.016729984283447265, 0.016623552322387696, 0.016535615921020506, 0.016568288803100586, 0.016566303253173827, 0.0165533447265625, 0.017025663375854493, 0.016661535263061522, 0.016589088439941405, 0.016658239364624024, 0.016634624481201173, 0.01658687973022461, 0.016559295654296875, 0.016547744750976562, 0.016524192810058593, 0.016484352111816408, 0.01644451141357422, 0.016477088928222656, 0.016480384826660158, 0.01660323143005371, 0.016538784027099608, 0.01646793556213379, 0.016564416885375976, 0.016816095352172853, 0.016447999954223632, 0.0164715518951416, 0.01647648048400879, 0.01652140808105469, 0.016461151123046875, 0.01650534439086914, 0.01650806427001953, 0.01661644744873047, 0.016745920181274413, 0.016451679229736327, 0.01653193664550781, 0.016558080673217773, 0.0165295352935791, 0.016508800506591797, 0.01657241630554199, 0.0164454402923584, 0.016509056091308594, 0.017893024444580078, 0.017585664749145507, 0.018968351364135744, 0.017113855361938476, 0.016701696395874023, 0.01692857551574707, 0.016830591201782225, 0.016672767639160157, 0.016719839096069337, 0.01666870307922363, 0.016755840301513673, 0.01661427116394043, 0.016773120880126953, 0.016592096328735352, 0.017094655990600584, 0.016626752853393555, 0.01658412742614746, 0.01647439956665039, 0.016558080673217773, 0.018143232345581056, 0.0163507194519043, 0.016630079269409178, 0.016632095336914062, 0.01664761543273926, 0.017122175216674803, 0.016600896835327148, 0.016734464645385742, 0.016621503829956055, 0.016571775436401367, 0.016485055923461913, 0.01660927963256836, 0.01656012725830078, 0.01658291244506836, 0.016496383666992187, 0.016517120361328123, 0.016586751937866212, 0.016563295364379883, 0.01662447929382324, 0.016612512588500977, 0.01654876708984375, 0.016749568939208984, 0.016546815872192384, 0.016575551986694335, 0.016524063110351563, 0.016596832275390626, 0.01666489601135254, 0.0166582088470459, 0.016590303421020507, 0.01661574363708496, 0.016953792572021485, 0.0167587833404541, 0.016695072174072265, 0.016725887298583986, 0.01709414482116699, 0.016826976776123048, 0.01697407913208008, 0.016690208435058595, 0.0165994873046875, 0.01665446472167969, 0.016775583267211912, 0.016666175842285157, 0.016550336837768555, 0.016774528503417967, 0.016588991165161132, 0.016726463317871094, 0.017295360565185547, 0.016641664505004882, 0.016609664916992187, 0.016721920013427736, 0.01683875274658203, 0.016551839828491212, 0.016516864776611326, 0.016494848251342772, 0.016549888610839843, 0.01647369575500488, 0.016484384536743165, 0.01651545524597168, 0.016586271286010743, 0.016431583404541015, 0.016411775588989257, 0.016503679275512696, 0.016545791625976563, 0.01653660774230957, 0.016521215438842773, 0.016814271926879884, 0.01663385581970215, 0.016539648056030275, 0.016465919494628906, 0.016539648056030275, 0.016650144577026366, 0.01654115104675293, 0.016535871505737303, 0.016922943115234373, 0.016558080673217773, 0.016581888198852538, 0.01652934455871582, 0.01654457664489746, 0.016514623641967773, 0.01644384002685547, 0.016545791625976563, 0.016475488662719726, 0.016507551193237303, 0.016480255126953124, 0.016422815322875976, 0.01650022315979004, 0.01656483268737793, 0.016512895584106447, 0.016723487854003908, 0.01662345504760742, 0.016526079177856444, 0.01652716827392578, 0.01669548797607422, 0.01659267234802246, 0.016476383209228517, 0.01765900802612305, 0.01855718421936035, 0.017171072006225585, 0.016971359252929686, 0.016822399139404295, 0.0170185604095459, 0.016681856155395507, 0.016829856872558592, 0.01690412712097168, 0.016630144119262696, 0.01668681526184082, 0.016619808197021486, 0.01665433692932129, 0.01667683219909668, 0.01681001663208008, 0.01664169692993164, 0.0166014404296875, 0.016701440811157226, 0.016719871520996094, 0.01664614486694336, 0.016922624588012695, 0.016878623962402344, 0.017014944076538085, 0.016785408020019533, 0.016775360107421877, 0.016750688552856444, 0.016790048599243164, 0.016953344345092772, 0.01669334411621094, 0.01661667251586914, 0.017219648361206055, 0.01679033660888672, 0.01632271957397461, 0.01665862464904785, 0.016569791793823244, 0.01664064025878906, 0.016619455337524413, 0.016562175750732423, 0.01661952018737793, 0.016583776473999022, 0.01661382484436035, 0.016599519729614258, 0.016658336639404296, 0.01661142349243164, 0.0168222713470459, 0.017225631713867186, 0.016697439193725586, 0.01681407928466797, 0.017326080322265625, 0.017088064193725584, 0.016933311462402345, 0.016707487106323242, 0.016676191329956055, 0.016865087509155274, 0.016597951889038086, 0.016584384918212892, 0.016675264358520507, 0.016819679260253906, 0.016693151473999024, 0.01669375991821289, 0.01657347106933594, 0.016640031814575195, 0.016600000381469728, 0.01680384063720703, 0.017514495849609374, 0.017710208892822266, 0.017230655670166014, 0.017035327911376952, 0.01682841682434082, 0.016721088409423827, 0.016757568359375, 0.016611328125, 0.016622848510742187, 0.0166278076171875, 0.016599712371826173, 0.01663327980041504, 0.016728639602661133, 0.016719423294067382, 0.016738752365112304, 0.016596704483032226, 0.016531232833862305, 0.016483936309814453, 0.016652320861816405, 0.016567167282104493, 0.016614463806152342, 0.016575456619262696, 0.016639968872070313, 0.016616960525512696, 0.01653536033630371, 0.016949951171875, 0.01683251190185547, 0.016652191162109375, 0.016705631256103515, 0.01669228744506836, 0.016615999221801757, 0.016253120422363283, 0.016533248901367186, 0.016529855728149415, 0.016482112884521484, 0.016518495559692384, 0.016530368804931642, 0.016568031311035156, 0.016593120574951173, 0.016647968292236328, 0.016569568634033204, 0.01664080047607422, 0.016547903060913086, 0.016668607711791992, 0.016543712615966797, 0.016547872543334962, 0.016527360916137695, 0.016910335540771485, 0.016633823394775392, 0.016740383148193358, 0.01664614486694336, 0.016768287658691407, 0.016892351150512696, 0.016847135543823243, 0.01689174461364746, 0.016660543441772462, 0.016613536834716797, 0.016618431091308592, 0.01701580810546875, 0.016574464797973632, 0.016744447708129884, 0.01665654373168945, 0.016627552032470704, 0.01661510467529297, 0.016554304122924805, 0.01665023994445801, 0.016869344711303712, 0.016758687973022462, 0.016692960739135742, 0.01670729637145996, 0.016629600524902345, 0.016837472915649413, 0.016780799865722656, 0.01662544059753418, 0.0165383358001709, 0.016723167419433593, 0.01660393524169922, 0.01665843200683594, 0.01653718376159668, 0.016742176055908203, 0.016527040481567383, 0.016628608703613282, 0.016588863372802735, 0.016619775772094728, 0.016693119049072266, 0.0168590087890625, 0.016645343780517578, 0.016599391937255858, 0.016529855728149415, 0.016482303619384766, 0.016496639251708984, 0.01661891174316406, 0.016487007141113282, 0.016424959182739257]",tokens/s,59.910032489776356,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2206.498816,2271.084544,0.0,1868.562432,1779.722752,s,1,9.1956044921875,9.1956044921875,0.0,9.1956044921875,9.1956044921875,9.1956044921875,9.1956044921875,[9.1956044921875],,kWh,5.702162377914798e-05,6.2827142298745825e-06,1.756251404999576e-05,8.086685205901832e-05,,MB,2297.995264,2378.039296,0.0,1960.83712,1916.447232,s,10,0.9634465255737305,0.09634465255737304,0.0005154187172517159,0.096394287109375,0.09664055786132812,0.09703864746093749,0.09735711914062499,"[0.09519411468505859, 0.09609849548339844, 0.09743673706054687, 0.09623023986816406, 0.09655209350585937, 0.09641203308105468, 0.09637654113769531, 0.09643942260742187, 0.09628720092773438, 0.09641964721679687]",tokens/s,2657.127232334483,kWh,2.8048670713885964e-06,3.0932694706088443e-07,1.8566311149334095e-06,4.9708251333828906e-06,tokens/kWh,51500504.06737592,MB,2298.5728,2545.811456,0.0,2128.60928,2001.572864,s,10,19.789150390625004,1.9789150390625,0.009842989886318898,1.9752076416015625,1.99432197265625,1.9960130981445312,1.9973659985351562,"[1.981271240234375, 1.9939461669921874, 1.9746187744140624, 1.9757965087890625, 1.9847132568359376, 1.974387451171875, 1.9685323486328126, 1.9977042236328124, 1.9698544921875, 1.968325927734375]",tokens/s,31.835626470273276,kWh,4.38184827756926e-05,4.8322452112811885e-06,2.4665112324666934e-05,7.331584031164072e-05,tokens/kWh,859295.8865670558,,s,630,19.785949651718152,0.03140626928844149,0.0006218747355141671,0.031244416236877442,0.03193429794311524,0.03217912921905518,0.034664887962341316,"[0.03116828727722168, 0.030988800048828126, 0.03141257667541504, 0.031997983932495117, 0.031946399688720706, 0.03176227188110352, 0.0316011848449707, 0.03130524826049805, 0.031012832641601564, 0.031008384704589845, 0.031052608489990235, 0.03103468894958496, 0.030841119766235353, 0.03106038475036621, 0.03524991989135742, 0.031189247131347655, 0.031147680282592773, 0.03092835235595703, 0.03089820861816406, 0.030991199493408204, 0.03098988723754883, 0.031052223205566408, 0.03423231887817383, 0.031213567733764647, 0.031055871963500976, 0.031051647186279296, 0.031549856185913085, 0.031676191329956055, 0.03144806480407715, 0.03168908882141113, 0.03178886413574219, 0.031832544326782224, 0.031129087448120117, 0.030933792114257813, 0.030992191314697267, 0.031174848556518555, 0.03097113609313965, 0.03117919921875, 0.031065439224243162, 0.030972448348999024, 0.03108294486999512, 0.031694623947143556, 0.031500255584716794, 0.03161235237121582, 0.03132643127441406, 0.031021663665771484, 0.031170303344726563, 0.03493305587768555, 0.031106239318847657, 0.03097654342651367, 0.031047903060913085, 0.03113814353942871, 0.030864351272583006, 0.03117535972595215, 0.031264768600463864, 0.03220889663696289, 0.03322675323486328, 0.031329887390136715, 0.031154592514038085, 0.031096160888671873, 0.03126543998718262, 0.03118489646911621, 0.031027551651000976, 0.031526432037353516, 0.03139955139160156, 0.031033567428588867, 0.031154815673828124, 0.031119359970092773, 0.031072256088256835, 0.03095487976074219, 0.030993024826049806, 0.03084492874145508, 0.03095756721496582, 0.030899328231811525, 0.031099775314331054, 0.030934080123901368, 0.031042495727539064, 0.03497574234008789, 0.03115727996826172, 0.030933984756469725, 0.03103299140930176, 0.03081046485900879, 0.031080543518066408, 0.030968864440917968, 0.03193740844726563, 0.032540672302246096, 0.031459327697753905, 0.031188991546630858, 0.031029247283935548, 0.031104352951049803, 0.03143657684326172, 0.031059040069580077, 0.03163030433654785, 0.031186975479125977, 0.03143964767456055, 0.03137513542175293, 0.03154761505126953, 0.031602815628051756, 0.03179251289367676, 0.031656448364257815, 0.03151795196533203, 0.03155401611328125, 0.03176038360595703, 0.03168217658996582, 0.03177347183227539, 0.0315731201171875, 0.03174297523498535, 0.03179475212097168, 0.0319489917755127, 0.034582527160644534, 0.034328575134277346, 0.032204158782958986, 0.03200665664672851, 0.03179327964782715, 0.031929855346679685, 0.03166783905029297, 0.03191910362243652, 0.031885183334350586, 0.031934240341186526, 0.03180268859863281, 0.0317664966583252, 0.032117214202880856, 0.03176668739318848, 0.03155999946594238, 0.031586143493652345, 0.0332432975769043, 0.032161792755126956, 0.03158220863342285, 0.031096832275390625, 0.031254528045654296, 0.031145280838012695, 0.030880704879760742, 0.030893695831298827, 0.030906496047973634, 0.030930047988891603, 0.031024255752563477, 0.03122559928894043, 0.031237695693969728, 0.031186975479125977, 0.03514191818237305, 0.03116556739807129, 0.030978080749511718, 0.03157814407348633, 0.032404129028320315, 0.031237632751464843, 0.030938079833984375, 0.031007551193237306, 0.03110105514526367, 0.031102943420410156, 0.031091903686523436, 0.031054880142211912, 0.030984767913818358, 0.031030975341796874, 0.031003072738647462, 0.030838783264160157, 0.03090630340576172, 0.030922208786010742, 0.030986015319824218, 0.031127456665039063, 0.031189727783203124, 0.03120351982116699, 0.030904319763183592, 0.03105958366394043, 0.03091075134277344, 0.03106211280822754, 0.031008480072021484, 0.03096835136413574, 0.03102899169921875, 0.03088719940185547, 0.031090944290161134, 0.030931072235107424, 0.0313143367767334, 0.03546444702148437, 0.03151123237609863, 0.0315424633026123, 0.031271743774414065, 0.031225568771362306, 0.031189279556274416, 0.03135487937927246, 0.03134409523010254, 0.03110966491699219, 0.03140784072875977, 0.03146956825256347, 0.03123843193054199, 0.03143270492553711, 0.03185830307006836, 0.03171471977233887, 0.03163785552978516, 0.03175625610351562, 0.031858720779418946, 0.03194083213806152, 0.0317540168762207, 0.03174399948120117, 0.031625024795532225, 0.03167660713195801, 0.03158835220336914, 0.03162028884887695, 0.03219331359863281, 0.03125251197814941, 0.03136102485656738, 0.03145270347595215, 0.03220896148681641, 0.03504966354370117, 0.03187241554260254, 0.031628095626831054, 0.03165340805053711, 0.031623231887817384, 0.03161743927001953, 0.031410175323486327, 0.03140812873840332, 0.031389696121215824, 0.03130268859863281, 0.031341087341308596, 0.031136192321777344, 0.031090431213378907, 0.03079145622253418, 0.030951295852661133, 0.030894527435302733, 0.030990495681762695, 0.031153215408325195, 0.031202239990234373, 0.031088640213012695, 0.03103539276123047, 0.030922752380371094, 0.03090015983581543, 0.031070272445678712, 0.031067712783813477, 0.03183990478515625, 0.03083907127380371, 0.03089664077758789, 0.03100876808166504, 0.03077516746520996, 0.030863040924072264, 0.030935487747192382, 0.031069887161254882, 0.03145136070251465, 0.031442495346069334, 0.031099424362182618, 0.0308953914642334, 0.030845663070678712, 0.030887935638427736, 0.03080806350708008, 0.03097769546508789, 0.03094972801208496, 0.030828832626342773, 0.031092063903808594, 0.031250816345214844, 0.032128734588623045, 0.03128486442565918, 0.03186140823364258, 0.03114396858215332, 0.031385568618774416, 0.03197520065307617, 0.031979743957519534, 0.03213734436035156, 0.03215081787109375, 0.03193036842346191, 0.03199852752685547, 0.03161308860778809, 0.03181711959838867, 0.031674976348876956, 0.03191772842407226, 0.031934816360473635, 0.031821823120117186, 0.03184828758239746, 0.03212713623046875, 0.03208748626708984, 0.03170976066589355, 0.03148361587524414, 0.03147772789001465, 0.0314531192779541, 0.031645696640014646, 0.03163523292541504, 0.03169545555114746, 0.03131731224060059, 0.031138496398925783, 0.03120947265625, 0.03110838317871094, 0.030888351440429687, 0.031031551361083983, 0.030976064682006837, 0.03143680000305176, 0.031180639266967774, 0.031072416305541993, 0.030835968017578125, 0.03112556838989258, 0.0311013126373291, 0.03169926452636719, 0.031676416397094724, 0.0315228157043457, 0.031676416397094724, 0.03162931251525879, 0.03149964714050293, 0.031316608428955076, 0.031115488052368166, 0.031490976333618165, 0.03146022415161133, 0.031933792114257814, 0.03266831970214844, 0.0316939525604248, 0.031052671432495117, 0.03097020721435547, 0.03135043144226074, 0.030895231246948242, 0.031038143157958983, 0.03088505554199219, 0.031065088272094726, 0.031014528274536133, 0.03148608016967774, 0.031662336349487306, 0.03162675285339355, 0.03138924789428711, 0.031496320724487305, 0.03140867233276367, 0.031174623489379882, 0.031221471786499023, 0.030953760147094726, 0.030879743576049806, 0.030963136672973634, 0.030867807388305663, 0.03086511993408203, 0.03084339141845703, 0.031164415359497072, 0.03078758430480957, 0.030948671340942382, 0.033630142211914064, 0.03195788764953613, 0.031522687911987306, 0.031618175506591795, 0.03187980842590332, 0.031443199157714846, 0.031528959274291994, 0.03165510368347168, 0.03170115280151367, 0.03147638320922851, 0.031053823471069338, 0.03092255973815918, 0.030910655975341796, 0.03096780776977539, 0.03119308853149414, 0.031006719589233397, 0.03128121566772461, 0.03112953567504883, 0.03155353546142578, 0.03166559982299805, 0.031641183853149416, 0.031986656188964846, 0.031039487838745116, 0.031106271743774415, 0.030832992553710938, 0.031029695510864257, 0.03083263969421387, 0.03095327949523926, 0.030959552764892578, 0.031131359100341798, 0.03137763214111328, 0.031014720916748048, 0.031064159393310548, 0.03387433624267578, 0.031696895599365234, 0.031323328018188476, 0.03139369583129883, 0.031152063369750977, 0.033185024261474606, 0.03214131164550781, 0.03104739189147949, 0.03147980880737305, 0.031152128219604492, 0.03097360038757324, 0.030888288497924805, 0.03099385643005371, 0.031005247116088867, 0.031047264099121095, 0.031178367614746093, 0.031211679458618163, 0.03136102485656738, 0.0312644157409668, 0.031167455673217773, 0.031779071807861325, 0.03188531112670898, 0.03166364860534668, 0.03148134422302246, 0.03139251136779785, 0.03135510444641113, 0.031254528045654296, 0.03124608039855957, 0.031158527374267577, 0.034698528289794923, 0.03244105529785156, 0.032565185546875, 0.03169849586486816, 0.03171788787841797, 0.03109996795654297, 0.03111747169494629, 0.031503135681152344, 0.031327455520629884, 0.030931520462036132, 0.030947551727294922, 0.03093891143798828, 0.030826143264770508, 0.030889728546142577, 0.030984991073608397, 0.0309268798828125, 0.030991359710693358, 0.030978719711303712, 0.031094112396240235, 0.030939584732055665, 0.031093311309814454, 0.030938880920410156, 0.031049856185913084, 0.030799488067626953, 0.030906879425048828, 0.030848608016967774, 0.030826400756835938, 0.030849536895751952, 0.030846399307250978, 0.03095609664916992, 0.03174604797363281, 0.030963712692260743, 0.031002336502075196, 0.031137056350708008, 0.031224639892578124, 0.031000768661499024, 0.030980096817016602, 0.03128278350830078, 0.03126723289489746, 0.03093008041381836, 0.03082713508605957, 0.03081443214416504, 0.030888320922851563, 0.0308504638671875, 0.031097055435180664, 0.03100467109680176, 0.031275007247924806, 0.03122380828857422, 0.03129955291748047, 0.03147068786621094, 0.03135791969299316, 0.03143008041381836, 0.0311506233215332, 0.031080448150634765, 0.031690528869628906, 0.031440895080566404, 0.031217599868774416, 0.03126803207397461, 0.031300479888916016, 0.03417497634887695, 0.032129024505615236, 0.03222118377685547, 0.0317255687713623, 0.03155558395385742, 0.03190336036682129, 0.031884927749633786, 0.03195276832580567, 0.03228147125244141, 0.0323328971862793, 0.031939487457275394, 0.031959039688110355, 0.03187113571166992, 0.03195273590087891, 0.031932416915893554, 0.03188719940185547, 0.03158236885070801, 0.03168003273010254, 0.03150076866149902, 0.03173990440368652, 0.031796735763549806, 0.031608415603637696, 0.03151948738098145, 0.03138918495178223, 0.03144976043701172, 0.03138355255126953, 0.03143270492553711, 0.031291391372680666, 0.031422464370727536, 0.03139743995666504, 0.031432640075683596, 0.03173017692565918, 0.03186793518066406, 0.03173398399353027, 0.031657855987548826, 0.031503231048583984, 0.0315863037109375, 0.03153462409973144, 0.03167004776000976, 0.03160134315490723, 0.031911327362060544, 0.032016193389892575, 0.03162150382995606, 0.03156806373596192, 0.031740127563476564, 0.03176777648925781, 0.03165059280395508, 0.03180371284484863, 0.032253440856933595, 0.03166431999206543, 0.03176959991455078, 0.03189248085021973, 0.03219660949707031, 0.03153424072265625, 0.031105888366699218, 0.03097804832458496, 0.030874784469604493, 0.03096006393432617, 0.03158038330078125, 0.03145747184753418, 0.03141587257385254, 0.031285984039306644, 0.031090272903442382, 0.031090368270874025, 0.03168943977355957, 0.031850496292114255, 0.031870399475097656, 0.03195379257202149, 0.03196240043640137, 0.03168297576904297, 0.03175046348571777, 0.03130745506286621, 0.03105526351928711, 0.03112611198425293, 0.030943231582641603, 0.031031328201293944, 0.031065759658813478, 0.031089023590087892, 0.03099612808227539, 0.03122524833679199, 0.03125139236450195, 0.03136300849914551, 0.030954944610595704, 0.030853248596191405, 0.03127340888977051, 0.030892032623291016, 0.030832096099853514, 0.031123231887817383, 0.030948448181152343, 0.030979904174804687, 0.03091164779663086, 0.030935007095336912, 0.031013599395751955, 0.030904319763183592, 0.03098784065246582, 0.030838272094726563, 0.03099251174926758, 0.031406560897827146, 0.03148604774475098, 0.031839744567871094, 0.0317263355255127, 0.03193638420104981, 0.03149955177307129, 0.03150057601928711, 0.03234259033203125, 0.031227519989013672, 0.030982528686523438, 0.03092848014831543, 0.031166656494140625, 0.03141632080078125, 0.031248863220214845, 0.030969184875488283, 0.030997983932495116, 0.031242752075195314, 0.031021503448486327, 0.03098419189453125, 0.031250591278076174, 0.03157180786132813, 0.03112550354003906, 0.030980096817016602, 0.03115827178955078, 0.03154944038391113, 0.03142598342895508, 0.031203903198242188, 0.03099238395690918, 0.030950559616088866, 0.030991199493408204, 0.03101286315917969, 0.03122380828857422, 0.031229663848876953, 0.03138768005371094, 0.031019264221191407, 0.031005792617797852, 0.031127647399902345, 0.03296953582763672, 0.03131977653503418, 0.031140127182006837, 0.03118838310241699, 0.030959711074829102, 0.03099251174926758, 0.03106559944152832, 0.031001472473144533, 0.031285247802734374, 0.031024831771850586, 0.03132991981506348, 0.031828384399414066, 0.031262304306030275, 0.031316671371459964, 0.03143683242797852, 0.03138531112670898, 0.031080703735351562, 0.030905408859252928, 0.03162553596496582, 0.030923263549804687, 0.030830047607421876, 0.031075103759765625, 0.031226911544799806, 0.030899040222167967, 0.03111280059814453, 0.03110553550720215, 0.03160463905334473, 0.03093708801269531, 0.031440895080566404, 0.031133024215698243, 0.03197599983215332, 0.03165769577026367, 0.03190726470947266, 0.03203571319580078, 0.03161094474792481, 0.03177190399169922, 0.03145305633544922, 0.030981216430664062, 0.030972896575927736, 0.03086739158630371, 0.03131427192687988, 0.03119772720336914, 0.03101487922668457, 0.03097760009765625, 0.030814687728881837, 0.030989696502685547, 0.031058559417724608, 0.030918111801147462, 0.03081679916381836, 0.031197183609008788]",tokens/s,31.840776464590522,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7438.864384,8048.738304,0.0,7646.216192,7627.584,s,1,13.0644892578125,13.0644892578125,0.0,13.0644892578125,13.0644892578125,13.0644892578125,13.0644892578125,[13.0644892578125],,kWh,0.00017518869824164842,1.9317285628127295e-05,5.484393276400379e-05,0.0002493499166337795,,MB,1775.828992,8694.66112,0.0,8277.458944,8199.8592,s,10,3.582872100830078,0.3582872100830078,0.0009407618133292165,0.3583049621582031,0.3590489440917969,0.3596042755126953,0.36004854064941405,"[0.357796875, 0.3574187622070312, 0.35661016845703125, 0.35762802124023435, 0.35818572998046877, 0.358925537109375, 0.3588048095703125, 0.35891839599609376, 0.3601596069335937, 0.3584241943359375]",tokens/s,714.5105736280401,kWh,1.0503318217708586e-05,1.157523856455291e-06,6.951463894499779e-06,1.8612305968663655e-05,tokens/kWh,13754340.834016524,MB,1783.578624,9009.23392,0.0,8592.031744,8476.849152,s,10,29.597494873046877,2.9597494873046877,0.005798273546547342,2.961339599609375,2.965486962890625,2.966429150390625,2.967182900390625,"[2.949198974609375, 2.949369384765625, 2.958435791015625, 2.962334228515625, 2.963216796875, 2.962507568359375, 2.960344970703125, 2.965277587890625, 2.967371337890625, 2.959438232421875]",tokens/s,21.285585239638404,kWh,9.097741691895747e-05,1.0035818115743189e-05,6.064620129470096e-05,0.00016165943632940162,tokens/kWh,389708.15085380786,,s,630,29.590878746032715,0.04696964880322653,0.00042575473763651814,0.046929279327392576,0.04745906562805176,0.04756553688049316,0.04830731567382813,"[0.04735295867919922, 0.04676883316040039, 0.046278846740722655, 0.04642559814453125, 0.04624374389648438, 0.04616624069213867, 0.046163711547851566, 0.04617689514160156, 0.04638719940185547, 0.046460289001464844, 0.0464791374206543, 0.046449504852294925, 0.04642611312866211, 0.04645478439331055, 0.046412799835205076, 0.04653894424438477, 0.04673446273803711, 0.04670767974853516, 0.04686681747436523, 0.04665603256225586, 0.046703712463378906, 0.046770912170410156, 0.046607776641845705, 0.04654550552368164, 0.04661043167114258, 0.04644607925415039, 0.04655769729614258, 0.046655006408691406, 0.0466844482421875, 0.0465401611328125, 0.04675667190551758, 0.04690288162231445, 0.046908992767333985, 0.04690825653076172, 0.04665302276611328, 0.04680335998535156, 0.0469153938293457, 0.046978687286376955, 0.046909343719482424, 0.047063713073730466, 0.047267200469970704, 0.047056705474853515, 0.04704134368896484, 0.04727155303955078, 0.04705926513671875, 0.047044670104980466, 0.04700364685058594, 0.04705062484741211, 0.04701356887817383, 0.04687286376953125, 0.04670431900024414, 0.046948543548583986, 0.0470041618347168, 0.046787841796875, 0.047196704864501955, 0.04719174575805664, 0.047144447326660156, 0.047354686737060545, 0.047503360748291014, 0.04740256118774414, 0.047416927337646485, 0.0471703987121582, 0.04717772674560547, 0.04690534210205078, 0.04663907241821289, 0.04636880111694336, 0.046309375762939455, 0.046274559020996094, 0.046301185607910154, 0.046276607513427735, 0.046516223907470705, 0.04635238265991211, 0.04632950210571289, 0.046389598846435544, 0.04639955139160156, 0.04657145690917969, 0.0465715217590332, 0.046472991943359375, 0.04658812713623047, 0.04647222518920899, 0.04665193557739258, 0.0465863037109375, 0.046698497772216796, 0.046645633697509764, 0.04665462493896484, 0.04661033630371094, 0.04647135925292969, 0.04650393676757812, 0.04673798370361328, 0.04692720031738281, 0.04672521591186524, 0.046827777862548825, 0.046895233154296875, 0.04679884719848633, 0.046802017211914064, 0.04661724853515625, 0.046833919525146483, 0.046895103454589845, 0.04690943908691406, 0.04696473693847656, 0.04692105484008789, 0.046833694458007814, 0.04714089584350586, 0.046938720703125, 0.047017982482910156, 0.047025215148925784, 0.04692473602294922, 0.04684563064575195, 0.04681727981567383, 0.04715552139282227, 0.04704460906982422, 0.047023551940917965, 0.04693868637084961, 0.04712063980102539, 0.04714640045166016, 0.04696451187133789, 0.04708819198608399, 0.04719001770019531, 0.04715520095825195, 0.047241214752197266, 0.04717567825317383, 0.047176959991455075, 0.04731292724609375, 0.04734230422973633, 0.04722876739501953, 0.04746579360961914, 0.04718307113647461, 0.04652521514892578, 0.046327423095703125, 0.04614771270751953, 0.0462215690612793, 0.04663267135620117, 0.046362911224365234, 0.04654819107055664, 0.04735635375976562, 0.04639507293701172, 0.046543521881103514, 0.04756217575073242, 0.046815105438232425, 0.04661727905273438, 0.04660224151611328, 0.04660553741455078, 0.046430496215820315, 0.04665164947509766, 0.0464714241027832, 0.0465918083190918, 0.04681273651123047, 0.046782657623291014, 0.04679875183105469, 0.0466822395324707, 0.046696128845214846, 0.04668899154663086, 0.04723839950561524, 0.047231327056884764, 0.046757503509521486, 0.0474686393737793, 0.04716790390014648, 0.04696092987060547, 0.0472407341003418, 0.046977504730224606, 0.04725279998779297, 0.046940673828125, 0.04676012802124024, 0.046637054443359374, 0.04671263885498047, 0.046860286712646484, 0.046687999725341794, 0.04689555358886719, 0.04693999862670899, 0.04696899032592773, 0.04701388931274414, 0.047247360229492184, 0.046988800048828126, 0.0471860466003418, 0.047231361389160155, 0.047116287231445314, 0.04719001770019531, 0.04829004669189453, 0.04732227325439453, 0.047352447509765624, 0.047390689849853514, 0.047429630279541016, 0.04731430435180664, 0.04727391815185547, 0.047225536346435545, 0.04731276702880859, 0.0474002571105957, 0.04733993530273437, 0.047415615081787106, 0.0488458251953125, 0.04676095962524414, 0.046510143280029295, 0.046462913513183594, 0.04636262512207031, 0.04633910369873047, 0.047025249481201174, 0.04637273788452148, 0.046430206298828124, 0.04649369430541992, 0.04657897567749023, 0.0463961296081543, 0.04643139266967773, 0.046567649841308595, 0.0465035514831543, 0.04661471939086914, 0.04648204803466797, 0.04690918350219726, 0.046782752990722654, 0.046910785675048826, 0.04712739181518555, 0.04689715194702149, 0.04679398345947266, 0.04663750457763672, 0.04655750274658203, 0.046497791290283204, 0.046645023345947265, 0.046700992584228516, 0.04659782409667969, 0.046731361389160155, 0.04675503921508789, 0.046682910919189455, 0.046734432220458984, 0.046686241149902344, 0.04697776031494141, 0.04705462265014648, 0.04664972686767578, 0.04669235229492188, 0.047101951599121096, 0.04717977523803711, 0.04741120147705078, 0.047476001739501957, 0.04744784164428711, 0.047346622467041015, 0.04732723236083984, 0.047230976104736325, 0.04732217788696289, 0.0476640625, 0.047505409240722656, 0.04730646514892578, 0.04740444946289062, 0.04748303985595703, 0.047299297332763675, 0.047083518981933595, 0.04748822402954102, 0.04759836959838867, 0.047416702270507816, 0.047435489654541016, 0.04774115371704102, 0.04753478240966797, 0.04763395309448242, 0.048027393341064456, 0.04793539047241211, 0.04759555053710938, 0.046742366790771483, 0.0464793586730957, 0.04696473693847656, 0.046310783386230465, 0.04646976089477539, 0.046559425354003904, 0.04644217681884766, 0.04657984161376953, 0.046876415252685544, 0.0467193603515625, 0.046641025543212894, 0.047344993591308594, 0.046940673828125, 0.04671241760253906, 0.046544830322265626, 0.04674991989135742, 0.04686454391479492, 0.04663897705078125, 0.05076031875610352, 0.046704383850097654, 0.04679065704345703, 0.047017982482910156, 0.046787872314453124, 0.04652051162719727, 0.04657209777832031, 0.04667087936401367, 0.04663363265991211, 0.04665167999267578, 0.04692089462280274, 0.04685260772705078, 0.04687654495239258, 0.04702444839477539, 0.04707315063476562, 0.046844158172607425, 0.04738032150268555, 0.04696614456176758, 0.047175647735595704, 0.04708953475952148, 0.047153823852539065, 0.047021888732910154, 0.04698380661010742, 0.047120223999023436, 0.046977024078369144, 0.047017982482910156, 0.047187774658203126, 0.04708310317993164, 0.04726559829711914, 0.04704499053955078, 0.04692323303222656, 0.047339553833007815, 0.047288257598876955, 0.04697187042236328, 0.04701180648803711, 0.04736822509765625, 0.047445663452148436, 0.04742614364624023, 0.04734735870361328, 0.04749321746826172, 0.04738457489013672, 0.04732656097412109, 0.047403518676757815, 0.04749123382568359, 0.04724425506591797, 0.04656329727172852, 0.0461223030090332, 0.046400222778320316, 0.04610012817382812, 0.04645308685302734, 0.046655487060546875, 0.04695859146118164, 0.04715315246582031, 0.04772857666015625, 0.04669996643066406, 0.04657436752319336, 0.046730880737304685, 0.047061214447021486, 0.04664115142822266, 0.04675174331665039, 0.046811134338378906, 0.046607486724853514, 0.046680416107177734, 0.046666271209716795, 0.04662879943847656, 0.04690678405761719, 0.04680361557006836, 0.04690943908691406, 0.0466736946105957, 0.04660630416870117, 0.046801151275634764, 0.046829311370849606, 0.04675404739379883, 0.04705625534057617, 0.04838054275512695, 0.04690534210205078, 0.04676620864868164, 0.047003520965576175, 0.046875648498535157, 0.04714188766479492, 0.04705279922485352, 0.04677987289428711, 0.046887039184570316, 0.047204288482666015, 0.046954238891601566, 0.046833473205566405, 0.0472479362487793, 0.04715760040283203, 0.04716857528686524, 0.04694316864013672, 0.047169246673583985, 0.047022369384765624, 0.04703987121582031, 0.04723500823974609, 0.04771500778198242, 0.047065086364746093, 0.04728137588500977, 0.048397087097167966, 0.04746854400634765, 0.04781363296508789, 0.04740832138061524, 0.047274112701416016, 0.047298240661621097, 0.04763417434692383, 0.04733977508544922, 0.04728179168701172, 0.04750553512573242, 0.04831436920166016, 0.046988990783691405, 0.04631788635253906, 0.04640742492675781, 0.04643353652954101, 0.04638937759399414, 0.04643641662597656, 0.04651481628417969, 0.04642604827880859, 0.04653286361694336, 0.04666163253784179, 0.04647731018066406, 0.046430206298828124, 0.046403167724609375, 0.046363040924072264, 0.04628611373901367, 0.04686070251464844, 0.046843456268310546, 0.046733856201171875, 0.04662704086303711, 0.046997344970703125, 0.04702838516235352, 0.04678451156616211, 0.04660838317871094, 0.04663507080078125, 0.046931903839111326, 0.046952449798583984, 0.04691353607177735, 0.046739456176757815, 0.04689715194702149, 0.046811038970947266, 0.04698940658569336, 0.04715520095825195, 0.04701740646362305, 0.04685881423950195, 0.04687411117553711, 0.04700630569458008, 0.04688217544555664, 0.04711388778686523, 0.0470077133178711, 0.04706387329101563, 0.04699350357055664, 0.047164993286132814, 0.04748323059082031, 0.04719830322265625, 0.04713676834106445, 0.04720230484008789, 0.04728044891357422, 0.04734892654418945, 0.047270526885986326, 0.0471912956237793, 0.047489566802978514, 0.04746044921875, 0.04741120147705078, 0.04746614456176758, 0.04742083358764648, 0.0472606086730957, 0.04713983917236328, 0.04740793609619141, 0.04787628936767578, 0.04758323287963867, 0.047467872619628905, 0.04766582489013672, 0.0471545295715332, 0.0466393928527832, 0.04649417495727539, 0.046579616546630856, 0.04707440185546875, 0.046656192779541014, 0.046706912994384765, 0.04655270385742188, 0.04653913497924805, 0.046448638916015625, 0.046845951080322266, 0.04677750396728515, 0.04665190505981445, 0.04651168060302734, 0.04683446502685547, 0.04708163070678711, 0.04679663848876953, 0.04676982498168945, 0.04656777572631836, 0.0466247673034668, 0.04669235229492188, 0.04695449447631836, 0.04667295837402344, 0.046951454162597654, 0.046921310424804685, 0.046653377532958985, 0.04668044662475586, 0.04687843322753906, 0.04670883178710938, 0.046723262786865234, 0.046548992156982424, 0.04670054244995117, 0.04707328033447265, 0.04717567825317383, 0.04688880157470703, 0.046921886444091794, 0.04722383880615234, 0.0473097915649414, 0.04762358474731445, 0.04748553466796875, 0.047368255615234375, 0.04743161773681641, 0.04719945526123047, 0.047473438262939455, 0.04764057540893555, 0.04719772720336914, 0.04734409713745117, 0.047390689849853514, 0.04728358459472656, 0.047263423919677736, 0.04760470581054688, 0.047421440124511716, 0.047399009704589844, 0.047445919036865236, 0.04747673416137695, 0.04754841613769531, 0.04738041687011719, 0.04747398376464844, 0.047517726898193356, 0.047910751342773436, 0.04748102569580078, 0.047591838836669925, 0.047610145568847656, 0.04718057632446289, 0.04664934539794922, 0.04639539337158203, 0.046884735107421874, 0.04667552185058594, 0.04674617767333984, 0.04668809509277344, 0.046852256774902346, 0.04681932830810547, 0.04779827117919922, 0.04693135833740234, 0.04686105728149414, 0.04670038223266602, 0.0467182731628418, 0.04705523300170898, 0.046779903411865234, 0.04667679977416992, 0.04676812744140625, 0.046742752075195314, 0.04702288055419922, 0.047146465301513674, 0.046940353393554686, 0.04679919815063477, 0.046747615814208984, 0.04690537643432617, 0.04680908966064453, 0.046688255310058595, 0.0466954231262207, 0.046617599487304685, 0.046919456481933595, 0.047157470703125, 0.047881439208984376, 0.046879520416259764, 0.046893054962158204, 0.046943519592285154, 0.04730569458007813, 0.04707302474975586, 0.04688617706298828, 0.04697724914550781, 0.047329792022705076, 0.047292289733886717, 0.04701196670532227, 0.04706633758544922, 0.04704044723510742, 0.04725228881835938, 0.047263774871826175, 0.047171520233154296, 0.04706719970703125, 0.04717158508300781, 0.04725132751464844, 0.04750899124145508, 0.04743436813354492, 0.04701513671875, 0.047119136810302734, 0.04907212829589844, 0.048502784729003906, 0.047373600006103515, 0.04726201629638672, 0.04739932632446289, 0.04754022216796875, 0.047502880096435544, 0.04738505554199219, 0.04744003295898438, 0.04719555282592774, 0.0477130241394043, 0.04638131332397461, 0.046494720458984375, 0.046365440368652346, 0.046429439544677736, 0.04642483139038086, 0.046534656524658206, 0.046499839782714845, 0.04645273590087891, 0.046662017822265624, 0.04682918548583984, 0.04681894302368164, 0.04673116683959961, 0.046631614685058595, 0.04678844833374023, 0.04648748779296875, 0.046698497772216796, 0.04687446212768555, 0.04698128128051758, 0.04684799957275391, 0.04678656005859375, 0.04707942581176758, 0.04674505615234375, 0.046623104095458986, 0.046588062286376956, 0.04666777420043945, 0.0469381103515625, 0.04682137680053711, 0.04671023941040039, 0.04666537475585938, 0.04676803207397461, 0.046774303436279294, 0.04698012924194336, 0.0468287353515625, 0.0470759048461914, 0.04678876876831055, 0.04698726272583008, 0.047067134857177735, 0.04730812835693359, 0.047069854736328125, 0.04733302307128906, 0.04731343841552734, 0.04738844680786133, 0.047263774871826175, 0.04740915298461914, 0.04709580612182617, 0.047042560577392575, 0.04706057739257812, 0.04707900619506836, 0.047497215270996096, 0.04738051223754883, 0.047102752685546874, 0.04703343963623047, 0.04708211135864258, 0.047398303985595705, 0.047518463134765626, 0.04734291076660156, 0.047387454986572264, 0.04756828689575195, 0.04745891189575195, 0.04739276885986328, 0.04746198272705078]",tokens/s,21.290344413461018,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,876.371968,662.56896,0.0,260.046848,258.555392,s,1,7.6930947265625,7.6930947265625,0.0,7.6930947265625,7.6930947265625,7.6930947265625,7.6930947265625,[7.6930947265625],,kWh,1.5043263316647427e-05,1.652119122551898e-06,4.5366702959986904e-06,2.1232052735198017e-05,,MB,1329.893376,759.037952,0.0,341.835776,317.950464,s,14,0.19584982299804687,0.013989273071289063,0.0003924698149841576,0.013858880043029785,0.01405989751815796,0.014529316711425782,0.015200724182128906,"[0.013813887596130371, 0.01387116813659668, 0.013836671829223633, 0.015368576049804688, 0.013822400093078613, 0.013848992347717285, 0.013828767776489257, 0.01373084831237793, 0.013868767738342285, 0.013844736099243164, 0.013954431533813476, 0.014019040107727051, 0.014077407836914062, 0.01396412754058838]",tokens/s,18299.735711457557,kWh,4.0486558713593135e-07,4.463948137609203e-08,2.160912500804278e-07,6.655963185924512e-07,tokens/kWh,384617511.9197894,MB,1370.025984,786.300928,0.0,369.098752,317.953024,s,14,10.2091162109375,0.7292225864955357,0.008958890656735363,0.7251283569335938,0.7411813537597657,0.7439443481445313,0.7479024926757812,"[0.7261859741210938, 0.7228895874023438, 0.724860595703125, 0.7224734497070312, 0.72203466796875, 0.7253961181640625, 0.7238218994140625, 0.720133056640625, 0.7216100463867188, 0.7291727294921875, 0.7394151611328125, 0.74095068359375, 0.7488920288085937, 0.7412802124023438]",tokens/s,86.39337448770272,kWh,2.103134934619995e-05,2.3193967290330682e-06,7.711831758919987e-06,3.1062577834153e-05,tokens/kWh,2028163.9320588557,,s,882,10.201798299789454,0.01156666473899028,0.0002727617723053447,0.0114759521484375,0.011841481208801269,0.01195679063796997,0.012620402498245236,"[0.011280672073364258, 0.01143609619140625, 0.011467647552490235, 0.011575360298156739, 0.011414688110351562, 0.011553312301635742, 0.011497471809387207, 0.01141055965423584, 0.011464672088623047, 0.01140329647064209, 0.011447263717651366, 0.011435296058654785, 0.011434080123901368, 0.011496000289916992, 0.011486528396606445, 0.011486016273498535, 0.011487071990966796, 0.011435040473937988, 0.011405952453613281, 0.01141590404510498, 0.011891008377075196, 0.012071871757507324, 0.011557632446289063, 0.01324448013305664, 0.011922816276550292, 0.011495136260986328, 0.011463808059692384, 0.0114234561920166, 0.011446080207824706, 0.011587455749511718, 0.01139958381652832, 0.011430144309997559, 0.011369279861450195, 0.011319808006286621, 0.01153264045715332, 0.011394847869873046, 0.011343647956848144, 0.01135251235961914, 0.01145036792755127, 0.011347968101501465, 0.011367456436157226, 0.011602911949157715, 0.01140940761566162, 0.011353631973266602, 0.011377056121826172, 0.011391039848327636, 0.011345248222351074, 0.011444895744323731, 0.011388544082641602, 0.01136473560333252, 0.011734751701354981, 0.011892255783081054, 0.011707072257995605, 0.011627903938293457, 0.011672256469726562, 0.011705696105957031, 0.011473055839538573, 0.011424320220947266, 0.011365951538085938, 0.011412063598632812, 0.011431776046752929, 0.011532575607299804, 0.011590368270874023, 0.011230655670166016, 0.011460895538330079, 0.011530048370361328, 0.011467583656311035, 0.011482751846313477, 0.011600255966186523, 0.01155388832092285, 0.011529120445251465, 0.01153609561920166, 0.011424032211303711, 0.011488863945007324, 0.01143235206604004, 0.011464703559875488, 0.011581695556640625, 0.011636480331420899, 0.011620351791381836, 0.01155459213256836, 0.011434207916259766, 0.011389984130859374, 0.011430879592895508, 0.011433823585510254, 0.011470656394958496, 0.011444543838500977, 0.011459872245788575, 0.011603008270263672, 0.011431167602539063, 0.011397567749023438, 0.011462464332580566, 0.01150166416168213, 0.011547039985656739, 0.011461536407470703, 0.011407551765441894, 0.011420096397399902, 0.011425215721130371, 0.011423583984375, 0.011437952041625976, 0.011453760147094727, 0.011449695587158203, 0.011470623970031738, 0.011385408401489258, 0.011411680221557616, 0.011413311958312989, 0.011433952331542969, 0.011501567840576172, 0.011554335594177246, 0.011477375984191894, 0.011409472465515137, 0.011413503646850585, 0.011410688400268555, 0.011483296394348145, 0.01152064037322998, 0.011644576072692871, 0.011560416221618653, 0.011514752388000489, 0.011439519882202149, 0.011387680053710937, 0.011384160041809081, 0.011530048370361328, 0.011416223526000977, 0.011446271896362305, 0.011372384071350097, 0.011432448387145995, 0.01131811237335205, 0.011204031944274903, 0.011391584396362304, 0.011388287544250489, 0.011385439872741699, 0.011404895782470703, 0.01139743995666504, 0.011375967979431151, 0.011392064094543457, 0.011361696243286134, 0.012749471664428712, 0.014009920120239258, 0.011528287887573242, 0.011454431533813477, 0.011466976165771484, 0.011646623611450196, 0.011444704055786133, 0.011347647666931152, 0.011435456275939941, 0.011399744033813476, 0.011507712364196777, 0.01137174415588379, 0.01135807991027832, 0.011332575798034667, 0.011433919906616212, 0.011407679557800292, 0.011383999824523925, 0.011422207832336426, 0.0114171199798584, 0.011350015640258788, 0.011354911804199218, 0.011351072311401367, 0.011443103790283203, 0.011448287963867187, 0.011453632354736329, 0.011332192420959473, 0.011374048233032227, 0.011354816436767579, 0.011390815734863282, 0.011396191596984863, 0.01137712001800537, 0.011374367713928222, 0.011362463951110839, 0.011343935966491699, 0.011434399604797363, 0.011458016395568848, 0.011499327659606933, 0.01142630386352539, 0.011550592422485352, 0.011491392135620117, 0.011476927757263184, 0.01139955234527588, 0.011421471595764161, 0.011448575973510742, 0.011449567794799805, 0.011501952171325684, 0.011411104202270508, 0.011406047821044922, 0.011436032295227052, 0.01146675205230713, 0.011406911849975585, 0.011486783981323242, 0.012123007774353028, 0.011894432067871094, 0.011440416336059571, 0.011511360168457031, 0.011653727531433105, 0.011460127830505372, 0.011614399909973145, 0.011397407531738282, 0.011444512367248536, 0.011411168098449708, 0.01140940761566162, 0.011380767822265625, 0.011544384002685548, 0.011464863777160645, 0.011553952217102051, 0.011493760108947754, 0.011480959892272949, 0.01158198356628418, 0.011759679794311523, 0.011478591918945312, 0.01139087963104248, 0.01143171215057373, 0.01141222381591797, 0.011536383628845214, 0.011612288475036621, 0.01147270393371582, 0.011407232284545899, 0.011385024070739746, 0.011390975952148438, 0.011359807968139648, 0.011577792167663574, 0.011413503646850585, 0.011419072151184081, 0.011385087966918945, 0.011366496086120606, 0.011543935775756835, 0.011405856132507323, 0.011431872367858887, 0.011389311790466308, 0.011671360015869141, 0.011407551765441894, 0.011421695709228515, 0.011954175949096679, 0.01141318416595459, 0.011413824081420899, 0.01137657642364502, 0.01140937614440918, 0.011419743537902831, 0.011413503646850585, 0.011470848083496094, 0.01139417552947998, 0.011335552215576172, 0.011482399940490723, 0.011486623764038086, 0.011462559700012207, 0.011372960090637207, 0.011372384071350097, 0.011397088050842285, 0.011474559783935547, 0.011462783813476563, 0.011450207710266114, 0.011367008209228516, 0.011384832382202148, 0.011385120391845704, 0.011357503890991211, 0.011169792175292969, 0.011491328239440919, 0.01152790355682373, 0.011446656227111817, 0.011398847579956056, 0.011423199653625489, 0.011426560401916504, 0.011403455734252929, 0.011454048156738282, 0.011421952247619629, 0.011360223770141602, 0.011440192222595215, 0.011433792114257813, 0.011409536361694336, 0.011411456108093262, 0.011458847999572754, 0.011404512405395507, 0.011397472381591797, 0.01146281623840332, 0.011385919570922852, 0.011373151779174804, 0.011366656303405762, 0.011574624061584472, 0.011387007713317871, 0.011357088088989258, 0.01138640022277832, 0.011394463539123535, 0.011485504150390626, 0.011461088180541992, 0.011463744163513184, 0.011469120025634766, 0.011395711898803711, 0.011499008178710938, 0.011552576065063477, 0.011461248397827148, 0.011448224067687989, 0.01149289608001709, 0.011428480148315429, 0.011415391921997071, 0.011450431823730468, 0.011450400352478027, 0.01137241554260254, 0.01144051170349121, 0.011461695671081544, 0.011409631729125977, 0.01148572826385498, 0.01154047966003418, 0.011376704216003418, 0.011448575973510742, 0.011425472259521485, 0.011483136177062989, 0.011437567710876465, 0.01141811180114746, 0.011478495597839356, 0.011442720413208008, 0.011446271896362305, 0.011619935989379883, 0.011437855720520019, 0.011473535537719726, 0.011438079833984375, 0.011718655586242676, 0.011423744201660157, 0.012093600273132324, 0.01122537612915039, 0.011497471809387207, 0.011438303947448731, 0.011398591995239259, 0.011434304237365722, 0.01140944004058838, 0.011405311584472656, 0.012071200370788574, 0.011437631607055665, 0.011394399642944336, 0.011360608100891113, 0.011360575675964355, 0.011399328231811523, 0.011463839530944825, 0.011438591957092285, 0.011481439590454102, 0.011382911682128906, 0.011454336166381836, 0.01134166431427002, 0.011450528144836426, 0.011429887771606445, 0.011447711944580078, 0.01148579216003418, 0.011884544372558594, 0.011476767539978028, 0.011409312248229981, 0.011364288330078125, 0.011432319641113282, 0.011411775588989257, 0.011449024200439454, 0.011369343757629394, 0.011384575843811036, 0.011416031837463378, 0.011466464042663575, 0.011368736267089844, 0.011376511573791505, 0.011434080123901368, 0.011406720161437988, 0.01142246437072754, 0.01147283172607422, 0.011421919822692871, 0.011412704467773437, 0.011454336166381836, 0.011430432319641113, 0.01143164825439453, 0.011381279945373536, 0.011398943901062011, 0.011549920082092284, 0.014902079582214355, 0.011423711776733398, 0.011986944198608398, 0.011581439971923829, 0.01144422435760498, 0.011448351860046387, 0.01132464027404785, 0.011428640365600587, 0.01140937614440918, 0.011421695709228515, 0.011384832382202148, 0.011505887985229493, 0.01152790355682373, 0.011466815948486328, 0.011421695709228515, 0.011296544075012207, 0.0120513916015625, 0.011484800338745117, 0.011505311965942382, 0.01169001579284668, 0.01147980785369873, 0.011468735694885254, 0.011420703887939454, 0.011381919860839845, 0.01138156795501709, 0.01145139217376709, 0.011390975952148438, 0.01140332794189453, 0.0113721923828125, 0.011410816192626953, 0.01167852783203125, 0.011392767906188966, 0.011423263549804687, 0.01147593593597412, 0.011507616043090821, 0.01145139217376709, 0.011500639915466309, 0.01132323169708252, 0.011370559692382813, 0.011409343719482422, 0.011332991600036622, 0.011385472297668457, 0.01135647964477539, 0.011378560066223144, 0.011341631889343262, 0.011546784400939942, 0.01153164768218994, 0.011429887771606445, 0.011422176361083985, 0.01145036792755127, 0.01152019214630127, 0.011511615753173828, 0.011633983612060547, 0.012548959732055665, 0.011485024452209473, 0.011396767616271973, 0.011364704132080078, 0.011490431785583496, 0.011499967575073242, 0.011526592254638672, 0.011436032295227052, 0.01141759967803955, 0.011491328239440919, 0.011481087684631347, 0.011569151878356934, 0.011497376441955566, 0.011503711700439453, 0.011469023704528809, 0.011443967819213868, 0.011606240272521972, 0.011437664031982422, 0.011581664085388183, 0.011406911849975585, 0.011415200233459473, 0.011417759895324706, 0.011448960304260253, 0.01140121555328369, 0.01142579174041748, 0.011166272163391113, 0.01140351963043213, 0.011370688438415528, 0.011411295890808106, 0.011405599594116212, 0.01144803237915039, 0.011428959846496582, 0.011375455856323243, 0.011408896446228027, 0.011442720413208008, 0.01144422435760498, 0.011374591827392578, 0.011460607528686523, 0.011405407905578613, 0.011433024406433106, 0.011461055755615235, 0.01140163230895996, 0.011372544288635255, 0.01143331241607666, 0.011375264167785644, 0.011553088188171386, 0.011400896072387695, 0.011419360160827636, 0.011374879837036132, 0.011362303733825683, 0.011309056282043458, 0.011390975952148438, 0.011421152114868164, 0.011411392211914062, 0.011398752212524415, 0.011349151611328125, 0.011410431861877441, 0.011385855674743652, 0.011485024452209473, 0.011396320343017578, 0.011417984008789062, 0.011415200233459473, 0.011395680427551269, 0.011411616325378418, 0.012310336112976075, 0.011358624458312988, 0.011478816032409668, 0.011616512298583985, 0.011453696250915527, 0.011536895751953125, 0.011441887855529785, 0.01143836784362793, 0.011329631805419921, 0.01135974407196045, 0.011380415916442871, 0.011492032051086426, 0.011421216011047364, 0.011360223770141602, 0.011456607818603515, 0.011395071983337402, 0.011428288459777832, 0.011386112213134765, 0.01136451244354248, 0.011326047897338867, 0.011358207702636718, 0.011356160163879395, 0.011370240211486816, 0.011495679855346679, 0.011184415817260741, 0.01139094352722168, 0.01224723243713379, 0.012143839836120605, 0.011464703559875488, 0.011524864196777344, 0.011449343681335449, 0.011480128288269043, 0.011503711700439453, 0.011398048400878906, 0.011416735649108887, 0.011394847869873046, 0.011400320053100587, 0.011430784225463867, 0.011472576141357422, 0.011401408195495606, 0.011382335662841797, 0.01132806396484375, 0.01142137622833252, 0.011378751754760743, 0.011362751960754395, 0.01138054370880127, 0.011390303611755371, 0.011376895904541016, 0.011456831932067871, 0.011472864151000977, 0.011430015563964844, 0.011408831596374512, 0.011416128158569336, 0.011374400138854981, 0.011360447883605957, 0.011372544288635255, 0.011776255607604981, 0.011474592208862305, 0.011455936431884766, 0.011393823623657227, 0.011417535781860352, 0.011364192008972167, 0.011515999794006348, 0.011376383781433106, 0.011366656303405762, 0.011361632347106933, 0.011368191719055176, 0.011391712188720702, 0.011425984382629395, 0.011374591827392578, 0.011386591911315918, 0.011378047943115234, 0.011386848449707032, 0.011348608016967774, 0.011574687957763672, 0.011410207748413085, 0.011380255699157715, 0.011391584396362304, 0.011448672294616699, 0.011368096351623535, 0.011458720207214355, 0.011421536445617676, 0.011455583572387695, 0.011442079544067382, 0.011475968360900878, 0.01161196804046631, 0.01150499153137207, 0.011217984199523925, 0.011481887817382812, 0.011534687995910644, 0.01152787208557129, 0.01157705593109131, 0.011540127754211425, 0.011476991653442382, 0.011497088432312012, 0.01151689624786377, 0.011413824081420899, 0.01151353645324707, 0.011429823875427247, 0.01142950439453125, 0.011483424186706543, 0.011428000450134278, 0.011416607856750489, 0.011412544250488281, 0.011445311546325684, 0.011426655769348145, 0.011403264045715332, 0.011425408363342285, 0.011401280403137207, 0.01141107177734375, 0.01143068790435791, 0.011397024154663087, 0.011511327743530274, 0.011508288383483887, 0.011478303909301758, 0.011481727600097656, 0.011504863739013672, 0.011442879676818847, 0.011505760192871094, 0.011593728065490723, 0.011466719627380372, 0.011425824165344238, 0.011429856300354004, 0.011437888145446778, 0.01156937599182129, 0.011603648185729981, 0.011594047546386719, 0.011525407791137695, 0.011521087646484376, 0.01162399959564209, 0.011687328338623047, 0.011928256034851075, 0.011509504318237305, 0.011528512001037598, 0.011648287773132324, 0.011729567527770997, 0.011790335655212402, 0.01175551986694336, 0.011841792106628418, 0.011788031578063966, 0.011799679756164552, 0.011742079734802246, 0.011803872108459472, 0.01205894374847412, 0.011772224426269531, 0.011770175933837891, 0.011752320289611816, 0.011747648239135742, 0.011856032371520995, 0.01163929557800293, 0.011681119918823242, 0.011799200057983399, 0.01177567958831787, 0.011716927528381348, 0.011702272415161133, 0.011720831871032716, 0.011759296417236328, 0.011654656410217285, 0.011956928253173828, 0.013447168350219727, 0.011862208366394043, 0.011871264457702637, 0.011850432395935058, 0.011982111930847169, 0.01188486385345459, 0.011792896270751953, 0.01179593563079834, 0.011817503929138183, 0.011806943893432616, 0.012005503654479981, 0.011785663604736329, 0.011714112281799317, 0.011648927688598633, 0.011772607803344727, 0.011740320205688477, 0.011744159698486328, 0.011697440147399903, 0.011762399673461914, 0.01163987159729004, 0.011666687965393066, 0.01170736026763916, 0.01175823974609375, 0.011782143592834473, 0.01170867156982422, 0.0116528959274292, 0.011612031936645508, 0.011655327796936035, 0.011620351791381836, 0.011646016120910645, 0.011598719596862792, 0.011577407836914062, 0.011607872009277344, 0.011615872383117676, 0.011668095588684082, 0.011873567581176758, 0.0116594877243042, 0.011767295837402344, 0.011596735954284667, 0.011575296401977539, 0.011535712242126465, 0.011543007850646972, 0.01150380802154541, 0.01153983974456787, 0.011532320022583007, 0.011561663627624511, 0.011653023719787598, 0.011688096046447754, 0.011695967674255372, 0.011603167533874512, 0.011545375823974609, 0.011603967666625976, 0.011589920043945313, 0.011568384170532226, 0.01142198371887207, 0.011561920166015625, 0.011589664459228516, 0.011582367897033692, 0.011589216232299806, 0.011681183815002442, 0.011756352424621582, 0.011714591979980468, 0.011728896141052245, 0.011718655586242676, 0.01176371192932129, 0.011889663696289063, 0.01170467185974121, 0.011771776199340821, 0.011891488075256348, 0.011886336326599122, 0.011810144424438477, 0.011815839767456055, 0.01173692798614502, 0.0117161283493042, 0.011837311744689941, 0.011741951942443847, 0.011673600196838378, 0.011766816139221191, 0.011729887962341309, 0.01166438388824463, 0.011670528411865234, 0.01174937629699707, 0.011677696228027343, 0.01198857593536377, 0.011759967803955078, 0.011734944343566894, 0.011780256271362305, 0.011650464057922364, 0.011713120460510254, 0.011814911842346192, 0.011681280136108398, 0.01175500774383545, 0.011613183975219727, 0.011630080223083495, 0.011818559646606445, 0.011701184272766113, 0.011673312187194824, 0.011630144119262695, 0.01167024040222168, 0.011866144180297852, 0.011825311660766601, 0.011851455688476563, 0.011851743698120117, 0.011794848442077637, 0.011675423622131347, 0.011628512382507325, 0.011655167579650879, 0.011655200004577636, 0.011712608337402343, 0.011691904067993164, 0.011784192085266113, 0.011902048110961913, 0.012000288009643555, 0.012063648223876953, 0.012049375534057617, 0.012038463592529297, 0.011877984046936034, 0.011529248237609863, 0.011745823860168456, 0.011690431594848633, 0.011673503875732422, 0.011702303886413573, 0.011712287902832031, 0.011565407752990723, 0.011609248161315918, 0.011615008354187011, 0.011720704078674317, 0.011853216171264648, 0.011808575630187989, 0.011900704383850097, 0.011817376136779785, 0.011797439575195313, 0.011912863731384277, 0.012006912231445312, 0.012069375991821289, 0.011903200149536133, 0.011986271858215332, 0.012253631591796875, 0.011899968147277833, 0.01196127986907959, 0.01196345615386963, 0.01184659194946289, 0.011800576210021972, 0.01182431983947754, 0.01184447956085205, 0.011840991973876953, 0.01234716796875, 0.011993791580200195, 0.012664544105529784, 0.011915552139282226, 0.011964320182800293, 0.011914688110351563, 0.011912192344665527, 0.011817824363708497, 0.01180345630645752, 0.011914976119995117, 0.011818367958068848, 0.01186297607421875, 0.011826399803161622, 0.011857664108276367, 0.012794143676757812, 0.01353212833404541, 0.011915295600891114, 0.012015328407287598, 0.011821120262145997, 0.011751359939575195, 0.011841535568237305, 0.01177190399169922, 0.01175152015686035, 0.011716511726379395, 0.011683839797973633, 0.01164310359954834, 0.011763456344604493, 0.011628000259399415, 0.011869791984558106, 0.011735584259033203, 0.011606464385986328, 0.011710463523864746, 0.011661312103271485, 0.0116428804397583, 0.011378687858581543, 0.011638784408569336, 0.01157487964630127, 0.01162281608581543, 0.011809951782226562, 0.011669983863830566, 0.011690367698669433, 0.011708415985107423, 0.011613696098327637, 0.011624959945678711, 0.011583456039428711, 0.011534367561340332, 0.011579744338989258, 0.011768832206726074, 0.011690655708312988, 0.01163263988494873, 0.01160534381866455, 0.01170860767364502, 0.011672032356262208, 0.01156719970703125, 0.011683744430541992, 0.011636608123779297, 0.011633952140808106, 0.011686752319335938, 0.01168553638458252, 0.011629216194152832, 0.011641759872436524, 0.011762207984924316, 0.011719167709350586, 0.011904447555541993, 0.011854207992553711, 0.011773887634277344, 0.011786239624023438, 0.011850048065185546, 0.011808032035827637, 0.011715200424194336, 0.011873056411743164, 0.011717535972595216, 0.011789983749389648, 0.011866847991943359, 0.011854880332946778, 0.011802399635314941, 0.01170729637145996, 0.011666912078857422, 0.011625184059143066, 0.011591423988342285, 0.011601984024047851, 0.011603615760803222, 0.011585887908935546, 0.011605664253234864, 0.012327263832092285, 0.01274675178527832, 0.011984288215637207, 0.012610048294067382, 0.011717023849487305, 0.011705727577209472, 0.011675647735595703, 0.011624128341674804, 0.011754112243652344, 0.011982848167419433, 0.012105119705200196, 0.012055135726928711, 0.012025407791137695]",tokens/s,86.4553458205702,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1511.657472,1383.989248,0.0,981.467136,978.805248,s,1,8.3412880859375,8.3412880859375,0.0,8.3412880859375,8.3412880859375,8.3412880859375,8.3412880859375,[8.3412880859375],,kWh,3.797005896249932e-05,4.180768217604495e-06,1.196973179801164e-05,5.412055897811546e-05,,MB,1607.778304,1526.595584,0.0,1109.393408,1064.45312,s,10,0.5321865921020508,0.05321865921020508,0.0004651977802656137,0.05325665664672852,0.05349269599914551,0.0538780122756958,0.054186265296936036,"[0.052375648498535154, 0.053075393676757815, 0.05310598373413086, 0.0532872314453125, 0.05272351837158203, 0.05322608184814453, 0.05339878463745117, 0.054263328552246096, 0.05332355117797852, 0.05340707015991211]",tokens/s,4810.342909783606,kWh,1.5634709008021178e-06,1.724222064375655e-07,1.0337530551657167e-06,2.7696461624054e-06,tokens/kWh,92430579.57182065,MB,1615.23712,1610.481664,0.0,1193.279488,1117.180928,s,10,11.678140869140625,1.1678140869140625,0.0069589869400435095,1.1681026000976562,1.1769537963867187,1.1782032165527343,1.1792027526855469,"[1.156515625, 1.17945263671875, 1.172553955078125, 1.17076025390625, 1.16179296875, 1.16678369140625, 1.1766761474609375, 1.1694215087890625, 1.1603458251953125, 1.1638382568359376]",tokens/s,53.94694301597003,kWh,3.373011068586522e-05,3.7199887348930535e-06,1.6577372144833972e-05,5.402747156559223e-05,tokens/kWh,1166073.4469780738,,s,630,11.675702033996577,0.018532860371423147,0.0003262209306607475,0.018440768241882323,0.018919833564758303,0.01911366491317749,0.019826448116302492,"[0.018441856384277342, 0.018208864212036133, 0.018340415954589843, 0.018198528289794923, 0.018394847869873047, 0.018226783752441408, 0.018240192413330077, 0.018374591827392577, 0.018359392166137696, 0.01826300811767578, 0.018231039047241212, 0.018202335357666015, 0.01826665687561035, 0.01824563217163086, 0.018261184692382814, 0.018301759719848633, 0.01842790412902832, 0.018337791442871093, 0.01823539161682129, 0.01830633544921875, 0.018266847610473633, 0.018556928634643553, 0.018323295593261717, 0.018418943405151367, 0.018303903579711914, 0.018251264572143554, 0.018295295715332033, 0.01813443183898926, 0.018293344497680664, 0.018950143814086915, 0.018548736572265623, 0.01849932861328125, 0.018442495346069336, 0.01827631950378418, 0.01820879936218262, 0.018153280258178712, 0.01820857620239258, 0.018350400924682618, 0.01827155113220215, 0.018178815841674804, 0.018318912506103516, 0.02049667167663574, 0.018788511276245118, 0.018319360733032225, 0.018552831649780274, 0.018388992309570314, 0.0182392635345459, 0.01825923156738281, 0.018224063873291015, 0.018294048309326173, 0.018339584350585938, 0.01826915168762207, 0.018315263748168945, 0.018339136123657226, 0.018387584686279296, 0.01813920021057129, 0.018277664184570313, 0.018146015167236327, 0.018233184814453126, 0.018247840881347656, 0.01823855972290039, 0.018361248016357423, 0.018316736221313478, 0.018247968673706056, 0.018533920288085936, 0.0182194881439209, 0.018195648193359375, 0.018198911666870116, 0.018267808914184572, 0.01823619270324707, 0.01826201629638672, 0.018298879623413086, 0.018394496917724608, 0.01825404739379883, 0.01824764823913574, 0.018343744277954103, 0.01824729537963867, 0.01985353660583496, 0.020100095748901366, 0.018572479248046874, 0.018655519485473632, 0.018948415756225585, 0.018391168594360352, 0.018406272888183594, 0.018332672119140626, 0.01839619255065918, 0.018404064178466798, 0.01817795181274414, 0.018436479568481445, 0.018294752120971678, 0.01829644775390625, 0.019708288192749022, 0.01881920051574707, 0.019156864166259766, 0.018585599899291993, 0.019682880401611327, 0.018807231903076174, 0.018696191787719727, 0.018601375579833983, 0.018491392135620118, 0.018436704635620117, 0.01850761604309082, 0.01853455924987793, 0.018618368148803712, 0.018948095321655273, 0.019390464782714844, 0.019417087554931642, 0.019527679443359376, 0.01942323112487793, 0.01923891258239746, 0.0192491512298584, 0.019168832778930663, 0.019421312332153322, 0.019174848556518555, 0.019121023178100587, 0.018919424057006837, 0.018876319885253907, 0.01892092704772949, 0.018879104614257812, 0.01877382469177246, 0.0187127685546875, 0.018526208877563476, 0.01831920051574707, 0.018388448715209962, 0.018483903884887694, 0.01845792007446289, 0.01866268730163574, 0.018422271728515623, 0.01843631935119629, 0.018509824752807616, 0.01843814468383789, 0.018579456329345705, 0.01841971206665039, 0.018276351928710938, 0.018350080490112306, 0.01826950454711914, 0.018296575546264647, 0.01832851219177246, 0.018231296539306642, 0.01846793556213379, 0.018310047149658202, 0.01842585563659668, 0.01899519920349121, 0.01842508888244629, 0.018380863189697266, 0.018322111129760742, 0.01850927925109863, 0.01843459129333496, 0.01846601676940918, 0.018418527603149413, 0.01828646469116211, 0.01828256034851074, 0.018226367950439453, 0.018228031158447264, 0.01824358367919922, 0.018257919311523436, 0.01821696090698242, 0.018257408142089843, 0.018393600463867187, 0.018769920349121092, 0.018700288772583007, 0.018664608001708983, 0.01921455955505371, 0.018997215270996095, 0.018946720123291017, 0.018822656631469727, 0.018847360610961914, 0.018824064254760742, 0.018892736434936525, 0.01893382453918457, 0.018873664855957033, 0.018889408111572265, 0.01879859161376953, 0.018873664855957033, 0.01904096031188965, 0.01902796745300293, 0.018860031127929687, 0.018988479614257814, 0.018870847702026367, 0.0190380802154541, 0.01913868713378906, 0.01894105529785156, 0.018824064254760742, 0.0188272647857666, 0.018771968841552734, 0.018777952194213868, 0.01855459213256836, 0.01846112060546875, 0.0183767032623291, 0.018501632690429686, 0.018454143524169922, 0.01885830307006836, 0.018364479064941406, 0.01819385528564453, 0.018682432174682618, 0.018380800247192384, 0.018222431182861328, 0.01854108810424805, 0.018086015701293947, 0.01842790412902832, 0.01874665641784668, 0.018866912841796875, 0.018759679794311524, 0.018445472717285156, 0.018287456512451172, 0.018257183074951173, 0.01831395149230957, 0.01860121536254883, 0.018622335433959962, 0.01870732879638672, 0.0186429443359375, 0.018518016815185546, 0.018370559692382812, 0.018493440628051756, 0.01832899284362793, 0.018633087158203124, 0.01843222427368164, 0.01850707244873047, 0.01840608024597168, 0.018700288772583007, 0.01828771209716797, 0.018236095428466798, 0.01825814437866211, 0.01879654312133789, 0.0183175048828125, 0.018596799850463867, 0.018657728195190428, 0.019962303161621092, 0.01893708801269531, 0.018798431396484374, 0.018919712066650392, 0.018992767333984376, 0.01882828712463379, 0.018890495300292968, 0.018713888168334962, 0.018475999832153322, 0.01843132781982422, 0.018243871688842773, 0.018257408142089843, 0.018383264541625977, 0.018558752059936522, 0.01861292839050293, 0.018969951629638673, 0.018518688201904297, 0.01863804817199707, 0.018524959564208986, 0.018404767990112304, 0.01837936019897461, 0.01843987274169922, 0.020199199676513672, 0.018424352645874022, 0.018511871337890624, 0.019564735412597657, 0.018532032012939452, 0.018603519439697267, 0.018488128662109374, 0.018323455810546875, 0.018362367630004883, 0.0182762565612793, 0.018263935089111328, 0.01823356819152832, 0.018239488601684572, 0.018236799240112303, 0.018178688049316407, 0.01831488037109375, 0.018278783798217773, 0.018519872665405272, 0.01846905517578125, 0.018577407836914063, 0.01858902359008789, 0.01987446403503418, 0.018405376434326173, 0.01842790412902832, 0.018390399932861328, 0.018416255950927735, 0.018366464614868162, 0.0182476806640625, 0.018325504302978517, 0.01841152000427246, 0.01838204765319824, 0.018303680419921874, 0.018341472625732422, 0.018248191833496095, 0.01835212707519531, 0.018255872726440428, 0.0186429443359375, 0.01820467185974121, 0.018992256164550782, 0.018555776596069336, 0.01826201629638672, 0.01845248031616211, 0.01817318344116211, 0.01839923286437988, 0.01828326416015625, 0.018184032440185547, 0.018485408782958984, 0.018275360107421874, 0.018198495864868165, 0.01815449523925781, 0.018503423690795898, 0.01824995231628418, 0.018184192657470705, 0.018597919464111327, 0.01821900749206543, 0.018192384719848635, 0.01838489532470703, 0.018358272552490236, 0.02067865562438965, 0.018562463760375975, 0.018333568572998046, 0.018301631927490233, 0.018230464935302733, 0.018284767150878907, 0.018236032485961916, 0.0181790714263916, 0.018367776870727538, 0.018260543823242187, 0.018247520446777344, 0.018187456130981446, 0.018201568603515624, 0.018305023193359374, 0.01823289680480957, 0.01825836753845215, 0.01829478454589844, 0.01839308738708496, 0.018234783172607422, 0.01830521583557129, 0.018235328674316407, 0.018282976150512695, 0.018415615081787108, 0.018238975524902345, 0.018207231521606446, 0.018298879623413086, 0.018583263397216797, 0.01860416030883789, 0.01851398468017578, 0.018690143585205078, 0.019058143615722656, 0.018557472229003905, 0.01838800048828125, 0.018806751251220704, 0.01860710334777832, 0.018988479614257814, 0.018475584030151367, 0.018505599975585936, 0.018459999084472656, 0.01884182357788086, 0.018497215270996094, 0.01843699264526367, 0.018449600219726563, 0.018420320510864258, 0.018462879180908203, 0.018437471389770508, 0.018382591247558595, 0.0184616641998291, 0.018610015869140625, 0.018691328048706053, 0.018881311416625978, 0.01880099105834961, 0.0186977596282959, 0.01876780891418457, 0.018782432556152345, 0.018864160537719728, 0.018888383865356444, 0.01882691192626953, 0.018655967712402344, 0.018522111892700196, 0.018520063400268554, 0.018464767456054687, 0.018485248565673826, 0.018597888946533202, 0.018524160385131837, 0.01853753662109375, 0.01853536033630371, 0.018541759490966796, 0.01859667205810547, 0.018685951232910156, 0.018435840606689454, 0.018817024230957033, 0.018861663818359374, 0.018587263107299804, 0.01858745574951172, 0.018434976577758787, 0.018482271194458007, 0.018500576019287108, 0.018497312545776367, 0.01852796745300293, 0.01864137649536133, 0.018638879776000976, 0.018554880142211915, 0.018601984024047852, 0.018651136398315428, 0.018386688232421875, 0.018501119613647463, 0.018563327789306642, 0.018473472595214844, 0.01864499282836914, 0.018780160903930664, 0.01844166374206543, 0.018844127655029297, 0.018826623916625977, 0.018983648300170897, 0.0188023681640625, 0.018550207138061523, 0.018678495407104492, 0.018471071243286133, 0.01847420883178711, 0.0184102725982666, 0.018433855056762694, 0.018457952499389647, 0.018587583541870116, 0.019025888442993164, 0.01869919967651367, 0.018595199584960937, 0.018686592102050783, 0.018747392654418944, 0.018973791122436523, 0.019104671478271485, 0.019001344680786132, 0.01882111930847168, 0.019021823883056642, 0.019013504028320312, 0.01893507194519043, 0.018825279235839844, 0.018659551620483397, 0.018708320617675783, 0.018615007400512695, 0.01850092887878418, 0.018635103225708008, 0.018512224197387694, 0.018585599899291993, 0.01866547203063965, 0.018503679275512695, 0.018618368148803712, 0.018949567794799806, 0.018911487579345704, 0.018907039642333985, 0.01856550407409668, 0.018587488174438477, 0.018722623825073243, 0.01861631965637207, 0.01917241668701172, 0.01901628875732422, 0.01884102439880371, 0.01921116828918457, 0.0186998405456543, 0.018583999633789063, 0.018868223190307617, 0.018538496017456055, 0.018503488540649413, 0.01837238311767578, 0.01845903968811035, 0.018547775268554688, 0.018616832733154298, 0.018520288467407227, 0.018810239791870118, 0.018360288619995117, 0.018305791854858398, 0.01839516830444336, 0.018374303817749023, 0.018323776245117187, 0.018362495422363283, 0.01839923286437988, 0.018880832672119142, 0.018446016311645507, 0.018311168670654295, 0.018317312240600587, 0.01843814468383789, 0.01850137519836426, 0.018527999877929687, 0.018417407989501953, 0.019593984603881835, 0.018415615081787108, 0.018413568496704103, 0.01848262405395508, 0.018376447677612304, 0.018893632888793945, 0.018487104415893553, 0.018350048065185545, 0.01863702392578125, 0.018416799545288087, 0.018365055084228515, 0.018274303436279296, 0.018292192459106446, 0.01829305648803711, 0.018479007720947266, 0.01864143943786621, 0.018274303436279296, 0.01841548728942871, 0.018356096267700194, 0.018419967651367188, 0.01859993553161621, 0.018509824752807616, 0.019441440582275392, 0.019169504165649415, 0.019035263061523436, 0.018635648727416992, 0.018529792785644532, 0.018678272247314453, 0.018284543991088868, 0.018212480545043944, 0.01852009582519531, 0.018286943435668945, 0.018281919479370117, 0.018984960556030273, 0.01825484848022461, 0.019760128021240234, 0.018395135879516602, 0.018348031997680665, 0.018233343124389647, 0.018306144714355467, 0.018273056030273436, 0.018298656463623046, 0.018178144454956056, 0.018311424255371092, 0.01824358367919922, 0.01826201629638672, 0.01830297660827637, 0.018271392822265625, 0.018273120880126954, 0.018306432723999025, 0.018463359832763673, 0.018472192764282226, 0.01834060859680176, 0.018429088592529296, 0.018348896026611328, 0.018322656631469727, 0.018362655639648437, 0.01832806396484375, 0.018354175567626953, 0.018354175567626953, 0.018304191589355468, 0.01831177520751953, 0.018300832748413084, 0.01830944061279297, 0.018319360733032225, 0.01868182373046875, 0.0182968635559082, 0.01843404769897461, 0.018423040390014647, 0.01842799949645996, 0.018452192306518556, 0.01834899139404297, 0.01843814468383789, 0.018909503936767578, 0.01849228858947754, 0.018508480072021483, 0.018466943740844725, 0.018511871337890624, 0.01845996856689453, 0.018453184127807616, 0.018327360153198243, 0.018343551635742188, 0.018588224411010743, 0.01847091293334961, 0.018505439758300782, 0.0182741756439209, 0.0183536319732666, 0.018299840927124025, 0.018335615158081055, 0.018356224060058594, 0.018603872299194336, 0.01838870429992676, 0.01841619110107422, 0.01841766357421875, 0.01839027214050293, 0.018404096603393556, 0.01863596725463867, 0.018556800842285157, 0.01838585662841797, 0.01843404769897461, 0.018392608642578124, 0.018436576843261717, 0.018480735778808592, 0.018562719345092772, 0.018627328872680662, 0.018659328460693358, 0.01836851119995117, 0.01846067237854004, 0.018524160385131837, 0.018472959518432617, 0.018395135879516602, 0.018296831130981444, 0.018311168670654295, 0.018350080490112306, 0.018290687561035156, 0.018526208877563476, 0.01831110382080078, 0.01830672073364258, 0.018269695281982423, 0.018330432891845702, 0.018202720642089845, 0.019333023071289063, 0.01930863952636719, 0.018664543151855468, 0.019249568939208983, 0.018667999267578124, 0.018732799530029296, 0.01870262336730957, 0.01861952018737793, 0.01867830467224121, 0.018692319869995117, 0.018562240600585936, 0.01848579216003418, 0.01848361587524414, 0.018478080749511717, 0.01844326400756836, 0.018372608184814454, 0.018647039413452148, 0.018476160049438476, 0.018350976943969727, 0.018371904373168945, 0.018379199981689454, 0.018286848068237306, 0.01836604881286621, 0.01828700828552246, 0.018318912506103516, 0.018353599548339844, 0.018346303939819335, 0.01833603286743164, 0.018243135452270506, 0.01834480094909668, 0.018292512893676758, 0.01826838493347168, 0.018300928115844727, 0.0182857608795166, 0.01829151916503906, 0.01829478454589844, 0.018309024810791014, 0.018380479812622072]",tokens/s,53.95821152043836,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2174.373888,2194.604032,0.0,1816.133632,1727.29344,s,1,8.94433203125,8.94433203125,0.0,8.94433203125,8.94433203125,8.94433203125,8.94433203125,[8.94433203125],,kWh,5.511606295416034e-05,6.0725403847365365e-06,1.7106958129980976e-05,7.829556146887786e-05,,MB,2227.195904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2092273559570312,0.3209227355957031,0.0008386638202555853,0.3211023712158203,0.32185628967285157,0.32205032196044925,0.32220554779052735,"[0.3199826354980469, 0.32017123413085935, 0.31957742309570314, 0.32088507080078127, 0.32027764892578126, 0.32133026123046876, 0.3218131713867188, 0.3213196716308594, 0.32224435424804687, 0.32162588500976563]",tokens/s,797.6997937675178,kWh,9.385472596354085e-06,1.0347539388508268e-06,6.223433798187121e-06,1.6643660333392034e-05,tokens/kWh,15381231.944897924,MB,2235.76064,2597.257216,0.0,2189.426688,2078.022144,s,10,177.43118554687499,17.7431185546875,0.033526370132177866,17.758843749999997,17.7714302734375,17.77307646484375,17.77439341796875,"[17.675099609375, 17.694265625, 17.717541015625, 17.745, 17.7532890625, 17.7643984375, 17.769435546875, 17.766369140625, 17.771064453125, 17.77472265625]",tokens/s,3.5506723243618428,kWh,0.0005187076917844781,5.721722755934871e-05,0.0003448937915398119,0.0009208187108836387,tokens/kWh,68417.37603218744,,s,630,177.42690451049805,0.2816300071595207,0.0006598392608408472,0.2817473449707031,0.28242669372558593,0.28255672454833985,0.2828116159057617,"[0.2800423889160156, 0.2798489685058594, 0.27987677001953126, 0.28069677734375, 0.2809182739257812, 0.2799151611328125, 0.2804708557128906, 0.2804883117675781, 0.280727294921875, 0.28029803466796877, 0.2804562683105469, 0.2802956237792969, 0.28037606811523436, 0.2803502502441406, 0.2799678955078125, 0.28061727905273437, 0.27993701171875, 0.28083200073242187, 0.28064358520507815, 0.27996954345703123, 0.2806561279296875, 0.28070816040039065, 0.28012542724609374, 0.2807080383300781, 0.2807132263183594, 0.28060049438476564, 0.28059661865234375, 0.2804834289550781, 0.2809849548339844, 0.28008346557617186, 0.28019439697265625, 0.28069955444335937, 0.28046044921875, 0.2801589660644531, 0.28077825927734373, 0.2805921020507812, 0.28040835571289063, 0.28039385986328125, 0.28098391723632815, 0.280465087890625, 0.28005825805664064, 0.28069232177734377, 0.2807298278808594, 0.2803466796875, 0.280330322265625, 0.2809014892578125, 0.2810062255859375, 0.2803957824707031, 0.2808667602539062, 0.28111810302734375, 0.2807504272460937, 0.28062753295898435, 0.28100579833984374, 0.28070892333984376, 0.2809819946289063, 0.28046047973632815, 0.2810714111328125, 0.2802329711914063, 0.2805138854980469, 0.28088592529296874, 0.28113006591796874, 0.2816296081542969, 0.28068658447265626, 0.281005859375, 0.28015133666992187, 0.2804008178710938, 0.28073983764648436, 0.28049612426757814, 0.28073321533203127, 0.28063790893554685, 0.28020941162109375, 0.2803833618164063, 0.2812335510253906, 0.28066201782226563, 0.2800002136230469, 0.28072726440429685, 0.2811516418457031, 0.2803818664550781, 0.2807459716796875, 0.28107571411132815, 0.2807296142578125, 0.280090087890625, 0.28065847778320313, 0.280848388671875, 0.2805125122070313, 0.2809405517578125, 0.2810262451171875, 0.28134841918945314, 0.28045413208007813, 0.28098153686523436, 0.2812651062011719, 0.28072549438476563, 0.28061270141601563, 0.2810942993164062, 0.28120046997070314, 0.2811516418457031, 0.28120883178710937, 0.28136788940429686, 0.28082965087890627, 0.2804236755371094, 0.28094329833984377, 0.2808046264648438, 0.2805627746582031, 0.28110296630859377, 0.28122930908203125, 0.2805247802734375, 0.2808934631347656, 0.28068658447265626, 0.2811058349609375, 0.28089788818359374, 0.2809177551269531, 0.2811558837890625, 0.2807339172363281, 0.28060467529296873, 0.28165310668945315, 0.2810533142089844, 0.280748046875, 0.2808401794433594, 0.28158770751953127, 0.28121279907226565, 0.2812069091796875, 0.2809117431640625, 0.28113461303710935, 0.2811109008789062, 0.2807441711425781, 0.2812673645019531, 0.2806640625, 0.2807945556640625, 0.28124627685546877, 0.28088262939453124, 0.281145751953125, 0.28130291748046876, 0.2816937255859375, 0.2803898620605469, 0.28093280029296874, 0.2809283447265625, 0.28078048706054687, 0.280913330078125, 0.2810458984375, 0.28148532104492185, 0.280922119140625, 0.2811125793457031, 0.28112240600585936, 0.2808047790527344, 0.2807490539550781, 0.28160614013671875, 0.281249755859375, 0.28072171020507813, 0.2812303771972656, 0.2810395812988281, 0.28151602172851564, 0.2808606872558594, 0.28136856079101563, 0.2807767028808594, 0.2813706359863281, 0.28103884887695313, 0.2810142822265625, 0.28121701049804687, 0.28157131958007814, 0.28107366943359374, 0.28137210083007813, 0.2811848449707031, 0.2810491027832031, 0.281143310546875, 0.28145895385742187, 0.28184756469726563, 0.28124978637695314, 0.2812653503417969, 0.28144723510742187, 0.2813807678222656, 0.2816123962402344, 0.28132147216796877, 0.2814075012207031, 0.28182464599609375, 0.28112857055664064, 0.2807162780761719, 0.2815958251953125, 0.2814337158203125, 0.28157586669921875, 0.2812951049804687, 0.28182913208007815, 0.2811023254394531, 0.2814849853515625, 0.2818131103515625, 0.2814261169433594, 0.2813286437988281, 0.28186517333984373, 0.2815753173828125, 0.2808505554199219, 0.2822403564453125, 0.2807236938476563, 0.2805497741699219, 0.2815038146972656, 0.28116583251953126, 0.28076220703125, 0.2811209411621094, 0.2815037536621094, 0.28119210815429685, 0.28136688232421875, 0.28179364013671876, 0.28147393798828124, 0.2810163269042969, 0.2815672302246094, 0.281280517578125, 0.28149090576171876, 0.28075677490234374, 0.28159796142578125, 0.28178399658203124, 0.2808909912109375, 0.2817575073242187, 0.2825286865234375, 0.28195806884765623, 0.28188088989257815, 0.28218746948242185, 0.28228436279296876, 0.28108184814453124, 0.28176177978515626, 0.282838134765625, 0.2814873352050781, 0.2818338012695312, 0.2822354736328125, 0.28211404418945313, 0.28158770751953127, 0.2818495788574219, 0.28214300537109377, 0.2822717590332031, 0.28159591674804685, 0.2825904541015625, 0.2814175720214844, 0.2816391296386719, 0.2814712219238281, 0.2818414611816406, 0.2815576171875, 0.28091595458984375, 0.28136032104492187, 0.2823326416015625, 0.28136099243164064, 0.28139706420898436, 0.28150320434570314, 0.28204721069335936, 0.28181292724609375, 0.28150790405273435, 0.28198297119140625, 0.2825850830078125, 0.2817843322753906, 0.28202392578125, 0.28227609252929686, 0.28162753295898435, 0.28117041015625, 0.2816596374511719, 0.2821185607910156, 0.28147891235351563, 0.28207513427734376, 0.28206472778320313, 0.2810799560546875, 0.2822287292480469, 0.281380859375, 0.28096307373046875, 0.28201495361328127, 0.28191543579101563, 0.2808220825195312, 0.2812441101074219, 0.2820792236328125, 0.28129074096679685, 0.2807245483398437, 0.2816378173828125, 0.28133172607421875, 0.2814259338378906, 0.2809405517578125, 0.2820807189941406, 0.2818770446777344, 0.2812231750488281, 0.28140945434570314, 0.2818765869140625, 0.281585205078125, 0.2813772277832031, 0.28210791015625, 0.2815528869628906, 0.28134954833984377, 0.281118408203125, 0.28153668212890626, 0.2811440124511719, 0.281315185546875, 0.28166574096679686, 0.28222409057617187, 0.2818480529785156, 0.28228182983398437, 0.2820284729003906, 0.28187362670898436, 0.2818568115234375, 0.28186767578125, 0.28210015869140626, 0.2811656494140625, 0.2817580261230469, 0.2824253540039062, 0.2816795654296875, 0.28214822387695315, 0.282633056640625, 0.28246231079101564, 0.28224920654296876, 0.28219390869140626, 0.28176177978515626, 0.28234295654296876, 0.281440673828125, 0.28233282470703125, 0.28235589599609373, 0.28176202392578126, 0.28263742065429687, 0.2827757263183594, 0.28194482421875, 0.2814791564941406, 0.28246823120117187, 0.28238861083984373, 0.28188467407226564, 0.2819947509765625, 0.2818296813964844, 0.28161227416992185, 0.28164312744140624, 0.2824798583984375, 0.28089599609375, 0.2816155395507812, 0.282393310546875, 0.28163201904296875, 0.28096578979492187, 0.2822208251953125, 0.2825495910644531, 0.28141635131835935, 0.2813686218261719, 0.28233929443359373, 0.28234951782226564, 0.2814049377441406, 0.2820491027832031, 0.2822655944824219, 0.28181671142578124, 0.28181951904296876, 0.282474365234375, 0.2828248291015625, 0.28128375244140624, 0.2819653930664062, 0.28253378295898435, 0.28245318603515623, 0.28302224731445313, 0.2817261962890625, 0.2820779418945312, 0.28135592651367186, 0.28173553466796875, 0.2820218811035156, 0.2824163818359375, 0.281559814453125, 0.2818511962890625, 0.281985595703125, 0.28193911743164063, 0.2824345397949219, 0.2821079406738281, 0.28175128173828123, 0.28165960693359376, 0.28184490966796877, 0.2818196411132812, 0.2816590576171875, 0.28129541015625, 0.2822452087402344, 0.282148681640625, 0.28179647827148435, 0.2818431701660156, 0.281916259765625, 0.282071044921875, 0.28207308959960936, 0.28224102783203125, 0.28240692138671875, 0.28170367431640625, 0.28196322631835935, 0.2822586975097656, 0.2821107482910156, 0.2816573486328125, 0.28200115966796874, 0.28276107788085936, 0.28166796875, 0.28264389038085935, 0.28165985107421876, 0.2816119079589844, 0.2818493041992188, 0.28241342163085936, 0.28160269165039065, 0.2821709289550781, 0.28233157348632815, 0.281638916015625, 0.2820690002441406, 0.2820444030761719, 0.2819522705078125, 0.2817003479003906, 0.28188671875, 0.2821754760742187, 0.28198440551757814, 0.28190985107421873, 0.28162771606445314, 0.28219027709960937, 0.2819363708496094, 0.28173867797851565, 0.2817145080566406, 0.2827763061523437, 0.2823818664550781, 0.282061279296875, 0.2822955322265625, 0.28185458374023437, 0.2820845947265625, 0.28183645629882814, 0.2820997009277344, 0.28219384765625, 0.2820178527832031, 0.28301669311523436, 0.2816857604980469, 0.2826753845214844, 0.28165997314453123, 0.2818326110839844, 0.28254217529296877, 0.28154898071289064, 0.2824791564941406, 0.28230215454101565, 0.282519287109375, 0.282200439453125, 0.2817508544921875, 0.28248358154296876, 0.2819420166015625, 0.2820316467285156, 0.2820674133300781, 0.2824532775878906, 0.2819747619628906, 0.281317626953125, 0.2821370849609375, 0.28193997192382814, 0.28199856567382814, 0.282194580078125, 0.28177011108398436, 0.2821119995117187, 0.2824540100097656, 0.2815442199707031, 0.28249545288085937, 0.2822952880859375, 0.281781005859375, 0.28232345581054685, 0.2816731872558594, 0.2817425842285156, 0.2815733032226562, 0.282046875, 0.28263876342773436, 0.2819246826171875, 0.280864990234375, 0.2823441467285156, 0.2817774047851562, 0.28158438110351564, 0.281775390625, 0.2825755920410156, 0.281280517578125, 0.28094049072265626, 0.28242132568359374, 0.2824335327148437, 0.2814668884277344, 0.28139007568359375, 0.28251776123046873, 0.28208615112304686, 0.2814786376953125, 0.28204290771484375, 0.28198822021484377, 0.28178521728515626, 0.28226739501953124, 0.2823375244140625, 0.2817307739257813, 0.28193624877929685, 0.28203347778320315, 0.2822744140625, 0.2821096496582031, 0.2826099548339844, 0.2821754760742187, 0.28191094970703123, 0.2817907104492188, 0.2819154968261719, 0.282218505859375, 0.28213824462890624, 0.2821349182128906, 0.282040283203125, 0.28201541137695313, 0.2820816345214844, 0.28174261474609374, 0.2815003967285156, 0.2821997375488281, 0.28205908203125, 0.2816632385253906, 0.282668701171875, 0.28182794189453125, 0.2821672973632813, 0.2818334655761719, 0.28256256103515626, 0.28206491088867186, 0.2816860046386719, 0.2823475341796875, 0.2824228515625, 0.28201397705078124, 0.2820582275390625, 0.2820000305175781, 0.2821888427734375, 0.28197781372070313, 0.28243557739257813, 0.28195431518554687, 0.28218572998046876, 0.28233468627929686, 0.28162115478515626, 0.2822348937988281, 0.2820526123046875, 0.2816813659667969, 0.28182583618164064, 0.2823434143066406, 0.28173226928710937, 0.2815312194824219, 0.2821590270996094, 0.28229574584960937, 0.2820245666503906, 0.28214068603515624, 0.282492919921875, 0.28185305786132814, 0.28190194702148436, 0.2820603332519531, 0.28267156982421876, 0.28136856079101563, 0.282112060546875, 0.2824962463378906, 0.2821064453125, 0.2815693359375, 0.28216445922851563, 0.28250607299804686, 0.281864013671875, 0.28174533081054687, 0.2819934692382812, 0.281740478515625, 0.28147732543945314, 0.2822243957519531, 0.28174221801757815, 0.2820280456542969, 0.28201895141601563, 0.2824937744140625, 0.2817577514648438, 0.28169625854492186, 0.28246426391601565, 0.282492919921875, 0.2821160888671875, 0.282071044921875, 0.2819317626953125, 0.281697998046875, 0.281837890625, 0.2820362243652344, 0.2827120666503906, 0.28151602172851564, 0.28276840209960935, 0.28217239379882814, 0.2814392395019531, 0.2822522888183594, 0.2821119995117187, 0.2827202453613281, 0.28199502563476564, 0.28225689697265627, 0.2823175354003906, 0.2820803833007812, 0.28190399169921876, 0.28221826171875, 0.2828515625, 0.2820765380859375, 0.2819029235839844, 0.2825469970703125, 0.281478759765625, 0.28158453369140624, 0.2819154052734375, 0.2817404479980469, 0.2814493103027344, 0.2822504272460937, 0.2821331787109375, 0.2818151550292969, 0.281635986328125, 0.2826691589355469, 0.28135467529296876, 0.28190548706054686, 0.28174935913085936, 0.2820091247558594, 0.2818586730957031, 0.28164913940429687, 0.2826322021484375, 0.2821663818359375, 0.2818118591308594, 0.2821775207519531, 0.28203826904296875, 0.2821048278808594, 0.2819526672363281, 0.28202249145507813, 0.28223480224609376, 0.2818842163085937, 0.28232101440429686, 0.28194351196289064, 0.28260369873046876, 0.2824259338378906, 0.28248236083984374, 0.28222836303710935, 0.2819715576171875, 0.2824027404785156, 0.2820341796875, 0.281385009765625, 0.28212225341796876, 0.2824920349121094, 0.28204327392578127, 0.28189816284179686, 0.282149658203125, 0.28204217529296877, 0.28256689453125, 0.2821114807128906, 0.2821842041015625, 0.2820157470703125, 0.2827670288085937, 0.28207037353515624, 0.28220645141601564, 0.28214492797851565, 0.28206887817382814, 0.28248272705078126, 0.28248028564453126, 0.2826987609863281, 0.2822668762207031, 0.282213134765625, 0.28295150756835935, 0.282881591796875, 0.2827792663574219, 0.2821653747558594, 0.2824528503417969, 0.2820559387207031, 0.281964599609375]",tokens/s,3.550757996585146,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3163.000832,4423.876608,0.0,4028.628992,3944.723968,s,1,10.2890380859375,10.2890380859375,0.0,10.2890380859375,10.2890380859375,10.2890380859375,10.2890380859375,[10.2890380859375],,kWh,9.265283422500184e-05,1.0210715612788045e-05,2.773502218800933e-05,0.00013059857202579923,,MB,3183.652864,4763.615232,0.0,4353.687552,4305.05728,s,10,1.145394432067871,0.11453944320678713,0.0002339463254169406,0.11449860763549805,0.11482673568725586,0.11494184837341308,0.11503393852233887,"[0.1150569610595703, 0.11436332702636719, 0.1142347183227539, 0.11432816314697265, 0.11465875244140625, 0.1143918685913086, 0.11456227111816407, 0.11446249389648437, 0.11453472137451172, 0.11480115509033204]",tokens/s,2235.037929578747,kWh,3.4183840612885793e-06,3.7688180050823733e-07,2.2660515544417967e-06,6.061317416238614e-06,tokens/kWh,42235042.71763783,MB,3187.810304,4763.615232,0.0,4353.687552,4305.05984,s,10,25.420324462890623,2.5420324462890624,0.00740630854366198,2.5402193603515624,2.552800537109375,2.5546341552734373,2.5561010498046874,"[2.5564677734375, 2.55239306640625, 2.533532958984375, 2.5334658203125, 2.536947998046875, 2.540232666015625, 2.536437744140625, 2.546279052734375, 2.544361328125, 2.5402060546875]",tokens/s,24.78331859688469,kWh,7.489263759996022e-05,8.260680932844667e-06,3.806064737355796e-05,0.00012121396590636286,tokens/kWh,519742.0901867625,,s,630,25.41778401947023,0.04034568891979399,0.0005551954603061547,0.04022352027893067,0.04074762496948242,0.04107856597900391,0.04241985176086426,"[0.040673408508300785, 0.040661502838134765, 0.04061715316772461, 0.04047705459594726, 0.04062995147705078, 0.040651519775390624, 0.04033033752441406, 0.04043254470825195, 0.04060160064697266, 0.04055654525756836, 0.040473697662353515, 0.04043244934082031, 0.04102944183349609, 0.04362678527832031, 0.0407276496887207, 0.04527302551269531, 0.04102409744262695, 0.040957504272460935, 0.04071916961669922, 0.04134707260131836, 0.04053811264038086, 0.04056195068359375, 0.04037295913696289, 0.040683231353759765, 0.040735008239746094, 0.04055270385742187, 0.04081638336181641, 0.04091494369506836, 0.04069539260864258, 0.04033782577514648, 0.04096409606933594, 0.040339359283447264, 0.04035356903076172, 0.040245567321777344, 0.040204288482666016, 0.04024115371704102, 0.04128768157958984, 0.04006911849975586, 0.04072652816772461, 0.040045982360839845, 0.04247817611694336, 0.040529918670654294, 0.040114177703857425, 0.039972862243652346, 0.0405805778503418, 0.040454689025878905, 0.04016537475585937, 0.0399268798828125, 0.03990220642089844, 0.039812576293945315, 0.03988115310668945, 0.040103935241699216, 0.0401541748046875, 0.040274879455566404, 0.03986355209350586, 0.04002838516235351, 0.03991606521606445, 0.039932926177978514, 0.03998646545410156, 0.0398759994506836, 0.03993017578125, 0.04008550262451172, 0.04032716751098633, 0.04026208114624023, 0.04047257614135742, 0.04038383865356445, 0.0402786865234375, 0.04030054473876953, 0.04016332626342774, 0.0401162223815918, 0.04010720062255859, 0.03997779083251953, 0.04005068969726563, 0.04050124740600586, 0.04012236785888672, 0.04036608123779297, 0.04062617492675781, 0.04044800186157226, 0.040141918182373046, 0.04490742492675781, 0.04033052825927735, 0.04028387069702148, 0.040049568176269534, 0.04110345458984375, 0.040691585540771485, 0.040382591247558594, 0.04055199813842773, 0.04029894256591797, 0.04053104019165039, 0.04022544097900391, 0.04019356918334961, 0.04038524627685547, 0.04019830322265625, 0.04029993438720703, 0.03998764801025391, 0.040088897705078126, 0.040336063385009766, 0.04074655914306641, 0.04026959991455078, 0.040269824981689455, 0.041433761596679684, 0.04206768035888672, 0.04018739318847656, 0.04014889526367187, 0.04120249557495117, 0.04270844650268555, 0.04035644912719726, 0.040323070526123043, 0.04175462341308594, 0.04074454498291016, 0.04064092636108398, 0.04037392044067383, 0.04061014556884766, 0.04024115371704102, 0.04011606216430664, 0.04053798294067383, 0.04017523193359375, 0.040433792114257815, 0.04063900756835938, 0.040089569091796874, 0.040124446868896484, 0.04019404983520508, 0.04001587295532227, 0.040153087615966795, 0.040188129425048826, 0.04022643280029297, 0.040514049530029295, 0.040224704742431644, 0.040048702239990235, 0.040003360748291014, 0.04034377670288086, 0.04004044723510742, 0.04036608123779297, 0.040224704742431644, 0.04014400100708008, 0.04009056091308594, 0.04003609466552734, 0.0401082878112793, 0.04018175888061523, 0.0402630729675293, 0.04031948852539063, 0.04014416122436523, 0.04046828842163086, 0.040330238342285156, 0.04040703964233398, 0.040417278289794925, 0.04022819137573242, 0.04018780899047852, 0.04050543975830078, 0.04001244735717773, 0.04021420669555664, 0.04015955352783203, 0.04038800048828125, 0.04018982315063477, 0.03995107269287109, 0.04071331024169922, 0.04025436782836914, 0.04013840103149414, 0.03995888137817383, 0.040017822265625, 0.040169567108154294, 0.04029439926147461, 0.04022886276245117, 0.04031094360351563, 0.040253536224365234, 0.04001152038574219, 0.040187904357910156, 0.040136703491210936, 0.039858177185058595, 0.039925121307373045, 0.040059040069580075, 0.04015292739868164, 0.04033561706542969, 0.04018739318847656, 0.04012940979003906, 0.0401162223815918, 0.04013075256347656, 0.04014470291137695, 0.04026761627197266, 0.040406944274902344, 0.04071209716796875, 0.04042736053466797, 0.04023689651489258, 0.040100032806396485, 0.04019247817993164, 0.04014080047607422, 0.04024934387207031, 0.04027801513671875, 0.040030174255371094, 0.04064604949951172, 0.04039535903930664, 0.04016332626342774, 0.04013382339477539, 0.04006732940673828, 0.040030017852783206, 0.039994110107421876, 0.04017356872558594, 0.04005068969726563, 0.040101886749267575, 0.03993798446655274, 0.03999068832397461, 0.04009408187866211, 0.04048076629638672, 0.040100128173828124, 0.039997089385986326, 0.03999574279785156, 0.03989503860473633, 0.04000678253173828, 0.03997148895263672, 0.039921440124511716, 0.04002041625976562, 0.04038246536254883, 0.040252735137939456, 0.04032083129882812, 0.040341793060302736, 0.040251998901367186, 0.04047052764892578, 0.040433345794677736, 0.04023689651489258, 0.04021295928955078, 0.040213665008544924, 0.0400362548828125, 0.040239776611328125, 0.04009571075439453, 0.04030905532836914, 0.04023910522460938, 0.0401627197265625, 0.04020080184936523, 0.03999948883056641, 0.03997625732421875, 0.04009235382080078, 0.04017897415161133, 0.040276702880859376, 0.04032921600341797, 0.03991548919677734, 0.040459999084472655, 0.04180204772949219, 0.04045366287231445, 0.04073107147216797, 0.04010601425170898, 0.040065120697021485, 0.04018166351318359, 0.040275966644287106, 0.040123775482177736, 0.04074467086791992, 0.040323135375976565, 0.039996257781982424, 0.04014604949951172, 0.04030271911621094, 0.04013747024536133, 0.039962623596191404, 0.04003430557250977, 0.0414048957824707, 0.040562305450439456, 0.040652702331542966, 0.0404376335144043, 0.040241569519042966, 0.04036012649536133, 0.04032921600341797, 0.04022224044799805, 0.04085398483276367, 0.0407347183227539, 0.040869888305664064, 0.040525825500488284, 0.04035379028320313, 0.04042931365966797, 0.04032742309570313, 0.040325119018554685, 0.040277503967285154, 0.04033116912841797, 0.04029241561889649, 0.04001795196533203, 0.04015478515625, 0.04026556777954102, 0.04018617630004883, 0.03998585510253906, 0.039979007720947264, 0.03994131088256836, 0.04010067367553711, 0.040665088653564455, 0.04081459045410156, 0.04035583877563476, 0.04062825775146484, 0.040482017517089845, 0.04018662261962891, 0.040486751556396486, 0.04027612686157227, 0.04023910522460938, 0.04007526397705078, 0.04020633697509766, 0.04003635025024414, 0.04003859329223633, 0.04046623992919922, 0.040374271392822264, 0.04017295837402344, 0.04002588653564453, 0.04011004638671875, 0.0399532470703125, 0.04012236785888672, 0.039962623596191404, 0.03996876907348633, 0.03993360137939453, 0.03991587066650391, 0.04015919876098633, 0.040081409454345705, 0.04003036880493164, 0.0400010871887207, 0.040092063903808595, 0.04044524765014648, 0.03995868682861328, 0.0402334098815918, 0.040007038116455076, 0.04006361770629883, 0.04013993453979492, 0.03982342529296875, 0.04083977508544922, 0.04112319946289063, 0.04227705764770508, 0.04024911880493164, 0.04004556655883789, 0.040351486206054686, 0.04028416061401367, 0.040191295623779294, 0.04012716674804687, 0.042264575958251956, 0.040191070556640625, 0.03984476852416992, 0.03983769607543945, 0.04014284896850586, 0.03990729522705078, 0.039796768188476564, 0.04008550262451172, 0.0402083854675293, 0.03998720169067383, 0.03996041488647461, 0.03985833740234375, 0.039989246368408206, 0.040103935241699216, 0.04172009658813477, 0.040768863677978516, 0.040175071716308595, 0.04117504119873047, 0.040632320404052735, 0.04018220901489258, 0.04023664093017578, 0.04145379257202148, 0.04054697418212891, 0.04098252868652344, 0.04027391815185547, 0.040268863677978516, 0.04030915069580078, 0.04033795166015625, 0.040322399139404295, 0.04011484909057617, 0.04009369659423828, 0.04028006362915039, 0.04063641738891602, 0.03999334335327148, 0.03996006393432617, 0.04019395065307617, 0.04017203140258789, 0.04038016128540039, 0.03989923095703125, 0.04029695892333984, 0.040295711517333986, 0.03996105575561523, 0.0400992317199707, 0.04010863876342773, 0.039874561309814455, 0.0399458236694336, 0.04024566268920898, 0.040030208587646485, 0.03997081756591797, 0.04004006576538086, 0.040301982879638674, 0.04001020812988281, 0.04008729553222656, 0.039944480895996094, 0.04158246231079102, 0.04054243087768555, 0.04012713623046875, 0.04033536148071289, 0.040153087615966795, 0.04028416061401367, 0.04007526397705078, 0.0405129280090332, 0.04043836975097656, 0.04019993591308594, 0.040184062957763673, 0.04018175888061523, 0.039944000244140625, 0.04013657760620117, 0.04049132919311523, 0.04012403106689453, 0.04009817504882812, 0.0400357437133789, 0.04006768035888672, 0.03999334335327148, 0.0400175666809082, 0.040019519805908205, 0.0399815673828125, 0.03993423843383789, 0.040022014617919925, 0.040785919189453124, 0.03997238540649414, 0.039963104248046874, 0.040000545501708985, 0.040010719299316405, 0.04025360107421875, 0.04022233581542969, 0.04042160034179688, 0.04030054473876953, 0.04032921600341797, 0.04089846420288086, 0.04012656021118164, 0.04041523361206055, 0.040527393341064456, 0.04056889724731445, 0.040583873748779295, 0.04040265655517578, 0.04030575942993164, 0.040643489837646485, 0.04021193695068359, 0.04011859130859375, 0.04009334564208984, 0.04005849456787109, 0.040104511260986325, 0.04017776107788086, 0.04076531219482422, 0.040089534759521483, 0.04030860900878906, 0.04021299362182617, 0.04000982284545898, 0.0401899528503418, 0.04003168106079102, 0.04055843353271484, 0.040143585205078124, 0.040304447174072264, 0.040398143768310545, 0.040182655334472656, 0.03999961471557617, 0.04027913665771484, 0.04006358337402344, 0.04000297546386719, 0.04006755065917969, 0.040134464263916016, 0.040011680603027344, 0.03995641708374023, 0.04002608108520508, 0.039887584686279294, 0.040217823028564456, 0.040065216064453124, 0.04009235382080078, 0.0401448974609375, 0.04006092834472656, 0.0400928955078125, 0.04001984024047851, 0.040369056701660154, 0.040535903930664065, 0.04041129684448242, 0.04048691177368164, 0.040304641723632816, 0.04017766571044922, 0.040304641723632816, 0.04008345413208008, 0.04021452713012695, 0.040286209106445314, 0.04020547103881836, 0.040547168731689454, 0.04023091125488281, 0.040089599609375, 0.040312416076660154, 0.041940673828125, 0.04206252670288086, 0.04040256118774414, 0.04070646286010742, 0.04046233749389649, 0.040343551635742186, 0.04044800186157226, 0.04010598373413086, 0.040052734375, 0.04001574325561524, 0.04039443206787109, 0.0401228141784668, 0.04057702255249023, 0.040804126739501956, 0.04062844848632813, 0.04014694213867188, 0.0416255989074707, 0.041981952667236325, 0.040809921264648434, 0.04065951919555664, 0.04046976089477539, 0.040628990173339846, 0.0404213752746582, 0.04047872161865235, 0.0403394546508789, 0.04017926406860352, 0.040411487579345706, 0.04009910583496094, 0.04095699310302734, 0.04088332748413086, 0.0407674560546875, 0.04040975952148437, 0.040565567016601564, 0.04009897613525391, 0.040147647857666016, 0.04036214447021484, 0.04004044723510742, 0.04002732849121094, 0.039995296478271485, 0.039973438262939455, 0.0400096321105957, 0.03998764801025391, 0.03985609436035156, 0.0398306884765625, 0.039826465606689454, 0.03992057418823242, 0.0401005744934082, 0.040175552368164065, 0.040382720947265624, 0.04038361740112305, 0.040868736267089846, 0.04086150360107422, 0.040605438232421874, 0.0405016975402832, 0.0405667839050293, 0.04046601486206055, 0.04063273620605469, 0.04089772796630859, 0.04026451110839844, 0.040357887268066404, 0.040353889465332034, 0.0401585922241211, 0.040210559844970704, 0.04062422561645508, 0.04020406341552735, 0.040305183410644534, 0.039923713684082034, 0.03987836837768555, 0.040296192169189456, 0.0401229133605957, 0.04019516754150391, 0.04019398498535156, 0.04060176086425781, 0.04104060745239258, 0.04039075088500976, 0.04023616027832031, 0.04041747283935547, 0.04044790267944336, 0.04032985687255859, 0.040341663360595706, 0.04013071823120117, 0.040381534576416016, 0.040385311126708984, 0.04006038284301758, 0.04011468887329102, 0.04356022262573242, 0.04087472152709961, 0.04102963256835938, 0.04092313766479492, 0.04057292938232422, 0.040581119537353515, 0.04032924652099609, 0.04075721740722656, 0.04040419387817383, 0.04004521560668945, 0.041010238647460937, 0.04047999954223633, 0.04008310317993164, 0.04002361679077148, 0.040239551544189456, 0.04022476959228516, 0.04053923034667969, 0.04000656127929687, 0.04101500701904297, 0.04006940841674805, 0.03994214248657227, 0.04186111831665039, 0.040570079803466795, 0.04043993759155273, 0.04119414520263672, 0.041095169067382815, 0.040664352416992185, 0.04058323287963867, 0.040376991271972654, 0.04065622329711914, 0.04014352035522461, 0.040013824462890625, 0.04005414581298828, 0.041353855133056644, 0.04001177597045898, 0.039916671752929685, 0.03987955093383789, 0.03977830505371094, 0.040097118377685544, 0.04015785598754883, 0.040350784301757814, 0.039832511901855466, 0.03970816040039062, 0.040108097076416015, 0.03975936126708984, 0.03975980758666992, 0.03990425491333008, 0.040011585235595705, 0.0397305908203125, 0.04022665786743164, 0.03992243194580078, 0.03984918212890625, 0.03978931045532227, 0.03980310440063477, 0.04050281524658203, 0.039842273712158205, 0.039809024810791016, 0.04008755111694336, 0.04005068969726563, 0.04004355239868164, 0.04001276779174805, 0.0398699836730957, 0.03996902465820312, 0.045717727661132815, 0.04005068969726563, 0.040130561828613284, 0.03986636734008789, 0.03994214248657227, 0.04015625762939453, 0.03981609725952148, 0.04132044982910156, 0.04105827331542969, 0.04047248077392578]",tokens/s,24.785795627085953,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4372.512768,4566.482944,0.0,4188.012544,4187.049984,s,1,10.332669921875,10.332669921875,0.0,10.332669921875,10.332669921875,10.332669921875,10.332669921875,[10.332669921875],,kWh,0.00010267034029583305,1.1317863835509072e-05,3.084280245199992e-05,0.00014483100658334205,,MB,4080.443392,4962.844672,0.0,4555.014144,4514.269184,s,10,7.865827026367188,0.7865827026367188,0.002886280729514126,0.7850776062011718,0.7900420410156249,0.7905072570800781,0.7908794299316406,"[0.782828369140625, 0.7894422607421875, 0.7844773559570313, 0.7850126953125, 0.7840709228515625, 0.7897021484375, 0.7842396240234375, 0.7899386596679687, 0.7851425170898437, 0.7909724731445312]",tokens/s,325.4584662767914,kWh,2.2918281938461133e-05,2.5274666805281998e-06,1.5180375392153848e-05,4.062612401114318e-05,tokens/kWh,6301364.115606568,MB,4093.394944,4979.621888,0.0,4571.79136,4514.271744,s,10,467.798828125,46.7798828125,0.03317416613981883,46.7930546875,46.8095484375,46.81011796875,46.81057359375,"[46.7034296875, 46.74363671875, 46.7610078125, 46.77698046875, 46.78384375, 46.80287109375, 46.802265625, 46.809421875, 46.80468359375, 46.8106875]",tokens/s,1.3467327451954592,kWh,0.0013654926656507058,0.000150622730472546,0.0009080636687580463,0.002424179064881298,tokens/kWh,25988.179220203292,,s,630,467.7878660888669,0.7425204223632813,0.0006844989959605742,0.7426236572265625,0.7431574462890624,0.7433347930908203,0.7435878228759765,"[0.7409111938476562, 0.740742431640625, 0.7414298706054687, 0.74062060546875, 0.7404442138671875, 0.7408536987304688, 0.7409601440429687, 0.7411483764648438, 0.74078662109375, 0.7409418334960938, 0.7408561401367187, 0.7414207153320312, 0.7407407836914063, 0.7410364379882812, 0.7412715454101563, 0.7415802612304687, 0.7406494750976562, 0.7412059936523437, 0.7412817993164063, 0.741195068359375, 0.7406961669921875, 0.7416878662109375, 0.740632080078125, 0.7417431030273437, 0.7411015625, 0.741113525390625, 0.740952392578125, 0.7412178955078125, 0.7411934204101562, 0.7409158935546875, 0.741591064453125, 0.7417518920898437, 0.740760498046875, 0.7420047607421875, 0.7417855834960938, 0.741658203125, 0.7413723754882813, 0.7413206787109375, 0.7414326171875, 0.74105517578125, 0.7418880004882813, 0.7414899291992187, 0.7414234619140625, 0.7416295166015625, 0.7411597290039063, 0.7413309326171875, 0.741670166015625, 0.7417576904296875, 0.7414681396484375, 0.7412572021484375, 0.7414517822265625, 0.7421171264648437, 0.7413455200195312, 0.741306396484375, 0.74177490234375, 0.7413662109375, 0.7415738525390625, 0.7421973266601563, 0.741670654296875, 0.7415838623046875, 0.742012939453125, 0.7419064331054688, 0.7414863891601563, 0.7411098022460938, 0.7416702880859375, 0.7419783935546875, 0.7411612548828125, 0.7413350219726562, 0.7418200073242187, 0.741941650390625, 0.7415685424804688, 0.7422476196289063, 0.7416410522460938, 0.7412612915039063, 0.7419465942382812, 0.7418907470703126, 0.7419454345703125, 0.7413104858398437, 0.7418941650390625, 0.741507080078125, 0.7417914428710938, 0.7418411865234374, 0.74210302734375, 0.7418203735351563, 0.7427645874023437, 0.7417232055664063, 0.7411324462890625, 0.7418697509765625, 0.7416817016601562, 0.742033203125, 0.7417816772460938, 0.7421992797851562, 0.7420682373046875, 0.7422382202148438, 0.7418982543945313, 0.742023193359375, 0.74232763671875, 0.7421692504882812, 0.7418585205078125, 0.7418699951171875, 0.74219482421875, 0.7417301025390625, 0.741923583984375, 0.7422916259765625, 0.74202734375, 0.7420128784179687, 0.7420436401367188, 0.74224853515625, 0.7418878173828125, 0.7422157592773437, 0.7422811889648437, 0.7418238525390625, 0.741875732421875, 0.7418477172851563, 0.7424307250976563, 0.7423569946289063, 0.7418634033203125, 0.7422015991210937, 0.7421802978515625, 0.741806396484375, 0.742371337890625, 0.742463623046875, 0.7422913208007812, 0.742451171875, 0.7423262329101562, 0.7423321533203125, 0.7417332763671876, 0.7420638427734375, 0.7420910034179687, 0.7422750854492187, 0.7414292602539062, 0.7421375732421875, 0.742060302734375, 0.7421511840820313, 0.7419559936523438, 0.7420770263671875, 0.7415540771484375, 0.741965087890625, 0.7424330444335937, 0.7415834350585937, 0.74231396484375, 0.741759033203125, 0.7421541748046875, 0.742, 0.7415894165039062, 0.7419058227539063, 0.7423639526367187, 0.7421171875, 0.7419309692382813, 0.7422015380859375, 0.742751953125, 0.7422569580078126, 0.7420476684570313, 0.7418409423828125, 0.742561767578125, 0.7424368896484375, 0.742372802734375, 0.7426729736328125, 0.7423775024414062, 0.7418040161132813, 0.742319580078125, 0.7422195434570312, 0.7423568725585937, 0.7422083740234375, 0.7421634521484375, 0.742289794921875, 0.7424149169921875, 0.7425884399414062, 0.7427276611328125, 0.74241845703125, 0.7423897705078125, 0.7428956298828125, 0.7419310302734375, 0.742454345703125, 0.7419319458007813, 0.7426245727539063, 0.7416572875976563, 0.742635498046875, 0.7424710693359375, 0.7423289184570312, 0.742068115234375, 0.7431066284179687, 0.7425264892578125, 0.7424691162109375, 0.7425054931640624, 0.7422279663085938, 0.7425269775390625, 0.7424140625, 0.7426787719726563, 0.7426966552734375, 0.7415352172851563, 0.7422392578125, 0.7424102172851562, 0.742135009765625, 0.7422636108398437, 0.7420353393554687, 0.74172021484375, 0.74231103515625, 0.7420568237304688, 0.7420620727539062, 0.7426764526367188, 0.742227783203125, 0.7420909423828125, 0.7422996215820312, 0.742192626953125, 0.7425576171875, 0.7425132446289062, 0.7418695678710937, 0.7424423217773437, 0.7422299194335937, 0.7424633178710938, 0.7417919311523438, 0.742465576171875, 0.7422327270507812, 0.7426171264648438, 0.742260009765625, 0.7421241455078125, 0.7425425415039062, 0.7420343017578125, 0.742509765625, 0.7425892944335938, 0.7424696044921875, 0.7419692993164062, 0.7424620971679687, 0.742371337890625, 0.7421785888671875, 0.7423941040039063, 0.7422993774414063, 0.7428602905273437, 0.7424945068359375, 0.7421895141601562, 0.7425863647460937, 0.7418142700195313, 0.7424737548828125, 0.7430901489257813, 0.7421828002929688, 0.7430094604492188, 0.7428085327148437, 0.7429366455078125, 0.7423128662109375, 0.7424125366210937, 0.7427161254882813, 0.7429427490234375, 0.7427215576171875, 0.7429300537109375, 0.7427828369140625, 0.7426687622070313, 0.7426682739257813, 0.7428546752929688, 0.7472858276367188, 0.7425925903320313, 0.7426705932617188, 0.7421051025390625, 0.7418914184570312, 0.7425516967773438, 0.7425357055664062, 0.7420498046875, 0.7424694213867188, 0.7420654907226563, 0.7424951171875, 0.7422190551757812, 0.7421734008789063, 0.7426744384765624, 0.7417849731445313, 0.7421753540039062, 0.7425023803710937, 0.7420511474609375, 0.7422849731445312, 0.7427368774414063, 0.7424122924804688, 0.7424389038085938, 0.7426638793945313, 0.7428917846679688, 0.742703125, 0.7427176513671875, 0.7426803588867188, 0.7424163818359375, 0.742846435546875, 0.7427088623046875, 0.7428919067382812, 0.7428956298828125, 0.7427333984375, 0.7425970458984374, 0.7429732666015625, 0.7428092041015625, 0.7422715454101563, 0.7426232299804687, 0.7429365234375, 0.7422946166992187, 0.7425728759765625, 0.7430309448242187, 0.7424307250976563, 0.7419351196289062, 0.7428231201171875, 0.7425114135742188, 0.74288330078125, 0.742803466796875, 0.7427543334960938, 0.7422457885742187, 0.7433970947265625, 0.7424357299804687, 0.7428424072265625, 0.7427125244140625, 0.74261572265625, 0.7431414184570313, 0.7426329345703125, 0.7430205078125, 0.7426730346679687, 0.7428487548828125, 0.7423252563476562, 0.7427470703125, 0.7431675415039063, 0.742809814453125, 0.7430718383789062, 0.742619140625, 0.7426431274414063, 0.7423594970703125, 0.7428670654296875, 0.7424796752929688, 0.7430535278320313, 0.7420407104492187, 0.7421405639648437, 0.7426275634765624, 0.7426573486328125, 0.7423167114257813, 0.7425001831054687, 0.7434381713867187, 0.7422440185546875, 0.7427672119140625, 0.74290087890625, 0.742628173828125, 0.742381591796875, 0.742920166015625, 0.7431004028320313, 0.742475341796875, 0.7425416870117187, 0.7425712890625, 0.7420874633789063, 0.743067626953125, 0.7426334838867188, 0.7430707397460937, 0.7431751708984375, 0.7429895629882812, 0.7426439819335937, 0.74296728515625, 0.7430427856445313, 0.7428402099609375, 0.74272802734375, 0.7427555541992188, 0.7429782104492187, 0.7431640014648437, 0.7428445434570312, 0.7430631713867187, 0.7427684326171875, 0.7425909423828125, 0.742961181640625, 0.7423259887695313, 0.74312060546875, 0.7428482055664063, 0.7428569946289062, 0.7432578125, 0.7426240844726563, 0.743041015625, 0.742920166015625, 0.7431248779296875, 0.7429837646484375, 0.7431264038085937, 0.7429137573242187, 0.743111572265625, 0.7432763061523437, 0.7424658203125, 0.7430245971679688, 0.7433277587890625, 0.7429918823242188, 0.7425571899414063, 0.7433282470703125, 0.7428646850585937, 0.743037109375, 0.7425703125, 0.7429478759765625, 0.7422984619140625, 0.742576171875, 0.7425693359375, 0.7424683227539063, 0.7430922241210938, 0.742842529296875, 0.7428029174804688, 0.7423410034179687, 0.7428628540039063, 0.7429490356445313, 0.742640625, 0.7428678588867188, 0.7427801513671874, 0.7429978637695313, 0.7429356689453125, 0.7426107788085937, 0.7426436767578125, 0.742823974609375, 0.7427727661132812, 0.7442432250976563, 0.7429447631835937, 0.7430430908203125, 0.7428211059570312, 0.7427387084960938, 0.7429916381835937, 0.742849853515625, 0.7425873413085937, 0.742582275390625, 0.7429816284179688, 0.743125, 0.742664306640625, 0.7427764892578125, 0.7429058837890625, 0.743031005859375, 0.7429959716796875, 0.742522705078125, 0.7427412719726563, 0.7431339111328125, 0.7428846435546875, 0.7428405151367188, 0.7426342163085937, 0.7431577758789063, 0.742472900390625, 0.7430396728515625, 0.742861083984375, 0.7432180786132813, 0.7428720703125, 0.7430491943359375, 0.7427246704101562, 0.743373779296875, 0.7430021362304687, 0.7431200561523438, 0.7430538330078125, 0.7428633422851563, 0.7426433715820312, 0.7431026611328125, 0.742940673828125, 0.7435852661132812, 0.7429833984375, 0.7430778198242187, 0.7431577758789063, 0.7431532592773438, 0.7425741577148437, 0.7428815307617187, 0.7427513427734375, 0.7430576171875, 0.7426541137695313, 0.7425327758789062, 0.7432507934570313, 0.7426170654296875, 0.7427522583007813, 0.7428915405273437, 0.7424327392578125, 0.7430390014648437, 0.7426314086914062, 0.7429222412109375, 0.7431489868164063, 0.7427958374023438, 0.7436157836914062, 0.7425665893554687, 0.7428231201171875, 0.742515380859375, 0.7431558227539062, 0.7427317504882812, 0.7427861938476562, 0.7427708740234376, 0.7428307495117188, 0.7424716796875, 0.7424383544921875, 0.7432681274414062, 0.7425994262695312, 0.7430263671875, 0.7423818359375, 0.7434302368164063, 0.7426638793945313, 0.7433649291992187, 0.7426434326171875, 0.7430082397460938, 0.7428876953125, 0.7429345092773437, 0.7427215576171875, 0.7425567016601563, 0.7435888671875, 0.742903564453125, 0.7430648193359375, 0.7429068603515625, 0.7430791015625, 0.7429815673828125, 0.7432242431640625, 0.743103759765625, 0.7426015014648437, 0.7428159790039063, 0.7433822631835938, 0.7434183959960937, 0.7428648681640625, 0.7430839233398437, 0.7431104736328125, 0.7426787719726563, 0.7426023559570313, 0.7431417846679688, 0.743210693359375, 0.7427914428710938, 0.7485798950195313, 0.7432222290039062, 0.7423714599609375, 0.7427328491210937, 0.7425728759765625, 0.742863037109375, 0.742371337890625, 0.742516845703125, 0.7428583984375, 0.7426624145507813, 0.7424384155273438, 0.7427845458984375, 0.7433097534179688, 0.74272412109375, 0.7423936157226563, 0.7428265380859375, 0.7431574096679687, 0.7425597534179688, 0.7424810180664062, 0.7430985107421875, 0.7432814331054688, 0.7429212646484376, 0.742916259765625, 0.7430966796875, 0.7423143310546875, 0.7430452270507812, 0.7427455444335938, 0.7430186157226563, 0.742625732421875, 0.7432007446289063, 0.743462890625, 0.742307861328125, 0.742781005859375, 0.7429815063476563, 0.7430794067382812, 0.7428836669921876, 0.74315185546875, 0.7430922241210938, 0.7428587646484375, 0.7430697021484375, 0.7426969604492187, 0.7434301147460938, 0.74296630859375, 0.743320556640625, 0.7429058837890625, 0.7432684326171874, 0.7429072265625, 0.7427815551757813, 0.7426837768554687, 0.7428739624023437, 0.7429467163085938, 0.7428485717773438, 0.7434170532226563, 0.7433401489257813, 0.7428226928710937, 0.7428097534179687, 0.7429998168945312, 0.743096435546875, 0.7431119384765625, 0.7429306640625, 0.7432291259765625, 0.7430718383789062, 0.7435374145507813, 0.7433564453125, 0.7432493896484375, 0.7426903686523437, 0.7428428344726562, 0.7428362426757813, 0.7431109008789063, 0.7427963256835938, 0.7429578247070312, 0.7426099243164063, 0.7430643920898438, 0.7425025634765625, 0.742628662109375, 0.7431317138671875, 0.74294287109375, 0.7432335205078126, 0.742649658203125, 0.7429142456054687, 0.7428546752929688, 0.7432407836914062, 0.7422105102539063, 0.7429706420898438, 0.7433489990234375, 0.7431597900390625, 0.742063720703125, 0.7429671020507812, 0.7425288696289063, 0.7424314575195312, 0.7430123291015625, 0.7431261596679688, 0.743260009765625, 0.743404541015625, 0.7429522705078125, 0.7430451049804687, 0.7432089233398438, 0.7428690795898437, 0.7429454956054687, 0.74342822265625, 0.74301123046875, 0.7425867919921875, 0.7429883422851562, 0.7435775756835937, 0.742518798828125, 0.7427189331054688, 0.7433569946289063, 0.7431270141601563, 0.7429991455078125, 0.7428924560546875, 0.7433113403320313, 0.743530517578125, 0.7433584594726562, 0.7430082397460938, 0.743664794921875, 0.7432050170898438, 0.7432547607421875, 0.7431248779296875, 0.7426888427734375, 0.7428217163085937, 0.7434033203125, 0.7434325561523437, 0.7432396850585937, 0.743583740234375, 0.743044921875, 0.7427442626953125, 0.743669189453125, 0.7433466796875]",tokens/s,1.3467643042291242,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2050.514944,2155.741184,0.0,1753.219072,1633.407488,s,1,9.422015625,9.422015625,0.0,9.422015625,9.422015625,9.422015625,9.422015625,[9.422015625],,kWh,5.378010735415727e-05,5.925084828566777e-06,1.5450290137997147e-05,7.51554823207212e-05,,MB,1946.54208,2241.724416,0.0,1824.52224,1762.836992,s,10,0.8206128005981446,0.08206128005981446,0.000406875530544768,0.08227532958984375,0.08245298385620117,0.08247095680236817,0.08248533515930176,"[0.08142982482910156, 0.08195062255859376, 0.08238175964355468, 0.08130518341064454, 0.08228915405273438, 0.0822955551147461, 0.08244898986816407, 0.08226150512695313, 0.08176127624511718, 0.08248892974853515]",tokens/s,3119.6198720444236,kWh,2.4314441579889094e-06,2.681156259530379e-07,1.6134103816363855e-06,4.312970165578333e-06,tokens/kWh,59355847.6344509,MB,1953.910784,2430.468096,0.0,2013.26592,1853.210112,s,10,13.407829223632813,1.3407829223632812,0.015119034435602554,1.3336165161132811,1.3607123657226563,1.3631648986816407,1.3651269250488283,"[1.3356656494140624, 1.3309630126953125, 1.3246400146484374, 1.32935205078125, 1.3315673828125, 1.32207275390625, 1.3532647705078125, 1.365617431640625, 1.3601673583984375, 1.354518798828125]",tokens/s,46.987471983127136,kWh,3.919810745076143e-05,4.323188169288976e-06,2.1141996711563042e-05,6.466329233161345e-05,tokens/kWh,974277.6423587656,,s,630,13.404014467239405,0.021276213440062506,0.0004500678012159599,0.021182144165039062,0.021725183486938478,0.02193448667526245,0.023002092094421386,"[0.021499135971069335, 0.021433151245117188, 0.021253631591796874, 0.02114009666442871, 0.021184192657470704, 0.021190975189208986, 0.021065664291381837, 0.02102070426940918, 0.020989984512329102, 0.020955135345458984, 0.02101862335205078, 0.020785152435302736, 0.020824064254760744, 0.020858879089355468, 0.02087321662902832, 0.02089132881164551, 0.020846912384033203, 0.02083839988708496, 0.020955135345458984, 0.020971168518066408, 0.02087356758117676, 0.020952415466308594, 0.02094761657714844, 0.021409887313842774, 0.021294656753540038, 0.021606752395629883, 0.021501951217651367, 0.021466495513916016, 0.02143631935119629, 0.02140847969055176, 0.021337343215942384, 0.02128767967224121, 0.021153791427612305, 0.02125391960144043, 0.021323999404907226, 0.021206464767456055, 0.021282655715942383, 0.021295520782470705, 0.02114796829223633, 0.021120416641235353, 0.021092960357666016, 0.020954912185668945, 0.02151759910583496, 0.021121183395385743, 0.02130534362792969, 0.021105567932128907, 0.02112006378173828, 0.021010847091674806, 0.020999807357788086, 0.02105833625793457, 0.020935712814331056, 0.02097587203979492, 0.021252832412719726, 0.02154275131225586, 0.021239200592041017, 0.021506175994873047, 0.023293983459472655, 0.021336671829223632, 0.021161983489990235, 0.021147232055664062, 0.020911808013916015, 0.021686784744262694, 0.021154016494750977, 0.021424064636230467, 0.021057600021362304, 0.020703231811523438, 0.020911808013916015, 0.020796831130981446, 0.020908960342407225, 0.020855968475341796, 0.020888256072998046, 0.02079759979248047, 0.02110873603820801, 0.02089779281616211, 0.020916223526000977, 0.02096691131591797, 0.021058048248291016, 0.02099932861328125, 0.02096553611755371, 0.020988607406616212, 0.020976736068725587, 0.021089183807373048, 0.021327072143554688, 0.02123036766052246, 0.021391359329223633, 0.021216863632202147, 0.021277088165283203, 0.021747711181640626, 0.0240064640045166, 0.021260480880737304, 0.021126623153686522, 0.021137247085571288, 0.021026687622070314, 0.020892255783081053, 0.021024896621704103, 0.020883295059204103, 0.02100067138671875, 0.021082944869995117, 0.020831199645996095, 0.021493696212768556, 0.021263744354248045, 0.021872831344604493, 0.021048992156982423, 0.021070304870605468, 0.021036479949951174, 0.021166656494140627, 0.02098806381225586, 0.02083468818664551, 0.02096726417541504, 0.020920320510864256, 0.020998144149780275, 0.020964448928833007, 0.02106051254272461, 0.020954336166381836, 0.020982559204101563, 0.02103091239929199, 0.020989952087402345, 0.020944896697998046, 0.02109414482116699, 0.021055551528930665, 0.022040800094604494, 0.021241823196411134, 0.021028863906860353, 0.020983327865600587, 0.02101193618774414, 0.020871936798095705, 0.021119552612304686, 0.021024511337280272, 0.020919872283935548, 0.021043487548828125, 0.0209800968170166, 0.021169792175292967, 0.020895679473876952, 0.02093894386291504, 0.020846879959106446, 0.020981760025024415, 0.02096895980834961, 0.020916000366210937, 0.02092310333251953, 0.021015615463256837, 0.021003040313720703, 0.02097577667236328, 0.020963327407836914, 0.020924415588378906, 0.02086502456665039, 0.020856416702270508, 0.02091868782043457, 0.020899839401245117, 0.020829599380493165, 0.020679040908813475, 0.02066966438293457, 0.020706304550170897, 0.020758527755737305, 0.020739328384399413, 0.020918495178222658, 0.02096348762512207, 0.021097856521606444, 0.02125721549987793, 0.021078016281127928, 0.020955135345458984, 0.020926464080810548, 0.020968448638916014, 0.021107648849487303, 0.02126422309875488, 0.021944543838500977, 0.021279808044433593, 0.021413888931274414, 0.02119980812072754, 0.02114905548095703, 0.02112575912475586, 0.021714912414550782, 0.02158095932006836, 0.02115862464904785, 0.02096463966369629, 0.021152320861816405, 0.021000543594360353, 0.020903839111328124, 0.020869375228881836, 0.02094060707092285, 0.021274240493774414, 0.021012800216674805, 0.0209715518951416, 0.020840192794799806, 0.020836639404296874, 0.020770816802978515, 0.020824064254760744, 0.020746240615844725, 0.020918272018432618, 0.021097600936889647, 0.021372928619384765, 0.02120460891723633, 0.02105792045593262, 0.021124095916748048, 0.02105855941772461, 0.021157119750976564, 0.021021440505981447, 0.02096291160583496, 0.020930976867675782, 0.021050432205200195, 0.021089439392089845, 0.020934431076049805, 0.021198848724365234, 0.021038240432739257, 0.021123647689819336, 0.020967039108276367, 0.02114633560180664, 0.02105881690979004, 0.021872768402099608, 0.021211711883544922, 0.021223424911499023, 0.02127187156677246, 0.021391263961791994, 0.021151615142822267, 0.021113759994506837, 0.022236223220825194, 0.02276211166381836, 0.02127027130126953, 0.02128656005859375, 0.021195871353149414, 0.021156671524047852, 0.021275648117065428, 0.02102617645263672, 0.02115836715698242, 0.021016735076904297, 0.02088755226135254, 0.020817920684814452, 0.020878463745117187, 0.02076915168762207, 0.02102684783935547, 0.020961759567260742, 0.0210402889251709, 0.021160032272338865, 0.021388032913208007, 0.0212541446685791, 0.02118182373046875, 0.021086816787719728, 0.020944927215576174, 0.020846559524536134, 0.021043231964111328, 0.020798912048339845, 0.020823808670043947, 0.020935264587402344, 0.020793344497680662, 0.020799711227416993, 0.02082371139526367, 0.020719263076782228, 0.02074608039855957, 0.02077187156677246, 0.02080748748779297, 0.02083020782470703, 0.020996095657348633, 0.02080073547363281, 0.02128700828552246, 0.021192287445068358, 0.020924928665161133, 0.020938655853271485, 0.020969568252563478, 0.02084828758239746, 0.02086128044128418, 0.02083020782470703, 0.020682752609252928, 0.020791296005249024, 0.0208035831451416, 0.020801279067993166, 0.02089971160888672, 0.02118646430969238, 0.021532447814941406, 0.02120159912109375, 0.021098495483398438, 0.02100239944458008, 0.02096931266784668, 0.02089583969116211, 0.020852447509765625, 0.020799072265625, 0.020785760879516602, 0.02077916717529297, 0.0212739200592041, 0.02172979164123535, 0.020927999496459963, 0.020885440826416017, 0.020935264587402344, 0.020789087295532225, 0.02100649642944336, 0.02299875259399414, 0.0211376953125, 0.021073919296264648, 0.02112291145324707, 0.020948831558227538, 0.02099964714050293, 0.021013343811035156, 0.021012479782104493, 0.021399551391601563, 0.02143027114868164, 0.021618688583374023, 0.021391679763793945, 0.021129247665405273, 0.020958879470825195, 0.02149990463256836, 0.02113859176635742, 0.021010400772094727, 0.021053760528564454, 0.022131071090698243, 0.021042816162109373, 0.020988479614257812, 0.020983808517456053, 0.021137407302856445, 0.021094400405883788, 0.021192703247070312, 0.02115497589111328, 0.021275487899780274, 0.021381120681762695, 0.021290143966674804, 0.021242719650268554, 0.021656991958618164, 0.02124777603149414, 0.02121343994140625, 0.021260671615600586, 0.020983808517456053, 0.02104230308532715, 0.02103500747680664, 0.02114758491516113, 0.021045631408691406, 0.020935232162475587, 0.0209300479888916, 0.021082304000854493, 0.021493631362915037, 0.02088751983642578, 0.020867551803588867, 0.02095238494873047, 0.020900543212890626, 0.020981088638305664, 0.020961664199829103, 0.021046848297119142, 0.02090671920776367, 0.020914176940917968, 0.020707328796386718, 0.020971519470214844, 0.02091334342956543, 0.02111296081542969, 0.0211231689453125, 0.020961088180541994, 0.021403648376464843, 0.020980159759521486, 0.021090175628662108, 0.020957664489746095, 0.020797279357910155, 0.02078940773010254, 0.02073401641845703, 0.02078745651245117, 0.020713151931762694, 0.02083785629272461, 0.020802080154418947, 0.020813823699951172, 0.020674016952514647, 0.021391584396362306, 0.020990272521972657, 0.020924287796020506, 0.02123081588745117, 0.02102774429321289, 0.020979040145874022, 0.02093244743347168, 0.02099078369140625, 0.02083184051513672, 0.020803552627563476, 0.02085523223876953, 0.020754432678222655, 0.020770816802978515, 0.02115590476989746, 0.021733312606811522, 0.021147232055664062, 0.02098627281188965, 0.02087936019897461, 0.020959232330322267, 0.02102272033691406, 0.020907039642333983, 0.02089241600036621, 0.02103273582458496, 0.020836767196655274, 0.02126473617553711, 0.02163462448120117, 0.02091004753112793, 0.020986656188964843, 0.020987903594970703, 0.02099404716491699, 0.021000192642211913, 0.021437728881835937, 0.020944639205932616, 0.021012832641601562, 0.020991743087768553, 0.020866016387939453, 0.02085660743713379, 0.020987424850463867, 0.020803455352783204, 0.020877248764038087, 0.021259040832519532, 0.021428159713745117, 0.0227574405670166, 0.021765344619750975, 0.021213024139404298, 0.021436416625976562, 0.02135545539855957, 0.022165504455566407, 0.021725183486938478, 0.021646976470947266, 0.021406080245971678, 0.021450143814086914, 0.021445119857788086, 0.021297119140625, 0.02131315231323242, 0.021198720932006837, 0.021645952224731445, 0.02147327995300293, 0.02152448081970215, 0.021379072189331053, 0.021368959426879882, 0.021626752853393556, 0.02146099281311035, 0.021559488296508788, 0.021491519927978514, 0.021559295654296876, 0.02176527976989746, 0.021635936737060546, 0.021746784210205077, 0.02161347198486328, 0.021667072296142578, 0.021606975555419922, 0.021653696060180663, 0.021725183486938478, 0.021534015655517578, 0.02160095977783203, 0.021559295654296876, 0.021778432846069336, 0.022031583786010743, 0.021988128662109373, 0.021716543197631836, 0.021720640182495116, 0.021922687530517578, 0.021558399200439452, 0.021879680633544923, 0.021843967437744142, 0.021831680297851562, 0.021932479858398437, 0.02301919937133789, 0.022435712814331054, 0.02196268844604492, 0.021699071884155274, 0.0216944637298584, 0.021632896423339844, 0.021528703689575195, 0.021512128829956054, 0.021635135650634765, 0.02164531135559082, 0.021936128616333008, 0.021793983459472657, 0.02185651206970215, 0.02173561668395996, 0.023003456115722656, 0.021611583709716796, 0.021529600143432616, 0.02147123146057129, 0.02152448081970215, 0.02147327995300293, 0.0216760311126709, 0.021623104095458985, 0.02142790412902832, 0.021547008514404296, 0.02154287910461426, 0.021401632308959962, 0.021456895828247072, 0.021489664077758788, 0.021560447692871094, 0.021587135314941407, 0.02123129653930664, 0.021346303939819337, 0.02146713638305664, 0.02195039939880371, 0.022417472839355468, 0.021579616546630858, 0.021717151641845702, 0.021960704803466798, 0.021495391845703125, 0.021285120010375976, 0.021399391174316405, 0.021411903381347658, 0.02128102493286133, 0.021236831665039063, 0.021400447845458984, 0.02141801643371582, 0.021820608139038085, 0.021482303619384767, 0.021425952911376955, 0.021280511856079102, 0.021449184417724608, 0.02145280075073242, 0.02141798400878906, 0.021356544494628905, 0.02141798400878906, 0.02135024070739746, 0.021399168014526366, 0.02154550361633301, 0.02296339225769043, 0.022694623947143555, 0.021915327072143553, 0.02174198341369629, 0.021668415069580078, 0.021722944259643554, 0.02154719924926758, 0.021624607086181642, 0.021571807861328125, 0.021671199798583986, 0.021797599792480468, 0.021581216812133788, 0.021498464584350587, 0.021552160263061525, 0.02159715270996094, 0.025251007080078124, 0.022124832153320312, 0.021666336059570312, 0.022163455963134765, 0.021661792755126953, 0.021749664306640625, 0.021708351135253906, 0.021504383087158203, 0.02156662368774414, 0.02167491149902344, 0.02166169548034668, 0.021595455169677733, 0.021506752014160156, 0.021604352951049805, 0.02161177635192871, 0.021422367095947265, 0.021569887161254884, 0.023205535888671875, 0.024051328659057618, 0.021455968856811523, 0.02147635269165039, 0.021354591369628906, 0.02135862350463867, 0.021444896697998046, 0.021383743286132812, 0.02134182357788086, 0.021298688888549806, 0.021344928741455077, 0.02118560028076172, 0.021019071578979493, 0.02098601531982422, 0.021227872848510743, 0.021182464599609374, 0.021093536376953125, 0.021027679443359374, 0.0210882568359375, 0.021065376281738282, 0.02111712074279785, 0.02113961601257324, 0.020969472885131835, 0.021130912780761717, 0.02146544075012207, 0.02145894432067871, 0.02142416000366211, 0.021493728637695313, 0.021257375717163084, 0.021294015884399414, 0.02149295997619629, 0.021309759140014647, 0.021386720657348632, 0.022012832641601563, 0.021407487869262696, 0.021578367233276368, 0.02151318359375, 0.02142438316345215, 0.02145916748046875, 0.021399871826171875, 0.021302879333496092, 0.021698976516723634, 0.02139664077758789, 0.021431264877319337, 0.02170662307739258, 0.021024768829345702, 0.020841728210449217, 0.020938783645629882, 0.02116067123413086, 0.020863231658935548, 0.02101740837097168, 0.020843456268310547, 0.020733951568603515, 0.020826175689697267, 0.021208736419677736, 0.021358879089355468, 0.02149692726135254, 0.021494688034057616, 0.02149737548828125, 0.021520864486694335, 0.02176736068725586, 0.02144748878479004, 0.021381120681762695, 0.021436416625976562, 0.021403648376464843, 0.021360639572143555, 0.021381120681762695, 0.02145280075073242, 0.02177145576477051, 0.021725439071655275, 0.021659679412841797, 0.02170319938659668, 0.021876735687255858, 0.021942272186279296, 0.021538816452026367, 0.021585920333862304, 0.021972768783569335, 0.021806432723999025, 0.02173632049560547, 0.021661184310913087, 0.02157119941711426, 0.02160220718383789, 0.021584863662719726, 0.021595327377319336, 0.021477888107299805, 0.02136000061035156, 0.021515199661254883, 0.021590015411376954, 0.021595359802246094, 0.022023040771484374, 0.02171446418762207, 0.02212451171875, 0.02175347137451172, 0.02158883285522461, 0.02156332778930664, 0.02167807960510254, 0.02186444854736328, 0.021609760284423827]",tokens/s,47.000844526076634,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3728.146432,4375.642112,0.0,3997.171712,3878.257152,s,1,10.0629853515625,10.0629853515625,0.0,10.0629853515625,10.0629853515625,10.0629853515625,10.0629853515625,[10.0629853515625],,kWh,9.067295107500967e-05,9.994629891378651e-06,2.7987522389999464e-05,0.00012865510335638777,,MB,1964.519424,4862.181376,0.0,4454.350848,4371.844096,s,10,6.671113952636719,0.6671113952636719,0.0009954266792010686,0.6670467224121094,0.6679490539550781,0.6684646697998047,0.6688771624755859,"[0.66518603515625, 0.6659949951171875, 0.66783447265625, 0.6668709716796875, 0.6672155151367187, 0.6689802856445313, 0.6677422485351563, 0.6668779296875, 0.666758056640625, 0.6676534423828125]",tokens/s,383.7440070991704,kWh,1.9423147584895633e-05,2.1412516791791457e-06,1.2930131871874938e-05,3.4494531135949714e-05,tokens/kWh,7421466.2895707665,MB,1959.440384,5063.507968,0.0,4655.67744,4530.328576,s,10,392.1630078125,39.21630078125,0.020344922816255443,39.211791015624996,39.23774375,39.25023125,39.26022125,"[39.19016015625, 39.1965625, 39.20012890625, 39.20765234375, 39.26271875, 39.23496875, 39.22264453125, 39.2159296875, 39.2257734375, 39.20646875]",tokens/s,1.606474826664972,kWh,0.0011421220202313548,0.00012598509379877328,0.0007595156805287254,0.0020276227945588534,tokens/kWh,31070.86789962174,,s,630,392.15904455566374,0.6224746738978795,0.0008655115987336269,0.6223991088867188,0.6236659790039062,0.6239581451416015,0.6245922937011719,"[0.6200103759765625, 0.6197701416015625, 0.6201177368164063, 0.6205819091796875, 0.6210646362304687, 0.6203760986328125, 0.6204375610351562, 0.6206837158203125, 0.6204436645507813, 0.6202265014648437, 0.6222623901367188, 0.621433837890625, 0.6215280151367187, 0.62188134765625, 0.62137060546875, 0.6221484985351563, 0.62211083984375, 0.621301513671875, 0.621649658203125, 0.6213512573242187, 0.62057861328125, 0.6210986328125, 0.6208408813476562, 0.6229445190429688, 0.6221722412109375, 0.6218251953125, 0.6227607421875, 0.623111572265625, 0.6230137939453125, 0.6239930419921875, 0.6230023193359375, 0.6225838012695313, 0.622202880859375, 0.6215720825195312, 0.6214387817382813, 0.6213839111328125, 0.6214364013671875, 0.6218626708984375, 0.622228271484375, 0.62367333984375, 0.6235811767578125, 0.6236504516601562, 0.6236121215820313, 0.6237943115234375, 0.6235645141601562, 0.623310791015625, 0.6226824340820313, 0.6235648193359375, 0.6227107543945313, 0.6225997924804687, 0.6235303955078125, 0.622992919921875, 0.622903564453125, 0.6229484252929688, 0.6223258056640625, 0.623763427734375, 0.6220636596679687, 0.6224998168945313, 0.6232493896484375, 0.6211295776367187, 0.6223375244140625, 0.621364990234375, 0.6211470336914062, 0.6223689575195313, 0.622508056640625, 0.6228316040039062, 0.6213529663085937, 0.6221004638671875, 0.6214983520507813, 0.6214381713867188, 0.6226903686523437, 0.6209523315429688, 0.6207808227539062, 0.6214124145507812, 0.620971923828125, 0.6218842163085937, 0.6220738525390626, 0.621828125, 0.621791259765625, 0.6213467407226563, 0.6219163818359374, 0.6220556030273438, 0.6224869384765624, 0.6239113159179688, 0.6213734130859375, 0.6217617797851562, 0.6224534912109375, 0.6221947021484375, 0.6215208740234375, 0.6229728393554688, 0.622939697265625, 0.6218276977539062, 0.6214949340820313, 0.6219102783203125, 0.6232936401367187, 0.6222221069335937, 0.6208645629882813, 0.6215435791015625, 0.6210117797851562, 0.6210989379882812, 0.6213345336914062, 0.6213837890625, 0.6225961303710937, 0.62291552734375, 0.6242017211914063, 0.6238248901367187, 0.6230337524414062, 0.6229817504882813, 0.6226165771484375, 0.62198291015625, 0.6223634643554687, 0.6218589477539063, 0.6218522338867187, 0.621098876953125, 0.622270263671875, 0.6222772216796875, 0.6219161376953125, 0.621995849609375, 0.621876953125, 0.6222908935546875, 0.6215070190429688, 0.623447265625, 0.6230023803710938, 0.6239747314453125, 0.6237037353515625, 0.6231859130859375, 0.6228176879882813, 0.6215496826171875, 0.6214759521484375, 0.6214410400390625, 0.6216576538085937, 0.6214496459960938, 0.6215905151367187, 0.6229995727539063, 0.6222479248046875, 0.621897705078125, 0.6223482666015625, 0.62142578125, 0.6226810302734375, 0.6223095092773437, 0.6218217163085937, 0.6223750610351563, 0.6216907958984375, 0.62226220703125, 0.6223396606445313, 0.6222217407226562, 0.6226906127929688, 0.6216229858398438, 0.6219797973632812, 0.622447998046875, 0.6220122680664063, 0.6219999389648437, 0.6224759521484375, 0.6215211181640625, 0.6214738159179688, 0.6216309204101562, 0.6225701293945313, 0.6222386474609375, 0.6228568115234375, 0.6218812866210938, 0.6217998657226562, 0.6232882080078125, 0.6228809204101563, 0.6232711181640626, 0.6232659301757812, 0.6227479248046875, 0.6222993774414063, 0.6229536743164062, 0.622607666015625, 0.6231890869140625, 0.623139404296875, 0.6220667114257813, 0.6217567749023437, 0.6218615112304687, 0.6213767700195313, 0.6219066162109375, 0.621654052734375, 0.6217686767578126, 0.6228472900390625, 0.62200439453125, 0.6217765502929687, 0.6235244750976563, 0.622743896484375, 0.621717529296875, 0.62225, 0.6226636962890625, 0.6220015258789062, 0.6222670288085937, 0.62209228515625, 0.62174755859375, 0.621290283203125, 0.6210610961914063, 0.6216672973632813, 0.6211151123046875, 0.62069580078125, 0.6213017578125, 0.6216365966796875, 0.6210303955078125, 0.6210828247070312, 0.6210349731445313, 0.6217708129882813, 0.6215928955078125, 0.62160693359375, 0.6215065307617188, 0.621294921875, 0.6211319580078125, 0.6208429565429687, 0.6212675170898437, 0.62142626953125, 0.6217486572265625, 0.6214430541992187, 0.6210413818359375, 0.6219266967773438, 0.622487548828125, 0.6214102783203125, 0.6214717407226562, 0.622182373046875, 0.6214000854492188, 0.6219857788085937, 0.6214390258789062, 0.6237880249023438, 0.6231531372070312, 0.6232815551757812, 0.6228436279296875, 0.62311083984375, 0.6232412719726562, 0.6218281860351562, 0.62165576171875, 0.62274609375, 0.6230274658203125, 0.6233749389648438, 0.6227387084960937, 0.62357373046875, 0.6229483642578125, 0.6236651611328125, 0.6231387939453125, 0.6232576293945312, 0.6238064575195312, 0.6240092163085937, 0.623517822265625, 0.6245886840820313, 0.6245232543945313, 0.6244086303710937, 0.6249613647460938, 0.623146484375, 0.6227442626953125, 0.6224465942382813, 0.6225262451171875, 0.6226920776367187, 0.6234403686523438, 0.6223196411132812, 0.62211279296875, 0.621302978515625, 0.6227747802734375, 0.6231701049804688, 0.6235216064453125, 0.6242981567382813, 0.6236590576171875, 0.6236607055664063, 0.624884033203125, 0.6239848022460938, 0.62408642578125, 0.6232537841796875, 0.6225061645507812, 0.6219195556640625, 0.6220023803710938, 0.622193115234375, 0.6218832397460937, 0.622002197265625, 0.6220484619140625, 0.621796630859375, 0.6219608154296875, 0.6227579345703125, 0.6230382690429688, 0.6230971069335938, 0.6237848510742188, 0.6233681640625, 0.6225735473632813, 0.6229154052734375, 0.6248265380859375, 0.6229381103515625, 0.6229155883789063, 0.622581787109375, 0.6224097290039062, 0.6226590576171875, 0.6229775390625, 0.6237630004882813, 0.6239482421875, 0.6238617553710938, 0.6235765991210938, 0.6235120849609375, 0.623795654296875, 0.6234913330078125, 0.622713134765625, 0.6221495971679688, 0.6220308227539062, 0.6224120483398438, 0.6234224853515625, 0.6228380126953125, 0.6244541625976563, 0.6237429809570313, 0.6242688598632813, 0.6238717651367187, 0.623831787109375, 0.6240173950195312, 0.6237614135742188, 0.6245928955078125, 0.6245908203125, 0.62287255859375, 0.6223831176757812, 0.6224219970703125, 0.6238592529296875, 0.623743408203125, 0.624645263671875, 0.6239662475585938, 0.622827392578125, 0.6229811401367188, 0.6220188598632812, 0.6238758544921875, 0.6241565551757813, 0.6236318969726562, 0.6231119995117187, 0.6219984741210938, 0.622020751953125, 0.6231390991210938, 0.6225299682617188, 0.6230388793945313, 0.6222376708984375, 0.6226309204101562, 0.6222622680664063, 0.6220226440429687, 0.6226022338867188, 0.6219960327148437, 0.6217298583984375, 0.6219807739257812, 0.6221729736328125, 0.6236580200195313, 0.6226025390625, 0.6238562622070313, 0.6229373779296875, 0.6232030639648437, 0.6226739501953125, 0.6230437622070313, 0.6224904174804687, 0.621959228515625, 0.6215430908203124, 0.6218031005859375, 0.62206005859375, 0.6217870483398438, 0.6218916015625, 0.6230121459960938, 0.6222722778320312, 0.6226165161132813, 0.6225267944335937, 0.6229401245117188, 0.6235381469726563, 0.623805908203125, 0.6235285034179687, 0.6229237060546875, 0.6232515258789062, 0.623276123046875, 0.6229988403320312, 0.6240543212890625, 0.6237988891601562, 0.6229699096679687, 0.622371826171875, 0.62280908203125, 0.6232158203125, 0.6228017578125, 0.6229827270507813, 0.62249560546875, 0.6228301391601563, 0.6217666625976562, 0.6224403076171875, 0.6227353515625, 0.6230809326171876, 0.6235040893554687, 0.623515625, 0.623261962890625, 0.6221266479492188, 0.6223284912109375, 0.6214202880859375, 0.6217576904296875, 0.6218575439453125, 0.6223890991210937, 0.6238516235351562, 0.623969482421875, 0.6214234008789062, 0.6214876098632812, 0.621227783203125, 0.6215134887695313, 0.6225276489257813, 0.6228345336914063, 0.6229075317382813, 0.6235851440429687, 0.6217216186523438, 0.6213507080078124, 0.6221498413085937, 0.6238248901367187, 0.623427734375, 0.6234868774414063, 0.6225546264648437, 0.6222705688476563, 0.6223138427734375, 0.6221107177734375, 0.623091796875, 0.62264111328125, 0.6221414184570313, 0.6228643798828125, 0.6230693969726563, 0.6235441284179688, 0.622635009765625, 0.6219710083007812, 0.62380078125, 0.62267626953125, 0.6227508544921875, 0.6230059814453125, 0.621990234375, 0.624089111328125, 0.6222274780273438, 0.6224034423828125, 0.6229784545898438, 0.6222505493164062, 0.622751953125, 0.62209228515625, 0.6220062866210937, 0.6222725219726563, 0.6221803588867187, 0.6223031005859375, 0.6222578125, 0.6225883178710937, 0.6229033813476562, 0.6239375610351563, 0.6232567138671875, 0.6232687377929688, 0.6230304565429687, 0.62258154296875, 0.6221129760742188, 0.6221259765625, 0.6225110473632812, 0.6222695922851562, 0.6221068115234375, 0.6221582641601563, 0.6215231323242187, 0.6219796752929687, 0.62226025390625, 0.62123828125, 0.6218137817382813, 0.6220238037109375, 0.622521240234375, 0.623515625, 0.6222418212890625, 0.6231427001953125, 0.622386474609375, 0.6224884643554688, 0.6225634765625, 0.6221185913085937, 0.6217791137695312, 0.621676513671875, 0.6228825073242188, 0.6224960327148438, 0.6222042846679687, 0.6219224853515625, 0.622136962890625, 0.6224452514648438, 0.6224302368164063, 0.622475341796875, 0.6222970581054688, 0.6220874633789063, 0.6237088623046875, 0.6230947875976562, 0.62299462890625, 0.622970703125, 0.6222347412109375, 0.6225335083007812, 0.62200830078125, 0.6229933471679687, 0.6223155517578125, 0.6216693725585938, 0.6226721801757813, 0.6223756713867188, 0.622839599609375, 0.6227332763671874, 0.6231636962890625, 0.623339111328125, 0.6221480102539062, 0.6229912719726562, 0.6224384765625, 0.6230159301757813, 0.622149658203125, 0.6223973999023438, 0.6233170776367187, 0.6224008178710938, 0.6225232543945313, 0.623578857421875, 0.6218077392578125, 0.6247218627929687, 0.6235191650390625, 0.6220989990234375, 0.6221145629882813, 0.6218303833007812, 0.6222553100585938, 0.622135498046875, 0.6215460815429688, 0.6213087158203126, 0.622620849609375, 0.6242772827148437, 0.6232037963867187, 0.6247526245117188, 0.6224877319335937, 0.6222564697265625, 0.6226760864257812, 0.6223031616210938, 0.6220322265625, 0.6223919067382813, 0.6222521362304687, 0.6217685546875, 0.6215347900390625, 0.6217870483398438, 0.6225313720703125, 0.621974853515625, 0.6225928344726562, 0.6223268432617187, 0.6222604370117187, 0.6231044921875, 0.6236356201171875, 0.6232913208007812, 0.6224424438476562, 0.622376953125, 0.6224302368164063, 0.6221639404296875, 0.622761962890625, 0.6221732788085937, 0.6236161499023437, 0.6223853759765625, 0.6218573608398438, 0.62199169921875, 0.6220431518554688, 0.621332763671875, 0.622243896484375, 0.62230322265625, 0.6227742919921875, 0.6223658447265625, 0.6220337524414062, 0.6244816284179687, 0.6226843872070312, 0.6229847412109375, 0.6227628784179687, 0.62247900390625, 0.6219923095703125, 0.6221475830078125, 0.6218950805664063, 0.622811279296875, 0.6216702880859375, 0.6225107421875, 0.6226431274414063, 0.6224752807617188, 0.6241111450195312, 0.6234976806640625, 0.6229844970703124, 0.6231921997070312, 0.6222235107421875, 0.6225997924804687, 0.623913818359375, 0.62384912109375, 0.6238252563476563, 0.6229421997070312, 0.621756591796875, 0.6219512939453125, 0.6219854736328125, 0.6218219604492188, 0.622545166015625, 0.621743896484375, 0.6217643432617187, 0.6226516723632812, 0.622095947265625, 0.622987060546875, 0.6225020141601563, 0.622263916015625, 0.6228427734375, 0.6223524169921875, 0.6226841430664063, 0.623431884765625, 0.6231898193359375, 0.622972900390625, 0.62239111328125, 0.621619384765625, 0.6229232788085938, 0.6225082397460937, 0.6230407104492187, 0.6244061889648438, 0.622408447265625, 0.6222244873046875, 0.6240897216796875, 0.6229627075195312, 0.6230916748046875, 0.6225797119140625, 0.6231428833007813, 0.6217769165039062, 0.6236446533203125, 0.6240706787109375, 0.6222821044921875, 0.6225885009765625, 0.6225654296875, 0.6230643920898438, 0.62136767578125, 0.62153369140625, 0.6229584350585937, 0.6218322143554688, 0.6215249633789063, 0.6218395385742187, 0.6217871704101563, 0.6217344970703125, 0.6213716430664062, 0.621669677734375, 0.6217459716796875, 0.621306640625, 0.6215516357421875, 0.6213387451171875, 0.6225891723632813, 0.6224488525390625, 0.6220723876953125, 0.6219570922851563, 0.6220472412109375, 0.6218035278320313, 0.6217536010742187, 0.6216425170898437, 0.6220791625976563, 0.6216027221679687, 0.6216016845703125]",tokens/s,1.6064910620991075,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8216.236032,11251.089408,0.0,10848.567296,10616.027648,s,1,14.9294140625,14.9294140625,0.0,14.9294140625,14.9294140625,14.9294140625,14.9294140625,[14.9294140625],,kWh,0.00021945647610833514,2.420022217123049e-05,6.470477398599558e-05,0.00030836147226556124,,MB,3917.08672,11683.10272,0.0,11265.900544,11070.470656,s,10,3.799405975341797,0.3799405975341797,0.0016273646893260915,0.38001599121093754,0.38233244323730464,0.3824092788696289,0.3824707473754883,"[0.3776495361328125, 0.37773464965820314, 0.3810429992675781, 0.3798990173339844, 0.3788070983886719, 0.3805530090332031, 0.37878521728515624, 0.38013296508789063, 0.38248611450195313, 0.3823153686523437]",tokens/s,673.7895388422398,kWh,1.11395795583333e-05,1.2280768663164228e-06,7.375643760592596e-06,1.974330018524232e-05,tokens/kWh,12966423.931058615,MB,3922.272256,11685.199872,0.0,11267.997696,11070.473216,s,10,28.983058593750002,2.8983058593749993,0.005214278858235374,2.900692138671875,2.903626416015625,2.9037448486328126,2.9038395947265627,"[2.89153125, 2.88903857421875, 2.89418017578125, 2.899248046875, 2.894547607421875, 2.90258251953125, 2.90213623046875, 2.90386328125, 2.902330810546875, 2.90360009765625]",tokens/s,21.736836295664645,kWh,8.494653748583237e-05,9.370225894567959e-06,5.6564768502807394e-05,0.00015088153188320773,tokens/kWh,417546.13181397284,,s,630,28.979988956451415,0.0459999824705578,0.00048814968224423415,0.04596897506713867,0.04642744064331055,0.046601284599304196,0.048061772651672365,"[0.04785343933105469, 0.04586918258666992, 0.04530790328979492, 0.04526662445068359, 0.04538195037841797, 0.04546559906005859, 0.0453201904296875, 0.04543078231811523, 0.045445438385009765, 0.04564140701293945, 0.045364479064941406, 0.04577151870727539, 0.04592761611938476, 0.045605056762695315, 0.04528601455688477, 0.04538729476928711, 0.04601084899902344, 0.04572979354858398, 0.04542780685424805, 0.04585481643676758, 0.04599481582641601, 0.0457891845703125, 0.045770782470703125, 0.04586288070678711, 0.04560281753540039, 0.04576812744140625, 0.04571603012084961, 0.04566790390014648, 0.04555001449584961, 0.045672351837158204, 0.04593878555297851, 0.04576425552368164, 0.04566806411743164, 0.045875839233398434, 0.04655513763427734, 0.04591820907592774, 0.04551628875732422, 0.04580387115478515, 0.04604275131225586, 0.04608371353149414, 0.045916641235351566, 0.046072254180908205, 0.04649574279785156, 0.04579328155517578, 0.045897823333740234, 0.046043041229248044, 0.045973121643066404, 0.04561139297485352, 0.045943904876708984, 0.04607068634033203, 0.045860000610351566, 0.047997791290283205, 0.04561539077758789, 0.045931934356689456, 0.04591238403320313, 0.04625151824951172, 0.04632160186767578, 0.04627449417114258, 0.04609478378295898, 0.04664748764038086, 0.04637843322753906, 0.04612768173217773, 0.046102527618408204, 0.04823849487304688, 0.0458614387512207, 0.045415809631347656, 0.04548668670654297, 0.04539177703857422, 0.04557017517089844, 0.04532368087768555, 0.045206111907958986, 0.045800960540771485, 0.04556380844116211, 0.045502559661865234, 0.04571529769897461, 0.045712032318115235, 0.04552035140991211, 0.04561155319213867, 0.04552083206176758, 0.046023937225341795, 0.04572038269042969, 0.04549203109741211, 0.045605022430419924, 0.045809505462646484, 0.04592019271850586, 0.04609868621826172, 0.04592025756835937, 0.04561305618286133, 0.04532368087768555, 0.04575907135009766, 0.045741470336914065, 0.04596796798706055, 0.04558607864379883, 0.04577423858642578, 0.04587411117553711, 0.045817119598388675, 0.04567859268188477, 0.04597760009765625, 0.045927135467529294, 0.04583833694458008, 0.045838016510009766, 0.045744449615478515, 0.04606911849975586, 0.04567308807373047, 0.0456440315246582, 0.04596854400634766, 0.046165790557861325, 0.04592246246337891, 0.04608060836791992, 0.046198848724365235, 0.045764606475830076, 0.04585203170776367, 0.046142078399658205, 0.04597350311279297, 0.0456616325378418, 0.04642816162109375, 0.045972030639648435, 0.046034942626953124, 0.045876609802246095, 0.0461759033203125, 0.0461300163269043, 0.046069889068603515, 0.046043071746826175, 0.04615993499755859, 0.04614044952392578, 0.04610351943969727, 0.048285694122314454, 0.04602265548706055, 0.04548540878295899, 0.04540892791748047, 0.04548764801025391, 0.04538735961914062, 0.04524531173706055, 0.04523206329345703, 0.04566227340698242, 0.04586195373535156, 0.045408382415771484, 0.045408897399902344, 0.045381824493408204, 0.04564112091064453, 0.04545516967773437, 0.045778942108154294, 0.04602140808105469, 0.045881343841552735, 0.04551270294189453, 0.04559667205810547, 0.046061569213867185, 0.04599587249755859, 0.04586073684692383, 0.04574031829833984, 0.04593657684326172, 0.04566636657714844, 0.045649921417236325, 0.04569283294677735, 0.04589372634887695, 0.0458342399597168, 0.046025856018066406, 0.04591296005249024, 0.04560281753540039, 0.0455491828918457, 0.04597971343994141, 0.04588972854614258, 0.045887615203857424, 0.04604844665527344, 0.046285633087158204, 0.045991294860839846, 0.045787776947021484, 0.04643635177612305, 0.046353759765625, 0.04625388717651367, 0.04611772918701172, 0.04623155212402344, 0.04600419235229492, 0.04569196701049805, 0.045980640411376957, 0.04604108810424805, 0.04599603271484375, 0.046072864532470705, 0.04645782470703125, 0.04621311950683594, 0.04590959930419922, 0.046085918426513675, 0.04609471893310547, 0.04634854507446289, 0.04627865600585938, 0.046655487060546875, 0.0465428466796875, 0.04619468688964844, 0.04646060943603516, 0.0476767692565918, 0.04568153762817383, 0.04547516632080078, 0.04524099349975586, 0.0453072624206543, 0.045304065704345704, 0.04543280029296875, 0.04537180709838867, 0.045664031982421874, 0.04534908676147461, 0.045581630706787106, 0.04587795257568359, 0.045608959197998046, 0.04568668746948242, 0.04577494430541992, 0.047642623901367184, 0.045568000793457034, 0.04557619094848633, 0.04613119888305664, 0.04613324737548828, 0.04584185409545898, 0.04587168121337891, 0.04617420959472656, 0.04571136093139649, 0.04538777542114258, 0.04552499389648437, 0.04577689743041992, 0.04558028793334961, 0.045451168060302735, 0.04591215896606445, 0.04614144134521484, 0.04566825485229492, 0.0457872314453125, 0.045699073791503904, 0.04627817535400391, 0.04604156875610352, 0.04606771087646484, 0.046331489562988284, 0.046133663177490236, 0.045791233062744144, 0.04615318298339844, 0.046172702789306644, 0.04607590484619141, 0.046166015625, 0.04624310302734375, 0.04594278335571289, 0.04573052978515625, 0.046080001831054686, 0.045930496215820314, 0.04579446411132813, 0.04612956619262695, 0.04642214584350586, 0.046055744171142575, 0.045881343841552735, 0.04616191864013672, 0.046129150390625, 0.051031551361083984, 0.04583065414428711, 0.046454113006591795, 0.046027263641357424, 0.04628086471557617, 0.046679584503173825, 0.04631804656982422, 0.04760985565185547, 0.045843711853027345, 0.04542473602294922, 0.04535337448120117, 0.04533478546142578, 0.045641727447509765, 0.045528190612792965, 0.04550662231445313, 0.045345600128173826, 0.045553665161132816, 0.04584444808959961, 0.04569705581665039, 0.0453570556640625, 0.04576825714111328, 0.04570771026611328, 0.04542771148681641, 0.04547052764892578, 0.045793472290039064, 0.04560892868041992, 0.04575030517578125, 0.04624588775634766, 0.046274559020996094, 0.04583446502685547, 0.045905696868896485, 0.046075393676757816, 0.046027263641357424, 0.045586143493652344, 0.045739551544189454, 0.045781761169433596, 0.045758464813232425, 0.04587833786010742, 0.04599494552612305, 0.04619260787963867, 0.045953056335449216, 0.045632961273193356, 0.04582457733154297, 0.04581689453125, 0.04571139144897461, 0.04582633590698242, 0.046322303771972655, 0.04639744186401367, 0.04642406463623047, 0.04619462585449219, 0.04623308944702149, 0.0462457275390625, 0.046215713500976564, 0.045973377227783205, 0.046451007843017575, 0.04592636871337891, 0.04613488006591797, 0.04595260620117188, 0.04587187194824219, 0.04607171249389649, 0.04655737686157226, 0.046243137359619144, 0.04572639846801758, 0.046104480743408206, 0.04639139175415039, 0.04607078552246094, 0.04604969787597656, 0.046559009552001956, 0.04642636871337891, 0.046088768005371095, 0.04826726531982422, 0.046018558502197264, 0.04553097534179688, 0.04558454513549805, 0.04548774337768555, 0.045502368927001956, 0.04547836685180664, 0.04535849761962891, 0.045361759185791016, 0.0458403205871582, 0.046053375244140625, 0.04550867080688477, 0.0454076156616211, 0.04561967849731445, 0.045613216400146483, 0.045620384216308596, 0.04608041763305664, 0.04619308853149414, 0.04588544082641602, 0.04571468734741211, 0.04590464019775391, 0.04595507049560547, 0.045795135498046875, 0.04605152130126953, 0.04608319854736328, 0.04577507019042969, 0.045974174499511716, 0.04591820907592774, 0.04609638214111328, 0.0458994255065918, 0.04618668746948242, 0.045959327697753904, 0.046635009765625, 0.04586905670166016, 0.046172000885009765, 0.046100639343261716, 0.045991966247558594, 0.04642736053466797, 0.046398208618164065, 0.04608201599121094, 0.04621315383911133, 0.046737407684326174, 0.04645657730102539, 0.04623715209960937, 0.046813983917236325, 0.04640937423706055, 0.046031200408935546, 0.04600822448730469, 0.04632175827026367, 0.045948928833007815, 0.04604240036010742, 0.0462322883605957, 0.04636604690551758, 0.04597407913208008, 0.04606777572631836, 0.04650601577758789, 0.04634764862060547, 0.04635033416748047, 0.046305919647216795, 0.04639334487915039, 0.04592575836181641, 0.04651264190673828, 0.04666518402099609, 0.04754022216796875, 0.04563455963134765, 0.04548505783081055, 0.0454917106628418, 0.04547945785522461, 0.04577788925170898, 0.04575616073608398, 0.045547775268554684, 0.04549222564697265, 0.0455211181640625, 0.04570627212524414, 0.045818622589111326, 0.045467647552490234, 0.045854400634765625, 0.046047809600830075, 0.04601625442504883, 0.04553318405151367, 0.045608959197998046, 0.04586700820922852, 0.04674969482421875, 0.045879295349121094, 0.046386688232421876, 0.04607436752319336, 0.04580352020263672, 0.045804862976074216, 0.04604108810424805, 0.04584310531616211, 0.04568476867675781, 0.0458520622253418, 0.045994239807128905, 0.04581411361694336, 0.04586038589477539, 0.04588592147827148, 0.04591971206665039, 0.046271041870117186, 0.04609334564208984, 0.045996288299560546, 0.04597155380249023, 0.04593241500854492, 0.046379104614257816, 0.046174846649169925, 0.04615292739868164, 0.04612982559204101, 0.046399616241455076, 0.04600384140014648, 0.0462790412902832, 0.04637251281738281, 0.04617574310302734, 0.04578335952758789, 0.046208831787109376, 0.046868480682373044, 0.04607049560546875, 0.0462213134765625, 0.04656550216674805, 0.04621094512939453, 0.04628070449829102, 0.046630912780761716, 0.046491649627685545, 0.04604108810424805, 0.046402721405029296, 0.04808790588378906, 0.0461677131652832, 0.04619094467163086, 0.048326560974121094, 0.04615350341796875, 0.04540700912475586, 0.045502464294433595, 0.04554956817626953, 0.04542364883422852, 0.04556284713745117, 0.045674495697021485, 0.04561507034301758, 0.045639713287353514, 0.04572732925415039, 0.045782913208007814, 0.04564022445678711, 0.04587724685668945, 0.0458158073425293, 0.04605715179443359, 0.04581148910522461, 0.04557206344604492, 0.04582457733154297, 0.046219264984130856, 0.0462432975769043, 0.046273056030273436, 0.04621516799926758, 0.045995903015136716, 0.045609088897705076, 0.045758464813232425, 0.04605731201171875, 0.046086273193359374, 0.04591823959350586, 0.046102527618408204, 0.04602848052978516, 0.04578700637817383, 0.04577030563354492, 0.046050174713134766, 0.046186496734619144, 0.04598723220825195, 0.04610518264770508, 0.04619468688964844, 0.04607385635375977, 0.0457704963684082, 0.046429569244384766, 0.04631961441040039, 0.04608499145507813, 0.046516223907470705, 0.046450687408447267, 0.04611072158813476, 0.04606723022460937, 0.046623199462890626, 0.04630886459350586, 0.04600467300415039, 0.04629100799560547, 0.04641340637207031, 0.04620719909667969, 0.04605324935913086, 0.046586177825927735, 0.046177471160888675, 0.04606854248046875, 0.04635644912719727, 0.04649526214599609, 0.04709215927124023, 0.04630323028564453, 0.04666716766357422, 0.046500511169433593, 0.048664161682128906, 0.04616268920898438, 0.04554342269897461, 0.045622848510742185, 0.04557564926147461, 0.04555465698242187, 0.045469696044921876, 0.04561920166015625, 0.045744129180908207, 0.04565599822998047, 0.04550380706787109, 0.04556057739257813, 0.04578012847900391, 0.04592316818237305, 0.04564582443237305, 0.04572774505615235, 0.04601212692260742, 0.0462022705078125, 0.0456835823059082, 0.04575151824951172, 0.0462050895690918, 0.046080097198486325, 0.04606211090087891, 0.04590182495117188, 0.04605747222900391, 0.045794879913330075, 0.045883617401123046, 0.04578531265258789, 0.04580147171020508, 0.04584447860717773, 0.04599356842041016, 0.04579369735717773, 0.04575641632080078, 0.04589363098144531, 0.04630527877807617, 0.04642828750610352, 0.045766399383544924, 0.04640719985961914, 0.046184574127197266, 0.04620540618896484, 0.046403392791748044, 0.046452449798583983, 0.04625388717651367, 0.04616873550415039, 0.04640153503417969, 0.046408897399902345, 0.04625654220581055, 0.04629955291748047, 0.04640752029418945, 0.04627062225341797, 0.045969406127929685, 0.04622073745727539, 0.046031425476074216, 0.04606083297729492, 0.046193374633789065, 0.04642201614379883, 0.04619468688964844, 0.04597350311279297, 0.04652646255493164, 0.046421630859375, 0.046174591064453124, 0.04635647964477539, 0.04659366226196289, 0.04784729766845703, 0.04560108947753906, 0.04543315124511719, 0.04547353744506836, 0.045494529724121095, 0.04536284637451172, 0.04541680145263672, 0.04559667205810547, 0.04592639923095703, 0.04547292709350586, 0.04556067276000977, 0.04597715377807617, 0.04582649612426758, 0.045484031677246094, 0.04579043197631836, 0.04594348907470703, 0.04592403030395508, 0.046098846435546875, 0.046067073822021486, 0.04592089462280274, 0.046607521057128905, 0.04652937698364258, 0.046862335205078126, 0.04570111846923828, 0.04571468734741211, 0.04587324905395508, 0.04573865509033203, 0.04565107345581055, 0.04623401641845703, 0.04623203277587891, 0.046059680938720704, 0.045817440032958984, 0.04623545455932617, 0.046029247283935544, 0.04571446228027344, 0.046000190734863285, 0.046359455108642575, 0.04614553451538086, 0.04608201599121094, 0.046241825103759765, 0.04652617645263672, 0.046400833129882815, 0.04637993621826172, 0.04629715347290039, 0.04615507125854492, 0.04620544052124023, 0.04630268859863281, 0.045988574981689456, 0.0458158073425293, 0.04615734481811523, 0.04625603103637695, 0.04604150390625, 0.04617763137817383, 0.046475616455078125, 0.04630166244506836, 0.046002174377441404, 0.046516223907470705, 0.046417278289794923, 0.04642675018310547, 0.04668415832519531, 0.046635009765625, 0.0464600944519043, 0.0466376953125]",tokens/s,21.73913871902121,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 57153 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.746688,556.72832,0.0,178.25792,176.52224,s,1,7.51967626953125,7.51967626953125,0.0,7.51967626953125,7.51967626953125,7.51967626953125,7.51967626953125,[7.51967626953125],,kWh,1.982594503750761e-05,2.179734761547903e-06,5.404726546004435e-06,2.741040634505995e-05,,MB,1194.688512,669.974528,0.0,262.144,221.118976,s,10,0.23773827171325684,0.023773827171325684,0.00024283595479606624,0.02371673583984375,0.024118610572814943,0.02418594560623169,0.02423981363296509,"[0.02391481590270996, 0.023476768493652343, 0.024103647232055665, 0.023773056030273437, 0.02366041564941406, 0.024253280639648437, 0.023616479873657226, 0.023656160354614257, 0.02347360038757324, 0.023810047149658203]",tokens/s,10768.1442350506,kWh,6.932403663103739e-07,7.645196220950019e-08,4.059873926176255e-07,1.1756797211374995e-06,tokens/kWh,217746377.17856833,MB,1228.357632,684.654592,0.0,276.824064,221.271552,s,10,13.6162353515625,1.36162353515625,0.004976171477569547,1.3627197875976562,1.3672108764648436,1.369263397216797,1.3709054138183594,"[1.362764404296875, 1.36269970703125, 1.37131591796875, 1.3627398681640626, 1.36317236328125, 1.3562159423828124, 1.355752197265625, 1.3607286376953125, 1.354091552734375, 1.3667547607421875]",tokens/s,46.2682954380416,kWh,3.9575149517440624e-05,4.3647022323524135e-06,2.0418029266581237e-05,6.435788101637427e-05,tokens/kWh,978901.0919108914,,s,630,13.611357091903685,0.021605328717307436,0.0004793603854581813,0.02150827217102051,0.021850230407714846,0.022027892112731935,0.024121036033630375,"[0.02136659240722656, 0.0215164794921875, 0.021698240280151368, 0.021655712127685547, 0.02155523109436035, 0.02168614387512207, 0.02161235237121582, 0.021825983047485353, 0.021884639739990233, 0.02169878387451172, 0.02162518310546875, 0.021489376068115233, 0.021884767532348633, 0.021545440673828124, 0.021544639587402343, 0.021530336380004882, 0.021551551818847655, 0.021600095748901368, 0.021439872741699218, 0.021392000198364257, 0.02143833541870117, 0.021472639083862304, 0.024135967254638672, 0.02312588882446289, 0.021810911178588868, 0.02173776054382324, 0.021477535247802736, 0.02143440055847168, 0.02140985679626465, 0.02182588768005371, 0.021575679779052736, 0.02159187126159668, 0.02151030349731445, 0.021499935150146483, 0.021395423889160155, 0.02136476707458496, 0.021339935302734377, 0.021369056701660158, 0.021441631317138672, 0.02128700828552246, 0.021468032836914064, 0.021498016357421875, 0.021790655136108398, 0.021624223709106445, 0.02154902458190918, 0.021496288299560545, 0.021591232299804686, 0.021506591796875, 0.021518943786621093, 0.021501056671142577, 0.02148726463317871, 0.021643680572509767, 0.02153558349609375, 0.021646175384521484, 0.021754688262939453, 0.02164735984802246, 0.021940448760986327, 0.021501056671142577, 0.021501663208007813, 0.021425088882446288, 0.021379072189331053, 0.021536991119384764, 0.021421056747436523, 0.02101702308654785, 0.021399999618530275, 0.021557247161865235, 0.021405696868896484, 0.021420320510864257, 0.02151340866088867, 0.02134003257751465, 0.021434240341186524, 0.021394208908081056, 0.02137276840209961, 0.02144476890563965, 0.021656831741333007, 0.02145254325866699, 0.021547552108764648, 0.021483327865600584, 0.021494495391845704, 0.02148086357116699, 0.021369279861450194, 0.021417760848999025, 0.02136300849914551, 0.021350400924682617, 0.021356800079345702, 0.021284704208374024, 0.021188512802124023, 0.021666944503784178, 0.02145552062988281, 0.021422367095947265, 0.021528703689575195, 0.021296384811401368, 0.021418432235717773, 0.02142972755432129, 0.021324447631835938, 0.021401952743530274, 0.021565088272094725, 0.02145020866394043, 0.0215283203125, 0.02165830421447754, 0.021699871063232422, 0.021638336181640624, 0.021610111236572267, 0.02158835220336914, 0.021537471771240234, 0.02157459259033203, 0.021479328155517577, 0.02157708740234375, 0.021626976013183592, 0.021566240310668946, 0.021578655242919922, 0.021658559799194337, 0.02170579147338867, 0.021830528259277344, 0.021774368286132814, 0.022078975677490235, 0.021792512893676758, 0.02187104034423828, 0.021737823486328123, 0.02186240005493164, 0.021937952041625977, 0.021948991775512697, 0.02188047981262207, 0.022009855270385743, 0.024481439590454103, 0.023181695938110352, 0.02150982475280762, 0.02202150344848633, 0.022033119201660158, 0.021868223190307616, 0.02185004806518555, 0.021687904357910157, 0.021854944229125976, 0.022159263610839842, 0.022081920623779297, 0.02169068717956543, 0.02184694480895996, 0.021613311767578126, 0.021553184509277342, 0.02147737693786621, 0.021483776092529296, 0.021738815307617187, 0.02158140754699707, 0.021500736236572265, 0.02176527976989746, 0.02169862365722656, 0.026327936172485352, 0.02262182426452637, 0.021944063186645508, 0.021870687484741212, 0.0217607364654541, 0.021548992156982423, 0.02157548713684082, 0.02147123146057129, 0.02151628875732422, 0.021580127716064452, 0.021536415100097656, 0.02148476791381836, 0.021438880920410155, 0.021467519760131837, 0.021615999221801758, 0.021488256454467773, 0.021593151092529298, 0.021485759735107423, 0.021507135391235353, 0.02160310363769531, 0.02150275230407715, 0.021628480911254883, 0.021714656829833985, 0.02171913528442383, 0.021863487243652342, 0.021599264144897462, 0.021681983947753905, 0.02163603210449219, 0.021661600112915038, 0.021764095306396485, 0.0217391357421875, 0.021729375839233397, 0.02162076759338379, 0.021944000244140626, 0.021716575622558593, 0.021654367446899414, 0.02160985565185547, 0.021559680938720703, 0.02159654426574707, 0.021668895721435547, 0.021605375289916993, 0.021594432830810546, 0.021518335342407227, 0.02150399971008301, 0.021709152221679687, 0.02188902473449707, 0.0218656005859375, 0.021591936111450195, 0.02156835174560547, 0.021468416213989257, 0.021535680770874022, 0.02148054313659668, 0.021408639907836913, 0.02152448081970215, 0.02137843132019043, 0.021545600891113282, 0.021481472015380858, 0.021436447143554686, 0.021485536575317384, 0.02149344062805176, 0.02163539123535156, 0.021420383453369142, 0.021362335205078124, 0.02138047981262207, 0.021594751358032228, 0.02137660789489746, 0.021499391555786132, 0.02147011184692383, 0.022449600219726563, 0.02164588737487793, 0.02155958366394043, 0.02158393669128418, 0.02158729553222656, 0.021549152374267577, 0.021548671722412108, 0.02177699279785156, 0.021866207122802735, 0.021884191513061525, 0.02242252731323242, 0.021835519790649415, 0.022227264404296874, 0.02184185600280762, 0.021837215423583984, 0.021719776153564452, 0.02168409538269043, 0.021589824676513672, 0.021589599609375, 0.02159881591796875, 0.02153494453430176, 0.02167532730102539, 0.021672351837158203, 0.02173139190673828, 0.021700159072875976, 0.021606847763061522, 0.021892927169799806, 0.021809152603149414, 0.021710304260253905, 0.021492576599121092, 0.021417856216430664, 0.021518335342407227, 0.021376224517822267, 0.021529151916503907, 0.02148111915588379, 0.021357120513916014, 0.02145631980895996, 0.02141417694091797, 0.021029344558715822, 0.021476415634155272, 0.021285823822021484, 0.02128281593322754, 0.021509632110595703, 0.021264352798461915, 0.02138175964355469, 0.02148566436767578, 0.021392351150512696, 0.02136150360107422, 0.021458879470825195, 0.021374912261962892, 0.021343360900878905, 0.02145417594909668, 0.021397151947021485, 0.021501951217651367, 0.02157535934448242, 0.021520544052124023, 0.02142838478088379, 0.02166988754272461, 0.021341663360595703, 0.021469728469848633, 0.021317632675170898, 0.021250272750854494, 0.021408544540405274, 0.021305856704711915, 0.02129929542541504, 0.021445024490356446, 0.021794815063476563, 0.0216494083404541, 0.021599552154541016, 0.021498559951782226, 0.024669408798217773, 0.027470624923706055, 0.021816768646240235, 0.021571647644042968, 0.021488128662109376, 0.021507999420166016, 0.021315744400024414, 0.021516223907470704, 0.021454240798950194, 0.021448448181152345, 0.0213832950592041, 0.02142313575744629, 0.02150592041015625, 0.021527679443359374, 0.021436927795410156, 0.021508544921875, 0.02133286476135254, 0.021478271484375, 0.021581823348999024, 0.021728256225585937, 0.021507200241088868, 0.021416896820068358, 0.021629823684692382, 0.022895904541015626, 0.021436256408691408, 0.0215031681060791, 0.02138083267211914, 0.021462944030761717, 0.0218789119720459, 0.021342496871948242, 0.021495231628417967, 0.021202688217163087, 0.021759647369384766, 0.021487871170043946, 0.021495168685913085, 0.022068191528320312, 0.021354496002197267, 0.021526239395141603, 0.021440799713134766, 0.02154195213317871, 0.021497119903564454, 0.021533632278442384, 0.02234822463989258, 0.021639455795288087, 0.02178691291809082, 0.02157334327697754, 0.021510143280029297, 0.021716991424560548, 0.021518239974975584, 0.021790271759033204, 0.02147724723815918, 0.021378944396972657, 0.02156982421875, 0.02136729621887207, 0.021319583892822267, 0.021343360900878905, 0.02132476806640625, 0.021348352432250976, 0.02162713623046875, 0.02153011131286621, 0.02192745590209961, 0.022114368438720704, 0.02157814407348633, 0.021609920501708985, 0.021959327697753907, 0.02147164726257324, 0.021321216583251954, 0.021495071411132813, 0.021558528900146486, 0.021425888061523436, 0.02142348861694336, 0.02158451271057129, 0.021415775299072265, 0.021557407379150392, 0.0214052791595459, 0.021579296112060546, 0.021452959060668946, 0.02134294319152832, 0.02145894432067871, 0.02146918487548828, 0.02140166473388672, 0.02139743995666504, 0.02135206413269043, 0.021328256607055663, 0.02135990333557129, 0.021315296173095702, 0.021390335083007812, 0.02136649513244629, 0.021463264465332033, 0.021280096054077147, 0.02136899185180664, 0.021541439056396484, 0.021438144683837892, 0.021502016067504882, 0.021125503540039062, 0.021542911529541017, 0.02150716781616211, 0.021465856552124022, 0.021705984115600586, 0.02132467269897461, 0.021297536849975585, 0.021431615829467773, 0.021381471633911135, 0.021415327072143556, 0.021660255432128905, 0.021360639572143555, 0.023858240127563476, 0.02167683219909668, 0.021663904190063477, 0.021458623886108398, 0.021592639923095704, 0.021266176223754884, 0.021278976440429687, 0.021657344818115234, 0.02128691291809082, 0.021370880126953123, 0.0214932804107666, 0.021445087432861328, 0.02157382392883301, 0.02152364730834961, 0.021480064392089843, 0.021593791961669922, 0.021950784683227538, 0.021729280471801758, 0.021604352951049805, 0.021302911758422853, 0.021516063690185546, 0.02138332748413086, 0.021313472747802733, 0.021348543167114258, 0.021307712554931642, 0.021313472747802733, 0.021487327575683595, 0.02147158432006836, 0.021497856140136717, 0.021367040634155274, 0.0216428165435791, 0.021883327484130858, 0.02157360076904297, 0.021518112182617188, 0.0214866886138916, 0.021397600173950194, 0.021535615921020507, 0.021375200271606446, 0.0216343994140625, 0.021422464370727538, 0.02157155227661133, 0.021391519546508787, 0.02129724884033203, 0.021313119888305664, 0.021440704345703124, 0.02131065559387207, 0.02140243148803711, 0.021535072326660156, 0.021449472427368162, 0.021588895797729494, 0.021469024658203124, 0.02101139259338379, 0.02142790412902832, 0.021323455810546874, 0.021323360443115235, 0.021256927490234376, 0.02135590362548828, 0.02130803108215332, 0.021661088943481444, 0.02154966354370117, 0.021672191619873046, 0.022133663177490236, 0.021733631134033204, 0.021709535598754885, 0.021573087692260743, 0.021472799301147462, 0.021500543594360353, 0.021590272903442384, 0.021389312744140625, 0.021393312454223632, 0.021434783935546875, 0.02144611167907715, 0.021376480102539064, 0.02150271987915039, 0.02147929573059082, 0.021456192016601563, 0.02157859230041504, 0.021972959518432617, 0.02150399971008301, 0.021497856140136717, 0.02151628875732422, 0.021491519927978514, 0.021552608489990233, 0.02160099220275879, 0.0216944637298584, 0.02154521560668945, 0.02156723213195801, 0.02408448028564453, 0.022454208374023437, 0.021556480407714844, 0.021659711837768554, 0.02144767951965332, 0.02155084800720215, 0.021359615325927735, 0.021895647048950195, 0.021415775299072265, 0.02160268783569336, 0.021319999694824218, 0.021606559753417968, 0.021476831436157227, 0.02144732856750488, 0.021390047073364258, 0.021418495178222655, 0.02194175910949707, 0.021502880096435546, 0.02160643196105957, 0.021497983932495118, 0.02175993537902832, 0.021633056640625, 0.021353471755981446, 0.022502368927001953, 0.021356544494628905, 0.021319679260253906, 0.02150297546386719, 0.020975616455078124, 0.021389312744140625, 0.021456895828247072, 0.02142646408081055, 0.021393375396728517, 0.021432096481323243, 0.02130940818786621, 0.021851871490478517, 0.021533088684082033, 0.021405088424682618, 0.021502111434936525, 0.021416255950927734, 0.021691936492919922, 0.021596160888671875, 0.02161097526550293, 0.021569440841674805, 0.021600351333618165, 0.021448703765869142, 0.021496095657348634, 0.02140105628967285, 0.021399808883666993, 0.021622783660888673, 0.02129532814025879, 0.02148534393310547, 0.021506240844726562, 0.021433759689331054, 0.021508512496948243, 0.021340063095092773, 0.02126857566833496, 0.02141900825500488, 0.02148044776916504, 0.021336063385009766, 0.021426368713378906, 0.02143008041381836, 0.021508031845092774, 0.021411455154418946, 0.021389759063720704, 0.021316736221313477, 0.02183772850036621, 0.02150499153137207, 0.02183286476135254, 0.021707136154174803, 0.02158665657043457, 0.021503744125366212, 0.021475008010864258, 0.021441919326782227, 0.021293760299682617, 0.021324031829833983, 0.021388736724853516, 0.02142880058288574, 0.021581024169921876, 0.02160310363769531, 0.021423360824584962, 0.021594175338745115, 0.021502784729003906, 0.021476703643798827, 0.021461536407470703, 0.021444480895996095, 0.021692768096923828, 0.02169219207763672, 0.021501056671142577, 0.02172198486328125, 0.02150320053100586, 0.0217126407623291, 0.021554624557495117, 0.02150662422180176, 0.02162124824523926, 0.02140310478210449, 0.021394880294799804, 0.02128700828552246, 0.02142220878601074, 0.021569856643676756, 0.021457216262817384, 0.021395807266235353, 0.02435158348083496, 0.02265769577026367, 0.02180009651184082, 0.021558368682861328, 0.02162063980102539, 0.02207539176940918, 0.02153696060180664, 0.021947391510009767, 0.021596128463745118, 0.021631103515625, 0.022167583465576172, 0.02152230453491211, 0.02148249626159668, 0.021597343444824217, 0.021512128829956054, 0.02139155197143555, 0.021371423721313478, 0.02148761558532715, 0.02141209602355957, 0.021384960174560548, 0.021474720001220703, 0.021643871307373046, 0.021642751693725586, 0.02240153694152832, 0.024633472442626952, 0.02183359909057617, 0.02170787239074707, 0.021553056716918945, 0.021473344802856446, 0.02142300796508789, 0.02148464012145996, 0.021558399200439452, 0.022250335693359376, 0.021557600021362304, 0.02152822494506836, 0.021601408004760743, 0.021382335662841798, 0.021621376037597655, 0.021519968032836914, 0.021360960006713867, 0.02138912010192871, 0.021373247146606444, 0.0214835205078125, 0.021461183547973633, 0.021361536026000976, 0.02125257682800293, 0.021729759216308594, 0.022515615463256835, 0.021492000579833984, 0.021348159790039064, 0.0212739200592041, 0.021522495269775392]",tokens/s,46.28487782270711,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1474.31424,1326.383104,0.0,947.912704,945.250304,s,1,8.1235908203125,8.1235908203125,0.0,8.1235908203125,8.1235908203125,8.1235908203125,8.1235908203125,[8.1235908203125],,kWh,3.681505969166968e-05,4.053782199671776e-06,1.08444531200097e-05,5.1713295011351154e-05,,MB,1517.285376,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6054995269775392,0.1605499526977539,0.000562217233122397,0.16068774414062498,0.16113861389160156,0.1612920928955078,0.1614148760986328,"[0.15935037231445312, 0.16074298095703124, 0.16031491088867186, 0.16026255798339845, 0.16086659240722656, 0.16110450744628907, 0.16076307678222657, 0.16063250732421874, 0.16001644897460937, 0.16144557189941405]",tokens/s,1594.5193112696659,kWh,4.704023410647971e-06,5.185785808379412e-07,3.1308005646033637e-06,8.353402556089277e-06,tokens/kWh,30646194.563362308,MB,1527.250944,1653.538816,0.0,1245.708288,1164.242432,s,10,88.924287109375,8.8924287109375,0.01463182725578284,8.894529296875,8.90892509765625,8.910288720703125,8.911379619140625,"[8.863833984375, 8.874208984375, 8.8817548828125, 8.88940234375, 8.8925712890625, 8.8964873046875, 8.901982421875, 8.903771484375, 8.9086220703125, 8.91165234375]",tokens/s,7.084678668552195,kWh,0.00026004556043518647,2.8684570792508123e-05,0.00017283075687119642,0.00046156088809889105,tokens/kWh,136493.36766702388,,s,630,88.92091477966304,0.1411443091740684,0.0003321701887789103,0.14115912628173827,0.14155792083740235,0.1416606658935547,0.14184947067260742,"[0.14091314697265625, 0.1400463409423828, 0.14019378662109375, 0.1403412780761719, 0.14071157836914064, 0.14059756469726561, 0.140287841796875, 0.1404704284667969, 0.14042112731933593, 0.14061305236816407, 0.14044627380371094, 0.14091673278808595, 0.14092445373535156, 0.14058134460449218, 0.14066204833984375, 0.140548828125, 0.14014402770996093, 0.14071049499511717, 0.14068080139160155, 0.14068368530273437, 0.14069728088378905, 0.14054237365722655, 0.14061270141601562, 0.14057350158691406, 0.14058090209960938, 0.1405625, 0.1407724151611328, 0.1408070068359375, 0.14080995178222655, 0.14059344482421876, 0.14050918579101562, 0.1408125762939453, 0.14064405822753906, 0.14094857788085938, 0.1414276123046875, 0.14073036193847657, 0.14074406433105469, 0.14078016662597656, 0.14097613525390626, 0.14056646728515626, 0.14067132568359375, 0.14057647705078125, 0.140930908203125, 0.1405853729248047, 0.14074649047851562, 0.14074266052246093, 0.1403574981689453, 0.14065061950683594, 0.14066259765625, 0.1412671356201172, 0.14080819702148437, 0.1402882537841797, 0.14103030395507812, 0.1404550018310547, 0.1405806121826172, 0.1409269714355469, 0.14149609375, 0.14093927001953124, 0.14108905029296875, 0.14070346069335937, 0.14065397644042968, 0.14053817749023437, 0.14121200561523437, 0.14041920471191408, 0.14058505249023437, 0.14048573303222656, 0.14045890808105468, 0.1407631378173828, 0.14065078735351563, 0.14117654418945313, 0.1406402587890625, 0.1406543731689453, 0.14084451293945313, 0.14039251708984374, 0.14085804748535155, 0.14065565490722656, 0.1406164093017578, 0.14096954345703125, 0.14056108093261718, 0.14060304260253906, 0.14066943359375, 0.14078140258789062, 0.14096771240234374, 0.14086781311035157, 0.14094950866699218, 0.14097613525390626, 0.14097613525390626, 0.14083065795898436, 0.14094090270996093, 0.14127740478515624, 0.14087564086914062, 0.14126527404785155, 0.14073849487304688, 0.1407611541748047, 0.1407836151123047, 0.14070950317382813, 0.14091065979003906, 0.1409438018798828, 0.14117808532714843, 0.14086611938476562, 0.14110723876953124, 0.14052120971679688, 0.14061395263671875, 0.14123526000976563, 0.14069036865234374, 0.14108262634277344, 0.1409551696777344, 0.14087590026855468, 0.140957275390625, 0.14098713684082032, 0.14085673522949219, 0.140778076171875, 0.14141439819335938, 0.14121369934082031, 0.14089631652832033, 0.14098220825195312, 0.1410248565673828, 0.1407983703613281, 0.14067916870117186, 0.141046875, 0.14133529663085936, 0.14077967834472657, 0.1410885772705078, 0.14058709716796874, 0.1410274200439453, 0.14074879455566405, 0.1414819793701172, 0.1406320343017578, 0.14080975341796875, 0.1405788116455078, 0.14077548217773436, 0.140767578125, 0.14120150756835936, 0.14111439514160157, 0.14093606567382813, 0.14092707824707032, 0.14038761901855468, 0.14095750427246093, 0.1405572204589844, 0.14129151916503907, 0.14103321838378907, 0.14111318969726563, 0.1406754913330078, 0.14069325256347656, 0.1408350067138672, 0.1406519317626953, 0.14105410766601562, 0.14085699462890625, 0.14122685241699218, 0.14077337646484375, 0.14075820922851562, 0.14098915100097656, 0.1408527069091797, 0.14077719116210938, 0.14111036682128905, 0.14086038208007812, 0.14077772521972656, 0.14123190307617187, 0.1409031982421875, 0.1410068817138672, 0.14092643737792968, 0.14090847778320312, 0.14137834167480468, 0.14119949340820312, 0.14095872497558593, 0.14095021057128906, 0.1410662384033203, 0.14060748291015626, 0.14102857971191407, 0.14121244812011718, 0.14095578002929687, 0.14115213012695313, 0.14081622314453124, 0.14129078674316406, 0.1409135284423828, 0.14096298217773437, 0.1407189483642578, 0.14160211181640625, 0.1412403564453125, 0.14106655883789063, 0.14107887268066407, 0.14084095764160157, 0.14129766845703126, 0.14104098510742188, 0.1412626953125, 0.14095616149902343, 0.14118739318847656, 0.14098588562011719, 0.1411047668457031, 0.14146517944335937, 0.140505126953125, 0.14051954650878906, 0.1408916473388672, 0.14044364929199218, 0.14086749267578125, 0.14098722839355468, 0.14116181945800782, 0.14096646118164063, 0.14073251342773438, 0.1410846710205078, 0.14103453063964844, 0.14096015930175781, 0.14124691772460937, 0.14110508728027343, 0.14089360046386717, 0.1405447998046875, 0.1410396728515625, 0.14141229248046874, 0.14115225219726563, 0.141127685546875, 0.14089010620117187, 0.1409697570800781, 0.14090229797363282, 0.14100691223144532, 0.14112384033203124, 0.140943359375, 0.14119081115722656, 0.14099020385742186, 0.1411037139892578, 0.14079994201660156, 0.14107449340820313, 0.14137962341308594, 0.14098835754394531, 0.14149020385742186, 0.14116455078125, 0.14111485290527342, 0.1410089569091797, 0.141339111328125, 0.14120755004882812, 0.14123622131347657, 0.14131132507324218, 0.141191650390625, 0.14100679016113282, 0.14122575378417968, 0.1411846466064453, 0.14110092163085938, 0.1412843780517578, 0.14137338256835938, 0.14112127685546874, 0.14088943481445312, 0.14117100524902343, 0.14134538269042968, 0.14094744873046874, 0.14173388671875, 0.1417090606689453, 0.1409927978515625, 0.14111946105957032, 0.14147584533691407, 0.14122950744628907, 0.1409974060058594, 0.14135621643066407, 0.14124911499023438, 0.14080812072753907, 0.14106434631347656, 0.14090316772460937, 0.14094137573242188, 0.14078764343261718, 0.14124447631835937, 0.14094540405273437, 0.14107192993164064, 0.14081272888183594, 0.14098629760742187, 0.14104786682128906, 0.14087948608398437, 0.1411075897216797, 0.14107752990722655, 0.14116117858886718, 0.14137779235839842, 0.14069480895996095, 0.14110794067382812, 0.1410867156982422, 0.14105746459960938, 0.1413347473144531, 0.14127040100097657, 0.1412352294921875, 0.14103955078125, 0.14105142211914062, 0.14108694458007812, 0.14104156494140624, 0.1414041290283203, 0.14131033325195314, 0.14109660339355468, 0.14108026123046874, 0.14129379272460937, 0.14114370727539063, 0.14088800048828126, 0.14112380981445313, 0.1410167999267578, 0.14114044189453126, 0.14090614318847655, 0.14132540893554688, 0.14110432434082032, 0.1412960968017578, 0.14134803771972657, 0.14135177612304686, 0.14112477111816407, 0.14128215026855467, 0.14144102478027343, 0.1413396759033203, 0.1413314208984375, 0.14117231750488282, 0.1411895294189453, 0.1410109405517578, 0.1414615020751953, 0.1413253479003906, 0.14125106811523438, 0.1414619903564453, 0.1411438446044922, 0.14117683410644533, 0.14145968627929686, 0.14107606506347656, 0.14098602294921875, 0.14136595153808593, 0.14145738220214843, 0.141125732421875, 0.14125836181640625, 0.14098045349121094, 0.1406304931640625, 0.141155517578125, 0.14097430419921875, 0.1408042297363281, 0.1411931915283203, 0.14117324829101563, 0.14086326599121093, 0.140996826171875, 0.1409129638671875, 0.14099130249023437, 0.14105186462402344, 0.1415013427734375, 0.14101539611816405, 0.14118159484863282, 0.14115927124023436, 0.1410498504638672, 0.14098591613769532, 0.1414351043701172, 0.14136767578125, 0.14105401611328125, 0.14114297485351562, 0.14099737548828126, 0.14109504699707032, 0.14125637817382813, 0.14104386901855467, 0.14164405822753906, 0.1409231719970703, 0.14128099060058594, 0.14119731140136718, 0.1410287628173828, 0.14150083923339843, 0.14118896484375, 0.14140623474121095, 0.1410780487060547, 0.14124520874023438, 0.140906494140625, 0.14107034301757812, 0.14149337768554687, 0.14125555419921876, 0.141459716796875, 0.14138064575195314, 0.14129020690917968, 0.14112358093261718, 0.1411494140625, 0.14130050659179688, 0.14126838684082033, 0.1414068145751953, 0.14136642456054688, 0.141455810546875, 0.1411219482421875, 0.14143618774414063, 0.14122000122070313, 0.14144940185546875, 0.1413504638671875, 0.14166099548339844, 0.14205746459960938, 0.14088552856445313, 0.14123802185058593, 0.14116732788085937, 0.14148809814453125, 0.14142057800292968, 0.14111001586914063, 0.14105331420898437, 0.14058790588378905, 0.14132199096679687, 0.14124986267089842, 0.1409297637939453, 0.14166026306152343, 0.1415801544189453, 0.1410846710205078, 0.14109933471679686, 0.14105158996582032, 0.14124850463867186, 0.1412460174560547, 0.14157868957519532, 0.1410190734863281, 0.14139808654785158, 0.14098136901855468, 0.1408275146484375, 0.14113154602050781, 0.1414126434326172, 0.14120498657226563, 0.14177325439453126, 0.1409249267578125, 0.1407422332763672, 0.141103515625, 0.14103753662109375, 0.14172163391113282, 0.14125465393066405, 0.1415202178955078, 0.14110992431640626, 0.14101475524902343, 0.14113821411132813, 0.14140406799316407, 0.14150869750976564, 0.14212300109863282, 0.14134259033203125, 0.1410952911376953, 0.1411822052001953, 0.1411012725830078, 0.14132048034667968, 0.14128128051757813, 0.141540771484375, 0.1415870361328125, 0.1413233642578125, 0.14125289916992187, 0.14118115234375, 0.1415970916748047, 0.14127513122558594, 0.1414757080078125, 0.14155789184570314, 0.14129971313476564, 0.1411746520996094, 0.1413448944091797, 0.14137753295898436, 0.14144511413574218, 0.14146560668945313, 0.1413017578125, 0.141461181640625, 0.14157037353515625, 0.14140316772460937, 0.14147273254394532, 0.14152499389648437, 0.1415355224609375, 0.14115440368652343, 0.14105430603027344, 0.1410768585205078, 0.14114201354980468, 0.14118911743164062, 0.14103330993652344, 0.14093959045410157, 0.14133859252929687, 0.14137242126464844, 0.1411961669921875, 0.14122125244140624, 0.14143869018554686, 0.1410592041015625, 0.14174595642089843, 0.14114714050292967, 0.1411287078857422, 0.14108038330078124, 0.14141629028320313, 0.14125823974609375, 0.1409291229248047, 0.14135760498046876, 0.1410474853515625, 0.14117123413085939, 0.14119436645507813, 0.14120640563964842, 0.14140165710449218, 0.1414405059814453, 0.14144607543945312, 0.14124832153320313, 0.14134471130371093, 0.14116685485839844, 0.14116864013671876, 0.14139974975585937, 0.14128477478027343, 0.14142477416992189, 0.14146409606933594, 0.14134707641601563, 0.14145542907714845, 0.14159353637695313, 0.14130476379394533, 0.14162358093261718, 0.14142031860351562, 0.14124237060546874, 0.1410867156982422, 0.1413605499267578, 0.14173654174804687, 0.14172808837890624, 0.14161404418945311, 0.14129740905761717, 0.1413477783203125, 0.1413305206298828, 0.14149746704101562, 0.14171420288085937, 0.14129270935058594, 0.1413824920654297, 0.1411788787841797, 0.14122189331054688, 0.14151065063476562, 0.1415146942138672, 0.14180972290039062, 0.14133485412597657, 0.14142022705078125, 0.14140211486816406, 0.14115020751953125, 0.14143775939941405, 0.14173721313476562, 0.14108937072753908, 0.14117056274414064, 0.1409701690673828, 0.14117269897460938, 0.14116358947753907, 0.14133544921875, 0.14150253295898438, 0.14108262634277344, 0.1408184356689453, 0.14107852172851562, 0.14129971313476564, 0.14138674926757813, 0.14119778442382813, 0.14169346618652343, 0.14154339599609375, 0.14124797058105468, 0.1411589813232422, 0.14185664367675782, 0.1412752685546875, 0.14132633972167968, 0.1414607391357422, 0.14117964172363281, 0.141127685546875, 0.14095542907714845, 0.14141658020019532, 0.14147596740722657, 0.14144610595703125, 0.14137855529785157, 0.14123519897460937, 0.14108924865722655, 0.14104835510253907, 0.14133436584472656, 0.1416848907470703, 0.14157005310058593, 0.1415720977783203, 0.1416171569824219, 0.14143043518066406, 0.1413410186767578, 0.14120355224609374, 0.14171720886230468, 0.14160914611816405, 0.1415068817138672, 0.14204444885253906, 0.1414127655029297, 0.1418275146484375, 0.14153298950195312, 0.14173846435546875, 0.14175830078125, 0.14171180725097657, 0.14144447326660156, 0.141304443359375, 0.1413570556640625, 0.14150656127929687, 0.14165196228027344, 0.14159461975097656, 0.14157618713378906, 0.1415265350341797, 0.14148483276367188, 0.14153494262695313, 0.1411744384765625, 0.1412696990966797, 0.14068531799316406, 0.14130989074707032, 0.14100486755371094, 0.14111669921875, 0.14115708923339843, 0.14146931457519532, 0.14154118347167968, 0.14108285522460937, 0.14133436584472656, 0.14112409973144532, 0.14095974731445313, 0.14117263793945312, 0.14198179626464844, 0.14138983154296875, 0.14141007995605467, 0.14138316345214844, 0.14127381896972657, 0.14123008728027345, 0.14127740478515624, 0.14158416748046876, 0.14147789001464844, 0.14150825500488282, 0.14162281799316406, 0.14142886352539064, 0.14126559448242187, 0.14148812866210939, 0.14190751647949218, 0.1417527618408203, 0.14164787292480469, 0.14117225646972656, 0.1413586883544922, 0.14116543579101562, 0.14146488952636718, 0.14179420471191406, 0.14158210754394532, 0.1416592254638672, 0.14134701538085936, 0.14158717346191407, 0.14171340942382812, 0.1413324737548828, 0.14132415771484375, 0.14199411010742188, 0.14161509704589845, 0.14164492797851563, 0.1412657928466797, 0.1414632568359375, 0.1413104705810547, 0.14140599060058595, 0.14164178466796876, 0.14155564880371094, 0.14166639709472656, 0.141671875, 0.1415581817626953, 0.14161517333984375, 0.14151609802246093, 0.14178134155273436, 0.14151641845703125, 0.1415933074951172, 0.1413324737548828, 0.1413324737548828, 0.14165536499023437, 0.1418319091796875]",tokens/s,7.084947355310901,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 151091 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 816, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 734, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1505.906688,1596.915712,0.0,1201.668096,1189.151232,s,1,8.179638671875,8.179638671875,0.0,8.179638671875,8.179638671875,8.179638671875,8.179638671875,[8.179638671875],,kWh,3.4542718758334936e-05,3.8032259343420668e-06,1.0888064266001071e-05,4.9234008958678075e-05,,MB,1524.883456,1791.950848,0.0,1382.023168,1351.367168,s,10,0.4748804168701172,0.04748804168701172,8.626458128418844e-05,0.04748456001281738,0.04758217430114746,0.04760303974151611,0.04761973209381103,"[0.04757753753662109, 0.04741312026977539, 0.047552288055419924, 0.04748134231567383, 0.04741155242919922, 0.04733055877685547, 0.04744131088256836, 0.047623905181884765, 0.04748777770996094, 0.0475610237121582]",tokens/s,5390.830847211323,kWh,1.4249498766260126e-06,1.5714683171962336e-07,9.500861259219454e-07,2.532182834267581e-06,tokens/kWh,101098544.91373903,MB,1528.918016,1833.893888,0.0,1423.966208,1407.328256,s,10,13.786848022460937,1.3786848022460938,0.0034189820094292227,1.3792277221679687,1.3835193969726562,1.3840247131347656,1.384428966064453,"[1.3790538330078126, 1.3803687744140625, 1.375017822265625, 1.3734044189453125, 1.379401611328125, 1.3756253662109375, 1.37638134765625, 1.3834071044921874, 1.37965771484375, 1.384530029296875]",tokens/s,45.695723850268834,kWh,4.049869935587363e-05,4.466583156182899e-06,1.8252484791678163e-05,6.32177673037347e-05,tokens/kWh,996555.2832214333,,s,630,13.784107973098747,0.021879536465236122,0.0002631747546613138,0.021808064460754395,0.022150712776184082,0.02236365451812744,0.022853109550476074,"[0.021778495788574218, 0.02174732780456543, 0.021688703536987305, 0.02183366394042969, 0.022048831939697266, 0.02204876708984375, 0.02185385513305664, 0.021788864135742186, 0.02177427291870117, 0.021960927963256837, 0.02182963180541992, 0.02185625648498535, 0.02177961540222168, 0.022281280517578127, 0.02176153564453125, 0.021694751739501954, 0.021700607299804688, 0.02172313690185547, 0.02201190376281738, 0.02191564750671387, 0.02171494483947754, 0.021778432846069336, 0.021729280471801758, 0.02169856071472168, 0.02166169548034668, 0.02194867134094238, 0.02190105628967285, 0.021749696731567382, 0.021851327896118163, 0.021825439453125, 0.02172003173828125, 0.021716991424560548, 0.021737472534179687, 0.02176924705505371, 0.021820383071899416, 0.021746784210205077, 0.021676895141601562, 0.021897279739379882, 0.021954559326171876, 0.022089824676513672, 0.022149023056030274, 0.022235136032104492, 0.02239190483093262, 0.022292959213256837, 0.022095680236816406, 0.022239328384399414, 0.0223045768737793, 0.02197987174987793, 0.022015071868896483, 0.022520767211914063, 0.02183718490600586, 0.0217872314453125, 0.02183884811401367, 0.02181427192687988, 0.021807104110717773, 0.02188902473449707, 0.021886144638061523, 0.021817983627319334, 0.021745311737060548, 0.021783136367797853, 0.021722272872924806, 0.02184419250488281, 0.021764671325683594, 0.021803680419921874, 0.021819391250610352, 0.021774240493774414, 0.021673919677734375, 0.021659807205200197, 0.021809152603149414, 0.021731327056884766, 0.021931392669677734, 0.021693056106567382, 0.021699583053588867, 0.021825824737548828, 0.021707487106323243, 0.021755456924438477, 0.022004159927368164, 0.022732223510742187, 0.02278438377380371, 0.021881023406982423, 0.022161407470703123, 0.021897216796875, 0.021882463455200195, 0.02214134407043457, 0.022901920318603514, 0.022288799285888672, 0.02204275131225586, 0.0219736328125, 0.02192918395996094, 0.021956607818603514, 0.02182192039489746, 0.022082624435424806, 0.02186502456665039, 0.021887487411499023, 0.02190652847290039, 0.021796735763549804, 0.02182636833190918, 0.021842016220092773, 0.021910720825195313, 0.02179078483581543, 0.022016767501831055, 0.021749536514282228, 0.021898527145385743, 0.02183468818664551, 0.021695552825927736, 0.021814207077026367, 0.02178656005859375, 0.021741024017333986, 0.021899871826171875, 0.021931264877319338, 0.021879552841186523, 0.021783552169799804, 0.021744640350341796, 0.021725183486938478, 0.02190745544433594, 0.022089471817016603, 0.02180940818786621, 0.021788671493530275, 0.02197283172607422, 0.021821599960327148, 0.021941791534423827, 0.021915935516357423, 0.021915199279785157, 0.021753856658935547, 0.021834239959716797, 0.021700735092163085, 0.02161664009094238, 0.02190745544433594, 0.02192793655395508, 0.021721088409423828, 0.021618015289306642, 0.02179715156555176, 0.02173334312438965, 0.022065568923950195, 0.021669792175292968, 0.021928031921386718, 0.021805055618286134, 0.02188083267211914, 0.022034431457519533, 0.021769535064697264, 0.021689023971557617, 0.02172867202758789, 0.02186240005493164, 0.02177276802062988, 0.02166592025756836, 0.021780384063720702, 0.02191574478149414, 0.021737472534179687, 0.021704704284667968, 0.021772064208984376, 0.02189334487915039, 0.022226943969726562, 0.02191321563720703, 0.021843807220458984, 0.021807199478149415, 0.02202684783935547, 0.021812448501586913, 0.021887615203857423, 0.021664928436279297, 0.02175881576538086, 0.02168320083618164, 0.02171494483947754, 0.0217807674407959, 0.021747936248779298, 0.021801279067993163, 0.021738719940185548, 0.021783424377441407, 0.021702783584594727, 0.021651424407958985, 0.021983232498168945, 0.021819391250610352, 0.02179452705383301, 0.021839391708374022, 0.021889184951782225, 0.022256223678588868, 0.021809152603149414, 0.02174729537963867, 0.021780672073364257, 0.021819616317749025, 0.021831872940063477, 0.021679935455322267, 0.021708799362182618, 0.0216760311126709, 0.022042240142822266, 0.02206528091430664, 0.021762304306030274, 0.021847711563110352, 0.021942655563354493, 0.021951839447021483, 0.022052671432495115, 0.02184796714782715, 0.02192207908630371, 0.0217161922454834, 0.021605152130126953, 0.021589632034301757, 0.0220546875, 0.021807712554931642, 0.021784223556518555, 0.021709152221679687, 0.02183737564086914, 0.022038976669311525, 0.021979135513305666, 0.021700191497802734, 0.021729503631591797, 0.021921503067016603, 0.021718591690063477, 0.02177935981750488, 0.021845407485961914, 0.02186204719543457, 0.021814207077026367, 0.021882015228271483, 0.021729536056518554, 0.02180134391784668, 0.021696735382080078, 0.021753856658935547, 0.021651456832885742, 0.021884735107421876, 0.021780351638793945, 0.021678176879882813, 0.021704927444458007, 0.021675296783447266, 0.021695104598999024, 0.021702560424804687, 0.021726879119873047, 0.02170854377746582, 0.021965599060058592, 0.02174787139892578, 0.021773439407348633, 0.022024927139282228, 0.021826976776123046, 0.02182819175720215, 0.02189427185058594, 0.0218919677734375, 0.021823488235473632, 0.021762048721313477, 0.021796863555908205, 0.021739519119262696, 0.021708383560180664, 0.021684576034545898, 0.02169196891784668, 0.02158355140686035, 0.02200454330444336, 0.021777952194213867, 0.02182806396484375, 0.021772287368774415, 0.02176425552368164, 0.021878623962402345, 0.021934080123901366, 0.021733375549316408, 0.02169036865234375, 0.02169254493713379, 0.021765888214111326, 0.021672607421875, 0.021807392120361327, 0.02176582336425781, 0.0217127685546875, 0.02174611282348633, 0.021827392578125, 0.022003807067871094, 0.02201113510131836, 0.021797727584838868, 0.022521535873413087, 0.021841279983520506, 0.021838560104370117, 0.021717023849487305, 0.02177039909362793, 0.021802400588989256, 0.02174425506591797, 0.021786624908447266, 0.021927679061889648, 0.021824928283691408, 0.021744480133056642, 0.022013887405395508, 0.02195167922973633, 0.021879167556762696, 0.021920255661010742, 0.02189030456542969, 0.02190822410583496, 0.021834943771362306, 0.021916479110717774, 0.021819391250610352, 0.02181875228881836, 0.021875328063964843, 0.02192153549194336, 0.02211164855957031, 0.021867359161376953, 0.02183123207092285, 0.021916095733642577, 0.021823488235473632, 0.021989280700683594, 0.02185635185241699, 0.021876735687255858, 0.02185420799255371, 0.021743583679199218, 0.02174569511413574, 0.02197020721435547, 0.021906143188476564, 0.021861440658569337, 0.021826240539550783, 0.021868799209594728, 0.02189030456542969, 0.021777280807495115, 0.021839744567871095, 0.021823488235473632, 0.021716928482055663, 0.021748960494995116, 0.02403619194030762, 0.021998624801635742, 0.021828575134277342, 0.021712928771972655, 0.021823455810546875, 0.021803007125854493, 0.021743616104125976, 0.022015775680541992, 0.021736831665039064, 0.0217893123626709, 0.021675519943237305, 0.021699071884155274, 0.021777664184570313, 0.021785343170166015, 0.021893119812011717, 0.02239676856994629, 0.02177155113220215, 0.02190835189819336, 0.02166774368286133, 0.021689855575561523, 0.02163158416748047, 0.021722944259643554, 0.021587392807006837, 0.021757728576660158, 0.021648319244384765, 0.021680160522460936, 0.021710048675537108, 0.022042943954467775, 0.021712352752685547, 0.02182601547241211, 0.021811456680297853, 0.021712736129760744, 0.021797632217407225, 0.021775136947631835, 0.021697248458862305, 0.021653696060180663, 0.02170675277709961, 0.0216760311126709, 0.021687583923339845, 0.021740255355834962, 0.021714559555053713, 0.021675615310668944, 0.021623296737670897, 0.022134559631347656, 0.02285532760620117, 0.02276438331604004, 0.02244607925415039, 0.02189107131958008, 0.0219965763092041, 0.021826528549194337, 0.021700607299804688, 0.021756128311157228, 0.022300447463989258, 0.021731327056884766, 0.02171683120727539, 0.0221812801361084, 0.021960447311401367, 0.021693376541137694, 0.021827648162841797, 0.021798912048339843, 0.021843584060668945, 0.021735807418823243, 0.021719039916992186, 0.021743616104125976, 0.021755903244018555, 0.021770240783691407, 0.021763711929321288, 0.02178291130065918, 0.021743616104125976, 0.021809152603149414, 0.02171452713012695, 0.021782943725585938, 0.021818944931030274, 0.02172153663635254, 0.021780479431152345, 0.021728927612304688, 0.02169215965270996, 0.021665599822998045, 0.021697311401367186, 0.021811199188232423, 0.021782527923583983, 0.021794815063476563, 0.021702655792236326, 0.021722944259643554, 0.02173766326904297, 0.021687519073486327, 0.021733631134033204, 0.021796640396118165, 0.021865215301513672, 0.021803007125854493, 0.021908863067626953, 0.021854240417480467, 0.021721696853637694, 0.021746847152709962, 0.02170163154602051, 0.021778207778930664, 0.021614431381225586, 0.021782720565795898, 0.021730527877807618, 0.022096607208251955, 0.021760095596313478, 0.021831680297851562, 0.021778079986572267, 0.02183407974243164, 0.021798816680908203, 0.02190985679626465, 0.02186809539794922, 0.0217458553314209, 0.021718271255493166, 0.02168294334411621, 0.021683616638183592, 0.021661951065063478, 0.021676383972167968, 0.02192140769958496, 0.02196019172668457, 0.022227807998657225, 0.0222761287689209, 0.022470304489135742, 0.022237184524536133, 0.022408639907836914, 0.021993663787841795, 0.021846912384033204, 0.02191958427429199, 0.021774335861206053, 0.02182713508605957, 0.021907039642333984, 0.021875551223754883, 0.022380544662475587, 0.021667295455932618, 0.02169705581665039, 0.02167955207824707, 0.021755552291870116, 0.02185513687133789, 0.021820608139038085, 0.022218656539916993, 0.021695552825927736, 0.021689056396484375, 0.02163430404663086, 0.02178553581237793, 0.021682207107543944, 0.021946367263793946, 0.022006944656372072, 0.022039392471313476, 0.021823488235473632, 0.021774335861206053, 0.021710847854614256, 0.021798912048339843, 0.02187468719482422, 0.021796319961547853, 0.022030624389648437, 0.02185241508483887, 0.021772287368774415, 0.021927871704101563, 0.021796384811401368, 0.021819328308105467, 0.02177084732055664, 0.02173855972290039, 0.022092832565307616, 0.02183353614807129, 0.02183900833129883, 0.021838399887084962, 0.021995904922485352, 0.02176963233947754, 0.021727872848510743, 0.021798879623413085, 0.021766271591186524, 0.022150463104248046, 0.021879360198974608, 0.021960704803466798, 0.02185830307006836, 0.02190745544433594, 0.021852224349975587, 0.021851104736328127, 0.022323200225830078, 0.021980127334594726, 0.02202828788757324, 0.022224704742431642, 0.0221529598236084, 0.022525503158569337, 0.022256511688232422, 0.021970943450927736, 0.021934080123901366, 0.02179180717468262, 0.021732383728027344, 0.021900800704956053, 0.02389232063293457, 0.023971904754638673, 0.022113407135009765, 0.021984128952026366, 0.02185215950012207, 0.021929983139038087, 0.021814815521240233, 0.021746143341064453, 0.02168832015991211, 0.021798303604125976, 0.02186835289001465, 0.02178656005859375, 0.021768224716186522, 0.021749343872070313, 0.0217542724609375, 0.021720127105712892, 0.02173014450073242, 0.02173347282409668, 0.021786624908447266, 0.021764095306396485, 0.02182713508605957, 0.02177609634399414, 0.02175049591064453, 0.02165555191040039, 0.021652544021606445, 0.021626976013183592, 0.021627744674682616, 0.021984512329101563, 0.02337420845031738, 0.022847679138183592, 0.022040735244750975, 0.022428800582885742, 0.021906335830688475, 0.021751808166503905, 0.021934080123901366, 0.02266659164428711, 0.02228700828552246, 0.02206105613708496, 0.022120288848876953, 0.022055072784423826, 0.021886816024780275, 0.02174991989135742, 0.02169990348815918, 0.021733184814453126, 0.021746559143066405, 0.021685407638549804, 0.021727775573730467, 0.02178041648864746, 0.02172496032714844, 0.021723583221435548, 0.02173513603210449, 0.021815296173095702, 0.02172140884399414, 0.02177142333984375, 0.02200054359436035, 0.02197715187072754, 0.021808416366577148, 0.021881568908691407, 0.021975040435791016, 0.022929407119750975, 0.021826751708984377, 0.021756736755371094, 0.021714015960693358, 0.021803680419921874, 0.021803264617919923, 0.021942272186279296, 0.02187059211730957, 0.022054912567138672, 0.021719039916992186, 0.021618688583374023, 0.021731199264526366, 0.021733503341674804, 0.02169206428527832, 0.02189961624145508, 0.021807104110717773, 0.02176598358154297, 0.02173967933654785, 0.021781984329223632, 0.02187913513183594, 0.021919551849365233, 0.021684608459472655, 0.02165555191040039, 0.021800960540771484, 0.021841920852661133, 0.02169343948364258, 0.02167091178894043, 0.021673759460449218, 0.021694719314575197, 0.021692384719848634, 0.02165113639831543, 0.021967168807983398, 0.021766143798828123, 0.021750848770141603, 0.02185100746154785, 0.021690208435058592, 0.021763904571533203, 0.021770656585693358, 0.021813247680664064, 0.02172313690185547, 0.02171494483947754, 0.021624704360961915, 0.021819360733032228, 0.021769792556762695, 0.02178927993774414, 0.02177340888977051, 0.021697439193725587, 0.02176348876953125, 0.02175030326843262, 0.021692480087280273, 0.021794815063476563, 0.021581823348999024, 0.02180019187927246, 0.021822208404541014, 0.021845184326171874, 0.02176083183288574, 0.021874015808105468, 0.021840543746948243, 0.022032384872436524, 0.02237824058532715, 0.02223708724975586, 0.022657567977905274, 0.022343488693237306, 0.022171648025512695, 0.02235935974121094, 0.02226655960083008, 0.022486080169677736, 0.02240812873840332, 0.02245631980895996, 0.022599679946899414, 0.022517759323120116, 0.02231888008117676, 0.022552799224853516, 0.02229043197631836, 0.02222198486328125, 0.022266719818115236, 0.022179840087890625, 0.022580480575561522, 0.02223904037475586, 0.02236716842651367]",tokens/s,45.704807393377656,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1506.066432,1596.915712,0.0,1201.668096,1189.151232,s,1,8.4394453125,8.4394453125,0.0,8.4394453125,8.4394453125,8.4394453125,8.4394453125,[8.4394453125],,kWh,3.494024245833695e-05,3.846210831008193e-06,1.0811675316000496e-05,4.959812860534564e-05,,MB,1522.200576,1791.950848,0.0,1382.023168,1351.367168,s,10,0.4749305305480957,0.04749305305480957,0.00011023556915181127,0.04746963310241699,0.0475606086730957,0.047669617080688474,0.047756823806762695,"[0.04777862548828125, 0.047507423400878906, 0.04743027114868164, 0.047454689025878904, 0.047536384582519534, 0.04740451049804688, 0.04734524917602539, 0.047470401763916016, 0.047534111022949216, 0.04746886444091797]",tokens/s,5390.262017995812,kWh,1.4211896730987327e-06,1.5673211640795952e-07,9.467806926990326e-07,2.524702482205725e-06,tokens/kWh,101398086.23166707,MB,1527.451648,1833.893888,0.0,1423.966208,1407.328256,s,10,13.804923950195311,1.3804923950195311,0.005752157842506621,1.379092041015625,1.3883178833007812,1.3886030334472657,1.3888311535644533,"[1.377676513671875, 1.3759522705078124, 1.383525146484375, 1.3769593505859374, 1.3882545166015625, 1.3716900634765625, 1.37472216796875, 1.380507568359375, 1.38888818359375, 1.3867481689453125]",tokens/s,45.635890662844744,kWh,4.023062634273427e-05,4.4370357060596444e-06,1.8130345680100675e-05,6.27980077288946e-05,tokens/kWh,1003216.5394796827,,s,630,13.802479480743418,0.021908697588481598,0.00033537922035371343,0.021842127799987794,0.02214827880859375,0.02232315511703491,0.023321606845855717,"[0.02208777618408203, 0.022468511581420898, 0.021942304611206054, 0.02188287925720215, 0.021726303100585938, 0.021657855987548828, 0.021713184356689452, 0.021792896270751955, 0.021780736923217775, 0.021810176849365235, 0.021832704544067383, 0.021899295806884767, 0.021881887435913086, 0.021863359451293946, 0.02205695915222168, 0.02188902473449707, 0.021795936584472656, 0.021681055068969727, 0.02186854362487793, 0.021819391250610352, 0.021790271759033204, 0.021774784088134765, 0.021819263458251952, 0.02200752067565918, 0.02195088005065918, 0.021934080123901366, 0.021901311874389647, 0.02181679916381836, 0.02196329689025879, 0.021825216293334962, 0.021825632095336913, 0.02169264030456543, 0.021663679122924804, 0.021791967391967773, 0.021822303771972657, 0.021876735687255858, 0.0217227840423584, 0.021752511978149414, 0.021717920303344726, 0.021838336944580077, 0.021879039764404296, 0.022120447158813478, 0.021747711181640626, 0.02202828788757324, 0.0220446720123291, 0.02226380729675293, 0.0221529598236084, 0.02221696090698242, 0.02189673614501953, 0.021852863311767577, 0.021908319473266602, 0.021764448165893555, 0.02162505531311035, 0.021723520278930663, 0.021855552673339843, 0.02184262466430664, 0.02181724739074707, 0.0217109432220459, 0.021793920516967772, 0.021940223693847655, 0.021809535980224608, 0.021858495712280275, 0.021655296325683592, 0.021861215591430665, 0.0221265926361084, 0.02202828788757324, 0.021768192291259765, 0.021701856613159178, 0.02181353569030762, 0.021916160583496092, 0.0217969913482666, 0.02179574394226074, 0.02168931198120117, 0.02181065559387207, 0.022006303787231445, 0.021870304107666015, 0.021764127731323243, 0.02162441635131836, 0.021696928024291993, 0.021627071380615235, 0.021692480087280273, 0.021713056564331056, 0.021618816375732423, 0.02194112014770508, 0.02173401641845703, 0.021988639831542967, 0.02163580894470215, 0.0216529598236084, 0.021639904022216796, 0.021700159072875976, 0.021860832214355468, 0.02169856071472168, 0.021800960540771484, 0.02172047996520996, 0.021696128845214845, 0.0216560001373291, 0.021658143997192382, 0.0216944637298584, 0.02165113639831543, 0.021819007873535155, 0.021785280227661134, 0.02177142333984375, 0.021707584381103515, 0.021708768844604494, 0.02165510368347168, 0.021713312149047852, 0.02234582328796387, 0.025286848068237305, 0.021980607986450195, 0.021975488662719728, 0.021821151733398436, 0.021725439071655275, 0.021667999267578127, 0.021706911087036134, 0.021709983825683593, 0.021715456008911133, 0.021805055618286134, 0.02193017578125, 0.021912736892700194, 0.02188355255126953, 0.021760000228881835, 0.02176367950439453, 0.02175632095336914, 0.021768192291259765, 0.021776384353637695, 0.021628480911254883, 0.02236582374572754, 0.0216625919342041, 0.02176406478881836, 0.02185795211791992, 0.021551488876342773, 0.021654720306396483, 0.02156979179382324, 0.021749664306640625, 0.022313087463378907, 0.022829599380493164, 0.023365631103515624, 0.022073280334472655, 0.022161376953125, 0.022216064453125, 0.021853120803833007, 0.021657215118408204, 0.024447296142578127, 0.021847904205322264, 0.021849952697753906, 0.021932319641113283, 0.02182310485839844, 0.02184832000732422, 0.021929983139038087, 0.021878528594970702, 0.022477216720581054, 0.02195644760131836, 0.021795103073120117, 0.021894208908081053, 0.021889503479003907, 0.02184815979003906, 0.021881088256835938, 0.02190048027038574, 0.021815839767456054, 0.021929119110107424, 0.021950912475585938, 0.021988895416259764, 0.02203526306152344, 0.02194246482849121, 0.02191155242919922, 0.021777664184570313, 0.021861120223999022, 0.02188662338256836, 0.02180291175842285, 0.021757535934448242, 0.021876991271972655, 0.021860895156860353, 0.02191321563720703, 0.02187718391418457, 0.02206934356689453, 0.02184592056274414, 0.021780479431152345, 0.021735424041748046, 0.021750816345214842, 0.021758943557739257, 0.021957887649536132, 0.022002431869506837, 0.021825279235839844, 0.021740095138549805, 0.021677280426025392, 0.021670047760009765, 0.02187107276916504, 0.021782463073730468, 0.021798816680908203, 0.02171219253540039, 0.021844671249389647, 0.021613983154296874, 0.021748319625854492, 0.022103967666625975, 0.021784671783447264, 0.021767295837402344, 0.02174611282348633, 0.02170444869995117, 0.02181190490722656, 0.02183302307128906, 0.021783199310302735, 0.021693599700927733, 0.022080352783203125, 0.02197711944580078, 0.021842016220092773, 0.021736576080322267, 0.021826335906982422, 0.021841920852661133, 0.021825023651123047, 0.021694847106933594, 0.021784095764160155, 0.02175651168823242, 0.02183590316772461, 0.02175164794921875, 0.021927808761596678, 0.021698720932006837, 0.02195609664916992, 0.02197875213623047, 0.022218847274780275, 0.021961503982543946, 0.021954559326171876, 0.02182963180541992, 0.021722431182861327, 0.02176233673095703, 0.022051231384277344, 0.021921567916870117, 0.02176652717590332, 0.021864063262939454, 0.021719263076782225, 0.021855615615844728, 0.02209270477294922, 0.021880544662475587, 0.02178771209716797, 0.021922752380371092, 0.021782527923583983, 0.021762271881103516, 0.021923103332519532, 0.021899776458740236, 0.021854368209838868, 0.021895008087158205, 0.02183600044250488, 0.021774208068847656, 0.021790624618530274, 0.02175702476501465, 0.021885759353637697, 0.0219420166015625, 0.021987903594970704, 0.021851551055908202, 0.022118783950805663, 0.02193561553955078, 0.021934431076049806, 0.021827743530273436, 0.021836736679077148, 0.021729280471801758, 0.02169036865234375, 0.021800575256347657, 0.021739904403686523, 0.021817344665527344, 0.021940288543701173, 0.02198428726196289, 0.021869760513305664, 0.021978847503662108, 0.021869823455810546, 0.02172185516357422, 0.02179043197631836, 0.021817344665527344, 0.023213823318481444, 0.024300064086914062, 0.0221014404296875, 0.02187731170654297, 0.021934080123901366, 0.021737472534179687, 0.02176646423339844, 0.021939903259277343, 0.022360063552856444, 0.021862079620361328, 0.021866111755371093, 0.02308371162414551, 0.0220153923034668, 0.021895456314086913, 0.021962432861328124, 0.021844608306884766, 0.02180735969543457, 0.021839584350585937, 0.0218439998626709, 0.021925344467163085, 0.021821632385253906, 0.021754079818725586, 0.021723264694213866, 0.021786144256591797, 0.021811391830444334, 0.02175116729736328, 0.022369184494018556, 0.021955968856811524, 0.021814176559448242, 0.02185513687133789, 0.021918752670288085, 0.021806880950927733, 0.021917695999145507, 0.022011423110961915, 0.02187516784667969, 0.02182054328918457, 0.021808000564575197, 0.021806848526000976, 0.021857568740844727, 0.024050016403198243, 0.02310736083984375, 0.022021184921264647, 0.02186422348022461, 0.021796863555908205, 0.0218787841796875, 0.0220446720123291, 0.022828863143920897, 0.022120672225952147, 0.022045888900756837, 0.021937856674194334, 0.02186777687072754, 0.021787391662597657, 0.02161471939086914, 0.021600128173828125, 0.02168556785583496, 0.021665824890136718, 0.02172947120666504, 0.021600223541259764, 0.021586431503295898, 0.02182143974304199, 0.021936479568481444, 0.021799680709838867, 0.02166655921936035, 0.02157583999633789, 0.021661535263061523, 0.021710687637329102, 0.021893152236938475, 0.022117759704589842, 0.02195542335510254, 0.022003423690795897, 0.02239523124694824, 0.022083295822143554, 0.022098207473754884, 0.021929983139038087, 0.021894720077514647, 0.02172972869873047, 0.021757951736450197, 0.02180463981628418, 0.021903455734252928, 0.021848384857177734, 0.021663999557495116, 0.021595903396606445, 0.021675359725952147, 0.02171299171447754, 0.021713056564331056, 0.021596832275390623, 0.021626623153686523, 0.021722591400146485, 0.021776575088500977, 0.02185273551940918, 0.02182032012939453, 0.021738592147827147, 0.021759775161743163, 0.021716543197631836, 0.02167238426208496, 0.021725183486938478, 0.02162892723083496, 0.021689632415771484, 0.021662431716918944, 0.021785728454589842, 0.021618688583374023, 0.0216909122467041, 0.021723487854003905, 0.02169241523742676, 0.021769376754760743, 0.021821952819824218, 0.021748064041137695, 0.021811199188232423, 0.021670207977294922, 0.021669023513793944, 0.021658143997192382, 0.021792512893676758, 0.021791200637817382, 0.021731199264526366, 0.021686656951904297, 0.021786815643310548, 0.021659648895263672, 0.021618688583374023, 0.02180838394165039, 0.021828351974487306, 0.021901311874389647, 0.021702655792236326, 0.021694303512573242, 0.021785888671875, 0.021635711669921873, 0.021704959869384765, 0.021684288024902344, 0.02183558464050293, 0.02200726318359375, 0.022334112167358398, 0.021788768768310547, 0.02180415916442871, 0.021670623779296874, 0.021626752853393556, 0.02190905570983887, 0.021723743438720702, 0.02159619140625, 0.021794815063476563, 0.02186854362487793, 0.02175811195373535, 0.02173526382446289, 0.021790719985961913, 0.021937824249267577, 0.021840223312377928, 0.021713119506835937, 0.021724895477294923, 0.021868320465087892, 0.0218176326751709, 0.022011327743530273, 0.02206572723388672, 0.021842144012451173, 0.02194000053405762, 0.021929536819458008, 0.021920255661010742, 0.021747392654418947, 0.021852096557617186, 0.021974720001220704, 0.021913248062133787, 0.021808095932006836, 0.021724288940429687, 0.021955455780029297, 0.02176540756225586, 0.02180374336242676, 0.021784576416015625, 0.021720447540283204, 0.021792512893676758, 0.021817792892456056, 0.021710784912109374, 0.021664255142211913, 0.02181046485900879, 0.021875423431396486, 0.02223308753967285, 0.02190332794189453, 0.02196272087097168, 0.021780832290649414, 0.021789920806884765, 0.021792640686035158, 0.021898143768310546, 0.021855615615844728, 0.021670528411865234, 0.02163302421569824, 0.021752864837646484, 0.021871007919311524, 0.02182406425476074, 0.02178825569152832, 0.021668256759643553, 0.021816864013671874, 0.02180963134765625, 0.021902912139892577, 0.02167612838745117, 0.02163337516784668, 0.021744895935058593, 0.02172319984436035, 0.021834272384643555, 0.021811359405517577, 0.02180214309692383, 0.021840351104736328, 0.021862783432006837, 0.02191564750671387, 0.021812480926513673, 0.021850879669189454, 0.021948320388793945, 0.02190140724182129, 0.021876735687255858, 0.02195462417602539, 0.021839103698730468, 0.022005887985229493, 0.021907487869262696, 0.021976640701293945, 0.021804000854492186, 0.02209174346923828, 0.021980863571166992, 0.021970848083496093, 0.021933919906616212, 0.021910112380981447, 0.022138784408569336, 0.022306911468505858, 0.021975040435791016, 0.02230588722229004, 0.022006656646728514, 0.022063039779663087, 0.022029600143432616, 0.021865280151367187, 0.022585119247436523, 0.022034656524658202, 0.021929983139038087, 0.02188902473449707, 0.02185420799255371, 0.021772287368774415, 0.021792192459106446, 0.021955135345458985, 0.021921279907226563, 0.021891584396362306, 0.02186835289001465, 0.021932287216186522, 0.02189267158508301, 0.022294271469116212, 0.022198911666870116, 0.022417407989501953, 0.02215920066833496, 0.02197443199157715, 0.021887744903564453, 0.022714368820190428, 0.02211167907714844, 0.02228384017944336, 0.022207040786743164, 0.02214784049987793, 0.021814783096313475, 0.021842111587524415, 0.021934080123901366, 0.021989280700683594, 0.022206560134887695, 0.022039968490600585, 0.02193276786804199, 0.021874048233032226, 0.022082048416137694, 0.022331392288208008, 0.022171520233154298, 0.022017311096191407, 0.022227807998657225, 0.022124544143676757, 0.022050304412841795, 0.021873151779174805, 0.021927679061889648, 0.021909824371337892, 0.02182975959777832, 0.02187884712219238, 0.021843584060668945, 0.02179033660888672, 0.02174208068847656, 0.02173097610473633, 0.021801055908203124, 0.02180678367614746, 0.021709375381469727, 0.021747711181640626, 0.021784576416015625, 0.02186419105529785, 0.021989152908325194, 0.021915615081787108, 0.021774112701416017, 0.02176063919067383, 0.02172480010986328, 0.022883071899414062, 0.023624895095825195, 0.022219295501708983, 0.021976736068725584, 0.021887584686279295, 0.022031328201293946, 0.021953311920166016, 0.02196873664855957, 0.02239299201965332, 0.02224508857727051, 0.022255680084228516, 0.022353216171264647, 0.022068031311035158, 0.022126848220825196, 0.0221628475189209, 0.021832128524780274, 0.021816383361816405, 0.022019264221191406, 0.02187775993347168, 0.021813087463378907, 0.02181545639038086, 0.02178166389465332, 0.02215990447998047, 0.022110048294067382, 0.021965311050415038, 0.021794336318969727, 0.021868192672729492, 0.022483327865600585, 0.022421056747436524, 0.022393247604370118, 0.021807647705078124, 0.021798303604125976, 0.02170230484008789, 0.02195916748046875, 0.022182239532470702, 0.022289600372314453, 0.02194710350036621, 0.025202144622802736, 0.02198796844482422, 0.022403072357177735, 0.022409023284912108, 0.022170047760009765, 0.021812992095947267, 0.021729280471801758, 0.022263872146606446, 0.02225161552429199, 0.022152223587036134, 0.021957439422607423, 0.022183168411254884, 0.02192665672302246, 0.0218024959564209, 0.021822975158691405, 0.021716064453125, 0.02188035202026367, 0.021989503860473634, 0.02184976005554199, 0.022272319793701173, 0.021989856719970703, 0.022089536666870118, 0.021940223693847655, 0.021835775375366212, 0.02166988754272461, 0.021722400665283203, 0.021801824569702147, 0.021854080200195313, 0.02190336036682129, 0.02184601593017578, 0.021814367294311524, 0.021939104080200195, 0.02185420799255371, 0.021730655670166014, 0.021809375762939454, 0.02193174362182617, 0.021799583435058594, 0.021864511489868163, 0.021795040130615236, 0.021738431930541993, 0.02190598487854004, 0.022024192810058595, 0.02194486427307129, 0.02189695930480957, 0.021736928939819336]",tokens/s,45.64397294550935,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 20149 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6768.16896,7762.542592,0.0,7367.294976,7351.94368,s,1,12.6473876953125,12.6473876953125,0.0,12.6473876953125,12.6473876953125,12.6473876953125,12.6473876953125,[12.6473876953125],,kWh,0.0001608182069083417,1.773204727626298e-05,4.962337303200323e-05,0.0002281736272166079,,MB,1658.08128,8372.813824,0.0,7962.886144,7872.44544,s,10,3.208184783935547,0.3208184783935547,0.0015482526784383604,0.3211701965332031,0.32249743347167964,0.322578581237793,0.3226434994506836,"[0.31727294921875, 0.3193846435546875, 0.3202622375488281, 0.3202374572753906, 0.32068597412109373, 0.3224794006347656, 0.3218287658691406, 0.3216544189453125, 0.32171920776367186, 0.32265972900390627]",tokens/s,797.9590243114346,kWh,9.335483573828066e-06,1.0295335762255684e-06,6.214831360749928e-06,1.6579848510803562e-05,tokens/kWh,15440430.582534475,MB,1666.674688,8624.472064,0.0,8214.544384,8118.577152,s,10,27.089406494140626,2.7089406494140627,0.004729172827181731,2.7089176025390627,2.7149712158203125,2.7152866333007815,2.7155389672851564,"[2.704410888671875, 2.714901123046875, 2.711612060546875, 2.701280029296875, 2.707578369140625, 2.70420947265625, 2.71560205078125, 2.7102568359375, 2.713653076171875, 2.705902587890625]",tokens/s,23.25632346859528,kWh,7.935913866033882e-05,8.753395499857032e-06,5.275871581805027e-05,0.00014087124997824615,tokens/kWh,447216.8736326872,,s,630,27.0860598335266,0.04299374576750256,0.0003475571523909005,0.042998958587646484,0.04339941749572754,0.04347417964935303,0.043852793464660644,"[0.043175136566162106, 0.04260927963256836, 0.042326175689697265, 0.042278911590576174, 0.04229935836791992, 0.04226233673095703, 0.04241388702392578, 0.04229721450805664, 0.04244534301757812, 0.04283391952514649, 0.04293603134155274, 0.04262704086303711, 0.04262508773803711, 0.042666240692138674, 0.04270489501953125, 0.042513664245605466, 0.04242918395996094, 0.042425472259521486, 0.04255424118041992, 0.04276627349853516, 0.043072895050048826, 0.04264400100708008, 0.04272947311401367, 0.04285164642333984, 0.042676544189453124, 0.042777118682861326, 0.04273299026489258, 0.042717758178710936, 0.0426673583984375, 0.04304553604125977, 0.04272127914428711, 0.04272281646728516, 0.04283030319213867, 0.04328656005859375, 0.04295091247558594, 0.04286233520507812, 0.042995712280273435, 0.04302627182006836, 0.04289539337158203, 0.04282102584838867, 0.04287561416625976, 0.042931617736816405, 0.043772350311279296, 0.04319452667236328, 0.04307891082763672, 0.043223583221435546, 0.04314470291137695, 0.043184864044189454, 0.04308582305908203, 0.04333884811401367, 0.04351084899902344, 0.04338876724243164, 0.04333878326416016, 0.04329776000976562, 0.043237377166748046, 0.043345375061035155, 0.04350620651245117, 0.04333977508544922, 0.04346060943603516, 0.043302913665771485, 0.0434952621459961, 0.043472705841064455, 0.043336032867431644, 0.043364864349365234, 0.042727294921875, 0.042464958190917966, 0.042301822662353515, 0.04230569458007812, 0.042411678314208986, 0.04323148727416992, 0.042772289276123046, 0.042735809326171874, 0.04303811264038086, 0.04293443298339844, 0.04286240005493164, 0.04269680023193359, 0.04265014266967773, 0.0425533447265625, 0.042906688690185546, 0.04272019195556641, 0.042668033599853515, 0.04272947311401367, 0.04288716888427734, 0.04304608154296875, 0.04324560165405274, 0.043031326293945314, 0.0432042236328125, 0.04300611114501953, 0.04313520050048828, 0.04304217529296875, 0.04284409713745117, 0.044810943603515625, 0.042923648834228514, 0.042874591827392575, 0.04308972930908203, 0.04332015991210938, 0.04316364669799805, 0.04313907241821289, 0.043199806213378905, 0.042832576751708984, 0.04298342514038086, 0.04301801681518555, 0.043191520690917966, 0.04295100784301758, 0.042984096527099606, 0.0432803840637207, 0.04321459197998047, 0.043235584259033205, 0.04326326370239258, 0.043324127197265624, 0.043549728393554685, 0.04349216079711914, 0.04337606430053711, 0.043301601409912106, 0.04330873489379883, 0.04346502304077148, 0.04342572784423828, 0.04344015884399414, 0.04330918502807617, 0.04337615966796875, 0.04341142272949219, 0.04343392181396484, 0.04336064147949219, 0.04335113525390625, 0.04332835388183594, 0.043323551177978516, 0.04378412628173828, 0.04307769775390625, 0.04273385620117188, 0.04267740631103516, 0.04379296112060547, 0.04258611297607422, 0.042510337829589843, 0.04253696060180664, 0.04282777786254883, 0.042802783966064455, 0.042844703674316406, 0.04269452667236328, 0.04265286254882812, 0.04262790298461914, 0.042447872161865234, 0.04236185455322266, 0.042792095184326175, 0.04294332885742187, 0.042979358673095706, 0.04304598236083984, 0.04305516815185547, 0.04295148849487305, 0.0432042236328125, 0.043106590270996094, 0.043219039916992184, 0.0431267204284668, 0.04299987030029297, 0.04300182342529297, 0.04302214431762695, 0.04272297668457031, 0.04273849487304687, 0.04298521423339844, 0.04286054229736328, 0.042727294921875, 0.04290070343017578, 0.04313910293579101, 0.04299660873413086, 0.042823680877685545, 0.042790912628173826, 0.04308377456665039, 0.04322742462158203, 0.04312998580932617, 0.0431129264831543, 0.043554943084716795, 0.04348435211181641, 0.043399295806884765, 0.043442878723144535, 0.04337254333496094, 0.04320665740966797, 0.043200511932373044, 0.04338278579711914, 0.04298076629638672, 0.04298179244995117, 0.04310444641113281, 0.043286720275878904, 0.043224895477294925, 0.04326358413696289, 0.0430838737487793, 0.04315343856811524, 0.04418588638305664, 0.04316774368286133, 0.043069438934326174, 0.04311859130859375, 0.04332729721069336, 0.0427760009765625, 0.042334720611572264, 0.04224227142333985, 0.04214505767822266, 0.04234400177001953, 0.042380287170410154, 0.04238131332397461, 0.042256385803222656, 0.04229529571533203, 0.04232585525512695, 0.042410144805908205, 0.04254265594482422, 0.042756542205810544, 0.04262911987304688, 0.04316998291015625, 0.04266374588012695, 0.042491233825683594, 0.04270662307739258, 0.04284515380859375, 0.04293001556396484, 0.04296720123291015, 0.04312678527832031, 0.043099838256835936, 0.04295507049560547, 0.043017822265625, 0.04270940780639648, 0.04268646240234375, 0.0426618881225586, 0.04274380874633789, 0.04292348861694336, 0.045267486572265626, 0.04263427352905273, 0.04264595031738281, 0.04264400100708008, 0.0425799674987793, 0.04271004867553711, 0.04305193710327149, 0.0428073616027832, 0.04279500961303711, 0.04286054229736328, 0.043222496032714844, 0.04295529556274414, 0.04301433563232422, 0.043036449432373045, 0.043216926574707035, 0.04325510406494141, 0.04335599899291992, 0.043103073120117186, 0.04330873489379883, 0.04307366561889649, 0.043159679412841795, 0.04308486557006836, 0.04294547271728515, 0.04293228912353515, 0.04294569778442383, 0.04314339065551758, 0.04308569717407226, 0.04297999954223633, 0.042998046875, 0.04310201644897461, 0.04306275177001953, 0.04313296127319336, 0.04342041778564453, 0.042923072814941406, 0.04268304061889648, 0.04245695877075195, 0.042523040771484374, 0.042502143859863284, 0.04253081512451172, 0.04278681564331055, 0.042659839630126956, 0.042567550659179686, 0.04259648132324219, 0.04259804916381836, 0.04273958587646484, 0.042856929779052734, 0.042649600982666014, 0.042641407012939454, 0.0430797119140625, 0.04279497528076172, 0.0428721923828125, 0.04324940872192383, 0.04305395126342773, 0.04312063980102539, 0.0432474250793457, 0.04321913528442383, 0.043122337341308596, 0.04296739196777344, 0.04294655990600586, 0.04298886489868164, 0.04289401626586914, 0.043014144897460936, 0.04270284652709961, 0.04286054229736328, 0.0427845458984375, 0.04271491241455078, 0.04285673522949219, 0.042626911163330075, 0.04286291122436523, 0.04267212677001953, 0.042883071899414066, 0.04324262237548828, 0.04294569778442383, 0.04293913650512695, 0.04305372619628906, 0.04340768051147461, 0.04332134246826172, 0.04333939361572266, 0.043321758270263674, 0.043337696075439455, 0.04309196853637695, 0.04382668685913086, 0.04306585693359375, 0.04290764617919922, 0.04312441635131836, 0.043477310180664065, 0.043245567321777346, 0.04310220718383789, 0.04324966430664062, 0.04322009658813476, 0.04309439849853516, 0.04290729522705078, 0.04290224075317383, 0.0431426887512207, 0.04330131149291992, 0.043869632720947266, 0.043270721435546874, 0.042651649475097655, 0.04259635162353516, 0.04260051345825195, 0.0426044807434082, 0.04250787353515625, 0.04245340728759766, 0.0425920639038086, 0.04257574462890625, 0.04250860977172852, 0.042684417724609375, 0.0425984001159668, 0.04245884704589844, 0.042473758697509766, 0.04240588760375977, 0.04229324722290039, 0.04250447845458984, 0.04273347091674805, 0.04280096054077148, 0.04284524917602539, 0.04294547271728515, 0.04292812728881836, 0.042928382873535155, 0.043020030975341794, 0.042979488372802736, 0.04280073547363281, 0.04271129608154297, 0.04288857650756836, 0.042961536407470705, 0.042638561248779294, 0.042549312591552736, 0.04258233642578125, 0.04249436950683594, 0.04265369415283203, 0.042676223754882815, 0.04270451354980469, 0.04268207931518555, 0.04268304061889648, 0.042706367492675784, 0.04297580718994141, 0.04300799942016602, 0.043044864654541014, 0.043243518829345705, 0.04332921600341797, 0.04344579315185547, 0.04337334442138672, 0.04329676818847656, 0.04333772659301758, 0.04314908981323242, 0.043308704376220704, 0.04311507034301758, 0.04339859390258789, 0.0432213134765625, 0.043151649475097656, 0.04324476623535156, 0.04338137435913086, 0.04332284927368164, 0.043380577087402346, 0.0433111686706543, 0.043299583435058596, 0.04343603134155274, 0.04347420883178711, 0.04339913558959961, 0.04303260803222656, 0.04261068725585938, 0.042665985107421874, 0.042510112762451174, 0.042610912322998046, 0.04250009536743164, 0.04269430541992188, 0.042690910339355466, 0.0426794548034668, 0.042686431884765626, 0.04272422409057617, 0.042659103393554686, 0.04273353576660156, 0.04263616180419922, 0.04261452865600586, 0.04288703918457031, 0.04281983947753906, 0.04295465469360352, 0.04302963256835937, 0.04312092971801758, 0.043067935943603516, 0.04314457702636719, 0.04344911956787109, 0.0433889274597168, 0.04321062469482422, 0.04319004821777344, 0.04307145690917969, 0.04307622528076172, 0.042833663940429687, 0.04305100631713867, 0.04298342514038086, 0.0429317741394043, 0.04286028671264648, 0.04282233428955078, 0.04310131072998047, 0.04322739028930664, 0.04371212768554687, 0.04313008117675781, 0.04309328079223633, 0.04327679824829102, 0.04336844635009766, 0.04324966430664062, 0.043227134704589845, 0.0434628791809082, 0.04340918350219727, 0.04336460876464844, 0.04359302520751953, 0.04339471817016602, 0.04343452835083008, 0.04332284927368164, 0.043485729217529294, 0.043612415313720704, 0.04323328018188476, 0.043361854553222653, 0.043153854370117185, 0.04337254333496094, 0.04334796905517578, 0.04329471969604492, 0.04329676818847656, 0.043466751098632815, 0.0434741439819336, 0.04342454528808594, 0.044015777587890624, 0.043071327209472654, 0.04274176025390625, 0.04261273574829102, 0.042592254638671875, 0.04266569519042969, 0.042514720916748044, 0.04254316711425781, 0.04259564971923828, 0.0426943359375, 0.04262499237060547, 0.042585247039794924, 0.04269622421264648, 0.04262659072875977, 0.042509056091308596, 0.043292671203613284, 0.04283596801757812, 0.04307558441162109, 0.042925216674804687, 0.04285116958618164, 0.04293632125854492, 0.04298150253295899, 0.04311846542358398, 0.04318767929077148, 0.04304540634155273, 0.04296499252319336, 0.0429439697265625, 0.04309657669067383, 0.0428787841796875, 0.04274812698364258, 0.04267827224731445, 0.04266921615600586, 0.04284288024902344, 0.042853759765625, 0.0429288330078125, 0.04280732727050781, 0.042731201171875, 0.042799488067626956, 0.04263724899291992, 0.042774528503417966, 0.043022335052490236, 0.04317388916015625, 0.04325580978393555, 0.04331433486938477, 0.04313993453979492, 0.04331699371337891, 0.04341785430908203, 0.043493377685546876, 0.04351795196533203, 0.043425792694091796, 0.043380062103271486, 0.043374591827392575, 0.044028575897216794, 0.043262046813964845, 0.04314720153808594, 0.04341574478149414, 0.04328015899658203, 0.04346265411376953, 0.04312998580932617, 0.0431396484375, 0.04314963150024414, 0.04316159820556641, 0.04323667144775391, 0.04386345672607422, 0.04311513519287109, 0.04253692626953125, 0.04272332763671875, 0.042768222808837894, 0.0427848014831543, 0.042829376220703125, 0.04252102279663086, 0.04254937744140625, 0.04265475082397461, 0.04252156829833984, 0.04261999893188476, 0.04249078369140625, 0.0427047348022461, 0.04272483062744141, 0.04268716812133789, 0.04311203384399414, 0.043108158111572266, 0.042885921478271485, 0.04316140747070313, 0.04310812759399414, 0.04309833526611328, 0.04340675354003906, 0.04357590484619141, 0.04315468978881836, 0.04315350341796875, 0.0431025276184082, 0.042889217376708984, 0.04286294555664062, 0.04276428985595703, 0.042773696899414064, 0.04284627151489258, 0.042910465240478514, 0.042882591247558596, 0.042931968688964844, 0.04295753479003906, 0.04287078475952148, 0.04290124893188477, 0.043014400482177736, 0.04313638305664062, 0.04307567977905274, 0.04341420745849609, 0.043764896392822265, 0.04334457778930664, 0.043402400970458985, 0.04345328140258789, 0.043361568450927736, 0.04321254348754883, 0.04325680160522461, 0.04315468978881836, 0.04310940933227539, 0.043173599243164065, 0.043200096130371096, 0.04331875228881836, 0.043138080596923825, 0.04319753646850586, 0.04346559906005859, 0.0434005126953125, 0.04327088165283203, 0.04349737548828125, 0.04348096084594726, 0.043432064056396484, 0.043490623474121096, 0.04354662322998047, 0.04286159896850586, 0.04266902542114258, 0.04239321517944336, 0.042377376556396486, 0.0426539192199707, 0.042573471069335934, 0.04252447891235352, 0.04252726364135742, 0.04244406509399414, 0.04257839965820313, 0.042493438720703124, 0.042558208465576175, 0.042543102264404296, 0.04253081512451172, 0.04278499221801758, 0.04280889511108398, 0.04281708908081055, 0.0427977294921875, 0.04288943862915039, 0.04315881729125977, 0.04314742279052734, 0.04312303924560547, 0.043089759826660155, 0.04301635360717773, 0.04305676651000977, 0.043032958984375, 0.04296089553833008, 0.04294620895385742, 0.04287932968139648, 0.04281327819824219, 0.042948257446289065, 0.04295731353759766, 0.04287078475952148, 0.042831871032714845, 0.0428361930847168, 0.04291766357421875, 0.04284633636474609, 0.0431962890625, 0.04320367813110351, 0.043170719146728515, 0.04303987121582031, 0.04301433563232422, 0.0432803840637207, 0.04325446319580078, 0.043128833770751954, 0.043096065521240234, 0.043093345642089845, 0.04324121475219726, 0.04332646560668945, 0.04329564666748047, 0.04312326431274414, 0.042932193756103514, 0.042936798095703124, 0.04297836685180664, 0.043021247863769534, 0.04305660629272461, 0.043409950256347654, 0.04320460891723633, 0.04338614273071289, 0.04315119934082031, 0.04305395126342773, 0.043186176300048826]",tokens/s,23.25919694012482,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8222.408704,11243.814912,0.0,10848.567296,10616.027648,s,1,14.2419736328125,14.2419736328125,0.0,14.2419736328125,14.2419736328125,14.2419736328125,14.2419736328125,[14.2419736328125],,kWh,0.00021355691618333593,2.35493461762268e-05,6.55511635520023e-05,0.00030265742591156504,,MB,4069.998592,11675.828224,0.0,11265.900544,11070.470656,s,10,3.8241392517089845,0.3824139251708984,0.0012889247604856932,0.3826279144287109,0.38372066345214845,0.38415348358154294,0.3844997396850586,"[0.38028997802734377, 0.38029132080078126, 0.3819477233886719, 0.38262469482421874, 0.3820273132324219, 0.38263113403320315, 0.3833066711425781, 0.3836244812011719, 0.3828096313476563, 0.3845863037109375]",tokens/s,669.4316894595174,kWh,1.1202163411574049e-05,1.2354040652425912e-06,7.444728177999863e-06,1.9882295654816503e-05,tokens/kWh,12875776.743516224,MB,4074.22976,11677.925376,0.0,11267.997696,11070.473216,s,10,29.081416992187503,2.90814169921875,0.007295716749810582,2.9061455078125,2.91719306640625,2.9194087890625,2.9211813671875,"[2.89908349609375, 2.898954833984375, 2.9021630859375, 2.904309814453125, 2.90538232421875, 2.90690869140625, 2.91390625, 2.91670068359375, 2.91238330078125, 2.92162451171875]",tokens/s,21.663318543564937,kWh,8.528220892301002e-05,9.406210770988544e-06,5.671276759239928e-05,0.00015140118728639785,tokens/kWh,416112.9851698331,,s,630,29.07887252426148,0.046156940514700755,0.0006409928978163092,0.04608177566528321,0.046541160202026366,0.04677731609344483,0.04871881748199463,"[0.048164958953857424, 0.04631155014038086, 0.045741825103759765, 0.04528819274902344, 0.04536524963378906, 0.045560958862304685, 0.04535491180419922, 0.045319137573242185, 0.04544307327270508, 0.04581785583496094, 0.045707263946533204, 0.04564377593994141, 0.045555519104003905, 0.0458856315612793, 0.04575027084350586, 0.04581894302368164, 0.0458240966796875, 0.045709312438964846, 0.04558921432495117, 0.04567772674560547, 0.04589209747314453, 0.046155967712402345, 0.045975841522216794, 0.04598169708251953, 0.04588339233398438, 0.04588457489013672, 0.045689697265625, 0.04586700820922852, 0.04571731185913086, 0.04568492889404297, 0.04603289413452148, 0.04934041595458984, 0.04544633483886719, 0.04575129699707031, 0.045991744995117184, 0.04591734313964844, 0.0456668815612793, 0.046156063079833984, 0.04584041595458985, 0.04580758285522461, 0.045764606475830076, 0.046639102935791016, 0.04634124755859375, 0.046091136932373045, 0.04625408172607422, 0.046419456481933595, 0.04605206298828125, 0.045895454406738284, 0.04620083236694336, 0.046066944122314456, 0.04586572647094726, 0.04624991989135742, 0.046303329467773435, 0.046088062286376956, 0.04606886291503906, 0.04619548797607422, 0.04612428665161133, 0.04603116989135742, 0.04631324768066406, 0.046482273101806644, 0.046430206298828124, 0.046235649108886716, 0.046524417877197265, 0.04808476638793945, 0.046153953552246094, 0.04573344039916992, 0.04545171356201172, 0.04529507064819336, 0.04539446258544922, 0.045725311279296875, 0.04552096176147461, 0.04553254318237305, 0.045460193634033204, 0.04581398391723633, 0.04593791961669922, 0.045542144775390626, 0.04563763046264648, 0.0458851203918457, 0.04563180923461914, 0.045676544189453126, 0.04588742446899414, 0.04588959884643555, 0.045729694366455076, 0.045846622467041014, 0.046132225036621094, 0.04610886383056641, 0.04602671813964844, 0.04599689483642578, 0.04591753768920898, 0.045761184692382814, 0.04547788619995117, 0.04575641632080078, 0.04597145462036133, 0.045916160583496096, 0.045768543243408205, 0.04661468887329102, 0.04588544082641602, 0.04621868896484375, 0.0459986572265625, 0.046034942626953124, 0.04585062408447266, 0.045860576629638675, 0.046068000793457034, 0.046314815521240234, 0.046171966552734374, 0.04618675231933594, 0.04638800048828125, 0.04631702423095703, 0.04626675033569336, 0.04692768096923828, 0.046050880432128904, 0.04583692932128906, 0.04597760009765625, 0.04613529586791992, 0.046120960235595705, 0.046301185607910154, 0.046456832885742184, 0.04632726287841797, 0.04596380615234375, 0.04633599853515625, 0.046107872009277344, 0.04594358444213867, 0.04606486511230469, 0.046518463134765625, 0.046434913635253906, 0.04636841583251953, 0.04864233779907227, 0.046163841247558596, 0.045297664642333986, 0.04524249649047852, 0.04565971374511719, 0.04552262496948242, 0.04529024124145508, 0.04550451278686524, 0.04563750457763672, 0.04560441589355469, 0.045402687072753904, 0.04558848190307617, 0.04574003219604492, 0.04577280044555664, 0.045927871704101564, 0.04580409622192383, 0.04604431915283203, 0.04602054214477539, 0.045833118438720705, 0.045830142974853515, 0.04624361419677735, 0.046055553436279296, 0.04616790390014648, 0.04598969650268555, 0.0458160629272461, 0.045811904907226565, 0.04601769638061524, 0.04584320068359375, 0.04563158416748047, 0.04605132675170898, 0.04582809448242187, 0.045838207244873044, 0.04569305419921875, 0.04625305557250976, 0.04619353485107422, 0.04616534423828125, 0.04596611022949219, 0.04619059371948242, 0.0458092155456543, 0.045900222778320315, 0.046442497253417966, 0.04638515090942383, 0.046274559020996094, 0.046276607513427735, 0.04642611312866211, 0.04627648162841797, 0.046176383972167966, 0.04599193572998047, 0.046369953155517576, 0.046209888458251955, 0.04618854522705078, 0.046258174896240234, 0.046300865173339846, 0.04631788635253906, 0.046102527618408204, 0.046319454193115235, 0.046000286102294924, 0.046316959381103515, 0.04773846435546875, 0.0459950065612793, 0.046382270812988284, 0.046532703399658204, 0.04658150482177734, 0.04873788833618164, 0.04627011108398438, 0.04568966293334961, 0.04546688079833985, 0.04542086410522461, 0.04538617706298828, 0.04570111846923828, 0.045649921417236325, 0.04565536117553711, 0.045596607208251955, 0.045574783325195316, 0.045582462310791015, 0.045747329711914066, 0.045706111907958986, 0.045690879821777344, 0.04585023880004883, 0.046246078491210936, 0.04601465606689453, 0.04595097732543945, 0.04582988739013672, 0.04611836624145508, 0.046297889709472656, 0.04645891189575195, 0.04633542251586914, 0.0460120964050293, 0.045988929748535155, 0.04579919815063477, 0.04601023864746094, 0.045631614685058594, 0.045791233062744144, 0.0461475830078125, 0.045830142974853515, 0.045661376953125, 0.046054206848144534, 0.04593670272827149, 0.04588947296142578, 0.045932544708251956, 0.046489601135253904, 0.04622463989257813, 0.045918270111083986, 0.04635833740234375, 0.04641471862792969, 0.046327808380126956, 0.046327808380126956, 0.046660606384277346, 0.046429183959960936, 0.046358047485351564, 0.04631804656982422, 0.04626383972167969, 0.046037471771240235, 0.04619651031494141, 0.04614115142822266, 0.0460880012512207, 0.04601721572875977, 0.046239070892333985, 0.04634022521972656, 0.04610915374755859, 0.04632787322998047, 0.046435806274414064, 0.04654671859741211, 0.046488319396972656, 0.046781822204589846, 0.04657187271118164, 0.04790537643432617, 0.045893791198730466, 0.045526912689208984, 0.045326366424560546, 0.04537472152709961, 0.04554991912841797, 0.045596511840820315, 0.045493919372558596, 0.045714305877685546, 0.045821502685546876, 0.04591177749633789, 0.0471448974609375, 0.045625919342041014, 0.04584368133544922, 0.04574720001220703, 0.04577231979370117, 0.04630486297607422, 0.046045089721679686, 0.04583932876586914, 0.04605747222900391, 0.04646297454833984, 0.046132926940917966, 0.04598188781738281, 0.04592038345336914, 0.04597145462036133, 0.04579878234863281, 0.046016670227050784, 0.04581795120239258, 0.04568307113647461, 0.045657470703125, 0.04600064086914062, 0.04617792129516601, 0.04601619338989258, 0.04600876617431641, 0.04591654586791992, 0.04675379180908203, 0.04589977645874024, 0.04614963150024414, 0.046282176971435544, 0.04606991958618164, 0.04646303939819336, 0.046604095458984376, 0.04626444625854492, 0.04631155014038086, 0.04632953643798828, 0.0463917121887207, 0.04593068695068359, 0.04619059371948242, 0.04610784149169922, 0.04691846466064453, 0.046004222869873046, 0.046491649627685545, 0.04612076950073242, 0.0461354866027832, 0.04622335815429687, 0.04640768051147461, 0.04603084945678711, 0.04639091110229492, 0.046381439208984375, 0.04655712127685547, 0.04635859298706055, 0.0466998405456543, 0.04660089492797852, 0.04867212677001953, 0.04618918228149414, 0.045563488006591796, 0.04525507354736328, 0.04563504028320312, 0.04556035232543945, 0.04551475143432617, 0.04540825653076172, 0.04581292724609375, 0.045947711944580076, 0.04576208114624023, 0.04677014541625977, 0.04558694458007812, 0.04596736145019531, 0.0459356803894043, 0.045884353637695316, 0.04599398422241211, 0.04596736145019531, 0.04580966567993164, 0.045752479553222654, 0.04623344039916992, 0.04632166290283203, 0.04632355117797852, 0.04611625671386719, 0.04594768142700195, 0.04576457595825195, 0.04604108810424805, 0.0458342399597168, 0.04581292724609375, 0.04592723083496094, 0.04601187133789063, 0.04588188934326172, 0.045811038970947265, 0.04600284957885742, 0.04631347274780274, 0.04600937652587891, 0.04603798294067383, 0.04625161743164063, 0.04624835205078125, 0.04603257751464844, 0.046128929138183596, 0.046602783203125, 0.04655923080444336, 0.04659404754638672, 0.04640915298461914, 0.04615948867797852, 0.04621964645385742, 0.04620316696166992, 0.04649398422241211, 0.04604470443725586, 0.046225887298583984, 0.0463927993774414, 0.046034847259521484, 0.04643699264526367, 0.04635238265991211, 0.04637392044067383, 0.04600252914428711, 0.04631001663208008, 0.04648099136352539, 0.04620636749267578, 0.046492385864257815, 0.047624481201171874, 0.04640358352661133, 0.04907440185546875, 0.04619059371948242, 0.0454093132019043, 0.04544947052001953, 0.045601505279541016, 0.045629150390625, 0.04539420700073242, 0.04563558578491211, 0.04596950531005859, 0.04584233474731445, 0.04561305618286133, 0.04579942321777344, 0.045795326232910154, 0.04585472106933594, 0.04577062225341797, 0.04592380905151367, 0.04595779037475586, 0.045864959716796876, 0.0461431999206543, 0.04607392120361328, 0.046080223083496095, 0.04616191864013672, 0.046481407165527344, 0.04616502380371094, 0.04610556793212891, 0.04600831985473633, 0.04604927825927734, 0.04584447860717773, 0.045674495697021485, 0.04597555160522461, 0.04612300872802735, 0.045932544708251956, 0.04595507049560547, 0.04632156753540039, 0.04603084945678711, 0.04580995178222656, 0.04600511932373047, 0.04632057571411133, 0.04620236968994141, 0.05368252944946289, 0.0453858871459961, 0.045788959503173826, 0.04608419036865234, 0.047185630798339845, 0.0467457275390625, 0.04661183929443359, 0.046349151611328125, 0.04642387390136719, 0.04591436767578125, 0.04627865600585938, 0.046182239532470706, 0.04614899063110352, 0.046277183532714844, 0.04658982467651367, 0.04651827239990235, 0.04622985458374024, 0.046491649627685545, 0.04641177749633789, 0.04621279907226562, 0.046808895111083985, 0.04669411087036133, 0.046246688842773435, 0.046145023345947264, 0.04850543975830078, 0.04600323104858398, 0.045709983825683594, 0.04560723114013672, 0.04550041580200195, 0.04542259216308594, 0.04567977523803711, 0.045953887939453125, 0.04592230224609375, 0.04589158248901367, 0.04575347137451172, 0.0456703987121582, 0.045797279357910156, 0.045747169494628905, 0.04624319839477539, 0.04597209548950195, 0.04610867309570312, 0.045872894287109375, 0.04603315353393555, 0.045956382751464846, 0.046508865356445314, 0.046892959594726565, 0.046358528137207033, 0.04583391952514648, 0.04598969650268555, 0.04617267227172851, 0.045879295349121094, 0.046083328247070315, 0.04652518463134766, 0.04623932647705078, 0.045975967407226564, 0.04597555160522461, 0.04612681579589844, 0.045959457397460934, 0.04617987060546875, 0.04641839981079102, 0.046399486541748046, 0.046230560302734376, 0.04623580932617188, 0.04614217758178711, 0.046336097717285155, 0.046567073822021486, 0.046614879608154296, 0.046431774139404296, 0.04627299118041992, 0.046419647216796874, 0.04644646453857422, 0.04598214340209961, 0.046516223907470705, 0.046499839782714845, 0.04628041458129883, 0.0463150405883789, 0.05188479995727539, 0.04571305465698242, 0.04636819076538086, 0.04635443115234375, 0.046519073486328125, 0.04638528060913086, 0.04653987121582031, 0.046615455627441404, 0.04645865631103516, 0.04684553527832031, 0.046613121032714845, 0.04818534469604492, 0.04624492645263672, 0.0456693115234375, 0.04551590347290039, 0.04536819076538086, 0.04576243209838867, 0.04550259017944336, 0.04558195114135742, 0.045682113647460935, 0.04582700729370117, 0.04604655838012695, 0.04581846237182617, 0.04579129409790039, 0.04571136093139649, 0.046004222869873046, 0.04582191848754883, 0.04615766525268555, 0.04613542556762695, 0.045959232330322265, 0.046239585876464845, 0.04652457427978516, 0.04637491226196289, 0.04633744049072266, 0.04647945785522461, 0.046184833526611326, 0.045987968444824216, 0.04603219223022461, 0.04607027053833008, 0.04582009506225586, 0.04614963150024414, 0.04621311950683594, 0.046276607513427735, 0.045866943359375, 0.046139488220214846, 0.04600393676757813, 0.04598313522338867, 0.045953887939453125, 0.046519905090332034, 0.046537120819091796, 0.04618585586547851, 0.04653731155395508, 0.04680502319335938, 0.04643840026855469, 0.0466756477355957, 0.04676025772094727, 0.04656742477416992, 0.04628396987915039, 0.047683582305908204, 0.04593747329711914, 0.04617599868774414, 0.04625958251953125, 0.04656729507446289, 0.046285118103027344, 0.04638175964355469, 0.04639852905273437, 0.04620956802368164, 0.04618406295776367, 0.04642076873779297, 0.04663881683349609, 0.0464664306640625, 0.0463733139038086, 0.04677180862426758, 0.04661747360229492, 0.04902617645263672, 0.04616486358642578, 0.045690494537353514, 0.04564982223510742, 0.04561967849731445, 0.04573183822631836, 0.04566150283813476, 0.045464256286621096, 0.04576233673095703, 0.045883617401123046, 0.04585062408447266, 0.04582809448242187, 0.0457625617980957, 0.046302974700927736, 0.0464447021484375, 0.04615996932983398, 0.04606060791015625, 0.047233985900878905, 0.046069759368896485, 0.04611686325073242, 0.04649369430541992, 0.046540542602539064, 0.04652671813964844, 0.046150657653808595, 0.04593900680541992, 0.04613190460205078, 0.046225406646728515, 0.04591206359863281, 0.045956863403320315, 0.04620518493652344, 0.04619059371948242, 0.045890720367431644, 0.046147903442382815, 0.04631363296508789, 0.046219230651855465, 0.04617436981201172, 0.04652671813964844, 0.04626227188110352, 0.046284801483154295, 0.049309696197509766, 0.04605542373657227, 0.046007423400878905, 0.047815521240234374, 0.04632783889770508, 0.046266368865966793, 0.046378719329833985, 0.04641952133178711, 0.04609302520751953, 0.04627251052856445, 0.04637615966796875, 0.04648969650268555, 0.0461739501953125, 0.04657977676391602, 0.04652889633178711, 0.04643686294555664, 0.04636188888549805, 0.04652515029907227, 0.04649897766113281, 0.04681609725952148, 0.04698448181152344, 0.04694489669799805, 0.0486440315246582, 0.0464796142578125]",tokens/s,21.665214133538704,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8807.899136,10152.18176,0.0,9749.659648,9714.717184,s,1,14.7033388671875,14.7033388671875,0.0,14.7033388671875,14.7033388671875,14.7033388671875,14.7033388671875,[14.7033388671875],,kWh,0.00021300866355418292,2.348912013070779e-05,6.296005036800223e-05,0.00029945783405289293,,MB,1883.185152,10580.000768,0.0,10162.798592,10037.824,s,10,4.624969512939454,0.46249695129394536,0.0008901268197805085,0.4622973480224609,0.4633814025878906,0.46371123962402344,0.4639751092529297,"[0.46327786254882813, 0.46115023803710936, 0.46257376098632813, 0.4632738647460938, 0.4618979797363281, 0.46404107666015626, 0.46330810546875, 0.46158438110351563, 0.46202093505859376, 0.46184130859375]",tokens/s,553.5171621862135,kWh,1.3503292427651903e-05,1.4891767433209699e-06,8.928075324272922e-06,2.3920544495245795e-05,tokens/kWh,10702097.523360305,MB,1892.073472,10791.81312,0.0,10374.610944,10299.609088,s,10,34.453828369140616,3.4453828369140624,0.0023691816873914993,3.445557373046875,3.448095703125,3.4484241943359377,3.4486869873046877,"[3.442867431640625, 3.4428173828125, 3.44151513671875, 3.448022705078125, 3.445501220703125, 3.448752685546875, 3.445613525390625, 3.447486328125, 3.447258056640625, 3.443993896484375]",tokens/s,18.285340985917088,kWh,0.00010048558879068236,1.1082811911940027e-05,6.691595757312707e-05,0.00017848435827574945,tokens/kWh,352972.10696002917,,s,630,34.450136367797896,0.054682756139361674,0.0003925622648369922,0.05466936111450195,0.05514907455444336,0.055284803199768064,0.055477677421569824,"[0.054749183654785157, 0.05431296157836914, 0.05388505554199219, 0.053888576507568356, 0.054233409881591796, 0.05413824081420898, 0.05423081588745117, 0.05438915252685547, 0.05436867141723633, 0.05407132720947266, 0.05438083267211914, 0.05449903869628906, 0.05411430358886719, 0.05410201644897461, 0.054513473510742184, 0.05451948928833008, 0.05456880187988281, 0.054680225372314456, 0.05420441436767578, 0.05443123245239258, 0.05442934417724609, 0.05425964736938477, 0.05416207885742187, 0.05410224151611328, 0.05443552017211914, 0.05452016067504883, 0.05458745574951172, 0.05467903900146484, 0.0544939193725586, 0.05451891326904297, 0.05433750534057617, 0.05458419036865234, 0.05657785415649414, 0.05419820785522461, 0.05464246368408203, 0.05472687911987305, 0.05466521453857422, 0.05460358428955078, 0.054644927978515626, 0.05478400039672852, 0.05490655899047851, 0.054704288482666015, 0.054618431091308595, 0.054709247589111325, 0.054817886352539064, 0.05484848022460938, 0.05504489517211914, 0.054924766540527345, 0.054991390228271486, 0.05507683181762695, 0.054844863891601564, 0.05492780685424805, 0.05471459197998047, 0.05493727874755859, 0.05505580902099609, 0.055314369201660156, 0.055015617370605466, 0.055049121856689455, 0.05537897491455078, 0.05523699188232422, 0.05503014373779297, 0.054910911560058596, 0.055200000762939454, 0.054613311767578124, 0.05439350509643555, 0.05401513671875, 0.053922561645507815, 0.05393212890625, 0.054145023345947264, 0.05402550506591797, 0.0543034896850586, 0.05416547012329102, 0.05412825775146484, 0.054011455535888674, 0.05429724884033203, 0.05443161773681641, 0.05411590576171875, 0.05424361419677735, 0.054346176147460935, 0.05471846389770508, 0.054443649291992184, 0.0543768310546875, 0.054316574096679685, 0.05486217498779297, 0.05460902404785156, 0.054520832061767575, 0.05433718490600586, 0.05448668670654297, 0.05446844863891601, 0.05447897720336914, 0.054397472381591795, 0.054282176971435545, 0.054818401336669924, 0.054601600646972656, 0.05446057510375977, 0.05455116653442383, 0.05468982315063477, 0.05462217712402344, 0.05454000091552735, 0.05440335845947265, 0.054521663665771485, 0.05483129501342773, 0.055041057586669925, 0.05485385513305664, 0.05468646240234375, 0.054836414337158204, 0.055026302337646486, 0.05501142501831055, 0.05484553527832031, 0.05485772705078125, 0.05487206268310547, 0.055009311676025394, 0.054962207794189456, 0.05504819107055664, 0.05497644805908203, 0.05490483093261719, 0.05507276916503906, 0.05511103820800781, 0.055072830200195315, 0.055126590728759764, 0.05540454483032226, 0.055169025421142576, 0.055209598541259765, 0.05536175918579102, 0.05547622299194336, 0.05502377700805664, 0.05459699249267578, 0.05414771270751953, 0.0543001594543457, 0.054516223907470705, 0.05418960189819336, 0.05466892623901367, 0.054080352783203126, 0.054087711334228517, 0.054116321563720704, 0.05411401748657227, 0.054248832702636716, 0.054344032287597654, 0.05418656158447266, 0.054476062774658204, 0.054407806396484376, 0.05432876968383789, 0.05430953598022461, 0.05450320053100586, 0.05453859329223633, 0.05455449676513672, 0.05495507049560547, 0.05445523071289062, 0.055003135681152344, 0.054496513366699216, 0.054629375457763675, 0.054437633514404296, 0.0545689582824707, 0.05461196899414063, 0.05467939376831055, 0.05451696014404297, 0.05438518524169922, 0.054686111450195314, 0.05435801696777344, 0.05449932861328125, 0.05445663833618164, 0.054539966583251956, 0.05468364715576172, 0.054765312194824216, 0.054876415252685544, 0.05464057540893555, 0.05490697479248047, 0.05474505615234375, 0.05483327865600586, 0.054534015655517576, 0.05447484970092773, 0.05460163116455078, 0.054642913818359375, 0.05471619033813477, 0.054822784423828125, 0.05498659133911133, 0.05476367950439453, 0.05483113479614258, 0.05508940887451172, 0.055141822814941406, 0.05529232025146484, 0.05521820831298828, 0.05494355010986328, 0.05491062545776367, 0.05504000091552735, 0.0549711685180664, 0.05489823913574219, 0.05494607925415039, 0.05491011047363281, 0.057283935546875, 0.05477443313598633, 0.054075393676757816, 0.054236351013183595, 0.05424812698364258, 0.05440918350219726, 0.05400371170043945, 0.054061214447021486, 0.05418188858032227, 0.054347232818603514, 0.05409571075439453, 0.05402489471435547, 0.054295936584472654, 0.0543526725769043, 0.05417763137817383, 0.054617984771728516, 0.05466739273071289, 0.05446656036376953, 0.054502559661865235, 0.05450409698486328, 0.05433555221557617, 0.05427212905883789, 0.0546201286315918, 0.05486595153808594, 0.05429411315917969, 0.054505630493164064, 0.05470793533325195, 0.05463004684448242, 0.05464688110351563, 0.05479443359375, 0.054698078155517575, 0.05446297454833984, 0.05466883087158203, 0.05474124908447266, 0.05504022216796875, 0.05496585464477539, 0.054761409759521484, 0.05498518371582031, 0.05477788925170898, 0.05503359985351562, 0.05504764938354492, 0.05489126586914062, 0.054781951904296876, 0.05494358444213867, 0.05505449676513672, 0.054824542999267575, 0.05476742553710937, 0.054889057159423826, 0.05497478485107422, 0.05482463836669922, 0.05493350219726562, 0.05500905609130859, 0.054970558166503904, 0.05506051254272461, 0.05497446441650391, 0.0551956787109375, 0.05497209548950195, 0.0552279052734375, 0.05520790481567383, 0.05480044937133789, 0.05479916763305664, 0.05506860733032227, 0.05531238555908203, 0.05454643249511719, 0.054106113433837894, 0.05406671905517578, 0.054061534881591794, 0.05392326354980469, 0.05418985748291016, 0.05429676818847656, 0.054273727416992185, 0.053916576385498044, 0.053960544586181644, 0.054053024291992186, 0.05420470428466797, 0.0540013427734375, 0.05424979019165039, 0.05419385528564453, 0.05423721694946289, 0.05448646545410156, 0.05432156753540039, 0.05447603225708008, 0.05455923080444336, 0.05464310455322265, 0.054814720153808595, 0.05439459228515625, 0.05439516830444336, 0.05471641540527344, 0.05465087890625, 0.054542335510253906, 0.05450137710571289, 0.05470159912109375, 0.054591552734375, 0.054626846313476564, 0.05461747360229492, 0.054636608123779296, 0.05457539367675781, 0.05484969711303711, 0.05496352005004883, 0.05480438232421875, 0.054774559020996094, 0.05500928115844726, 0.05493971252441406, 0.054879905700683594, 0.05487830352783203, 0.054972766876220706, 0.054941505432128904, 0.05476339340209961, 0.05527561569213867, 0.05533446502685547, 0.055060958862304686, 0.054908958435058594, 0.054941505432128904, 0.05491139221191406, 0.054869087219238284, 0.055149055480957034, 0.055312576293945315, 0.05531584167480469, 0.054941505432128904, 0.05529267120361328, 0.055377983093261716, 0.05506470489501953, 0.05525900650024414, 0.05544780731201172, 0.055154399871826174, 0.05512195205688476, 0.0547295036315918, 0.05420441436767578, 0.05415753555297852, 0.0540546875, 0.05419827270507813, 0.055907646179199216, 0.05405766296386719, 0.054356254577636716, 0.05448044967651367, 0.05402230453491211, 0.05420431900024414, 0.0543026237487793, 0.054262111663818356, 0.05421884918212891, 0.05460534286499023, 0.05430467224121094, 0.05450579071044922, 0.05475849533081055, 0.05470505523681641, 0.054429855346679684, 0.054497119903564456, 0.05452777481079102, 0.05469443130493164, 0.05452339172363281, 0.05464083099365234, 0.05452799987792969, 0.05446656036376953, 0.054370494842529295, 0.05469164657592773, 0.054740993499755856, 0.05446246337890625, 0.05483103942871094, 0.05471382522583008, 0.05463622283935547, 0.0548054084777832, 0.05465292739868164, 0.05487411117553711, 0.05502975845336914, 0.05492940902709961, 0.054669281005859376, 0.0558177261352539, 0.05519993591308594, 0.05488880157470703, 0.05501440048217773, 0.054795135498046876, 0.05479436874389648, 0.05476105499267578, 0.054906368255615234, 0.05499382400512695, 0.054956127166748046, 0.05477088165283203, 0.055012065887451174, 0.055045726776123044, 0.05514924621582031, 0.055004894256591795, 0.055166046142578126, 0.05525187301635742, 0.055109630584716796, 0.0550645751953125, 0.05504800033569336, 0.05534124755859375, 0.05525708770751953, 0.0552573127746582, 0.054730751037597655, 0.05428192138671875, 0.05421500778198242, 0.05430636978149414, 0.054206878662109374, 0.05429862213134766, 0.054187904357910155, 0.05414857482910156, 0.05398931121826172, 0.05414985656738281, 0.054284286499023435, 0.054060161590576174, 0.054182785034179684, 0.054220096588134765, 0.054239936828613285, 0.05415043258666992, 0.05441974258422851, 0.054538177490234374, 0.054485343933105466, 0.0542251205444336, 0.054579135894775394, 0.05433699035644531, 0.05450601577758789, 0.0545054702758789, 0.05436620712280273, 0.05459276962280273, 0.05461840057373047, 0.05467951965332031, 0.05464640045166016, 0.05456780624389648, 0.05461382293701172, 0.05443193435668945, 0.05464188766479492, 0.05463119888305664, 0.054593505859375, 0.05460995101928711, 0.05483724975585937, 0.05511167907714844, 0.05491664123535156, 0.05500156784057617, 0.05474508666992187, 0.05482700729370117, 0.054921215057373046, 0.055225921630859376, 0.05512422561645508, 0.05482688140869141, 0.05498246383666992, 0.05506032180786133, 0.055026432037353516, 0.05496207809448242, 0.054945793151855465, 0.05506662368774414, 0.05505382537841797, 0.05539619064331055, 0.05514102554321289, 0.05510095977783203, 0.05499951934814453, 0.055109630584716796, 0.055748001098632816, 0.05507132720947266, 0.05531033706665039, 0.05533900833129883, 0.055173057556152344, 0.05503023910522461, 0.054284320831298825, 0.05409004974365234, 0.05400175857543945, 0.053912929534912106, 0.05420089721679688, 0.05442559814453125, 0.054115615844726565, 0.05429116821289062, 0.05409996795654297, 0.05401744079589844, 0.054096481323242185, 0.05430185699462891, 0.054203231811523436, 0.05456281661987305, 0.05459747314453125, 0.0542127685546875, 0.054617599487304686, 0.054710369110107425, 0.05477228927612305, 0.054576862335205076, 0.054624385833740234, 0.05457875061035156, 0.05450166320800781, 0.054605022430419925, 0.05460063934326172, 0.05472262573242188, 0.05476688003540039, 0.054651679992675783, 0.05485894393920898, 0.05462108612060547, 0.054632095336914065, 0.0553177261352539, 0.05450640106201172, 0.05446246337890625, 0.05484543991088867, 0.05499881744384766, 0.05479983901977539, 0.054687553405761716, 0.05481286239624023, 0.05515750503540039, 0.055053695678710934, 0.05494643020629883, 0.05488569641113281, 0.0546965103149414, 0.054967681884765626, 0.05502051162719727, 0.05510102462768555, 0.054917312622070315, 0.055184894561767575, 0.05512790298461914, 0.05488620758056641, 0.05501007843017578, 0.05508512115478516, 0.05502566528320312, 0.05512192153930664, 0.05503788757324219, 0.055220287322998045, 0.055261119842529294, 0.05539993667602539, 0.055261856079101564, 0.05498255920410156, 0.055054561614990234, 0.05475804901123047, 0.054111774444580076, 0.05411660766601562, 0.053843582153320316, 0.05441596984863281, 0.054386688232421876, 0.053954719543457035, 0.05414281463623047, 0.05444403076171875, 0.05424870300292969, 0.054156063079833984, 0.054452190399169924, 0.054787521362304685, 0.054478431701660154, 0.054219711303710935, 0.05414915084838867, 0.054468608856201174, 0.05462019348144531, 0.05465494537353516, 0.05450956726074219, 0.05429817581176758, 0.054583744049072264, 0.05438409423828125, 0.05447734451293945, 0.05464678573608398, 0.05457628631591797, 0.0545513916015625, 0.05447225570678711, 0.054544830322265626, 0.05454848098754883, 0.05497446441650391, 0.05496137619018555, 0.054629249572753905, 0.0545871696472168, 0.0545445442199707, 0.05491900634765625, 0.05475532913208008, 0.05466944122314453, 0.054840736389160157, 0.05495049667358398, 0.055045982360839844, 0.05493324661254883, 0.05511209487915039, 0.05490028762817383, 0.05468403244018555, 0.054859840393066406, 0.0547039680480957, 0.055021183013916015, 0.055050785064697266, 0.055119873046875, 0.055209983825683595, 0.05504204940795898, 0.05505827331542969, 0.055132320404052734, 0.05501958465576172, 0.05534304046630859, 0.05527555084228516, 0.055311840057373045, 0.05537424087524414, 0.055500064849853516, 0.05541151809692383, 0.05484134292602539, 0.05514636611938477, 0.054790145874023435, 0.0542371826171875, 0.05392809677124023, 0.05418806457519531, 0.054259521484375, 0.054083553314208985, 0.054461952209472655, 0.05432579040527344, 0.05399347305297852, 0.05394841766357422, 0.05418582534790039, 0.05413820648193359, 0.054477630615234376, 0.05425971221923828, 0.054378017425537106, 0.05446031951904297, 0.05440166473388672, 0.05406611251831055, 0.05458227157592774, 0.05459299087524414, 0.054223167419433595, 0.05461948776245117, 0.05458217620849609, 0.0546283187866211, 0.05445632171630859, 0.05458313751220703, 0.05431926345825195, 0.05438470458984375, 0.054515392303466796, 0.05461382293701172, 0.054501823425292965, 0.05463619232177734, 0.05485388946533203, 0.05474675369262695, 0.05468617630004883, 0.054687744140625, 0.05475942230224609, 0.0548109130859375, 0.05472380828857422, 0.05477337646484375, 0.05486095809936523, 0.0551421127319336, 0.0547512321472168, 0.054724609375, 0.054884384155273434, 0.05476553726196289, 0.05494374465942383, 0.05489030456542969, 0.05512406539916992, 0.05534297561645508, 0.05515657424926758, 0.054935359954833986, 0.05482473754882813, 0.05521907043457031, 0.05526057434082031, 0.054827072143554687, 0.05487651062011719, 0.05522441482543945, 0.05513216018676758, 0.05488230514526367, 0.05521408081054688, 0.055478271484375, 0.05535065460205078]",tokens/s,18.28730061541615,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4972.916736,7338.917888,0.0,6943.670272,6539.1744,s,1,11.5805625,11.5805625,0.0,11.5805625,11.5805625,11.5805625,11.5805625,[11.5805625],,kWh,0.00012860236552916527,1.4178698316460877e-05,3.937392038799725e-05,0.00018215498423362339,,MB,4882.82112,7634.61632,0.0,7224.68864,6917.39904,s,10,2.0647067413330076,0.20647067413330075,0.000531730646215658,0.20658805084228515,0.20704199523925781,0.20706782684326172,0.20708849212646485,"[0.20546876525878907, 0.20618710327148437, 0.2056812744140625, 0.20671177673339844, 0.2066510772705078, 0.20633375549316407, 0.2065250244140625, 0.20709365844726563, 0.2070362548828125, 0.20701805114746094]",tokens/s,1239.8855240561782,kWh,6.064707978571061e-06,6.688289937004971e-07,4.0114261116328494e-06,1.0744963083904409e-05,tokens/kWh,23825116.754796427,MB,4887.052288,7636.713472,0.0,7226.785792,6917.4016,s,10,21.867404052734372,2.1867404052734374,0.012244226575833619,2.1830804443359373,2.2044777587890625,2.2071846801757813,2.2093502172851562,"[2.192820556640625, 2.176710205078125, 2.203876220703125, 2.170734130859375, 2.184782470703125, 2.18137841796875, 2.2098916015625, 2.192957763671875, 2.1802666015625, 2.173986083984375]",tokens/s,28.810004080992993,kWh,6.411299401559425e-05,7.070647166017867e-06,4.264413899056652e-05,0.00011382778017217864,tokens/kWh,553467.7027409713,,s,630,21.86504935073855,0.034706427540854796,0.0005855559582517047,0.03455833435058594,0.035169828033447266,0.035670738410949705,0.03758546409606934,"[0.035383296966552735, 0.03546527862548828, 0.034865119934082034, 0.034340545654296874, 0.0344681282043457, 0.03508019256591797, 0.03487744140625, 0.03448604965209961, 0.034574558258056644, 0.03421184158325195, 0.034609153747558595, 0.034402305603027344, 0.0343900146484375, 0.03457206344604492, 0.03493500900268555, 0.03437564849853516, 0.03462047958374023, 0.03440534210205078, 0.03441401672363281, 0.03501523208618164, 0.03827711868286133, 0.03582566452026367, 0.03512115097045899, 0.03474163055419922, 0.034491008758544925, 0.03439759826660156, 0.03435708618164063, 0.03478598403930664, 0.03515558242797852, 0.035756481170654296, 0.03479347229003906, 0.03462870407104492, 0.03438431930541992, 0.03434134292602539, 0.034705406188964845, 0.03537052917480469, 0.03477689743041992, 0.03476505661010742, 0.034521503448486326, 0.037220352172851565, 0.03622873687744141, 0.03523827362060547, 0.035151264190673825, 0.034589279174804685, 0.034840576171875, 0.03438796615600586, 0.034369537353515625, 0.03425404739379883, 0.0344359359741211, 0.03440428924560547, 0.0343521614074707, 0.03429475021362305, 0.03449446487426758, 0.034680831909179685, 0.034566368103027344, 0.0342606086730957, 0.03461955261230469, 0.03428707122802734, 0.03444380950927734, 0.034484222412109376, 0.03466854476928711, 0.03540486526489258, 0.03423235321044922, 0.03514518356323242, 0.034751007080078125, 0.034376895904541016, 0.034328895568847655, 0.034202110290527346, 0.03420159912109375, 0.034181121826171876, 0.03435724639892578, 0.03453952026367187, 0.034252799987792966, 0.03416985702514649, 0.03419558334350586, 0.0340975341796875, 0.03408127975463867, 0.03436646270751953, 0.034349086761474606, 0.034210784912109375, 0.03429296112060547, 0.034315040588378906, 0.0341130256652832, 0.034143936157226565, 0.03417782211303711, 0.0341341438293457, 0.03432172775268555, 0.034269790649414066, 0.03410259246826172, 0.03460156631469727, 0.034645824432373046, 0.03487772750854492, 0.034444480895996096, 0.034327136993408204, 0.035087711334228514, 0.034548095703125, 0.03440867233276367, 0.03445993423461914, 0.034351104736328124, 0.03418521499633789, 0.034289600372314454, 0.034439231872558596, 0.034326526641845705, 0.034327968597412106, 0.03429548645019531, 0.03474905776977539, 0.03491459274291992, 0.034969600677490234, 0.034523136138916014, 0.03429580688476563, 0.03424665451049805, 0.034178943634033204, 0.0342795524597168, 0.03480083084106445, 0.03489436721801758, 0.034826526641845705, 0.034590721130371094, 0.034614368438720705, 0.0349002571105957, 0.03524774551391602, 0.03514265441894531, 0.034912254333496096, 0.03476844787597656, 0.03858598327636719, 0.03489056015014649, 0.03483395385742188, 0.035860481262207033, 0.034970783233642576, 0.03497865676879883, 0.034830337524414064, 0.034852863311767575, 0.034842464447021486, 0.03505372619628906, 0.034774654388427736, 0.034885089874267576, 0.03499030303955078, 0.036324031829833986, 0.03523539352416992, 0.03790665435791016, 0.034979934692382815, 0.03522496032714844, 0.03500262451171875, 0.03609446334838867, 0.03530137634277344, 0.035108863830566404, 0.03628441619873047, 0.034846145629882815, 0.034873920440673827, 0.03482009506225586, 0.034590721130371094, 0.03460300827026367, 0.03469903945922852, 0.03451087951660156, 0.034609344482421874, 0.03575603103637695, 0.0371234245300293, 0.03498604965209961, 0.03466819381713867, 0.03481011199951172, 0.03500921630859375, 0.03504489517211914, 0.03557833480834961, 0.035133438110351564, 0.034857185363769534, 0.03485878372192383, 0.034977790832519534, 0.03533148956298828, 0.03495792007446289, 0.034971649169921876, 0.03507814407348633, 0.03519833755493164, 0.034982528686523434, 0.03481510543823242, 0.03504729461669922, 0.03437875366210937, 0.034307262420654294, 0.03418195343017578, 0.03435059356689453, 0.03434288024902344, 0.03417961502075195, 0.03421734237670898, 0.03421043014526367, 0.03466239929199219, 0.03456819152832031, 0.03448953628540039, 0.03433555221557617, 0.034309791564941405, 0.03424812698364258, 0.03462851333618164, 0.03585871887207031, 0.03474198532104492, 0.03442512130737305, 0.03425689697265625, 0.03439334487915039, 0.034706016540527344, 0.034422977447509766, 0.03537267303466797, 0.03451555252075195, 0.034283199310302735, 0.03457855987548828, 0.034547134399414064, 0.03437318420410156, 0.034390399932861325, 0.03418310546875, 0.03422681427001953, 0.034487648010253905, 0.03431046295166015, 0.03424895858764648, 0.03416796875, 0.034243518829345704, 0.03425075149536133, 0.03499008178710938, 0.035166206359863283, 0.03484844970703125, 0.034462017059326173, 0.034342910766601564, 0.0343633918762207, 0.03454489517211914, 0.034530048370361326, 0.03426508712768555, 0.03424460983276367, 0.03423436737060547, 0.03437158584594727, 0.03433062362670898, 0.03426496124267578, 0.034307872772216794, 0.03436576080322266, 0.034359329223632815, 0.03426303863525391, 0.034275230407714845, 0.03422627258300781, 0.03446499252319336, 0.03455875015258789, 0.034264225006103516, 0.03445817565917969, 0.034414878845214845, 0.03435443115234375, 0.03443584060668945, 0.03439311981201172, 0.03420665740966797, 0.034514976501464845, 0.03445673751831055, 0.03426089477539063, 0.03437593460083008, 0.034673118591308595, 0.03444758224487305, 0.03443273544311523, 0.03468521499633789, 0.03430604934692383, 0.03439411163330078, 0.03429929733276367, 0.03437424087524414, 0.03569558334350586, 0.03451644897460938, 0.03454127883911133, 0.03460185623168945, 0.034391841888427734, 0.03443059158325195, 0.034431358337402344, 0.03421184158325195, 0.03422822570800781, 0.03460710525512695, 0.034369537353515625, 0.03435273742675781, 0.03429417419433594, 0.034536705017089844, 0.03442764663696289, 0.034770942687988284, 0.0347852783203125, 0.03446755218505859, 0.03488796615600586, 0.03423231887817383, 0.034557918548583984, 0.03499193572998047, 0.034810142517089845, 0.03436076736450195, 0.0343741455078125, 0.034293758392333985, 0.034525184631347655, 0.03470745468139649, 0.034668254852294925, 0.03450806427001953, 0.034411518096923825, 0.034631679534912106, 0.03495657730102539, 0.03825328063964844, 0.03508838272094727, 0.035054656982421876, 0.03502166366577148, 0.03472127914428711, 0.03472035217285156, 0.034772991180419925, 0.03480166244506836, 0.03455292892456055, 0.03433564758300781, 0.03418102264404297, 0.034428417205810545, 0.034474273681640626, 0.03449055862426758, 0.03452121734619141, 0.03449046325683594, 0.034385887145996094, 0.03457222366333008, 0.03500783920288086, 0.03731737518310547, 0.03470121765136719, 0.03450182342529297, 0.03433292770385742, 0.03451347351074219, 0.03483238220214844, 0.03473628616333008, 0.034369281768798825, 0.03430307388305664, 0.03420876693725586, 0.03427139282226563, 0.03526095962524414, 0.034813438415527344, 0.03427148818969727, 0.034119937896728514, 0.037425151824951174, 0.03510015869140625, 0.034859073638916015, 0.03441196823120117, 0.0342210578918457, 0.03433657455444336, 0.03422841644287109, 0.0346126708984375, 0.034230144500732425, 0.034143070220947265, 0.034312030792236325, 0.034179073333740234, 0.03425475311279297, 0.03425654220581055, 0.03444710540771485, 0.03420230484008789, 0.03418511962890625, 0.034993473052978515, 0.03419331359863281, 0.034382495880126956, 0.03446185684204101, 0.0343573112487793, 0.03478483200073242, 0.03506227111816406, 0.03484988784790039, 0.035019615173339846, 0.03506089782714844, 0.03451737594604492, 0.03441872024536133, 0.034319072723388674, 0.03426444625854492, 0.03526092910766602, 0.034631233215332034, 0.03420332717895508, 0.03447663879394531, 0.03438713455200195, 0.034433502197265625, 0.03515119934082031, 0.03472803115844727, 0.034501537322998044, 0.03450201416015625, 0.03431897735595703, 0.034576385498046876, 0.0354463996887207, 0.03474060821533203, 0.034516990661621096, 0.03443119812011719, 0.034244384765625, 0.03504127883911133, 0.0344535026550293, 0.03428966522216797, 0.03451903915405274, 0.03449174499511719, 0.03457011032104492, 0.03472054290771484, 0.034715648651123046, 0.034631679534912106, 0.03510067367553711, 0.035460670471191405, 0.03617792129516602, 0.03544588851928711, 0.035445632934570315, 0.03527651214599609, 0.03516854476928711, 0.03502454376220703, 0.03522800064086914, 0.035053569793701174, 0.0350904312133789, 0.03529852676391602, 0.035643775939941405, 0.03508060836791992, 0.03479462432861328, 0.03515071868896484, 0.03520512008666992, 0.03471887969970703, 0.0346673583984375, 0.03489177703857422, 0.034767967224121094, 0.03474319839477539, 0.034988033294677735, 0.03450611114501953, 0.035928638458251956, 0.034592830657958984, 0.03826073455810547, 0.035218814849853515, 0.03497395324707031, 0.0348834228515625, 0.03549443054199219, 0.03580867385864258, 0.035465217590332034, 0.035076095581054685, 0.03491219329833985, 0.03510953521728516, 0.03474764633178711, 0.034599391937255856, 0.03511548614501953, 0.03511865615844727, 0.03622886276245117, 0.03478486251831055, 0.03472025680541992, 0.034793407440185546, 0.036690399169921874, 0.034783233642578126, 0.035119102478027346, 0.034598911285400394, 0.03458047866821289, 0.034729984283447264, 0.03481923294067383, 0.03510099029541015, 0.0346794548034668, 0.03464944076538086, 0.03460051345825195, 0.0350579833984375, 0.03479420852661133, 0.034616737365722655, 0.03467932891845703, 0.03500611114501953, 0.03433232116699219, 0.03457913589477539, 0.034662208557128905, 0.0347421760559082, 0.03463372802734375, 0.03555049514770508, 0.0348436164855957, 0.034705310821533206, 0.03475667190551758, 0.034533409118652346, 0.03448831939697266, 0.03509455871582031, 0.0346333122253418, 0.03463161468505859, 0.03461103820800781, 0.034553886413574215, 0.034652225494384764, 0.03765094375610351, 0.03582361602783203, 0.035243297576904295, 0.034902751922607424, 0.03481190490722656, 0.03511705780029297, 0.034598911285400394, 0.03457024002075195, 0.03466239929199219, 0.0345272331237793, 0.03461939239501953, 0.03454742431640625, 0.03454611206054688, 0.03457443237304687, 0.03480678558349609, 0.03468767929077148, 0.034695232391357425, 0.035844097137451174, 0.034871295928955076, 0.034732032775878906, 0.034789215087890624, 0.03477231979370117, 0.03518137741088867, 0.035127296447753906, 0.03534963226318359, 0.03474956893920898, 0.03492633438110351, 0.03499327850341797, 0.03481484985351563, 0.035098785400390624, 0.03511689758300781, 0.035091552734375, 0.034753406524658206, 0.03512527847290039, 0.034523136138916014, 0.034484222412109376, 0.03450265502929688, 0.034340801239013674, 0.03436345672607422, 0.0342724494934082, 0.03434364700317383, 0.03437577438354492, 0.03443283081054688, 0.03461344146728516, 0.034514846801757815, 0.0344351692199707, 0.03446988677978516, 0.03461478424072266, 0.034531455993652344, 0.034523521423339844, 0.03461686325073242, 0.035692798614501954, 0.03473372650146484, 0.034605087280273436, 0.03464681625366211, 0.034457599639892575, 0.03476684951782227, 0.034952865600585935, 0.03733462524414063, 0.03452620697021484, 0.03428326416015625, 0.034223583221435545, 0.03419395065307617, 0.03425791931152344, 0.0342083511352539, 0.034216190338134764, 0.03414236831665039, 0.03446579360961914, 0.03442486572265625, 0.034203262329101564, 0.03424905776977539, 0.034917377471923826, 0.03472710418701172, 0.03467977523803711, 0.03486403274536133, 0.03462044906616211, 0.03430249786376953, 0.0341712646484375, 0.034260990142822266, 0.034723838806152346, 0.034602718353271486, 0.034471424102783206, 0.034377761840820316, 0.034406558990478515, 0.03420537567138672, 0.034331390380859375, 0.034216094970703125, 0.03435724639892578, 0.03419443130493164, 0.03419647979736328, 0.03409100723266602, 0.034541568756103515, 0.03470876693725586, 0.03433567810058594, 0.03512092971801758, 0.035901439666748046, 0.03781017684936523, 0.03492643356323242, 0.034694366455078125, 0.03455481719970703, 0.034385921478271485, 0.03443241500854492, 0.03452959823608399, 0.034936481475830075, 0.034527870178222654, 0.034293758392333985, 0.03423027038574219, 0.03423385620117188, 0.034386238098144534, 0.03443471908569336, 0.03435996627807617, 0.03460287857055664, 0.034431262969970705, 0.034362529754638674, 0.035893310546875, 0.034924831390380856, 0.03462732696533203, 0.03475900650024414, 0.03450028610229492, 0.034411903381347655, 0.0344073600769043, 0.03512704086303711, 0.03439231872558594, 0.03427315139770508, 0.03467891311645508, 0.03491420745849609, 0.03474147033691406, 0.03458038330078125, 0.03452387237548828, 0.034383262634277344, 0.03433087921142578, 0.03433942413330078, 0.03460095977783203, 0.034582527160644534, 0.03457228851318359, 0.034617439270019534, 0.03452099227905273, 0.03501049423217773, 0.0344532470703125, 0.03424310302734375, 0.034367488861083983, 0.03439139175415039, 0.03419180679321289, 0.034362846374511716, 0.03429840087890625, 0.03424870300292969, 0.03447795104980469, 0.034479969024658205, 0.034483486175537106, 0.03453440093994141, 0.03418684768676758, 0.034191776275634765, 0.03423846435546875, 0.03419955062866211, 0.03426444625854492, 0.03470195388793945, 0.03451023864746094, 0.034802112579345706, 0.034436927795410154, 0.03456195068359375, 0.03440806579589844, 0.034502494812011716, 0.0343087043762207, 0.03430822372436523, 0.034234622955322265, 0.03435520172119141, 0.0347852783203125, 0.034290687561035156, 0.03451916885375977, 0.03442777633666992, 0.034385921478271485, 0.034371200561523436, 0.034649856567382814, 0.03453152084350586, 0.03454572677612305, 0.034378047943115234, 0.034401599884033206]",tokens/s,28.8131067025797,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11138.854912,12227.3792,0.0,11848.9088,11814.752256,s,1,16.331890625,16.331890625,0.0,16.331890625,16.331890625,16.331890625,16.331890625,[16.331890625],,kWh,0.000269950213837501,2.977020798288645e-05,8.404562279201722e-05,0.0003837660446124047,,MB,2093.58848,14033.027072,0.0,13625.196544,13298.00192,s,10,23.36663818359375,2.336663818359375,0.0021511089569970668,2.337625244140625,2.338669384765625,2.3388350585937503,2.33896759765625,"[2.338632568359375, 2.338152099609375, 2.339000732421875, 2.338243408203125, 2.337871337890625, 2.33632177734375, 2.337379150390625, 2.3351865234375, 2.33296484375, 2.3328857421875]",tokens/s,109.55790815460284,kWh,6.795378911708592e-05,7.494725000763435e-06,4.506350827299943e-05,0.00012051202239084878,tokens/kWh,2124269.387578045,MB,2099.965952,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.8407656250001,136.5840765625,0.14099416845937957,136.63813281249998,136.729778125,136.73538906250002,136.73987781250003,"[136.741, 136.71475, 136.72853125, 136.681078125, 136.67503125, 136.601234375, 136.51703125, 136.45, 136.38490625, 136.347203125]",tokens/s,0.46125435398885317,kWh,0.0039742203255445805,0.00043838653352902454,0.0026434413647514004,0.007056048223825005,tokens/kWh,8928.510407182053,,s,630,1365.835356933595,2.167992630053323,0.00245803797327439,2.168363525390625,2.170982055664062,2.1715647216796876,2.172534157714844,"[2.169362548828125, 2.169524169921875, 2.169290283203125, 2.169293212890625, 2.17150390625, 2.17208203125, 2.170980224609375, 2.17135791015625, 2.16968408203125, 2.171004150390625, 2.171822998046875, 2.1700087890625, 2.169798583984375, 2.170569580078125, 2.1727841796875, 2.170962158203125, 2.171938720703125, 2.170955810546875, 2.1710234375, 2.17062109375, 2.170538818359375, 2.170964111328125, 2.17099853515625, 2.170173828125, 2.16871630859375, 2.170280517578125, 2.170905029296875, 2.170300537109375, 2.169556884765625, 2.1686904296875, 2.17009716796875, 2.168494873046875, 2.171104736328125, 2.1698603515625, 2.170408447265625, 2.169908203125, 2.169794677734375, 2.17245068359375, 2.171545654296875, 2.17277783203125, 2.17004931640625, 2.170314697265625, 2.170281494140625, 2.1694814453125, 2.170204345703125, 2.16982763671875, 2.17168896484375, 2.169891845703125, 2.171984375, 2.170481201171875, 2.170078857421875, 2.17049658203125, 2.17175439453125, 2.170583984375, 2.17089013671875, 2.168748046875, 2.1698232421875, 2.170303955078125, 2.170503662109375, 2.170760498046875, 2.169337646484375, 2.17056982421875, 2.170290283203125, 2.16931982421875, 2.1692529296875, 2.1683291015625, 2.17041455078125, 2.1696865234375, 2.16855517578125, 2.169948486328125, 2.168610107421875, 2.167562255859375, 2.168133544921875, 2.1682509765625, 2.170392333984375, 2.169739501953125, 2.167556396484375, 2.168411865234375, 2.168475341796875, 2.168783447265625, 2.16992578125, 2.170560546875, 2.169288818359375, 2.16925390625, 2.167908203125, 2.17023095703125, 2.16987451171875, 2.1698251953125, 2.1699169921875, 2.17007080078125, 2.168732421875, 2.17103369140625, 2.170037841796875, 2.1694931640625, 2.1693408203125, 2.16977001953125, 2.16997216796875, 2.171922607421875, 2.16950390625, 2.169200439453125, 2.170351806640625, 2.170050537109375, 2.17153125, 2.17074072265625, 2.169933349609375, 2.1697294921875, 2.16966552734375, 2.17122216796875, 2.170271484375, 2.171137939453125, 2.170353515625, 2.1701796875, 2.17076318359375, 2.172542724609375, 2.16906591796875, 2.170171142578125, 2.17135302734375, 2.17271728515625, 2.17183642578125, 2.171159912109375, 2.17287255859375, 2.17251318359375, 2.171107421875, 2.17227880859375, 2.172880615234375, 2.170507080078125, 2.17068896484375, 2.17104150390625, 2.17034228515625, 2.171580322265625, 2.171598876953125, 2.1708017578125, 2.17172216796875, 2.1705146484375, 2.16976220703125, 2.168747802734375, 2.168450927734375, 2.1688330078125, 2.1695283203125, 2.16977197265625, 2.169739013671875, 2.170224853515625, 2.170828857421875, 2.169964599609375, 2.17046435546875, 2.16966552734375, 2.171493896484375, 2.1710830078125, 2.1718798828125, 2.169415771484375, 2.16923876953125, 2.169569580078125, 2.169186767578125, 2.169117919921875, 2.170444580078125, 2.170138671875, 2.171592529296875, 2.1686962890625, 2.169596435546875, 2.168596435546875, 2.168890625, 2.17133349609375, 2.169258056640625, 2.169445556640625, 2.17002001953125, 2.169176513671875, 2.171047607421875, 2.1707880859375, 2.16980419921875, 2.17070458984375, 2.171602294921875, 2.171460205078125, 2.171439208984375, 2.171408447265625, 2.1716806640625, 2.17069482421875, 2.170970947265625, 2.1697451171875, 2.1705810546875, 2.168915283203125, 2.169745849609375, 2.16980859375, 2.17103662109375, 2.1712587890625, 2.171217041015625, 2.16958056640625, 2.1689609375, 2.17033935546875, 2.172760009765625, 2.16794189453125, 2.168345947265625, 2.168537109375, 2.169506103515625, 2.16912109375, 2.16852880859375, 2.169100341796875, 2.16913037109375, 2.1684619140625, 2.168895263671875, 2.169536865234375, 2.16985400390625, 2.170060791015625, 2.16954248046875, 2.170759521484375, 2.169196533203125, 2.1683466796875, 2.1684892578125, 2.1680517578125, 2.169093017578125, 2.16945263671875, 2.16905859375, 2.170333984375, 2.168133544921875, 2.168465087890625, 2.1704296875, 2.170662841796875, 2.169109619140625, 2.169588623046875, 2.167689208984375, 2.168756103515625, 2.169784423828125, 2.16871533203125, 2.169174072265625, 2.169288818359375, 2.169388427734375, 2.171112060546875, 2.1708349609375, 2.16998095703125, 2.169499755859375, 2.170353759765625, 2.17191015625, 2.1708798828125, 2.171993896484375, 2.17151513671875, 2.17060546875, 2.168620849609375, 2.169801025390625, 2.170787841796875, 2.169780029296875, 2.16985986328125, 2.169104736328125, 2.1704267578125, 2.169712890625, 2.170366455078125, 2.16974951171875, 2.16888330078125, 2.169206787109375, 2.170090576171875, 2.168535888671875, 2.169146484375, 2.169555908203125, 2.169720947265625, 2.168891357421875, 2.16675927734375, 2.16760546875, 2.16756640625, 2.168850341796875, 2.168493408203125, 2.168814208984375, 2.168524658203125, 2.168668212890625, 2.167869140625, 2.169588134765625, 2.167732177734375, 2.167242431640625, 2.168169921875, 2.16750537109375, 2.167734619140625, 2.169392333984375, 2.1687666015625, 2.16872802734375, 2.167601318359375, 2.16911669921875, 2.1690546875, 2.16907958984375, 2.16874853515625, 2.167593017578125, 2.169420166015625, 2.16717626953125, 2.168871337890625, 2.16750537109375, 2.168604736328125, 2.16865966796875, 2.1699580078125, 2.169855712890625, 2.17080419921875, 2.16951708984375, 2.169409423828125, 2.169374755859375, 2.17058935546875, 2.1702998046875, 2.17072021484375, 2.1687197265625, 2.170218505859375, 2.168195068359375, 2.170111572265625, 2.170776123046875, 2.170766845703125, 2.170847900390625, 2.171146240234375, 2.170884033203125, 2.17183544921875, 2.171052978515625, 2.168711181640625, 2.17054833984375, 2.169819091796875, 2.170898193359375, 2.1709560546875, 2.17090869140625, 2.17165576171875, 2.17193115234375, 2.170635498046875, 2.17216796875, 2.171198486328125, 2.171590576171875, 2.166392822265625, 2.1678125, 2.167912353515625, 2.16743115234375, 2.167509033203125, 2.16749072265625, 2.168342529296875, 2.167932373046875, 2.167357666015625, 2.16689013671875, 2.166894775390625, 2.1672587890625, 2.16954345703125, 2.168363037109375, 2.166921142578125, 2.167406005859375, 2.167665283203125, 2.16841015625, 2.168255859375, 2.16693408203125, 2.16949755859375, 2.167058349609375, 2.1681904296875, 2.16927294921875, 2.168059814453125, 2.167815673828125, 2.168355224609375, 2.168860595703125, 2.1682236328125, 2.169315185546875, 2.16764208984375, 2.167444580078125, 2.168672119140625, 2.1676962890625, 2.171187255859375, 2.1680087890625, 2.1676337890625, 2.167380126953125, 2.1674638671875, 2.168512451171875, 2.167825927734375, 2.16818115234375, 2.167401611328125, 2.1679296875, 2.168057373046875, 2.169559326171875, 2.16978173828125, 2.168203857421875, 2.168690673828125, 2.167989501953125, 2.170017578125, 2.16817041015625, 2.168364013671875, 2.168296875, 2.16830615234375, 2.16907568359375, 2.169552978515625, 2.168143798828125, 2.169496826171875, 2.168421142578125, 2.169944091796875, 2.170005615234375, 2.170220458984375, 2.170427490234375, 2.16810498046875, 2.16730615234375, 2.167529541015625, 2.16633349609375, 2.16644921875, 2.166615966796875, 2.16589404296875, 2.16842626953125, 2.166503173828125, 2.165834228515625, 2.166220703125, 2.16660986328125, 2.16643994140625, 2.168268798828125, 2.1664296875, 2.16646240234375, 2.166199951171875, 2.167081298828125, 2.167373779296875, 2.165665771484375, 2.16609375, 2.1672197265625, 2.16670458984375, 2.16618798828125, 2.166564208984375, 2.1669462890625, 2.16717919921875, 2.167109619140625, 2.165676025390625, 2.16823291015625, 2.166917724609375, 2.16533154296875, 2.165361572265625, 2.16673486328125, 2.167796875, 2.167021728515625, 2.16681884765625, 2.16614794921875, 2.167887939453125, 2.1662373046875, 2.168397216796875, 2.16859716796875, 2.166555908203125, 2.167212646484375, 2.168458251953125, 2.167673828125, 2.166921142578125, 2.16628759765625, 2.166884765625, 2.167589111328125, 2.166208740234375, 2.16766796875, 2.16665185546875, 2.166697021484375, 2.1662607421875, 2.16687548828125, 2.16793505859375, 2.16679248046875, 2.1663173828125, 2.1664091796875, 2.166824951171875, 2.166919189453125, 2.165501953125, 2.1651494140625, 2.16407666015625, 2.164482177734375, 2.167457763671875, 2.165520263671875, 2.166302734375, 2.165307373046875, 2.165833740234375, 2.166099609375, 2.166630859375, 2.1645966796875, 2.16576416015625, 2.16566796875, 2.165785888671875, 2.1655, 2.165670654296875, 2.165544921875, 2.1652744140625, 2.16552783203125, 2.16616552734375, 2.16677685546875, 2.166128662109375, 2.165551025390625, 2.16499609375, 2.16361083984375, 2.167022216796875, 2.1652890625, 2.165182373046875, 2.165781494140625, 2.164755126953125, 2.1667412109375, 2.165505615234375, 2.166676025390625, 2.1669765625, 2.16542626953125, 2.166435791015625, 2.16665087890625, 2.16688037109375, 2.166635986328125, 2.165787109375, 2.166326904296875, 2.167841064453125, 2.16712109375, 2.165900146484375, 2.166140869140625, 2.165682373046875, 2.16465771484375, 2.165961181640625, 2.16589306640625, 2.167142333984375, 2.1649755859375, 2.16568212890625, 2.16656884765625, 2.167205810546875, 2.165231689453125, 2.166363525390625, 2.16429736328125, 2.16599853515625, 2.16683251953125, 2.16555908203125, 2.1658466796875, 2.16557763671875, 2.164621337890625, 2.164137451171875, 2.164543212890625, 2.164142333984375, 2.16470361328125, 2.165547119140625, 2.16657275390625, 2.1632021484375, 2.16575390625, 2.16450244140625, 2.163293212890625, 2.16456298828125, 2.164375244140625, 2.163614013671875, 2.163464111328125, 2.164674560546875, 2.164279296875, 2.16469287109375, 2.16445947265625, 2.164264892578125, 2.1646865234375, 2.16453759765625, 2.166032470703125, 2.163771240234375, 2.16451806640625, 2.163108642578125, 2.1642138671875, 2.1642236328125, 2.16492626953125, 2.165387451171875, 2.163768798828125, 2.164283203125, 2.164267822265625, 2.166134765625, 2.165104736328125, 2.1657763671875, 2.16464990234375, 2.165478515625, 2.16481005859375, 2.16541455078125, 2.165032470703125, 2.165539306640625, 2.164387939453125, 2.164507568359375, 2.166477783203125, 2.1652705078125, 2.16556298828125, 2.165947998046875, 2.165300048828125, 2.16605224609375, 2.165786865234375, 2.16467822265625, 2.16627490234375, 2.164760498046875, 2.16550927734375, 2.16473388671875, 2.164972412109375, 2.1643857421875, 2.164823486328125, 2.16431396484375, 2.165788818359375, 2.165119873046875, 2.16464794921875, 2.164505126953125, 2.163439208984375, 2.16489013671875, 2.16233544921875, 2.16322314453125, 2.16285791015625, 2.163093505859375, 2.16430126953125, 2.163749267578125, 2.164066162109375, 2.162819091796875, 2.163954833984375, 2.163837890625, 2.165333984375, 2.16505712890625, 2.162974365234375, 2.165384033203125, 2.163882080078125, 2.164766357421875, 2.16448193359375, 2.163464599609375, 2.163142578125, 2.163967041015625, 2.16417578125, 2.165642333984375, 2.163696533203125, 2.162642822265625, 2.162888671875, 2.16344580078125, 2.165002197265625, 2.164923828125, 2.16365234375, 2.16443359375, 2.163681396484375, 2.1636767578125, 2.1659365234375, 2.1631240234375, 2.1641806640625, 2.165008544921875, 2.16462744140625, 2.165252197265625, 2.165598388671875, 2.1647626953125, 2.164516845703125, 2.16500439453125, 2.164822021484375, 2.166564208984375, 2.165697021484375, 2.163859375, 2.163810302734375, 2.16506494140625, 2.164789794921875, 2.16618408203125, 2.16356201171875, 2.163326416015625, 2.16323486328125, 2.164265869140625, 2.164746337890625, 2.16414208984375, 2.163759033203125, 2.162353515625, 2.165184814453125, 2.1658955078125]",tokens/s,0.4612561805504866,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3550.654464,4495.179776,0.0,4116.709376,3980.386816,s,1,9.758755859375,9.758755859375,0.0,9.758755859375,9.758755859375,9.758755859375,9.758755859375,[9.758755859375],,kWh,8.898960568749884e-05,9.809050610969499e-06,2.695057711600015e-05,0.00012574923341446848,,MB,3549.011968,4826.529792,0.0,4418.699264,4245.89568,s,10,6.605035339355468,0.6605035339355468,0.0006230609261115123,0.6603740234375,0.6612575622558594,0.661468197631836,0.6616367059326171,"[0.6597408447265625, 0.6600413818359375, 0.6596790771484375, 0.660224853515625, 0.6602737426757812, 0.6604743041992187, 0.6611260375976562, 0.6612107543945313, 0.6616788330078125, 0.6605855102539062]",tokens/s,387.5830890330724,kWh,1.9281606503906238e-05,2.1255516435658325e-06,1.287422904937502e-05,3.428138719684709e-05,tokens/kWh,7467609.1294095805,MB,3556.94592,4837.015552,0.0,4429.185024,4245.89824,s,10,385.3308124999999,38.53308125,0.0249248607635426,38.5474375,38.55015078125,38.55081171875,38.55134046875,"[38.47213671875, 38.50425390625, 38.5239609375, 38.53500390625, 38.54686328125, 38.55147265625, 38.54991015625, 38.54801171875, 38.55000390625, 38.5491953125]",tokens/s,1.63495879271269,kWh,0.0011238995098256774,0.0001239736496154392,0.0007469280732364251,0.0019948012326775418,tokens/kWh,31582.093979076613,,s,630,385.32706719970645,0.6116302653963603,0.0005920977871824857,0.6116809997558594,0.6123485229492188,0.6124803436279297,0.6127918884277344,"[0.6100867309570313, 0.610197021484375, 0.6103533325195313, 0.6096705932617188, 0.6108343505859375, 0.6095738525390625, 0.6110203857421875, 0.6089094848632812, 0.6107077026367187, 0.610044921875, 0.609010009765625, 0.6115314331054688, 0.60957080078125, 0.6103983154296875, 0.6107708740234375, 0.610209716796875, 0.6111192016601562, 0.6099435424804688, 0.610407470703125, 0.6109091186523438, 0.6098042602539062, 0.6107578125, 0.6106380004882812, 0.6103272094726563, 0.6102745361328125, 0.6112899169921875, 0.6105045166015625, 0.610654296875, 0.6111459350585937, 0.6102752075195312, 0.610801513671875, 0.6112227783203125, 0.6105524291992187, 0.61095556640625, 0.6106604614257812, 0.611110107421875, 0.610850830078125, 0.6102777709960937, 0.6112688598632813, 0.6107890014648437, 0.610859375, 0.6106542358398438, 0.610566162109375, 0.6109616088867188, 0.6111590576171875, 0.610697998046875, 0.6113421630859375, 0.6108383178710938, 0.6102860107421875, 0.6111743774414062, 0.6108426513671875, 0.6111047973632813, 0.610873046875, 0.6104456176757812, 0.6114224243164063, 0.6110021362304687, 0.6109299926757813, 0.61133447265625, 0.61034521484375, 0.6125866088867188, 0.6106936645507812, 0.6112772827148437, 0.6109339599609375, 0.6115963134765625, 0.61043017578125, 0.6112429809570312, 0.6107515258789062, 0.61059912109375, 0.6110541381835938, 0.6098611450195313, 0.611631591796875, 0.6110841674804688, 0.610819580078125, 0.61090234375, 0.6101548461914063, 0.6113565673828125, 0.6110004272460937, 0.6110596923828125, 0.6109173583984375, 0.6111201171875, 0.6114240112304687, 0.6107047729492188, 0.6108209228515625, 0.6112788696289062, 0.6108262329101563, 0.6119588012695313, 0.61079345703125, 0.61098388671875, 0.6112564086914063, 0.6110984497070312, 0.61134423828125, 0.6106360473632813, 0.6111801147460938, 0.6110778198242187, 0.6114282836914062, 0.6116187744140625, 0.6108777465820312, 0.6109251098632813, 0.6113710327148437, 0.61138330078125, 0.6113792114257812, 0.6106234741210937, 0.611322998046875, 0.6113616943359375, 0.61153076171875, 0.6110494995117187, 0.6110392456054687, 0.6116618041992188, 0.6112166748046876, 0.6115838012695313, 0.6112612915039063, 0.6110740356445312, 0.6114365234375, 0.611292724609375, 0.6114259643554687, 0.6112550659179687, 0.6111948852539062, 0.6115687255859374, 0.61121630859375, 0.6115549926757813, 0.6113662719726562, 0.6114887084960937, 0.6114703979492188, 0.6114799194335937, 0.6118580932617188, 0.61162548828125, 0.6115798950195312, 0.611135498046875, 0.6108765258789063, 0.6112225952148438, 0.6110739135742187, 0.611142822265625, 0.6112203369140625, 0.610957275390625, 0.6111561279296875, 0.6113258056640625, 0.611061767578125, 0.6114877319335937, 0.6107731323242187, 0.6111189575195313, 0.6112454833984375, 0.61106640625, 0.6115083618164062, 0.611123046875, 0.6112849731445312, 0.6114754638671875, 0.6112337646484375, 0.6113956298828125, 0.6112267456054687, 0.611590087890625, 0.611854248046875, 0.6115601806640625, 0.6119142456054687, 0.6109790649414063, 0.6119214477539062, 0.612299560546875, 0.611412109375, 0.6113074340820313, 0.611765869140625, 0.6118466186523438, 0.611181884765625, 0.611301513671875, 0.6116542358398438, 0.6109094848632812, 0.6116719360351562, 0.6122107543945312, 0.611000244140625, 0.611877685546875, 0.6112620239257812, 0.61207763671875, 0.6121326293945313, 0.6109374389648438, 0.6120220947265625, 0.6113857421875, 0.6120875244140624, 0.6114581909179687, 0.6120641479492187, 0.6115491943359375, 0.6118441162109375, 0.6118250732421875, 0.611924560546875, 0.6112235717773438, 0.6115874633789062, 0.6117986450195313, 0.6119608154296875, 0.611535400390625, 0.6116593017578125, 0.6118470458984375, 0.6114503173828125, 0.612738037109375, 0.6118358764648437, 0.6109839477539063, 0.6119075927734375, 0.611090576171875, 0.6118807983398438, 0.611280029296875, 0.6113063354492188, 0.6113076782226563, 0.6112477416992188, 0.6117296142578125, 0.6115123291015625, 0.6111682739257812, 0.611694580078125, 0.6109653930664063, 0.6115628662109375, 0.6117786254882812, 0.6112447509765625, 0.611982666015625, 0.611343017578125, 0.6116022338867187, 0.6114140014648437, 0.6116100463867188, 0.6115374145507813, 0.6111929931640625, 0.61177880859375, 0.6119177856445313, 0.611346435546875, 0.6115819702148437, 0.6109470825195312, 0.6124107666015625, 0.6109736938476562, 0.6119307250976562, 0.6117291870117187, 0.6115064086914063, 0.61217578125, 0.6114890747070313, 0.61170166015625, 0.6121551513671875, 0.6119916381835937, 0.6127471923828125, 0.6110349731445313, 0.6121996459960938, 0.6112449951171876, 0.6121649169921874, 0.612045166015625, 0.6112271118164062, 0.6116300048828125, 0.6117802734375, 0.6122683715820313, 0.6113378295898437, 0.6117656860351562, 0.6114736328125, 0.611730224609375, 0.6118562622070313, 0.611344482421875, 0.6121122436523437, 0.6114164428710938, 0.6118440551757812, 0.6117517700195313, 0.6120194091796874, 0.6115458374023437, 0.6125361938476562, 0.612640380859375, 0.6116910400390625, 0.6108280029296875, 0.6117782592773438, 0.6113224487304687, 0.612083740234375, 0.6109819946289062, 0.6113761596679688, 0.6123519897460937, 0.6109807739257812, 0.6120018920898438, 0.61100634765625, 0.6117019653320312, 0.6111486206054687, 0.611885009765625, 0.6125772705078125, 0.6108948364257812, 0.6113424682617188, 0.61180810546875, 0.6117007446289062, 0.6113034057617187, 0.6120345458984375, 0.6122250366210937, 0.611577880859375, 0.6113272705078125, 0.6122667236328125, 0.6115052490234375, 0.6119617309570312, 0.6121383056640625, 0.6112713012695312, 0.6125997924804687, 0.6112788696289062, 0.612013671875, 0.6117134399414063, 0.6122352905273437, 0.6120238647460937, 0.6116519775390625, 0.6117332763671876, 0.6118280029296875, 0.6121653442382813, 0.6120798950195312, 0.6116488647460937, 0.6122659301757812, 0.6116287841796875, 0.6124349365234375, 0.6120017700195313, 0.61205908203125, 0.6118953247070312, 0.6121962280273437, 0.6119605712890624, 0.6128274536132813, 0.6122230834960938, 0.6122086181640625, 0.6117847290039062, 0.6124800415039062, 0.6116911010742188, 0.6117849731445313, 0.6125952758789063, 0.6111460571289062, 0.6127905883789062, 0.6115914306640625, 0.6121683349609375, 0.61209716796875, 0.61191796875, 0.612640625, 0.6110637817382812, 0.6115594482421876, 0.612121826171875, 0.6112201538085937, 0.6117783203125, 0.6113529663085937, 0.611819091796875, 0.6113018798828125, 0.6122822265625, 0.6118338623046875, 0.61203662109375, 0.61149951171875, 0.6117914428710938, 0.611531982421875, 0.6117403564453125, 0.6119852294921875, 0.6121980590820313, 0.6109639892578125, 0.6124329833984375, 0.6110625, 0.6117787475585937, 0.611724365234375, 0.6115828857421876, 0.612011962890625, 0.6118659057617187, 0.6120718383789062, 0.6121231689453125, 0.6124175415039063, 0.6121798095703125, 0.6114788818359375, 0.61227294921875, 0.6122002563476563, 0.6121082763671875, 0.6117191772460937, 0.6123735961914063, 0.611915771484375, 0.6119144897460937, 0.6118585815429688, 0.6122119140625, 0.6117993774414062, 0.6126126098632813, 0.6121094360351562, 0.61227099609375, 0.6115380249023438, 0.6126572875976563, 0.6113551025390624, 0.61220458984375, 0.6124115600585938, 0.6120941162109375, 0.6118339233398438, 0.6122511596679687, 0.6117440795898438, 0.6119541015625, 0.6115374145507813, 0.6120458984375, 0.6121747436523437, 0.612713623046875, 0.6117813110351562, 0.6124095458984375, 0.6112662963867187, 0.61240771484375, 0.6112577514648437, 0.6117216796875, 0.61115185546875, 0.6116004028320312, 0.6115552978515625, 0.6116796875, 0.6114078979492188, 0.612432373046875, 0.6116531372070313, 0.6120514526367188, 0.612039794921875, 0.611510498046875, 0.6129464111328125, 0.6108401489257812, 0.612366943359375, 0.611108642578125, 0.612112548828125, 0.6117539672851563, 0.6120386352539062, 0.6122014770507812, 0.610892822265625, 0.6124089965820313, 0.6112544555664062, 0.612296875, 0.6117885131835937, 0.6124813232421875, 0.6114488525390624, 0.6121001586914062, 0.6117293701171875, 0.6118292846679687, 0.6114818725585938, 0.6121261596679688, 0.6116583862304688, 0.611817626953125, 0.6119771728515625, 0.6119376220703125, 0.6120693359375, 0.6117487182617187, 0.6118214111328125, 0.6121790771484374, 0.6123486328125, 0.6122989501953126, 0.6123724975585938, 0.6121533203125, 0.6122147827148438, 0.6122823486328125, 0.6118863525390625, 0.6121823120117188, 0.6119112548828125, 0.6119692993164062, 0.6123930053710938, 0.6113568115234375, 0.6123175659179687, 0.6118001098632813, 0.6119844970703125, 0.6119932250976563, 0.6120860595703125, 0.6121143188476562, 0.6115952758789063, 0.6121318359375, 0.6116773681640625, 0.6123018188476562, 0.6116881103515625, 0.6114631958007812, 0.6121328735351562, 0.6111272583007813, 0.6119588623046875, 0.61170263671875, 0.6119564208984375, 0.6112684326171876, 0.6118877563476562, 0.6115038452148438, 0.6114746704101562, 0.6119205932617188, 0.6120281372070312, 0.6116987915039063, 0.6114064331054687, 0.6124113159179687, 0.6112781372070313, 0.6120066528320313, 0.611626953125, 0.6123621215820313, 0.61093798828125, 0.6119782104492187, 0.6115983276367187, 0.6121513061523437, 0.61161474609375, 0.6117186889648437, 0.6113179321289063, 0.611909912109375, 0.6120181884765625, 0.6120342407226562, 0.6114204711914063, 0.612114013671875, 0.6120016479492187, 0.612050537109375, 0.6117672729492187, 0.6117969360351563, 0.61220361328125, 0.6111314086914063, 0.6126417846679687, 0.611280029296875, 0.6126109008789062, 0.6116351318359375, 0.6120182495117188, 0.6121041259765625, 0.6115387573242187, 0.6120286254882813, 0.6117396240234375, 0.6120509643554688, 0.61136279296875, 0.6125336303710938, 0.61175439453125, 0.6116763305664062, 0.6122394409179688, 0.61179443359375, 0.6122379760742187, 0.6120364379882812, 0.6118072509765625, 0.6126284790039063, 0.6119752807617187, 0.6128693237304688, 0.6118898315429687, 0.6119874267578125, 0.6124451904296875, 0.6117816162109375, 0.6116331176757812, 0.6122645874023438, 0.6111027221679688, 0.6122823486328125, 0.61180419921875, 0.6120498657226563, 0.6111559448242188, 0.611862548828125, 0.6112052001953125, 0.6119134521484375, 0.61221826171875, 0.6110249633789062, 0.6118604736328125, 0.6112611694335938, 0.6116834106445312, 0.6120723876953125, 0.6109813842773437, 0.6128661499023438, 0.6112119140625, 0.612406494140625, 0.6119605712890624, 0.6113814086914062, 0.6115191040039063, 0.6121307983398437, 0.6115082397460937, 0.6124724731445312, 0.611697021484375, 0.6117130126953125, 0.6124393310546875, 0.6120467529296875, 0.6117588500976563, 0.6121447143554688, 0.6123012084960937, 0.611454345703125, 0.6122400512695313, 0.6113724365234375, 0.6123485107421875, 0.6113218383789063, 0.6121974487304688, 0.6117467041015625, 0.612235107421875, 0.61198291015625, 0.6116659545898437, 0.6121889038085937, 0.6113687744140625, 0.612431884765625, 0.6117457885742188, 0.612025634765625, 0.612070068359375, 0.6119446411132813, 0.61252490234375, 0.6114900512695313, 0.612707275390625, 0.6114993896484375, 0.6124589233398438, 0.6123704223632812, 0.612073486328125, 0.6117560424804688, 0.6119668579101563, 0.6117354736328126, 0.6124805908203125, 0.61143701171875, 0.6127924194335937, 0.6124965209960938, 0.6111691284179688, 0.61187890625, 0.6114877319335937, 0.6118578491210938, 0.6114903564453125, 0.61153076171875, 0.6115978393554687, 0.61136962890625, 0.6120692138671875, 0.6116220092773438, 0.6122158813476563, 0.6112848510742187, 0.6118174438476562, 0.6116864013671875, 0.6121880493164062, 0.611337646484375, 0.6113819580078125, 0.6126708374023437, 0.6112508544921875, 0.6123948974609374, 0.61131787109375, 0.6128312377929688, 0.6115405883789062, 0.6118854370117187, 0.6120525512695313, 0.6114081420898437, 0.6123910522460938, 0.6116823120117187, 0.6123521118164063, 0.6113831787109375, 0.6121242065429687, 0.611533447265625, 0.6117701416015625, 0.6116065063476562, 0.6117473754882813, 0.6116456909179687, 0.6114653930664062, 0.61201611328125, 0.6121922607421875, 0.6118807373046875, 0.612284912109375, 0.6114402465820312, 0.612060791015625, 0.6114349975585938, 0.6126071166992187, 0.6115418701171875, 0.61243798828125, 0.6115164184570312, 0.61290673828125, 0.611835693359375, 0.61220703125, 0.6122926025390625, 0.6111846313476562, 0.6127697143554688, 0.6120305786132813, 0.6122406616210937, 0.6120680541992187, 0.6117254638671875, 0.6124521484375, 0.6115799560546875, 0.6124517822265625, 0.6120985717773437]",tokens/s,1.6349746841778028,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5182.85312,5584.584704,0.0,5182.062592,5181.108736,s,1,11.2801416015625,11.2801416015625,0.0,11.2801416015625,11.2801416015625,11.2801416015625,11.2801416015625,[11.2801416015625],,kWh,0.0001153400779125036,1.2715570198330854e-05,3.361030466599718e-05,0.00016166595277683162,,MB,5159.092224,5739.773952,0.0,5322.571776,5283.621376,s,10,2.5152559204101563,0.25152559204101566,0.0006134836204615277,0.25144146728515626,0.2524150741577148,0.2524170860290527,0.25241869552612306,"[0.2505451202392578, 0.2510733184814453, 0.2507513580322266, 0.25143788146972657, 0.25144505310058596, 0.2517642822265625, 0.25241909790039063, 0.25133126831054686, 0.2524146270751953, 0.25207391357421877]",tokens/s,1017.789076342795,kWh,7.422230555104174e-06,8.18522820740633e-07,4.954385907950109e-06,1.3195139283794918e-05,tokens/kWh,19401083.572827168,MB,5167.259648,5756.551168,0.0,5339.348992,5283.623936,s,10,28.361325927734374,2.8361325927734375,0.018969364521891238,2.8370953369140626,2.8564384033203125,2.8598361450195315,2.8625543383789065,"[2.810791015625, 2.82039013671875, 2.80667431640625, 2.825640625, 2.83059521484375, 2.85177490234375, 2.843595458984375, 2.86323388671875, 2.855683349609375, 2.852947021484375]",tokens/s,22.21334790923603,kWh,8.2129030527396e-05,9.058927448725363e-06,5.234896549024954e-05,0.0001435369234663709,tokens/kWh,438911.45552356914,,s,630,28.3581591758728,0.04501295107281397,0.0005839222223718378,0.044969408035278324,0.04556121368408203,0.045784611892700194,0.04739607604980469,"[0.0451861457824707, 0.045018016815185545, 0.04541193771362305, 0.04488179016113281, 0.04458345413208008, 0.044635711669921876, 0.04494198226928711, 0.04477315139770508, 0.044635456085205076, 0.04441977691650391, 0.044478462219238284, 0.04455014419555664, 0.044494400024414064, 0.04445228958129883, 0.04458720016479492, 0.04489628982543945, 0.04466284942626953, 0.04442876815795899, 0.04440089416503906, 0.04435126495361328, 0.04469168090820313, 0.044497951507568356, 0.04462704086303711, 0.04464627075195313, 0.04477337646484375, 0.04461481475830078, 0.0445305290222168, 0.044459968566894534, 0.044607551574707034, 0.044677120208740234, 0.04444367980957031, 0.04468454360961914, 0.0444950065612793, 0.04457113647460938, 0.04498179244995117, 0.04451996612548828, 0.04415427017211914, 0.04430089569091797, 0.04466387176513672, 0.044526241302490235, 0.04441116714477539, 0.04445798492431641, 0.04422655868530274, 0.044199935913085936, 0.04416214370727539, 0.044657569885253906, 0.04470988845825195, 0.04449280166625977, 0.044711936950683595, 0.04447983932495117, 0.04467363357543945, 0.044877311706542966, 0.044612064361572265, 0.04449248123168945, 0.044493217468261716, 0.04468751907348633, 0.04476707077026367, 0.04462182235717774, 0.04470988845825195, 0.0445665283203125, 0.04469760131835938, 0.04483900833129883, 0.04472016143798828, 0.0451624641418457, 0.04474687957763672, 0.04461203384399414, 0.04450099182128906, 0.044916736602783204, 0.04592230224609375, 0.04470723342895508, 0.045160385131835935, 0.045058624267578125, 0.04490864181518555, 0.04563363265991211, 0.044687488555908206, 0.044412704467773435, 0.04620083236694336, 0.04498153686523437, 0.04480873489379883, 0.04472441482543945, 0.04470374298095703, 0.04469164657592774, 0.04434310531616211, 0.044240768432617185, 0.044822689056396484, 0.04432268905639648, 0.04426147079467774, 0.04422406387329102, 0.044003776550292965, 0.04409139251708984, 0.04454934310913086, 0.04431967926025391, 0.04435337448120117, 0.044625694274902344, 0.04473436737060547, 0.04497436904907227, 0.04451724624633789, 0.044585025787353516, 0.044799423217773436, 0.04486956787109375, 0.044659423828125, 0.04467113494873047, 0.04461100769042969, 0.04532675170898438, 0.04475494384765625, 0.044701694488525394, 0.04436377716064453, 0.04439244842529297, 0.044668800354003904, 0.04523408126831055, 0.04447049713134766, 0.04454633712768555, 0.04460726547241211, 0.044771263122558594, 0.04467686462402344, 0.044669185638427734, 0.04463846588134766, 0.044295425415039065, 0.044854015350341794, 0.04444291305541992, 0.044730464935302736, 0.04509142303466797, 0.047123584747314456, 0.045125598907470706, 0.044714305877685545, 0.044791839599609376, 0.04482003021240234, 0.04424454498291015, 0.04442940902709961, 0.044208927154541014, 0.04475081634521484, 0.04430031967163086, 0.04445907211303711, 0.044182464599609374, 0.044611583709716796, 0.04501465606689453, 0.04419641494750977, 0.044437503814697264, 0.044257022857666015, 0.044328990936279296, 0.04443334579467773, 0.044240478515625, 0.044773887634277344, 0.044646625518798826, 0.044469951629638675, 0.04438230514526367, 0.04648556900024414, 0.04488819122314453, 0.04463391876220703, 0.04515030288696289, 0.04483881759643555, 0.044910209655761715, 0.046985790252685546, 0.044642112731933595, 0.04428505706787109, 0.044249664306640624, 0.04400774383544922, 0.04484019088745117, 0.044316638946533204, 0.04436867141723633, 0.04447654342651367, 0.04428787231445312, 0.044305599212646485, 0.04405478286743164, 0.04409360122680664, 0.04421263885498047, 0.04446828842163086, 0.04433500671386719, 0.04437404632568359, 0.04449894332885742, 0.04415078353881836, 0.044259071350097656, 0.044601024627685545, 0.04434560012817383, 0.04435385513305664, 0.04432896041870117, 0.04432044982910156, 0.04433337783813476, 0.04415683364868164, 0.044101119995117184, 0.04803644943237305, 0.04430847930908203, 0.04421836853027344, 0.04408115386962891, 0.044455936431884766, 0.04460688018798828, 0.04413631820678711, 0.044297183990478516, 0.04440796661376953, 0.045266750335693356, 0.04471231842041016, 0.04524031829833984, 0.04482867050170898, 0.0446196174621582, 0.044765342712402345, 0.044609535217285154, 0.04475699234008789, 0.045416065216064457, 0.0446710090637207, 0.044620128631591795, 0.044892288208007815, 0.04461094284057617, 0.04450342559814453, 0.04463324737548828, 0.04461423873901367, 0.0446959342956543, 0.04477318572998047, 0.04450860977172852, 0.04455295944213867, 0.04445167922973633, 0.044631423950195315, 0.044638175964355466, 0.04465558242797851, 0.04449161529541015, 0.04514416122436524, 0.04461865615844727, 0.0448001937866211, 0.044986175537109374, 0.044805919647216794, 0.04465891265869141, 0.04507852935791016, 0.04502048110961914, 0.04488057708740235, 0.04502102279663086, 0.044996383666992185, 0.044869598388671876, 0.04462742233276367, 0.04475795364379883, 0.044830432891845705, 0.04501532745361328, 0.04494960021972656, 0.04518697738647461, 0.04466262435913086, 0.04503535842895508, 0.04495187377929687, 0.04619222259521484, 0.04503798294067383, 0.044944511413574216, 0.0447836799621582, 0.04477347183227539, 0.04516672134399414, 0.04544736099243164, 0.044997024536132815, 0.044853534698486325, 0.04460297775268555, 0.04493529510498047, 0.044677120208740234, 0.04459929656982422, 0.04465996932983399, 0.044826496124267576, 0.04476172637939453, 0.044990718841552736, 0.04525260925292969, 0.04502665710449219, 0.044962558746337894, 0.04495779037475586, 0.044937023162841795, 0.04490854263305664, 0.04478275299072266, 0.04447840118408203, 0.04444784164428711, 0.04406937789916992, 0.04405855941772461, 0.04399472045898437, 0.04427193450927734, 0.04473664093017578, 0.044644702911376954, 0.04471814346313477, 0.04521539306640625, 0.04488425445556641, 0.04474879837036133, 0.044660030364990236, 0.04472902297973633, 0.04485529708862305, 0.044834815979003906, 0.04499193572998047, 0.045115039825439456, 0.044770111083984376, 0.0447957763671875, 0.04478694534301758, 0.04602159881591797, 0.04478540802001953, 0.045062080383300784, 0.04855839920043945, 0.044805503845214846, 0.04486563110351562, 0.04491228866577148, 0.04491312026977539, 0.044687774658203124, 0.044638046264648436, 0.04475305557250977, 0.04496723175048828, 0.04495862579345703, 0.04489113616943359, 0.04497910308837891, 0.044942657470703126, 0.044912353515625, 0.04492092895507813, 0.04489043045043945, 0.04490079879760742, 0.045197311401367186, 0.04501606369018555, 0.04488889694213867, 0.04503753662109375, 0.04497158432006836, 0.044927646636962894, 0.04492287826538086, 0.04494131088256836, 0.044989761352539064, 0.04490719985961914, 0.045106464385986325, 0.04505462265014649, 0.04537139129638672, 0.04505401611328125, 0.04490403366088867, 0.045779102325439455, 0.04530934524536133, 0.04498697662353516, 0.04530899047851562, 0.04504467010498047, 0.045125633239746096, 0.044991905212402344, 0.04470140838623047, 0.04476553726196289, 0.04701779174804688, 0.044748863220214846, 0.04506691360473633, 0.04543625640869141, 0.045134624481201174, 0.044834686279296876, 0.04482640075683594, 0.046123233795166016, 0.04509872055053711, 0.04488016128540039, 0.04498636627197266, 0.04509641647338867, 0.045271263122558594, 0.04500640106201172, 0.04520332717895508, 0.04512857437133789, 0.04519036865234375, 0.04541024017333985, 0.04508758544921875, 0.045088768005371094, 0.04489215850830078, 0.04485529708862305, 0.04514543914794922, 0.04493174362182617, 0.044826881408691406, 0.044754688262939456, 0.044935169219970705, 0.04493856048583984, 0.04475769424438476, 0.0452567024230957, 0.044875774383544925, 0.04484067153930664, 0.044959102630615234, 0.04515049743652344, 0.04963350296020508, 0.04539993667602539, 0.04519363021850586, 0.04522406387329102, 0.04520345687866211, 0.045764606475830076, 0.045039615631103515, 0.04553113555908203, 0.04512153625488281, 0.0450334701538086, 0.0452751350402832, 0.04541996765136719, 0.04516719818115234, 0.04555587387084961, 0.04564476776123047, 0.04566412734985351, 0.04546579360961914, 0.04567267227172851, 0.045382175445556644, 0.04529155349731445, 0.04640892791748047, 0.04569363021850586, 0.04766857528686524, 0.04542892837524414, 0.04546166229248047, 0.045335006713867185, 0.0454466552734375, 0.045496864318847655, 0.045270912170410155, 0.045099136352539065, 0.044918785095214846, 0.04573606491088867, 0.04484844970703125, 0.045066814422607425, 0.04503257751464844, 0.045053855895996094, 0.04523311996459961, 0.04535456085205078, 0.04515843200683594, 0.045117855072021484, 0.045156352996826174, 0.04501708984375, 0.045658016204833986, 0.04522608184814453, 0.045330432891845705, 0.04514815902709961, 0.045330047607421875, 0.045300289154052736, 0.04512876892089844, 0.044900096893310544, 0.04477030563354492, 0.04516864013671875, 0.044990463256835936, 0.045702751159667966, 0.04481884765625, 0.04486134338378906, 0.04476457595825195, 0.044816638946533205, 0.04478611373901367, 0.044777473449707034, 0.04501440048217773, 0.045085311889648434, 0.045004127502441406, 0.04507920074462891, 0.04531337738037109, 0.04475766372680664, 0.04482252883911133, 0.04452083206176758, 0.044584766387939456, 0.04421635055541992, 0.04408195114135742, 0.04453171157836914, 0.04499276733398438, 0.045233505249023434, 0.04523984146118164, 0.045079425811767576, 0.044984321594238284, 0.0450437126159668, 0.044849056243896485, 0.0456233901977539, 0.04505132675170898, 0.04494803237915039, 0.04477452850341797, 0.04603209686279297, 0.04529436874389649, 0.04483071899414062, 0.044972030639648435, 0.04478060913085938, 0.045247425079345704, 0.04525056076049805, 0.044865535736083983, 0.04499596786499024, 0.04546214294433594, 0.04491231918334961, 0.04502320098876953, 0.04872022247314453, 0.04524031829833984, 0.045006847381591795, 0.04511331176757812, 0.04501641464233398, 0.045513153076171875, 0.045590782165527345, 0.045069534301757815, 0.04514691162109375, 0.04506009674072266, 0.04589932632446289, 0.04497571182250976, 0.045077342987060544, 0.045294944763183596, 0.04523491287231445, 0.04534451293945312, 0.04533881759643555, 0.04566835021972656, 0.04558028793334961, 0.045592575073242186, 0.04518889617919922, 0.04541667175292969, 0.045295616149902344, 0.04525696182250977, 0.04525235366821289, 0.04508467102050781, 0.04547993469238281, 0.045184223175048825, 0.045638656616210936, 0.045378849029541014, 0.04521011352539062, 0.045625343322753906, 0.04589977645874024, 0.04516390228271484, 0.046706527709960935, 0.045980384826660156, 0.045752384185791015, 0.045748222351074216, 0.045768672943115235, 0.04542806243896484, 0.04542319869995117, 0.045631423950195316, 0.04545756912231445, 0.04555926513671875, 0.04558428955078125, 0.0457529296875, 0.04578911972045899, 0.045463649749755856, 0.04535500717163086, 0.04583833694458008, 0.04530790328979492, 0.045641502380371096, 0.04517315292358398, 0.04521551895141602, 0.044988414764404294, 0.045338623046875, 0.04546559906005859, 0.04549836730957031, 0.0451778564453125, 0.045593185424804686, 0.04554297637939453, 0.0451973762512207, 0.04544182586669922, 0.04498799896240235, 0.045289886474609374, 0.04511667251586914, 0.04546579360961914, 0.045951553344726566, 0.04574720001220703, 0.04574310302734375, 0.0455096321105957, 0.04552975845336914, 0.045418846130371095, 0.045109310150146485, 0.04505184173583984, 0.04602675247192383, 0.04735795211791992, 0.045350910186767575, 0.04511743927001953, 0.04527260971069336, 0.045299999237060545, 0.044992702484130856, 0.04636876678466797, 0.04554547119140625, 0.04537343978881836, 0.04501504135131836, 0.04487369537353516, 0.04489628982543945, 0.04527056121826172, 0.04557875061035156, 0.0452070083618164, 0.04543539047241211, 0.045895263671875, 0.04502345657348633, 0.045037761688232425, 0.04504956817626953, 0.0452322883605957, 0.0452704963684082, 0.04500243377685547, 0.04466310501098633, 0.04469532775878906, 0.04484131240844726, 0.0450437126159668, 0.04470454406738281, 0.04516972732543945, 0.04480684661865234, 0.0446668815612793, 0.045047103881835936, 0.0448903694152832, 0.04741164779663086, 0.04531814575195312, 0.04534211349487305, 0.04536790466308594, 0.04468668746948242, 0.04611385726928711, 0.04510079956054688, 0.04536521530151367, 0.04532368087768555, 0.045275390625, 0.04520499038696289, 0.04505702209472656, 0.04496198272705078, 0.04547155380249023, 0.04515430450439453, 0.045301025390625, 0.044994400024414065, 0.04508777618408203, 0.04506198501586914, 0.045467647552490234, 0.045146110534667966, 0.04503142547607422, 0.04805836868286133, 0.04520550537109375, 0.045553375244140625, 0.044937503814697265, 0.04518473434448242, 0.04524025726318359, 0.04519971084594727, 0.04564377593994141, 0.04507033538818359, 0.04510438537597656, 0.044924671173095704, 0.04531110382080078, 0.045176513671875, 0.045209503173828124, 0.04506639862060547, 0.04532646560668945, 0.04509247970581055, 0.045230464935302736, 0.04581785583496094, 0.045238273620605465, 0.045348926544189455, 0.04529296112060547, 0.04549276733398438, 0.045315807342529296, 0.04553123092651367, 0.045606849670410156, 0.04531600189208984, 0.0452446403503418, 0.04500492858886719, 0.044935169219970705, 0.044918880462646485, 0.04560031890869141, 0.04615622329711914, 0.04523203277587891, 0.04513587188720703, 0.045174785614013675, 0.04501504135131836, 0.04492287826538086, 0.044816383361816405, 0.04504572677612305, 0.04504988861083985, 0.04521561431884766, 0.045428478240966796, 0.044988800048828125, 0.04511967849731445, 0.045010433197021485]",tokens/s,22.215828470841142,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,876.408832,655.294464,0.0,260.046848,258.555392,s,1,7.42957080078125,7.42957080078125,0.0,7.42957080078125,7.42957080078125,7.42957080078125,7.42957080078125,[7.42957080078125],,kWh,1.47450138791631e-05,1.617255663913867e-06,4.5072258280029476e-06,2.0869495371079913e-05,,MB,1354.1376,751.763456,0.0,341.835776,317.950464,s,14,0.18944127941131592,0.013531519957951137,0.00010986742841942603,0.01349833583831787,0.013646256065368654,0.01368941445350647,0.013752180376052857,"[0.013313983917236328, 0.013539551734924316, 0.013508831977844239, 0.01347913646697998, 0.013476960182189942, 0.01360489559173584, 0.01364412784576416, 0.013487839698791504, 0.013767871856689453, 0.013647168159484864, 0.013454751968383789, 0.013474368095397949, 0.013429311752319336, 0.013612480163574218]",tokens/s,18918.791148039072,kWh,3.933527302672926e-07,4.33739737116998e-08,2.0916496218059023e-07,6.458916661595826e-07,tokens/kWh,396351297.6133512,MB,1394.151424,779.026432,0.0,369.098752,317.953024,s,14,9.84946746826172,0.7035333905901229,0.003768367487343936,0.7027530212402344,0.7062368774414063,0.7098903015136719,0.7145298669433594,"[0.703157470703125, 0.7049987182617188, 0.7024894409179687, 0.7156897583007813, 0.7067675170898438, 0.7030166015625, 0.703255859375, 0.7013170166015625, 0.7020364990234375, 0.7000364379882813, 0.7017915649414063, 0.7006768188476562, 0.7031799926757812, 0.7010537719726563]",tokens/s,89.54798854274092,kWh,2.0345932114177316e-05,2.243730290200346e-06,7.366507765019225e-06,2.9956170169396877e-05,tokens/kWh,2103072.5771601,,s,882,9.842919075965877,0.011159772194972654,0.00023924831474364988,0.01113099193572998,0.01124176025390625,0.01131808614730835,0.01177602387428283,"[0.010941344261169434, 0.011363776206970214, 0.011177696228027344, 0.011175007820129394, 0.011122431755065918, 0.011148544311523437, 0.011139840126037597, 0.011110239982604981, 0.011110560417175293, 0.011093024253845216, 0.011267040252685547, 0.011154656410217284, 0.01115011215209961, 0.011115551948547363, 0.011127488136291504, 0.011277728080749512, 0.011207488059997559, 0.01118012809753418, 0.011095711708068848, 0.011179936408996583, 0.01119375991821289, 0.011166720390319825, 0.011177087783813476, 0.01110431957244873, 0.011184608459472656, 0.011194720268249511, 0.011208703994750976, 0.01113702392578125, 0.011073247909545898, 0.011125023841857911, 0.011114496231079102, 0.011100128173828125, 0.011062751770019532, 0.01110108757019043, 0.011156959533691406, 0.011157600402832032, 0.011135071754455566, 0.011164928436279297, 0.011156576156616211, 0.011145888328552246, 0.011122879981994628, 0.011342687606811524, 0.011103743553161622, 0.011163135528564454, 0.011170720100402832, 0.011165760040283202, 0.011130816459655761, 0.01114476776123047, 0.011173664093017578, 0.011193056106567383, 0.011124640464782716, 0.011087008476257324, 0.011164192199707032, 0.01119491195678711, 0.011165568351745606, 0.011206175804138184, 0.01113491153717041, 0.011127327919006348, 0.011151231765747071, 0.011148991584777833, 0.01112720012664795, 0.011130911827087402, 0.011213120460510253, 0.010954303741455078, 0.011210463523864745, 0.01124176025390625, 0.01118825626373291, 0.011138848304748535, 0.01108182430267334, 0.011115167617797852, 0.011206656455993653, 0.011228896141052247, 0.01124176025390625, 0.01117360019683838, 0.011184415817260741, 0.01120684814453125, 0.011245375633239746, 0.011136096000671386, 0.011216128349304199, 0.01130281639099121, 0.011754591941833497, 0.011418368339538574, 0.011681695938110352, 0.011278240203857422, 0.011237471580505372, 0.01115340805053711, 0.011183199882507324, 0.011211199760437012, 0.011125375747680664, 0.011070464134216309, 0.011231583595275879, 0.011270591735839844, 0.011148351669311524, 0.01110319995880127, 0.011022432327270508, 0.011134048461914063, 0.01113379192352295, 0.011118528366088867, 0.011077535629272462, 0.011219103813171387, 0.011156607627868652, 0.011118911743164062, 0.011112992286682128, 0.011052864074707031, 0.011137344360351563, 0.011151007652282715, 0.011179583549499512, 0.011157535552978515, 0.011227231979370117, 0.011225855827331543, 0.011244864463806152, 0.011121408462524414, 0.01120847988128662, 0.011055071830749512, 0.011143296241760255, 0.011314656257629395, 0.011210463523864745, 0.011153632164001465, 0.011084063529968262, 0.011165216445922851, 0.011153344154357911, 0.0111080961227417, 0.011110752105712891, 0.011097760200500488, 0.011101152420043945, 0.011098112106323242, 0.011083776473999024, 0.01118575954437256, 0.011216832160949706, 0.011106783866882325, 0.011353631973266602, 0.011212736129760743, 0.011196672439575195, 0.011125023841857911, 0.011074655532836914, 0.011131808280944825, 0.011109503746032715, 0.011152192115783692, 0.011098176002502442, 0.011136832237243653, 0.01120684814453125, 0.011119903564453125, 0.011059776306152343, 0.011032768249511719, 0.011138175964355469, 0.011160575866699218, 0.011132767677307129, 0.01115932846069336, 0.01107590389251709, 0.011148832321166991, 0.01115135955810547, 0.011134943962097167, 0.01102892780303955, 0.01109398365020752, 0.011085503578186036, 0.011180000305175781, 0.01107808017730713, 0.011048928260803222, 0.011134719848632813, 0.011190303802490234, 0.01112723159790039, 0.011104160308837891, 0.01115071964263916, 0.011108415603637695, 0.011082112312316894, 0.011053055763244628, 0.011026432037353515, 0.011161120414733886, 0.011169631958007813, 0.01121555233001709, 0.011244607925415038, 0.011281279563903809, 0.011198464393615723, 0.011200511932373047, 0.011091967582702637, 0.011086848258972168, 0.01109324836730957, 0.011120351791381836, 0.01112713623046875, 0.011166943550109863, 0.011259424209594726, 0.011164416313171386, 0.011220319747924805, 0.011131744384765625, 0.011110400199890137, 0.011106207847595214, 0.01116374397277832, 0.011292672157287598, 0.011222175598144531, 0.010917311668395996, 0.01114412784576416, 0.011158623695373536, 0.011120896339416503, 0.01117692756652832, 0.011144255638122559, 0.011136992454528809, 0.011213472366333008, 0.011282431602478027, 0.011269696235656738, 0.011231679916381836, 0.011124383926391602, 0.011125439643859864, 0.011226335525512695, 0.011428288459777832, 0.01146675205230713, 0.011367808341979981, 0.011214879989624024, 0.011143775939941406, 0.011190272331237794, 0.011162848472595214, 0.011170592308044434, 0.011122688293457032, 0.01109347152709961, 0.011268544197082519, 0.011224703788757324, 0.011176416397094727, 0.011530240058898926, 0.01113315200805664, 0.011179903984069824, 0.011193535804748534, 0.011199135780334473, 0.011156736373901367, 0.011137855529785157, 0.011186176300048829, 0.011183520317077637, 0.01116425609588623, 0.011165696144104004, 0.01173299217224121, 0.014544320106506348, 0.01618182373046875, 0.011280384063720703, 0.011198464393615723, 0.011116543769836425, 0.011068863868713379, 0.01122873592376709, 0.011391776084899902, 0.011184224128723145, 0.011110527992248535, 0.0110632963180542, 0.011188223838806152, 0.011187647819519043, 0.011209088325500488, 0.011177696228027344, 0.011166175842285157, 0.011193568229675294, 0.011149087905883788, 0.011174847602844238, 0.012023776054382325, 0.011214943885803222, 0.011070528030395509, 0.011652031898498534, 0.011185824394226074, 0.0108602876663208, 0.011147520065307618, 0.01116710376739502, 0.011112832069396972, 0.01118233585357666, 0.011255935668945312, 0.011179648399353027, 0.011198304176330566, 0.011126367568969727, 0.011052255630493163, 0.011148768424987793, 0.011163776397705077, 0.011129920005798339, 0.011135999679565429, 0.011160863876342773, 0.011546591758728027, 0.011295424461364747, 0.011179360389709472, 0.011199359893798827, 0.01105840015411377, 0.011227871894836426, 0.011154975891113281, 0.011258303642272949, 0.011161760330200195, 0.011081472396850586, 0.011168928146362304, 0.011164287567138671, 0.01113219165802002, 0.011135295867919922, 0.011189184188842773, 0.011196096420288086, 0.011202527999877929, 0.011180064201354981, 0.011032032012939454, 0.011132672309875488, 0.011184896469116211, 0.011122688293457032, 0.011323488235473633, 0.011308320045471191, 0.011419391632080078, 0.01197555160522461, 0.011309056282043458, 0.011228704452514648, 0.011330016136169433, 0.011111647605895995, 0.011074336051940917, 0.011108351707458495, 0.011108415603637695, 0.011100095748901367, 0.01107148838043213, 0.011192031860351562, 0.011247903823852538, 0.011620351791381836, 0.011659263610839844, 0.011290623664855956, 0.011243488311767578, 0.011213855743408203, 0.011274304389953613, 0.011286623954772949, 0.011286527633666991, 0.01120751953125, 0.01111843204498291, 0.011159711837768555, 0.010941344261169434, 0.01117407989501953, 0.011140224456787109, 0.01110700798034668, 0.011132927894592285, 0.011065343856811523, 0.01115340805053711, 0.011149312019348144, 0.011103520393371582, 0.011050880432128906, 0.011127072334289551, 0.011153984069824218, 0.011129920005798339, 0.011140031814575195, 0.01112451171875, 0.011139295578002929, 0.011143168449401856, 0.011104479789733886, 0.011104031562805176, 0.011103903770446778, 0.011126463890075683, 0.011117216110229491, 0.011110400199890137, 0.01110166358947754, 0.01118671989440918, 0.011131072044372558, 0.011116448402404786, 0.011068896293640136, 0.011125184059143067, 0.011189696311950683, 0.011315775871276855, 0.011141152381896972, 0.011334624290466309, 0.011891712188720703, 0.011173824310302734, 0.011238752365112304, 0.011229920387268066, 0.011102208137512207, 0.011038368225097656, 0.011263615608215332, 0.011154144287109376, 0.011144191741943359, 0.011089920043945312, 0.011070464134216309, 0.011159903526306152, 0.011126432418823242, 0.011186431884765625, 0.011048992156982422, 0.011149312019348144, 0.011097824096679688, 0.011166848182678223, 0.01110108757019043, 0.011067008018493652, 0.011212639808654785, 0.011179648399353027, 0.011158528327941895, 0.011100031852722169, 0.011157695770263671, 0.011138879776000976, 0.011160767555236817, 0.011203136444091797, 0.011086079597473144, 0.011300864219665528, 0.01085209560394287, 0.011110048294067382, 0.011188159942626954, 0.011169728279113769, 0.011150015830993652, 0.011061471939086914, 0.011114303588867187, 0.011476991653442382, 0.011181952476501465, 0.011524127960205079, 0.011185600280761718, 0.01116431999206543, 0.01186739158630371, 0.011273056030273438, 0.011318207740783692, 0.011138143539428711, 0.011102399826049804, 0.01105299186706543, 0.011117728233337402, 0.011106911659240723, 0.011102463722229004, 0.01154800033569336, 0.011113216400146484, 0.011158592224121093, 0.01113491153717041, 0.01111673641204834, 0.011033087730407716, 0.011097760200500488, 0.011194304466247558, 0.01108620834350586, 0.011058719635009765, 0.011040767669677735, 0.01112112045288086, 0.011242560386657715, 0.011100959777832032, 0.011059616088867187, 0.011164416313171386, 0.011144191741943359, 0.011106304168701172, 0.011068896293640136, 0.011023167610168458, 0.011128543853759765, 0.011149184226989746, 0.01115347194671631, 0.011100223541259766, 0.011143168449401856, 0.011169728279113769, 0.011177984237670899, 0.01112070369720459, 0.011041119575500488, 0.011107872009277344, 0.011077407836914063, 0.01109023952484131, 0.011016415596008302, 0.011112256050109863, 0.011370495796203613, 0.011130847930908204, 0.011155679702758789, 0.011083583831787109, 0.011202079772949219, 0.011153887748718262, 0.011153632164001465, 0.011073311805725098, 0.010983519554138184, 0.011087776184082031, 0.011039744377136231, 0.011114336013793946, 0.011288800239562989, 0.011171903610229492, 0.0110665283203125, 0.011108575820922851, 0.011151616096496581, 0.01116748809814453, 0.011157312393188477, 0.011087583541870118, 0.011172831535339356, 0.011235103607177735, 0.011172063827514648, 0.01112063980102539, 0.011113632202148437, 0.01115017604827881, 0.011139072418212891, 0.011148384094238281, 0.01109235191345215, 0.011116064071655274, 0.011148287773132324, 0.01147871971130371, 0.01111036777496338, 0.011050751686096191, 0.011104543685913086, 0.011149824142456055, 0.011140928268432618, 0.011120384216308593, 0.011069696426391601, 0.011169792175292969, 0.011143168449401856, 0.011140576362609863, 0.011098400115966797, 0.011114496231079102, 0.011147487640380859, 0.011125120162963867, 0.011101152420043945, 0.011135135650634766, 0.011196255683898926, 0.011119296073913574, 0.011065055847167968, 0.011036959648132325, 0.0110731840133667, 0.011099552154541016, 0.011201472282409667, 0.01113491153717041, 0.011075615882873535, 0.011122336387634277, 0.011143551826477052, 0.011057120323181152, 0.011052800178527831, 0.011088255882263184, 0.011153311729431152, 0.011129152297973633, 0.011118176460266114, 0.01103270435333252, 0.011129920005798339, 0.011178144454956055, 0.01110636806488037, 0.011047616004943847, 0.011058367729187012, 0.010780768394470215, 0.011065312385559082, 0.011125375747680664, 0.011120448112487793, 0.011062687873840332, 0.011113247871398925, 0.011122688293457032, 0.011100159645080567, 0.011126784324645997, 0.01109769630432129, 0.011084480285644531, 0.011134431838989258, 0.011124992370605468, 0.011141119956970215, 0.011108351707458495, 0.011134880065917969, 0.01115328025817871, 0.011472319602966309, 0.011174176216125488, 0.011065855979919433, 0.011192383766174317, 0.011132512092590332, 0.011147487640380859, 0.011114399909973144, 0.011091391563415527, 0.011176608085632323, 0.011175392150878906, 0.011188384056091308, 0.011311200141906739, 0.011192511558532715, 0.011206368446350097, 0.011184672355651855, 0.011143008232116699, 0.011181695938110352, 0.011068191528320313, 0.01114076805114746, 0.011122336387634277, 0.011076000213623047, 0.011078816413879394, 0.011148032188415527, 0.01115555191040039, 0.011147199630737304, 0.011223103523254395, 0.011112575531005859, 0.011203776359558106, 0.01115993595123291, 0.011137344360351563, 0.011116543769836425, 0.011128255844116211, 0.011247615814208984, 0.01117360019683838, 0.011164511680603027, 0.01103872013092041, 0.011104415893554688, 0.01111571216583252, 0.011096927642822265, 0.011063103675842284, 0.01112063980102539, 0.011143168449401856, 0.01112063980102539, 0.011144479751586915, 0.011129568099975586, 0.011147232055664062, 0.010875935554504395, 0.011146016120910644, 0.011135071754455566, 0.01112822437286377, 0.011112704277038574, 0.011060959815979004, 0.01110694408416748, 0.011091456413269044, 0.011142815589904785, 0.011023263931274414, 0.01111836814880371, 0.011116703987121583, 0.011122783660888673, 0.011101280212402344, 0.011078463554382325, 0.011095616340637207, 0.011108799934387207, 0.011075391769409179, 0.011032768249511719, 0.011097503662109374, 0.011090432167053223, 0.011096256256103516, 0.011050911903381347, 0.011053055763244628, 0.011085824012756347, 0.011154784202575683, 0.011037343978881837, 0.011053055763244628, 0.011124735832214355, 0.011106176376342774, 0.011104607582092285, 0.011034399986267089, 0.011155136108398437, 0.011104576110839844, 0.011065343856811523, 0.011054752349853515, 0.011069791793823242, 0.011179327964782715, 0.011141695976257325, 0.011108799934387207, 0.01104851245880127, 0.01109004783630371, 0.011122688293457032, 0.011383935928344727, 0.011096832275390624, 0.011093407630920411, 0.01112985610961914, 0.011126496315002442, 0.011110400199890137, 0.011102208137512207, 0.011108351707458495, 0.0111627836227417, 0.011139583587646485, 0.011086112022399902, 0.011061311721801758, 0.011130880355834961, 0.011193504333496094, 0.011162464141845702, 0.011124608039855956, 0.011068799972534179, 0.011145024299621583, 0.011116607666015625, 0.011118911743164062, 0.010893952369689942, 0.011158687591552734, 0.011143520355224609, 0.011192352294921874, 0.01118620777130127, 0.01112492847442627, 0.011116543769836425, 0.011062784194946289, 0.011164159774780273, 0.011168095588684082, 0.011359295845031737, 0.011165599822998046, 0.011120927810668946, 0.011157919883728027, 0.011146431922912597, 0.011131967544555664, 0.011105152130126953, 0.011119071960449218, 0.011147680282592774, 0.011108672142028809, 0.01107545566558838, 0.011011296272277833, 0.011091903686523437, 0.011102527618408202, 0.011107904434204102, 0.01114566421508789, 0.011201184272766113, 0.01152787208557129, 0.01115555191040039, 0.011142399787902832, 0.011077983856201172, 0.011073856353759766, 0.01113424015045166, 0.011128800392150879, 0.01109705638885498, 0.011064288139343261, 0.01110099220275879, 0.011134176254272462, 0.01113145637512207, 0.011017951965332031, 0.01108784008026123, 0.011156000137329102, 0.011167296409606934, 0.011186047554016114, 0.011082176208496095, 0.011183584213256836, 0.011114432334899902, 0.01113161563873291, 0.011051008224487305, 0.011079775810241698, 0.011113887786865234, 0.011139552116394042, 0.011268128395080566, 0.011103967666625976, 0.011116191864013672, 0.011129471778869629, 0.011186176300048829, 0.011184127807617187, 0.011046560287475585, 0.011173919677734375, 0.011143487930297851, 0.011087424278259277, 0.011038240432739257, 0.01078700828552246, 0.0110632963180542, 0.011118399620056152, 0.011126976013183593, 0.01125376033782959, 0.011085824012756347, 0.0110448637008667, 0.011282431602478027, 0.01118723201751709, 0.011090911865234376, 0.011081119537353516, 0.011073823928833007, 0.011167167663574219, 0.011213695526123047, 0.011106304168701172, 0.011085824012756347, 0.011071552276611328, 0.011092000007629395, 0.011117695808410644, 0.011154208183288574, 0.011398271560668945, 0.011228032112121582, 0.011223039627075194, 0.011195391654968261, 0.011092415809631348, 0.011085856437683105, 0.011086655616760253, 0.01111366367340088, 0.01108022403717041, 0.011011199951171875, 0.011122943878173829, 0.011145855903625488, 0.01109830379486084, 0.011044447898864745, 0.011092191696166992, 0.011116800308227539, 0.011321056365966796, 0.011131199836730957, 0.011031295776367188, 0.01110319995880127, 0.011089920043945312, 0.011059391975402831, 0.011020352363586425, 0.01109552001953125, 0.011090208053588867, 0.01111638355255127, 0.011085887908935547, 0.011051103591918946, 0.011122688293457032, 0.011141119956970215, 0.011161248207092285, 0.011260255813598633, 0.011106304168701172, 0.011166815757751464, 0.011076319694519042, 0.011063488006591796, 0.01105072021484375, 0.011149600028991699, 0.011097887992858886, 0.011094240188598633, 0.01103001594543457, 0.011089728355407715, 0.011077407836914063, 0.01087980842590332, 0.011154879570007324, 0.01110358428955078, 0.011074527740478516, 0.011075231552124023, 0.01111689567565918, 0.011067647933959961, 0.011042336463928222, 0.011151679992675781, 0.011315103530883788, 0.011077055931091309, 0.011061408042907716, 0.011084351539611816, 0.011010944366455078, 0.011112992286682128, 0.01108627223968506, 0.011069439888000488, 0.011048959732055665, 0.01162668800354004, 0.011478752136230468, 0.012269887924194336, 0.011263775825500488, 0.01119001579284668, 0.011268351554870605, 0.011083583831787109, 0.011171392440795899, 0.011149888038635253, 0.011400287628173827, 0.011195008277893066, 0.011087200164794923, 0.011322591781616211, 0.011118751525878906, 0.011222368240356445, 0.011125023841857911, 0.01122713565826416, 0.011135135650634766, 0.011127840042114258, 0.01108886432647705, 0.011122207641601562, 0.011157279968261718, 0.011148127555847168, 0.011218111991882324, 0.011096896171569825, 0.01106707191467285, 0.011073663711547851, 0.011097984313964844, 0.011128576278686523, 0.011086367607116699, 0.011052767753601074, 0.011158559799194336, 0.011148480415344239, 0.01110428810119629, 0.011009792327880859, 0.011145119667053222, 0.011122400283813477, 0.01107148838043213, 0.0110546236038208, 0.01108905601501465, 0.01108140754699707, 0.01107529640197754, 0.011081055641174316, 0.011011008262634278, 0.0112391357421875, 0.01091443157196045, 0.011130623817443848, 0.011122048377990723, 0.011098079681396485, 0.011203519821166992, 0.011013759613037109, 0.011123231887817383, 0.011066240310668945, 0.01099068832397461, 0.010989439964294434, 0.011079296112060547, 0.011090304374694824, 0.011075391769409179, 0.01202400016784668, 0.011206815719604492, 0.011120256423950195, 0.01212054443359375, 0.011155200004577637, 0.011122528076171876, 0.011075584411621094, 0.011036704063415528, 0.01120032024383545, 0.011189760208129883, 0.011067520141601563, 0.010996352195739746, 0.011004192352294922, 0.011168895721435547, 0.011096735954284669, 0.011144831657409668, 0.01099232006072998, 0.011218624114990234, 0.011103936195373535, 0.011016511917114257, 0.011026559829711914, 0.011050111770629882, 0.011076352119445802, 0.011061504364013671, 0.0110829439163208, 0.011075551986694337, 0.011119199752807616, 0.011152704238891602, 0.011086175918579102, 0.010969440460205078, 0.01120240020751953, 0.011106399536132813, 0.01124358367919922, 0.011048959732055665, 0.011015520095825196, 0.011176959991455078, 0.01108137607574463, 0.011034848213195801, 0.011040543556213379, 0.011073823928833007, 0.011076512336730958, 0.01101318359375, 0.010989312171936036, 0.011177248001098632, 0.011097087860107421, 0.011202272415161132, 0.011142592430114746, 0.011035039901733398, 0.011093888282775879, 0.011084416389465331]",tokens/s,89.60756389368666,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4192.362496,4878.958592,0.0,4483.710976,4465.672704,s,1,10.4747197265625,10.4747197265625,0.0,10.4747197265625,10.4747197265625,10.4747197265625,10.4747197265625,[10.4747197265625],,kWh,0.00010290706125000497,1.1344051261301709e-05,3.134030284999284e-05,0.0001455914153612995,,MB,2009.448448,5302.583296,0.0,4892.655616,4841.339904,s,10,1.9589540557861327,0.19589540557861326,0.000730593187748986,0.1959886245727539,0.19668135223388672,0.19678909378051757,0.19687528701782225,"[0.19454258728027343, 0.1952676544189453, 0.1957604522705078, 0.19513203430175782, 0.19553926086425782, 0.1963337860107422, 0.19689683532714844, 0.19665740966796874, 0.19660723876953126, 0.196216796875]",tokens/s,1306.8198268553401,kWh,5.72639562581696e-06,6.315141935190448e-07,3.7844801517646213e-06,1.0142389971100627e-05,tokens/kWh,25240599.181202605,MB,2013.175808,5470.355456,0.0,5060.427776,5012.931584,s,10,18.542546630859377,1.8542546630859378,0.0027287882253230152,1.8539893188476562,1.8574702392578124,1.8578740966796876,1.8581971826171877,"[1.8564677734375, 1.8568209228515624, 1.8552591552734374, 1.851202392578125, 1.8520284423828124, 1.8582779541015626, 1.852719482421875, 1.8573804931640625, 1.850620361328125, 1.8517696533203125]",tokens/s,33.97591563564006,kWh,5.4722095011683493e-05,6.035683385533565e-06,3.645494092863534e-05,9.72127193258524e-05,tokens/kWh,648063.3443533968,,s,630,18.539297084808336,0.029427455690171984,0.00034974845589844236,0.02939247989654541,0.029673522758483888,0.0298257905960083,0.030526356506347663,"[0.029909503936767577, 0.029638656616210936, 0.02937411117553711, 0.02945574378967285, 0.029524160385131837, 0.029345855712890626, 0.029495616912841797, 0.029454751968383788, 0.029765151977539064, 0.029397472381591797, 0.02957107162475586, 0.029327360153198243, 0.029491199493408202, 0.029284191131591798, 0.02931884765625, 0.029153120040893554, 0.029159135818481445, 0.029221151351928713, 0.029237119674682618, 0.029270368576049803, 0.029288063049316405, 0.029253631591796874, 0.029272096633911133, 0.02945305633544922, 0.029200096130371094, 0.029105855941772462, 0.029194431304931642, 0.02931065559387207, 0.02925049591064453, 0.029285503387451173, 0.02926825523376465, 0.02973734474182129, 0.029673471450805664, 0.029378559112548826, 0.029515264511108398, 0.02967398452758789, 0.0293621768951416, 0.02944236755371094, 0.029636287689208986, 0.02928816032409668, 0.029359615325927735, 0.029237951278686523, 0.029365983963012696, 0.029413087844848633, 0.02949964714050293, 0.02945712089538574, 0.029346656799316407, 0.029310911178588868, 0.029416351318359374, 0.029439552307128906, 0.029510208129882812, 0.029767328262329102, 0.029622495651245116, 0.029453407287597655, 0.029606559753417968, 0.02973311996459961, 0.029749248504638674, 0.029574304580688476, 0.02967638397216797, 0.02955628776550293, 0.029507423400878908, 0.029516384124755858, 0.031001888275146484, 0.029954143524169922, 0.029574752807617188, 0.029323904037475586, 0.029220800399780273, 0.02999692726135254, 0.03162860870361328, 0.029438655853271486, 0.02944819259643555, 0.029464544296264647, 0.02954185676574707, 0.030299808502197264, 0.029459360122680665, 0.029310592651367186, 0.02936832046508789, 0.03026959991455078, 0.02949955177307129, 0.029281536102294923, 0.029370655059814454, 0.02941168022155762, 0.029419967651367188, 0.02921436882019043, 0.029162687301635744, 0.02950422477722168, 0.029546207427978515, 0.02944630432128906, 0.02931724739074707, 0.029560192108154297, 0.029442943572998048, 0.029304191589355467, 0.029278495788574218, 0.029324640274047853, 0.029187007904052733, 0.029247488021850586, 0.029232767105102538, 0.0294465274810791, 0.029461503982543946, 0.029527040481567384, 0.029529951095581056, 0.02947603225708008, 0.02949734306335449, 0.029506528854370117, 0.029345344543457032, 0.02918649673461914, 0.029361631393432615, 0.029876319885253907, 0.029235328674316406, 0.02920444869995117, 0.029301088333129884, 0.029220256805419922, 0.029301599502563478, 0.029248447418212892, 0.029299520492553712, 0.029245664596557617, 0.029251359939575197, 0.0292044792175293, 0.029278207778930664, 0.029365503311157226, 0.02936284828186035, 0.0295402889251709, 0.029642208099365235, 0.029563583374023438, 0.02944755172729492, 0.02956755256652832, 0.030022655487060547, 0.029667455673217772, 0.029375328063964843, 0.02900752067565918, 0.029028736114501952, 0.028999679565429686, 0.02918396759033203, 0.029216800689697266, 0.02949862480163574, 0.02939571189880371, 0.029153408050537108, 0.029073280334472658, 0.02902016067504883, 0.02914454460144043, 0.029032991409301757, 0.029065439224243164, 0.029169439315795898, 0.02944144058227539, 0.029257535934448242, 0.029057472229003907, 0.029147359848022462, 0.029644031524658204, 0.029641408920288086, 0.029490976333618163, 0.029284767150878906, 0.029294496536254884, 0.02913699150085449, 0.02909388732910156, 0.02928428840637207, 0.02946361541748047, 0.02939187240600586, 0.0336176643371582, 0.02991119956970215, 0.029376895904541015, 0.029505247116088866, 0.02959526443481445, 0.029585792541503907, 0.029623359680175782, 0.029445152282714843, 0.029308832168579102, 0.029360288619995116, 0.0293703670501709, 0.030125280380249024, 0.02945270347595215, 0.029357631683349608, 0.029270687103271485, 0.029290496826171877, 0.029256704330444337, 0.02927462387084961, 0.029406848907470702, 0.029352096557617186, 0.02949193572998047, 0.029442079544067384, 0.029292512893676757, 0.029324928283691407, 0.029614463806152343, 0.02952342414855957, 0.029528608322143556, 0.029421567916870117, 0.029503488540649415, 0.02954863929748535, 0.0294289608001709, 0.029684415817260744, 0.0297523193359375, 0.029770175933837892, 0.02915385627746582, 0.029159423828125, 0.029099807739257813, 0.029118688583374023, 0.029166784286499024, 0.029563104629516602, 0.028977760314941405, 0.029261503219604492, 0.029386911392211914, 0.02908380889892578, 0.029038591384887694, 0.029088991165161133, 0.029020063400268553, 0.028937088012695313, 0.029073055267333985, 0.02918147277832031, 0.029166303634643554, 0.029329504013061523, 0.029071359634399413, 0.029085920333862304, 0.029038368225097658, 0.02931657600402832, 0.029262367248535155, 0.029195552825927736, 0.029118335723876954, 0.029426048278808594, 0.029277791976928712, 0.02915622329711914, 0.029306400299072267, 0.029336000442504884, 0.029235231399536134, 0.029220863342285155, 0.029423551559448244, 0.029370304107666015, 0.029495168685913085, 0.029702655792236327, 0.029483999252319336, 0.02942051124572754, 0.02956883239746094, 0.029426816940307618, 0.029334400177001955, 0.029464128494262696, 0.029690303802490235, 0.029503231048583985, 0.029387008666992186, 0.02944112014770508, 0.029430335998535156, 0.0292926082611084, 0.0293798713684082, 0.029733312606811522, 0.02996895980834961, 0.031061183929443358, 0.029795135498046875, 0.0295546875, 0.029417184829711913, 0.029473056793212892, 0.029652992248535157, 0.029476255416870118, 0.02950819206237793, 0.02946406364440918, 0.029464448928833008, 0.029804447174072265, 0.029510431289672852, 0.029244863510131835, 0.028961343765258787, 0.02896895980834961, 0.029816831588745117, 0.02998454475402832, 0.02921084785461426, 0.029366207122802735, 0.02922310447692871, 0.029061151504516602, 0.029484895706176757, 0.029042144775390626, 0.029190656661987304, 0.029189727783203126, 0.029192352294921876, 0.029225248336791992, 0.029355648040771485, 0.029477247238159178, 0.029057024002075195, 0.029138944625854493, 0.029259584426879884, 0.029411231994628906, 0.02944643211364746, 0.029259136199951172, 0.029411008834838867, 0.029475008010864258, 0.029392831802368163, 0.029491327285766603, 0.02947747230529785, 0.0295251522064209, 0.029635520935058595, 0.029597183227539063, 0.02963817596435547, 0.029453279495239258, 0.029292640686035157, 0.02945167922973633, 0.029831680297851562, 0.029818592071533204, 0.02948534393310547, 0.02932326316833496, 0.029322719573974608, 0.029368032455444337, 0.029411264419555664, 0.029404256820678713, 0.02927788734436035, 0.02917180824279785, 0.02922700881958008, 0.029245311737060548, 0.02931622314453125, 0.02930521583557129, 0.02944063949584961, 0.029624319076538085, 0.029490367889404297, 0.029416095733642577, 0.029392127990722657, 0.02951046371459961, 0.029485151290893553, 0.029445600509643555, 0.029475360870361327, 0.029458080291748047, 0.029382848739624025, 0.02936627197265625, 0.029882623672485353, 0.029355968475341797, 0.029192256927490234, 0.02931711959838867, 0.029816640853881835, 0.029061311721801757, 0.028929279327392577, 0.029051647186279297, 0.029187616348266603, 0.029433887481689455, 0.02958790397644043, 0.0293951358795166, 0.029167104721069335, 0.029065536499023437, 0.029046432495117187, 0.029187744140625, 0.029477567672729493, 0.02921452713012695, 0.029044927597045897, 0.029197919845581056, 0.029217536926269532, 0.029451488494873047, 0.029360832214355467, 0.029515167236328126, 0.02936604881286621, 0.029381023406982423, 0.029409311294555665, 0.029412895202636718, 0.029237247467041014, 0.029327680587768554, 0.029423904418945313, 0.029560831069946288, 0.0295546875, 0.029480960845947264, 0.029542400360107423, 0.029582687377929687, 0.02946099281311035, 0.029913248062133788, 0.029570079803466796, 0.029487552642822264, 0.03219878387451172, 0.029562591552734375, 0.02949600028991699, 0.02947088050842285, 0.02940707206726074, 0.029658336639404297, 0.02957391929626465, 0.029539871215820312, 0.029364704132080078, 0.029391040802001955, 0.02971628761291504, 0.02975062370300293, 0.02956559944152832, 0.030261247634887696, 0.02964406394958496, 0.029465311050415038, 0.029503488540649415, 0.029550048828125, 0.029575647354125975, 0.029562463760375978, 0.029472896575927734, 0.02958176040649414, 0.029796255111694335, 0.030254016876220702, 0.030231359481811524, 0.029406816482543945, 0.029314464569091796, 0.02925811195373535, 0.02926019287109375, 0.029103967666625978, 0.029227392196655273, 0.02911836814880371, 0.029110015869140624, 0.029147232055664062, 0.02934796714782715, 0.02911039924621582, 0.029204320907592774, 0.029208127975463866, 0.029505823135375978, 0.029337440490722656, 0.029181791305541993, 0.029131391525268554, 0.029216768264770508, 0.029294591903686523, 0.029347679138183595, 0.02925827217102051, 0.02935308837890625, 0.029478687286376953, 0.02923593521118164, 0.02925574493408203, 0.029232704162597656, 0.029306976318359376, 0.02943824005126953, 0.02947804832458496, 0.029408096313476562, 0.029459775924682616, 0.029377216339111327, 0.029669376373291017, 0.029638240814208985, 0.029526432037353514, 0.029834367752075194, 0.02954489517211914, 0.029353952407836913, 0.02929916763305664, 0.029177215576171874, 0.029147071838378905, 0.029189920425415038, 0.029250463485717772, 0.02930678367614746, 0.029253503799438477, 0.02937059211730957, 0.02940928077697754, 0.02977689552307129, 0.02944691276550293, 0.02974131202697754, 0.029578527450561522, 0.029375200271606446, 0.029525632858276366, 0.02943833541870117, 0.02935398483276367, 0.02931692886352539, 0.029595327377319337, 0.02960742378234863, 0.029578367233276368, 0.029712064743041992, 0.029792448043823243, 0.03041539192199707, 0.029513696670532226, 0.029206527709960937, 0.029146495819091796, 0.029067903518676757, 0.028960031509399416, 0.029075775146484375, 0.029071775436401368, 0.02916147232055664, 0.029495296478271486, 0.029437952041625977, 0.029400672912597656, 0.029184415817260743, 0.0295280647277832, 0.02902016067504883, 0.029052831649780272, 0.02913699150085449, 0.029251615524291993, 0.029282272338867186, 0.02923091125488281, 0.029300352096557618, 0.02954911994934082, 0.029376096725463867, 0.029476320266723633, 0.032368896484375, 0.029327392578125, 0.029391519546508788, 0.029566656112670897, 0.029581151962280273, 0.029303264617919923, 0.02929654312133789, 0.029492992401123047, 0.029550592422485353, 0.029417119979858398, 0.029340351104736328, 0.02944553565979004, 0.029550687789916992, 0.029647104263305663, 0.02958892822265625, 0.02938479995727539, 0.02926665687561035, 0.029251583099365236, 0.029176959991455077, 0.029232320785522462, 0.02932703971862793, 0.029315071105957033, 0.029339487075805665, 0.029685184478759764, 0.029428287506103514, 0.029359359741210938, 0.029597951889038087, 0.029844127655029296, 0.02974220848083496, 0.02961292839050293, 0.030263296127319338, 0.029531360626220703, 0.02958415985107422, 0.029485055923461914, 0.029638656616210936, 0.02959974479675293, 0.029732704162597656, 0.029634592056274413, 0.029853279113769532, 0.029878559112548827, 0.029492895126342775, 0.029895328521728517, 0.0290119686126709, 0.029081087112426757, 0.02926643180847168, 0.029258752822875978, 0.029293567657470702, 0.029197696685791016, 0.02942425537109375, 0.02954444885253906, 0.02915043258666992, 0.029033248901367187, 0.02915123176574707, 0.029173696517944336, 0.02922502326965332, 0.02914508819580078, 0.029165567398071288, 0.029062976837158205, 0.029172992706298827, 0.028998336791992187, 0.029112287521362305, 0.029335840225219727, 0.02954444885253906, 0.029403135299682616, 0.02924736022949219, 0.029265375137329103, 0.02913961601257324, 0.02915328025817871, 0.02918809509277344, 0.02920243263244629, 0.029421567916870117, 0.02934988784790039, 0.02921388816833496, 0.02927712059020996, 0.029277503967285155, 0.029311424255371095, 0.02932748794555664, 0.029460639953613282, 0.02948899269104004, 0.02934988784790039, 0.029482080459594728, 0.029376480102539064, 0.029567935943603515, 0.030571680068969726, 0.029667167663574218, 0.029440128326416015, 0.029438848495483397, 0.02949734306335449, 0.02936841583251953, 0.029333568572998046, 0.029539520263671876, 0.02949734306335449, 0.029475488662719727, 0.02935807991027832, 0.02929254341125488, 0.029460479736328125, 0.02961756706237793, 0.029600351333618165, 0.02950886344909668, 0.0294849910736084, 0.029383487701416015, 0.029529407501220704, 0.029946079254150392, 0.029463008880615236, 0.02920243263244629, 0.029011648178100587, 0.028928319931030275, 0.02901798439025879, 0.028972320556640625, 0.02903536033630371, 0.029104127883911132, 0.02922652816772461, 0.029562623977661132, 0.029198463439941407, 0.029184415817260743, 0.029349855422973633, 0.029239519119262695, 0.029216768264770508, 0.029222272872924806, 0.029462623596191406, 0.029169792175292968, 0.02924790382385254, 0.02952191925048828, 0.029404352188110352, 0.029258207321166994, 0.029364160537719727, 0.029266336441040038, 0.02921881675720215, 0.029247104644775392, 0.02968409538269043, 0.029187328338623048, 0.029157760620117188, 0.029445663452148437, 0.029276159286499022, 0.02922172737121582, 0.029253568649291992, 0.029312480926513673, 0.02932566452026367, 0.029384223937988282, 0.029728607177734376, 0.02951628875732422, 0.029397375106811525, 0.029503488540649415, 0.02938265609741211, 0.02964860725402832, 0.029525503158569336, 0.029593503952026368, 0.029334016799926758, 0.0295548152923584, 0.029509632110595704, 0.02950320053100586, 0.029516319274902343, 0.02954572868347168, 0.029511615753173827, 0.02954323196411133, 0.029497152328491212, 0.02940889549255371, 0.0295350399017334, 0.029660703659057618, 0.02965116882324219, 0.029755392074584962, 0.02963862419128418, 0.029620256423950195, 0.02954841613769531, 0.029589632034301757]",tokens/s,33.98187089392082,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7142.019072,7948.075008,0.0,7545.552896,7295.865344,s,1,13.7755009765625,13.7755009765625,0.0,13.7755009765625,13.7755009765625,13.7755009765625,13.7755009765625,[13.7755009765625],,kWh,0.00016670733450002142,1.8381498402713e-05,4.816531630999488e-05,0.0002332541492127293,,MB,3076.268032,8264.74496,0.0,7847.542784,7548.649984,s,10,3.3178253479003903,0.33178253479003905,0.0014311546756754076,0.331835693359375,0.3332368133544922,0.33350926055908203,0.3337272183227539,"[0.32855322265625, 0.3309906311035156, 0.3315560302734375, 0.3321153564453125, 0.3337817077636719, 0.33078115844726563, 0.33277313232421873, 0.3312745666503906, 0.33317626953125, 0.3328232727050781]",tokens/s,771.5897407378714,kWh,9.664844486425048e-06,1.0658492624320035e-06,6.428437042387126e-06,1.7159130791244174e-05,tokens/kWh,14919170.622012489,MB,3091.816448,8558.34624,0.0,8141.144064,7829.444096,s,10,26.952484130859375,2.6952484130859373,0.005234273992958838,2.695465576171875,2.701098071289062,2.7014954467773435,2.7018133471679686,"[2.690505859375, 2.6835625, 2.6921923828125, 2.69528955078125, 2.701009765625, 2.6955673828125, 2.701892822265625, 2.69536376953125, 2.696771728515625, 2.700328369140625]",tokens/s,23.374468822288573,kWh,7.883778776190839e-05,8.695642465579662e-06,5.236274887941276e-05,0.00013989617910690084,tokens/kWh,450333.9576691292,,s,630,26.948327713012684,0.042775123353988406,0.00031395352876542853,0.04276924705505371,0.04311631278991699,0.04322626991271973,0.043786819648742675,"[0.04240991973876953, 0.042474369049072265, 0.044068862915039066, 0.042016769409179686, 0.042006526947021484, 0.042291168212890626, 0.042397216796875, 0.04220502471923828, 0.042248863220214844, 0.04230144119262695, 0.042339839935302735, 0.04239206314086914, 0.042565696716308596, 0.042655681610107424, 0.04247110366821289, 0.042348865509033204, 0.04255059051513672, 0.04257452774047851, 0.04265903854370117, 0.042973983764648435, 0.04305100631713867, 0.04279036712646484, 0.0426042251586914, 0.042641441345214845, 0.04277705764770508, 0.042690910339355466, 0.042417377471923826, 0.04256028747558594, 0.04273971176147461, 0.04275404739379883, 0.04256972885131836, 0.042517822265625, 0.04271174240112305, 0.04262406539916992, 0.04266463851928711, 0.04339030456542969, 0.04275641632080078, 0.04284681701660156, 0.04281731033325195, 0.04269814300537109, 0.0426910400390625, 0.043028865814208984, 0.04286163330078125, 0.04278137588500976, 0.042633438110351564, 0.042829822540283204, 0.04277657699584961, 0.04276220703125, 0.04272745513916015, 0.042797054290771484, 0.04269388961791992, 0.042879745483398436, 0.042897407531738284, 0.042616832733154295, 0.042690208435058594, 0.042786399841308595, 0.04301696014404297, 0.042962944030761716, 0.04273104095458984, 0.043222496032714844, 0.04312575912475586, 0.043036319732666015, 0.0429837760925293, 0.04263504028320313, 0.04261759948730469, 0.042266815185546876, 0.04240793609619141, 0.04228505706787109, 0.042256385803222656, 0.04217446517944336, 0.04206387329101562, 0.04211507034301758, 0.04226358413696289, 0.042163135528564454, 0.04213494491577149, 0.042238304138183594, 0.04253472137451172, 0.042205310821533205, 0.04223798370361328, 0.04269689559936524, 0.04339724731445312, 0.04318975830078125, 0.04261324691772461, 0.04243247985839844, 0.04250422286987305, 0.04257791900634766, 0.042477569580078124, 0.042390560150146486, 0.04257072067260742, 0.04242432022094727, 0.04244889450073242, 0.042407230377197264, 0.042609344482421874, 0.04231167984008789, 0.04228710556030273, 0.04247347259521484, 0.042509376525878904, 0.042460094451904296, 0.042620159149169924, 0.04263398361206055, 0.042753856658935545, 0.04255353546142578, 0.04250966262817383, 0.04287350463867187, 0.04309401702880859, 0.04261273574829102, 0.042967041015625, 0.043054622650146486, 0.04272304153442383, 0.04274252700805664, 0.04268409729003906, 0.04272284698486328, 0.04269750213623047, 0.042856063842773434, 0.04280969619750977, 0.04282748794555664, 0.04269686508178711, 0.04269641494750977, 0.042684864044189454, 0.042805118560791014, 0.04282492828369141, 0.042904319763183596, 0.042870944976806644, 0.0429334716796875, 0.04281423950195312, 0.042881023406982424, 0.04220108795166016, 0.04232806396484375, 0.042403839111328126, 0.04242217636108398, 0.04241363143920898, 0.04228681564331055, 0.042361663818359374, 0.04233216094970703, 0.04414054489135742, 0.04256972885131836, 0.042786014556884765, 0.04256028747558594, 0.042676097869873045, 0.04279308700561523, 0.04233603286743164, 0.04277679824829102, 0.042600448608398435, 0.042538944244384765, 0.04282374572753906, 0.042641407012939454, 0.042716960906982425, 0.0427762565612793, 0.04272159957885742, 0.04250646209716797, 0.04266726303100586, 0.04267494583129883, 0.04266393661499023, 0.042647361755371094, 0.04256924819946289, 0.042472095489501954, 0.04242777633666992, 0.04257414245605469, 0.042436927795410154, 0.04259372711181641, 0.04294889450073242, 0.04279894256591797, 0.04274630355834961, 0.04268956756591797, 0.04275408172607422, 0.04258671951293945, 0.0427973747253418, 0.04302441787719727, 0.042810943603515624, 0.04285065460205078, 0.04266608047485351, 0.04283801651000976, 0.04283596801757812, 0.04280319976806641, 0.042850143432617185, 0.04318838500976562, 0.04311654281616211, 0.04312063980102539, 0.04277155303955078, 0.04280179214477539, 0.04264169692993164, 0.04282681655883789, 0.04280620956420898, 0.042889217376708984, 0.04308297729492187, 0.043014942169189455, 0.04293584060668945, 0.043106369018554684, 0.04307980728149414, 0.04275260925292969, 0.0425164794921875, 0.042446849822998046, 0.042349567413330076, 0.042111392974853515, 0.04228566360473633, 0.04253696060180664, 0.04253900909423828, 0.04235059356689453, 0.04231926345825195, 0.04270345687866211, 0.042567680358886716, 0.04254233551025391, 0.042359294891357424, 0.04223001480102539, 0.042403839111328126, 0.04252796936035156, 0.04271593475341797, 0.04279897689819336, 0.04263539123535156, 0.042690464019775394, 0.04277664184570312, 0.04302195358276367, 0.042869152069091795, 0.04253433609008789, 0.04302700805664063, 0.04265760040283203, 0.0428355827331543, 0.042503902435302734, 0.04255625534057617, 0.04272880172729492, 0.04261280059814453, 0.042525279998779295, 0.04272316741943359, 0.042485057830810545, 0.042888031005859376, 0.04294246292114258, 0.043052032470703126, 0.04259328079223633, 0.04288486480712891, 0.04289152145385742, 0.043023616790771484, 0.0429698257446289, 0.042903583526611326, 0.04281753540039063, 0.0429486083984375, 0.043034175872802734, 0.04300640106201172, 0.042979328155517575, 0.04292940902709961, 0.04299238586425781, 0.04322470474243164, 0.04293215942382812, 0.04303427124023437, 0.042902305603027345, 0.04297868728637695, 0.043016830444335935, 0.04368342590332031, 0.04291625595092773, 0.043448318481445314, 0.043202560424804685, 0.04312473678588867, 0.04329619216918945, 0.04259513473510742, 0.04260147094726562, 0.04274998474121094, 0.04249875259399414, 0.042503841400146486, 0.042600318908691405, 0.043628318786621094, 0.04263782501220703, 0.042486240386962894, 0.04291584014892578, 0.043259807586669925, 0.04268009567260742, 0.042810943603515624, 0.04257868957519531, 0.04245094299316406, 0.04306739044189453, 0.04303257751464844, 0.04296681594848633, 0.04260895919799805, 0.04275331115722656, 0.04282400131225586, 0.042895263671875, 0.042991584777832034, 0.043041217803955076, 0.04289945602416992, 0.042992992401123045, 0.04260931015014648, 0.04268851089477539, 0.04269875335693359, 0.042995712280273435, 0.04294041442871094, 0.04267212677001953, 0.042890975952148434, 0.04281292724609375, 0.04268521499633789, 0.042921504974365234, 0.042718814849853515, 0.04272623825073242, 0.042960479736328126, 0.04422256088256836, 0.04280543899536133, 0.04299292755126953, 0.04294512176513672, 0.043003902435302735, 0.042952991485595705, 0.043022335052490236, 0.04286614227294922, 0.04287071990966797, 0.04270959854125977, 0.042874881744384766, 0.042668033599853515, 0.04276591873168945, 0.042723712921142576, 0.04299929428100586, 0.042963329315185546, 0.042768096923828124, 0.0427729263305664, 0.04278076934814453, 0.04311628723144531, 0.04315356826782227, 0.04298956680297852, 0.04311040115356445, 0.04313497543334961, 0.04229891204833985, 0.04242217636108398, 0.042492542266845706, 0.042471424102783206, 0.04245475387573242, 0.042468894958496095, 0.04237539291381836, 0.04229788970947266, 0.04229119873046875, 0.04267212677001953, 0.0423889274597168, 0.04264323043823242, 0.0425682258605957, 0.04267647933959961, 0.04276838302612305, 0.042602497100830077, 0.04269388961791992, 0.042748672485351566, 0.04267331314086914, 0.04288188934326172, 0.042708992004394535, 0.04272947311401367, 0.042896446228027345, 0.04263622283935547, 0.04252876663208008, 0.042888607025146484, 0.04251913452148438, 0.04284774398803711, 0.042748416900634766, 0.042848255157470705, 0.04379606246948242, 0.042848033905029295, 0.04248844909667969, 0.04261068725585938, 0.04281375885009766, 0.042770111083984375, 0.042751102447509765, 0.042912609100341795, 0.04295635223388672, 0.042791393280029295, 0.04302438354492188, 0.04308947372436524, 0.04289142227172851, 0.042928417205810546, 0.042998817443847655, 0.0431209602355957, 0.04287472152709961, 0.04279548645019531, 0.04278716659545898, 0.042831871032714845, 0.042700801849365234, 0.04298476791381836, 0.04296531295776367, 0.04288140869140625, 0.042925537109375, 0.04288771057128906, 0.0429279670715332, 0.04291516876220703, 0.04302227020263672, 0.04316783905029297, 0.04318697738647461, 0.04305100631713867, 0.04319027328491211, 0.04298950576782227, 0.042659454345703125, 0.042414047241210937, 0.04270742416381836, 0.042297344207763675, 0.042848255157470705, 0.04221542358398438, 0.042307071685791016, 0.04246768188476562, 0.042468799591064456, 0.04265852737426758, 0.04287228775024414, 0.042564128875732424, 0.04259635162353516, 0.0430571517944336, 0.04283801651000976, 0.04263935852050781, 0.04402928161621094, 0.042807071685791016, 0.04272627258300781, 0.042866687774658206, 0.042620288848876954, 0.04284070587158203, 0.04265369415283203, 0.04286444854736328, 0.04292012786865235, 0.04267212677001953, 0.04263888168334961, 0.04278239822387695, 0.04267702484130859, 0.042592254638671875, 0.04272470474243164, 0.042512351989746094, 0.04296160125732422, 0.042749088287353514, 0.0428809928894043, 0.04302937698364258, 0.043046783447265625, 0.043036800384521484, 0.042897407531738284, 0.04320460891723633, 0.04296908950805664, 0.04293836975097656, 0.042939456939697265, 0.04337452697753906, 0.04327731323242188, 0.04298489761352539, 0.043282463073730466, 0.04315584182739258, 0.04292204666137695, 0.043288673400878906, 0.04312854385375976, 0.04304105758666992, 0.0428851203918457, 0.04292607879638672, 0.04308540725708008, 0.043061504364013674, 0.043159679412841795, 0.04307056045532227, 0.042911903381347656, 0.04342377471923828, 0.04311321640014648, 0.04305068969726562, 0.04221132659912109, 0.04253286361694336, 0.042198177337646484, 0.04217913436889648, 0.042176799774169924, 0.042493343353271484, 0.04224233627319336, 0.042690208435058594, 0.04284892654418945, 0.04247942352294922, 0.04275628662109375, 0.042546432495117185, 0.0424005126953125, 0.04244060897827148, 0.042586208343505856, 0.042379264831542966, 0.04254515075683594, 0.042485633850097654, 0.04263129425048828, 0.0423702392578125, 0.042649856567382814, 0.042609214782714844, 0.04243836975097656, 0.042485279083251955, 0.0425107536315918, 0.042526111602783204, 0.042471359252929684, 0.04260761642456055, 0.04262297439575195, 0.042774528503417966, 0.04268620681762696, 0.043358463287353516, 0.0437391357421875, 0.04275609588623047, 0.04264326477050781, 0.042952831268310544, 0.04291385650634766, 0.043069438934326174, 0.04292607879638672, 0.042964160919189455, 0.04308361434936524, 0.04327683258056641, 0.04314566421508789, 0.04313638305664062, 0.042984062194824216, 0.04286185455322265, 0.04278937530517578, 0.042780895233154294, 0.04287897491455078, 0.04268812942504883, 0.042987903594970706, 0.04297727966308594, 0.04277612686157226, 0.0429815673828125, 0.04303811264038086, 0.04300271987915039, 0.04310015869140625, 0.043028480529785154, 0.04315094375610352, 0.0431558723449707, 0.04359920120239258, 0.04337456130981445, 0.043186878204345705, 0.042423423767089845, 0.0425049934387207, 0.042293407440185546, 0.04213913726806641, 0.042205631256103514, 0.04268851089477539, 0.04246527862548828, 0.04260611343383789, 0.042586593627929686, 0.0424194221496582, 0.04241427230834961, 0.042594913482666016, 0.0424796142578125, 0.042658878326416017, 0.042670047760009766, 0.04241097640991211, 0.04286787033081055, 0.04300067138671875, 0.042521984100341796, 0.04258025741577148, 0.042823936462402346, 0.04258006286621094, 0.04260441589355469, 0.042832000732421875, 0.042579776763916014, 0.04266175842285156, 0.042746177673339845, 0.042584064483642575, 0.042425952911376956, 0.042457504272460936, 0.04257382583618164, 0.04256358337402344, 0.042840065002441405, 0.04279296112060547, 0.04280934524536133, 0.042889217376708984, 0.04266393661499023, 0.042724510192871094, 0.04270361709594726, 0.04281967926025391, 0.04284739303588867, 0.042922847747802736, 0.042971134185791016, 0.04309401702880859, 0.0430489616394043, 0.043022335052490236, 0.043112449645996094, 0.043128128051757815, 0.042766815185546876, 0.04296112060546875, 0.0432803840637207, 0.04268569564819336, 0.04457295989990234, 0.04376419067382813, 0.04284182357788086, 0.04302467346191406, 0.0431956787109375, 0.04309849548339844, 0.042912094116210935, 0.043294078826904295, 0.0429349136352539, 0.04333158493041992, 0.04336844635009766, 0.042807743072509764, 0.042552608489990235, 0.04336844635009766, 0.04235747146606445, 0.04249331283569336, 0.04235123062133789, 0.042774528503417966, 0.04231782531738281, 0.04275606536865234, 0.04236291122436524, 0.04264940643310547, 0.04256377410888672, 0.042529918670654296, 0.042904449462890626, 0.04257539367675781, 0.04253334426879883, 0.04269055938720703, 0.04285785675048828, 0.04270758438110352, 0.04275958251953125, 0.042920543670654294, 0.04258816146850586, 0.042627071380615236, 0.042842113494873046, 0.04290150451660156, 0.042651649475097655, 0.042625022888183595, 0.04283955383300781, 0.04275251388549805, 0.04272947311401367, 0.04263849639892578, 0.042914657592773436, 0.04287062454223633, 0.04303007888793945, 0.042686878204345705, 0.043337921142578124, 0.042927902221679685, 0.04304825592041016, 0.04408607864379883, 0.0429917106628418, 0.04300288009643555, 0.04295888137817383, 0.04298591995239258, 0.04319696044921875, 0.04317532730102539, 0.043192928314208984, 0.04291353607177734, 0.04292428970336914, 0.04283321762084961, 0.04280412673950195, 0.04295657730102539, 0.042772159576416016, 0.042866912841796875, 0.04282780838012695, 0.04280694580078125, 0.0432275505065918, 0.04296015930175781, 0.0429780158996582, 0.04307891082763672, 0.04319513702392578, 0.04309571075439453, 0.0432151985168457, 0.04307353591918945]",tokens/s,23.378074020370036,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4036.907008,4592.631808,0.0,4190.109696,3918.037504,s,1,10.7208134765625,10.7208134765625,0.0,10.7208134765625,10.7208134765625,10.7208134765625,10.7208134765625,[10.7208134765625],,kWh,9.623351788748853e-05,1.0608073579911668e-05,2.8292244855995796e-05,0.00013513383632339602,,MB,1794.53952,4678.61504,0.0,4261.412864,4088.623616,s,10,1.8303253173828125,0.18303253173828124,0.0006722698597633327,0.18282723236083984,0.18386539154052736,0.18410175552368163,0.18429084671020507,"[0.18224566650390625, 0.18280067443847656, 0.18265628051757812, 0.18285379028320312, 0.1822172546386719, 0.18433811950683593, 0.1832626495361328, 0.18250364685058593, 0.18363436889648438, 0.1838128662109375]",tokens/s,1398.6584656221396,kWh,5.368643045075755e-06,5.917491852117389e-07,3.5486493035637547e-06,9.509041533851248e-06,tokens/kWh,26921745.907688517,MB,1807.200256,4804.44416,0.0,4387.241984,4262.434304,s,10,16.522649047851562,1.6522649047851563,0.004253204444550381,1.6535601196289063,1.6554445922851562,1.6571782897949219,1.6585652478027344,"[1.6440238037109376, 1.6506837158203125, 1.654978271484375, 1.6460321044921875, 1.6511090087890625, 1.654428466796875, 1.6589119873046876, 1.655059326171875, 1.6547305908203125, 1.6526917724609376]",tokens/s,38.129479006389644,kWh,4.8078132796590615e-05,5.3016627544367325e-06,3.205196251023539e-05,8.543175806126274e-05,tokens/kWh,737430.6865465998,,s,630,16.51655425071717,0.026216752778916132,0.00023718311238803477,0.02622006416320801,0.026458319664001466,0.026555543899536134,0.026918071708679204,"[0.02627356719970703, 0.026228960037231446, 0.025956287384033203, 0.025813056945800782, 0.025702144622802733, 0.025702112197875975, 0.025559520721435545, 0.025534048080444335, 0.02564963150024414, 0.025972736358642577, 0.025858047485351563, 0.02602969551086426, 0.02585433578491211, 0.02595408058166504, 0.025835071563720703, 0.02577836799621582, 0.026154943466186523, 0.02603267288208008, 0.025845760345458983, 0.02594611167907715, 0.025926687240600585, 0.026161184310913087, 0.02642220878601074, 0.02611721611022949, 0.026112607955932617, 0.026220191955566408, 0.026144927978515625, 0.026255456924438477, 0.026046592712402342, 0.025930015563964844, 0.02596249580383301, 0.025899007797241212, 0.02607513618469238, 0.0259967041015625, 0.025938528060913086, 0.026087039947509764, 0.02631923294067383, 0.026512575149536134, 0.026564640045166017, 0.026249631881713868, 0.026272127151489258, 0.026172639846801758, 0.026197952270507814, 0.026218463897705078, 0.026139520645141603, 0.02608880043029785, 0.026009248733520507, 0.026004447937011718, 0.025982112884521485, 0.026321792602539064, 0.026283552169799804, 0.02638643264770508, 0.02637254333496094, 0.026353696823120117, 0.026263551712036134, 0.026457759857177736, 0.026249568939208986, 0.026211519241333008, 0.026134912490844726, 0.026132928848266603, 0.026261503219604493, 0.026341087341308595, 0.026233119964599608, 0.026368000030517577, 0.02621356773376465, 0.025856224060058594, 0.02592211151123047, 0.025950176239013672, 0.025975936889648436, 0.025864927291870118, 0.025843936920166014, 0.025927104949951173, 0.026061216354370118, 0.026058464050292968, 0.0260614070892334, 0.02594723129272461, 0.02589116859436035, 0.025835935592651366, 0.026048511505126954, 0.026011648178100585, 0.025903104782104492, 0.02576383972167969, 0.025874431610107423, 0.02632908821105957, 0.02582636833190918, 0.025883583068847655, 0.026273088455200197, 0.026368703842163086, 0.026230464935302733, 0.0262740478515625, 0.026103872299194336, 0.026045536041259764, 0.025994144439697265, 0.026001407623291017, 0.026066495895385743, 0.026073535919189452, 0.02596601676940918, 0.025995840072631837, 0.02613043212890625, 0.026400480270385742, 0.026386367797851563, 0.026326656341552734, 0.02616803169250488, 0.02638572883605957, 0.02624924850463867, 0.026383007049560547, 0.02654412841796875, 0.026391807556152343, 0.026254079818725587, 0.026267648696899414, 0.026164928436279298, 0.026296640396118166, 0.02644134330749512, 0.0264400634765625, 0.02693529510498047, 0.02633318328857422, 0.026425344467163086, 0.026779647827148437, 0.026314144134521485, 0.026575456619262694, 0.026513408660888672, 0.02650489616394043, 0.026284351348876953, 0.026266815185546875, 0.02666102409362793, 0.026552671432495116, 0.026549024581909178, 0.02649087905883789, 0.026187776565551758, 0.0261079044342041, 0.02602720069885254, 0.02604524803161621, 0.025935871124267578, 0.02596249580383301, 0.02609766387939453, 0.026232736587524414, 0.02606224060058594, 0.026132768630981445, 0.02616499137878418, 0.026235551834106446, 0.02612019157409668, 0.026073087692260744, 0.02615705680847168, 0.026186912536621094, 0.026137439727783204, 0.026062751770019533, 0.026207584381103516, 0.02677631950378418, 0.026302463531494142, 0.026100032806396483, 0.026178783416748046, 0.026237407684326173, 0.026225664138793944, 0.02614918327331543, 0.02607993507385254, 0.026220544815063477, 0.026611200332641603, 0.02630431938171387, 0.02613104057312012, 0.026271839141845704, 0.027327808380126953, 0.026587839126586913, 0.02631667137145996, 0.026382015228271483, 0.026343360900878906, 0.026511871337890625, 0.026457887649536133, 0.026411231994628907, 0.02631884765625, 0.026315967559814454, 0.026303295135498048, 0.026355424880981446, 0.026274080276489257, 0.026324384689331053, 0.02644233512878418, 0.026411008834838868, 0.026266847610473633, 0.026204927444458008, 0.02626563262939453, 0.02631679916381836, 0.026193920135498046, 0.026262975692749022, 0.026253856658935548, 0.02629430389404297, 0.026292224884033204, 0.02620800018310547, 0.026134016036987305, 0.02611427116394043, 0.026042911529541017, 0.02623561668395996, 0.02633113670349121, 0.026034175872802736, 0.025972736358642577, 0.02564838409423828, 0.025641727447509765, 0.025683967590332032, 0.02570585632324219, 0.025896928787231446, 0.025868959426879883, 0.026201759338378906, 0.025790815353393556, 0.025796287536621092, 0.02593619155883789, 0.026842687606811525, 0.026186176300048828, 0.02604457664489746, 0.026104703903198242, 0.026131168365478515, 0.02596406364440918, 0.025827775955200194, 0.025910943984985352, 0.025821792602539063, 0.025922719955444335, 0.025805696487426758, 0.02596249580383301, 0.02614476776123047, 0.026187135696411134, 0.026183551788330078, 0.02616192054748535, 0.026060127258300782, 0.025964384078979493, 0.02590959930419922, 0.025907680511474608, 0.025772031784057618, 0.025968639373779297, 0.026136159896850586, 0.026120607376098632, 0.026254911422729493, 0.026527776718139648, 0.026371967315673827, 0.026396671295166017, 0.02633113670349121, 0.026219039916992187, 0.026251264572143555, 0.026310655593872072, 0.026249216079711913, 0.02609561538696289, 0.026129600524902343, 0.02609833526611328, 0.026013856887817384, 0.026003456115722655, 0.02614271926879883, 0.02638768005371094, 0.026403615951538086, 0.02652774429321289, 0.026424896240234374, 0.02624732780456543, 0.026153247833251955, 0.026218496322631835, 0.026234880447387695, 0.026464256286621093, 0.026875904083251953, 0.0263372802734375, 0.026267648696899414, 0.026071039199829102, 0.02588412857055664, 0.025807392120361327, 0.025783647537231447, 0.02582499122619629, 0.02580099105834961, 0.025929695129394532, 0.026020544052124023, 0.025839456558227537, 0.02587414360046387, 0.025995712280273437, 0.025991167068481445, 0.025989120483398437, 0.02617344093322754, 0.02619375991821289, 0.026189760208129884, 0.026065120697021483, 0.02614067268371582, 0.0260250244140625, 0.026167520523071287, 0.02622719955444336, 0.026333408355712892, 0.026221567153930665, 0.026371007919311525, 0.026273855209350584, 0.026330720901489257, 0.026108320236206056, 0.026135648727416992, 0.026104736328125, 0.02615705680847168, 0.026172576904296876, 0.026168064117431642, 0.026143840789794922, 0.026403711318969725, 0.0263240966796875, 0.026348384857177734, 0.02630672073364258, 0.026372095108032227, 0.02630451202392578, 0.026320159912109373, 0.026292608261108397, 0.026470752716064454, 0.026261503219604493, 0.026408960342407226, 0.026380287170410157, 0.026227775573730468, 0.02624403190612793, 0.026355712890625, 0.02629199981689453, 0.02638051223754883, 0.026330400466918945, 0.026425504684448244, 0.02632499122619629, 0.026415615081787108, 0.026380352020263672, 0.026298368453979492, 0.026191455841064453, 0.026370464324951173, 0.026301631927490233, 0.026486976623535156, 0.02638912010192871, 0.0263372802734375, 0.026204160690307617, 0.026064895629882814, 0.02609328079223633, 0.025903392791748046, 0.026014720916748047, 0.02594918441772461, 0.026181631088256836, 0.02614681625366211, 0.026117919921875, 0.02605078315734863, 0.02611404800415039, 0.026005504608154296, 0.026011199951171876, 0.026054719924926757, 0.025988960266113283, 0.026057247161865235, 0.026089471817016603, 0.025982271194458006, 0.02594476890563965, 0.02611404800415039, 0.02614886474609375, 0.026168703079223633, 0.026096256256103515, 0.026161151885986327, 0.026107744216918947, 0.02622889518737793, 0.02611609649658203, 0.026261503219604493, 0.026130239486694337, 0.02619615936279297, 0.026142080307006835, 0.026319744110107422, 0.02616703987121582, 0.02634281539916992, 0.0262619514465332, 0.026263200759887695, 0.02635212707519531, 0.026327039718627928, 0.026518880844116213, 0.026268320083618166, 0.026161151885986327, 0.02629631996154785, 0.026390527725219725, 0.026394847869873048, 0.026801952362060545, 0.02642483139038086, 0.02642790412902832, 0.026322208404541015, 0.026555103302001955, 0.026226335525512696, 0.026177343368530274, 0.026442272186279297, 0.026779487609863283, 0.02709225654602051, 0.026469247817993164, 0.026449920654296875, 0.02637004852294922, 0.0263656005859375, 0.026487136840820314, 0.02625654411315918, 0.02619068717956543, 0.02632908821105957, 0.02661417579650879, 0.02645020866394043, 0.026462207794189452, 0.02636185646057129, 0.026109952926635743, 0.026177536010742186, 0.02615817642211914, 0.0261231689453125, 0.026074655532836916, 0.026119712829589845, 0.026118751525878905, 0.02615449523925781, 0.02618172836303711, 0.026338048934936523, 0.026225696563720702, 0.026199264526367186, 0.02607244873046875, 0.026167680740356445, 0.026183679580688478, 0.026193920135498046, 0.026167295455932618, 0.026156415939331056, 0.0262478084564209, 0.02628112030029297, 0.026166112899780273, 0.026247167587280275, 0.026226688385009765, 0.026243072509765625, 0.02606278419494629, 0.026555904388427733, 0.026219072341918944, 0.026267648696899414, 0.026230783462524415, 0.02628995132446289, 0.026392799377441406, 0.02653183937072754, 0.026464256286621093, 0.026300416946411134, 0.0263372802734375, 0.026247167587280275, 0.026294271469116212, 0.026387903213500978, 0.026368576049804686, 0.026298368453979492, 0.026259456634521484, 0.02630428886413574, 0.02626495933532715, 0.02622060775756836, 0.026407712936401366, 0.026569984436035157, 0.02636854362487793, 0.02633545684814453, 0.02631785583496094, 0.026612703323364257, 0.026603519439697267, 0.02653984069824219, 0.02643168067932129, 0.026965024948120118, 0.02640380859375, 0.026382335662841795, 0.02636595153808594, 0.02659324836730957, 0.02648476791381836, 0.02627993583679199, 0.026187456130981446, 0.026011680603027342, 0.02626793670654297, 0.02608332824707031, 0.02609334373474121, 0.02596886444091797, 0.025937664031982423, 0.02588287925720215, 0.025956352233886718, 0.026355712890625, 0.02611974334716797, 0.026870208740234373, 0.026005504608154296, 0.026003103256225586, 0.026198368072509765, 0.025993215560913087, 0.025986623764038087, 0.02591584014892578, 0.026005504608154296, 0.025892192840576173, 0.025927616119384767, 0.026112735748291014, 0.026240575790405275, 0.0261144962310791, 0.026567840576171876, 0.026239231109619142, 0.026241632461547853, 0.026021888732910156, 0.025972736358642577, 0.026004480361938476, 0.026014623641967775, 0.026277824401855467, 0.026384544372558594, 0.02630201530456543, 0.026284479141235353, 0.02629395294189453, 0.026429407119750975, 0.026379743576049806, 0.026483583450317382, 0.026226112365722656, 0.02615340805053711, 0.026339456558227538, 0.026438943862915038, 0.02636854362487793, 0.026302047729492187, 0.026272287368774416, 0.0262259521484375, 0.026370208740234376, 0.02650553512573242, 0.027864704132080077, 0.026661472320556642, 0.026427488327026367, 0.02669753646850586, 0.026302656173706054, 0.02632089614868164, 0.02626710319519043, 0.026292768478393555, 0.026245119094848633, 0.02636185646057129, 0.02651955223083496, 0.026632192611694337, 0.02656870460510254, 0.026552928924560546, 0.026580831527709962, 0.026210464477539063, 0.026052608489990234, 0.026020959854125978, 0.026022815704345705, 0.025960447311401368, 0.025957759857177735, 0.025911104202270507, 0.02607391929626465, 0.02607923126220703, 0.026135711669921874, 0.02619068717956543, 0.026081279754638673, 0.026449920654296875, 0.02618940734863281, 0.02621196746826172, 0.026083871841430663, 0.026194175720214843, 0.02626665687561035, 0.02617238426208496, 0.026222591400146485, 0.02610585594177246, 0.026218496322631835, 0.026265600204467773, 0.026177536010742186, 0.025985023498535157, 0.025985023498535157, 0.026167295455932618, 0.02671615982055664, 0.026998783111572267, 0.026192928314208986, 0.02622972869873047, 0.02624502372741699, 0.026075231552124024, 0.026225984573364256, 0.02641971206665039, 0.02635385513305664, 0.026372095108032227, 0.026327039718627928, 0.026425344467163086, 0.02621993637084961, 0.026228351593017576, 0.026274784088134766, 0.026330848693847657, 0.02646646308898926, 0.02640880012512207, 0.02616927909851074, 0.026273855209350584, 0.02639017677307129, 0.02626736068725586, 0.026330015182495118, 0.026376192092895507, 0.026453344345092774, 0.026438304901123047, 0.02632089614868164, 0.026212352752685547, 0.026201311111450194, 0.026238847732543945, 0.02647542381286621, 0.026427391052246094, 0.02643315124511719, 0.026304672241210938, 0.02636595153808594, 0.026265600204467773, 0.026013120651245118, 0.025993600845336914, 0.025839807510375977, 0.025994495391845705, 0.025884511947631837, 0.025930496215820314, 0.026028192520141602, 0.02610585594177246, 0.025985023498535157, 0.02608902359008789, 0.025976736068725585, 0.025944448471069335, 0.025903295516967774, 0.02601775932312012, 0.02602169609069824, 0.02599955177307129, 0.026101503372192383, 0.026142047882080077, 0.02608195114135742, 0.025962560653686524, 0.026048704147338866, 0.026117919921875, 0.02773219108581543, 0.026191871643066408, 0.026159040451049803, 0.026178911209106447, 0.02620182418823242, 0.02617843246459961, 0.026171520233154298, 0.026201152801513673, 0.026155967712402344, 0.026161151885986327, 0.02651740837097168, 0.026198015213012696, 0.026329183578491212, 0.026404863357543946, 0.026437536239624023, 0.026431583404541017, 0.026236032485961912, 0.026313535690307616, 0.026265119552612303, 0.026304479598999023, 0.026287872314453124, 0.02648966407775879, 0.026336671829223633, 0.026317407608032226, 0.026255359649658205, 0.026214399337768556, 0.026291744232177734, 0.026320991516113282, 0.026445472717285156, 0.026433696746826173, 0.02633375930786133, 0.02621379280090332, 0.02627555274963379, 0.026291072845458983, 0.026400768280029296, 0.02650111961364746, 0.02631817626953125, 0.026257888793945312, 0.026191295623779295]",tokens/s,38.143549219574346,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2199.990272,2551.119872,0.0,2155.872256,2032.413184,s,1,8.8898994140625,8.8898994140625,0.0,8.8898994140625,8.8898994140625,8.8898994140625,8.8898994140625,[8.8898994140625],,kWh,5.1338232924998315e-05,5.6559227750613706e-06,1.5266956657995767e-05,7.226111235805545e-05,,MB,2121.121792,2827.943936,0.0,2418.016256,2280.154112,s,10,0.9278674545288086,0.09278674545288088,9.926888700563933e-05,0.09277532958984375,0.09293991775512696,0.0929553981781006,0.0929677825164795,"[0.09284620666503907, 0.09277664184570313, 0.09267791748046875, 0.0927488021850586, 0.09266400146484376, 0.0927861099243164, 0.09293647766113282, 0.0926864013671875, 0.09297087860107422, 0.09277401733398437]",tokens/s,2759.0147574472517,kWh,2.7518656658488974e-06,3.0333095443466414e-07,1.8249469428224003e-06,4.880143563105961e-06,tokens/kWh,52457473.164389685,MB,2129.481728,2911.830016,0.0,2501.902336,2389.055488,s,10,18.071246215820313,1.807124621582031,0.008751352296018327,1.8075271606445313,1.816113879394531,1.8174338317871093,1.818489793701172,"[1.8187537841796875, 1.8053961181640625, 1.815820556640625, 1.802324951171875, 1.813374755859375, 1.8151702880859375, 1.8087320556640625, 1.806322265625, 1.7911900634765625, 1.794161376953125]",tokens/s,34.862011865483424,kWh,5.3613183272484675e-05,5.913438193190448e-06,2.6872853793177772e-05,8.63994752588529e-05,tokens/kWh,729171.0952092238,,s,630,18.068478263854985,0.028680124228341236,0.0003434489850813749,0.028610991477966308,0.029006570053100585,0.029212997245788574,0.030164768085479743,"[0.029655040740966795, 0.029017215728759767, 0.02900982475280762, 0.028979808807373046, 0.028851680755615235, 0.028976032257080078, 0.028837888717651368, 0.028603679656982423, 0.02885500717163086, 0.02993152046203613, 0.029683456420898438, 0.028732959747314452, 0.028680383682250978, 0.02863158416748047, 0.02895052719116211, 0.028675743103027344, 0.028694911956787108, 0.0288022403717041, 0.02867689514160156, 0.02865283203125, 0.028877536773681642, 0.028691904067993164, 0.02878316879272461, 0.028845535278320313, 0.0289531192779541, 0.02855526351928711, 0.028782848358154298, 0.02863692855834961, 0.02852835273742676, 0.028471200942993165, 0.028573280334472657, 0.028658464431762697, 0.030701568603515625, 0.030256895065307616, 0.029075712203979493, 0.02891062355041504, 0.02873651123046875, 0.028727264404296876, 0.028610559463500978, 0.028595935821533202, 0.028573568344116212, 0.028499647140502928, 0.028508895874023436, 0.028601951599121093, 0.028731807708740235, 0.0289136962890625, 0.02876963233947754, 0.028734079360961916, 0.028598272323608398, 0.028560895919799805, 0.02863564872741699, 0.02876825523376465, 0.028690431594848635, 0.028630815505981445, 0.02858415985107422, 0.02857164764404297, 0.028469440460205078, 0.028546432495117187, 0.030658720016479492, 0.029196575164794923, 0.028792287826538084, 0.02889094352722168, 0.028686559677124024, 0.02922208023071289, 0.029038944244384766, 0.028816064834594726, 0.028639007568359375, 0.02857574462890625, 0.028700672149658202, 0.02855344009399414, 0.028642879486083985, 0.028591615676879883, 0.028614463806152343, 0.028529567718505858, 0.028656671524047852, 0.028592479705810546, 0.02866592025756836, 0.02871571159362793, 0.028731264114379883, 0.028831743240356447, 0.02876620864868164, 0.0285614070892334, 0.028700672149658202, 0.028741472244262694, 0.028772192001342775, 0.028516672134399415, 0.02860851287841797, 0.028496192932128905, 0.028419776916503905, 0.028667903900146483, 0.029261152267456056, 0.02869251251220703, 0.02863974380493164, 0.028478815078735353, 0.028584127426147462, 0.028397792816162108, 0.02846348762512207, 0.028493824005126952, 0.02898944091796875, 0.028656639099121094, 0.02864361572265625, 0.028402399063110352, 0.02852799987792969, 0.028610208511352538, 0.028746368408203125, 0.028778303146362306, 0.02878060722351074, 0.028714975357055663, 0.02854902458190918, 0.0284881591796875, 0.028561151504516602, 0.028458400726318358, 0.029123552322387697, 0.02862828826904297, 0.028676799774169922, 0.02864454460144043, 0.028584224700927734, 0.028554880142211914, 0.028801727294921874, 0.028661535263061522, 0.02856595230102539, 0.02851430320739746, 0.028528287887573243, 0.028430431365966798, 0.028540384292602538, 0.0285765438079834, 0.028842111587524415, 0.028895103454589843, 0.029041183471679687, 0.028963008880615235, 0.028927711486816405, 0.028891679763793945, 0.028706783294677733, 0.028389408111572267, 0.02842848014831543, 0.02841580772399902, 0.028573408126831054, 0.028743711471557618, 0.02894054412841797, 0.029085695266723634, 0.028811264038085937, 0.028792287826538084, 0.028962560653686523, 0.02970636749267578, 0.02868614387512207, 0.028551551818847658, 0.02843280029296875, 0.02850377655029297, 0.028445024490356446, 0.02849001693725586, 0.02839254379272461, 0.028439104080200197, 0.02839360046386719, 0.028571136474609377, 0.02848409652709961, 0.028484928131103517, 0.028781055450439453, 0.028665727615356445, 0.02858624076843262, 0.03141334342956543, 0.029481887817382812, 0.028786752700805666, 0.02859779167175293, 0.028537248611450194, 0.028956159591674805, 0.029706335067749022, 0.02926246452331543, 0.029290271759033204, 0.028962080001831054, 0.02879689598083496, 0.029346559524536135, 0.028729343414306642, 0.029544704437255858, 0.029818624496459962, 0.028756128311157227, 0.028516191482543946, 0.02894643211364746, 0.028560735702514647, 0.028541023254394532, 0.028480064392089843, 0.028504032135009766, 0.02872118377685547, 0.028591520309448244, 0.028793439865112305, 0.028475072860717772, 0.028709184646606444, 0.028597503662109374, 0.02865843200683594, 0.028446720123291015, 0.028595232009887697, 0.02862886428833008, 0.028650720596313475, 0.028820159912109376, 0.028595359802246093, 0.02863030433654785, 0.028553184509277345, 0.02885193634033203, 0.028508127212524412, 0.028872703552246092, 0.02835251235961914, 0.028520383834838868, 0.028354623794555663, 0.028545024871826172, 0.028495328903198242, 0.028612991333007813, 0.028614175796508788, 0.028537471771240233, 0.028479488372802734, 0.028635135650634767, 0.028564640045166016, 0.028519264221191408, 0.028621055603027343, 0.028646400451660156, 0.02849827194213867, 0.02842460823059082, 0.02836675262451172, 0.028414047241210938, 0.028544672012329103, 0.028446144104003906, 0.028381919860839842, 0.028444864273071288, 0.028511327743530275, 0.02852137565612793, 0.02840985679626465, 0.028579519271850585, 0.028899648666381835, 0.028872703552246092, 0.02870675277709961, 0.028681663513183592, 0.028486175537109373, 0.028485727310180665, 0.028622783660888673, 0.028692544937133788, 0.02851020812988281, 0.02868783950805664, 0.02875347137451172, 0.02864022445678711, 0.028593215942382812, 0.028787647247314453, 0.029000864028930665, 0.028892000198364257, 0.02872422409057617, 0.0287324161529541, 0.028577184677124022, 0.028598175048828126, 0.028432640075683593, 0.02847964859008789, 0.028525951385498047, 0.028545951843261717, 0.028716543197631835, 0.02895088005065918, 0.028702335357666017, 0.029205184936523437, 0.028892864227294923, 0.028461376190185548, 0.028620031356811522, 0.02855603218078613, 0.02882508850097656, 0.028700319290161133, 0.028511072158813478, 0.028757631301879884, 0.029006208419799805, 0.028923904418945313, 0.02902835273742676, 0.028735488891601563, 0.0288603515625, 0.028934335708618163, 0.028783584594726564, 0.028593055725097655, 0.028536832809448243, 0.02855526351928711, 0.02872700881958008, 0.02885206413269043, 0.02891584014892578, 0.029085727691650392, 0.029161760330200195, 0.028882623672485352, 0.028706560134887694, 0.028604223251342775, 0.028723039627075196, 0.028631967544555666, 0.028659456253051756, 0.028569759368896483, 0.028569408416748047, 0.02863132858276367, 0.02852016067504883, 0.02850009536743164, 0.02847350311279297, 0.02856867218017578, 0.028887968063354492, 0.028762111663818358, 0.028909215927124022, 0.02888025665283203, 0.029024576187133787, 0.02918876838684082, 0.029164863586425782, 0.02900160026550293, 0.02892883110046387, 0.02937830352783203, 0.030646528244018555, 0.028692256927490233, 0.028600479125976564, 0.028473407745361327, 0.028676095962524413, 0.028569055557250977, 0.028524831771850587, 0.028532415390014648, 0.0287708797454834, 0.02876416015625, 0.028690431594848635, 0.028645376205444335, 0.028670272827148437, 0.028407487869262695, 0.028516288757324218, 0.028530752182006836, 0.02908585548400879, 0.028793312072753905, 0.02881724739074707, 0.02869059181213379, 0.02884841537475586, 0.029199647903442382, 0.028694143295288087, 0.028613664627075194, 0.028612384796142577, 0.028606752395629882, 0.02846281623840332, 0.02863260841369629, 0.028440736770629884, 0.02881772804260254, 0.028821504592895508, 0.02859132766723633, 0.028871456146240235, 0.028676095962524413, 0.028674272537231444, 0.02868332862854004, 0.028708927154541014, 0.028672767639160157, 0.02850806427001953, 0.02871014404296875, 0.028677888870239258, 0.028701087951660157, 0.030229087829589843, 0.030007295608520508, 0.0296013126373291, 0.028913536071777345, 0.02863929557800293, 0.028774944305419922, 0.028759647369384765, 0.0288505916595459, 0.028794879913330077, 0.028872480392456056, 0.02859644889831543, 0.02878463935852051, 0.02850201606750488, 0.028769760131835936, 0.028513887405395507, 0.028420703887939453, 0.0291560001373291, 0.02899510383605957, 0.02864703941345215, 0.02866640090942383, 0.028670015335083007, 0.02863030433654785, 0.028653535842895508, 0.03055686378479004, 0.029673471450805664, 0.0292096004486084, 0.028605440139770507, 0.028512256622314453, 0.028440576553344726, 0.02851968002319336, 0.02858880043029785, 0.028581695556640627, 0.02860665512084961, 0.02873958396911621, 0.028499103546142577, 0.02865443229675293, 0.028655616760253907, 0.0291267204284668, 0.028964799880981447, 0.028579839706420897, 0.02869980812072754, 0.028738399505615235, 0.028733247756958007, 0.028421407699584962, 0.028478464126586913, 0.029215776443481445, 0.029442943572998048, 0.02885647964477539, 0.028866016387939453, 0.02874950408935547, 0.02873750305175781, 0.028666591644287108, 0.028591840744018555, 0.028423904418945312, 0.029784576416015625, 0.028698495864868164, 0.028936384201049804, 0.028433727264404296, 0.028495967864990233, 0.028361183166503906, 0.02851238441467285, 0.028588031768798827, 0.02857779121398926, 0.02855116844177246, 0.028576095581054686, 0.0284946231842041, 0.028414783477783204, 0.02845907211303711, 0.028512191772460938, 0.028776512145996094, 0.029321216583251954, 0.02893619155883789, 0.029080863952636718, 0.029092607498168947, 0.028893152236938478, 0.028665855407714845, 0.028673696517944335, 0.02866771125793457, 0.028600160598754882, 0.028442720413208007, 0.02849443244934082, 0.028432384490966797, 0.028436832427978516, 0.028466720581054688, 0.028653696060180665, 0.028672000885009766, 0.028622880935668945, 0.028446687698364257, 0.028622848510742187, 0.028956672668457032, 0.02860611152648926, 0.028698976516723634, 0.028589632034301756, 0.028919744491577148, 0.028467615127563475, 0.028373088836669922, 0.028669599533081055, 0.028522048950195313, 0.02901865577697754, 0.028883007049560545, 0.029074623107910157, 0.028592960357666015, 0.028493919372558595, 0.02871286392211914, 0.02911631965637207, 0.028698720932006837, 0.028816831588745116, 0.02825222396850586, 0.02843084716796875, 0.028477439880371092, 0.028637184143066406, 0.028626943588256838, 0.02846073532104492, 0.028547391891479493, 0.02851840019226074, 0.028841983795166014, 0.02871494483947754, 0.02856284713745117, 0.028295839309692383, 0.02831974411010742, 0.028704767227172853, 0.028665855407714845, 0.028591487884521486, 0.028723840713500978, 0.02874367904663086, 0.028495872497558594, 0.028393728256225586, 0.028466880798339842, 0.028408000946044922, 0.028510080337524415, 0.028987136840820313, 0.028516128540039064, 0.028479551315307616, 0.028307775497436523, 0.029233440399169922, 0.028370752334594726, 0.02831155204772949, 0.028413951873779295, 0.028362688064575196, 0.028829311370849608, 0.02856185531616211, 0.028839744567871094, 0.028342464447021484, 0.028516351699829103, 0.02874275207519531, 0.02854185676574707, 0.028812864303588866, 0.028712799072265625, 0.028295200347900392, 0.0283756160736084, 0.029234752655029297, 0.02956742477416992, 0.028704767227172853, 0.028851648330688477, 0.028619327545166016, 0.02933350372314453, 0.029197952270507813, 0.029055360794067384, 0.0291362247467041, 0.02885209655761719, 0.028748575210571288, 0.028868608474731446, 0.028436479568481447, 0.02851878356933594, 0.02854262351989746, 0.02828633689880371, 0.02849072074890137, 0.028405311584472657, 0.028421728134155274, 0.028377952575683593, 0.028260351181030274, 0.02831564712524414, 0.028415199279785155, 0.028248863220214845, 0.028231679916381838, 0.028167327880859374, 0.028455072402954102, 0.028259008407592774, 0.028939584732055663, 0.028782943725585937, 0.028592479705810546, 0.028514400482177734, 0.02849577522277832, 0.028424192428588867, 0.028467199325561524, 0.02840131187438965, 0.028295520782470704, 0.028459007263183594, 0.028416000366210937, 0.028430335998535155, 0.028626943588256838, 0.028254207611083985, 0.028430335998535155, 0.02852659225463867, 0.028470720291137695, 0.0283571834564209, 0.02838528060913086, 0.028330015182495116, 0.028626720428466798, 0.028719295501708986, 0.028424192428588867, 0.028315008163452147, 0.028310304641723634, 0.02833827209472656, 0.02840278434753418, 0.028240480422973634, 0.02843244743347168, 0.028424192428588867, 0.028399616241455077, 0.02838118362426758, 0.028411903381347657, 0.028276735305786133, 0.028514272689819337, 0.028329696655273438, 0.028436672210693358, 0.02829324722290039, 0.028446720123291015, 0.028499967575073244, 0.028411615371704103, 0.028303232192993164, 0.028336063385009765, 0.02826697540283203, 0.02859014320373535, 0.0288023681640625, 0.02840230369567871, 0.028608480453491212, 0.028696159362792968, 0.028512256622314453, 0.02868614387512207, 0.028213855743408203, 0.028379135131835938, 0.02846211242675781, 0.028353504180908203, 0.028438528060913085, 0.02828886413574219, 0.028268703460693358, 0.02816204833984375, 0.028184576034545897, 0.028241504669189454, 0.02844076728820801, 0.02834636878967285, 0.028566911697387697, 0.02876608085632324, 0.028514495849609377, 0.028277503967285157, 0.028249216079711915, 0.028265024185180666, 0.028303680419921876, 0.02828822326660156, 0.028564287185668946, 0.028290943145751955, 0.028395263671875, 0.02833148765563965, 0.02848246383666992, 0.029099424362182616, 0.028584543228149413, 0.028395519256591797, 0.028588031768798827, 0.028217344284057616, 0.028274015426635744, 0.028446687698364257, 0.028703168869018556, 0.028379520416259765, 0.028482751846313478, 0.028207872390747072, 0.028409791946411134, 0.028409055709838867, 0.028672800064086915, 0.02840985679626465, 0.02851840019226074, 0.02838937568664551, 0.028280832290649413, 0.028329984664916992, 0.02856755256652832, 0.028530815124511718, 0.028338048934936525, 0.028294879913330077, 0.02853638458251953, 0.02846793556213379, 0.02846112060546875, 0.028453887939453124, 0.029133760452270507, 0.028489471435546875, 0.028583295822143556, 0.02861142349243164, 0.029011999130249023, 0.028804447174072264, 0.02887740707397461, 0.028934207916259766]",tokens/s,34.867352457693194,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1173.385216,1101.98784,0.0,706.740224,681.6384,s,1,7.9774697265625,7.9774697265625,0.0,7.9774697265625,7.9774697265625,7.9774697265625,7.9774697265625,[7.9774697265625],,kWh,3.170235369582694e-05,3.489330949884943e-06,9.687785528005033e-06,4.487947017371692e-05,,MB,1504.735232,1406.07488,0.0,996.1472,949.238272,s,10,0.30593103981018066,0.030593103981018065,0.0002849342081377975,0.030481040000915526,0.031006976318359376,0.031121599769592286,0.031213298530578613,"[0.031236223220825195, 0.030656864166259765, 0.03054080009460449, 0.030380895614624023, 0.030413471221923827, 0.030314720153808594, 0.030421279907226564, 0.03062700843811035, 0.030981504440307617, 0.030358272552490233]",tokens/s,8367.898862398497,kWh,9.069501885869118e-07,1.0001952580079885e-07,5.511840158260737e-07,1.5581537302137844e-06,tokens/kWh,164297010.64533335,MB,1538.02752,1414.463488,0.0,1004.535808,949.240832,s,10,14.960107177734374,1.4960107177734376,0.0069726967375618835,1.4946002197265624,1.5034655639648438,1.507519464111328,1.5107625842285155,"[1.49989990234375, 1.4917362060546875, 1.4974642333984376, 1.4908616943359374, 1.497645751953125, 1.502564697265625, 1.488336181640625, 1.4883565673828125, 1.4916685791015625, 1.5115733642578124]",tokens/s,42.111997762800115,kWh,4.406717211932881e-05,4.8602477698235655e-06,1.7025524489974185e-05,6.595294437912654e-05,tokens/kWh,955226.4966041285,,s,630,14.958072921752935,0.023742972891671318,0.0004972487734326744,0.023646528244018555,0.02399850559234619,0.024191565704345705,0.025756715698242196,"[0.0238920955657959, 0.023654272079467773, 0.023745567321777343, 0.02382703971862793, 0.023885536193847656, 0.02376156806945801, 0.023725088119506837, 0.024677343368530273, 0.023870784759521483, 0.023814847946166992, 0.023971839904785155, 0.023897504806518553, 0.02444758415222168, 0.024135679244995118, 0.02389580726623535, 0.023705856323242187, 0.02391196823120117, 0.023658016204833984, 0.023755647659301757, 0.023525440216064453, 0.023517183303833008, 0.02353971290588379, 0.02370969581604004, 0.023607295989990236, 0.023822336196899413, 0.023699424743652345, 0.023924736022949217, 0.02362985610961914, 0.023901407241821288, 0.02396454429626465, 0.023752607345581055, 0.023817535400390624, 0.02366320037841797, 0.023678592681884766, 0.023622112274169924, 0.023478239059448243, 0.023652032852172853, 0.023728479385375978, 0.02414556884765625, 0.02365884780883789, 0.0237260799407959, 0.02378495979309082, 0.023675392150878907, 0.023558176040649415, 0.023850175857543947, 0.023820383071899414, 0.02391110420227051, 0.023975807189941405, 0.02596284866333008, 0.02398134422302246, 0.023884288787841795, 0.023661632537841797, 0.023696319580078125, 0.02366166305541992, 0.023554624557495116, 0.023478336334228515, 0.023506656646728515, 0.023402240753173827, 0.023532352447509765, 0.023711103439331055, 0.023646848678588867, 0.023658496856689453, 0.02378278350830078, 0.02378976058959961, 0.0236112003326416, 0.023476224899291992, 0.02346188735961914, 0.023562240600585937, 0.023410688400268553, 0.023617536544799804, 0.023404735565185547, 0.023539520263671874, 0.023471519470214842, 0.023356000900268556, 0.023464096069335937, 0.02350592041015625, 0.02359996795654297, 0.023672224044799805, 0.02400320053100586, 0.02384009552001953, 0.023814783096313477, 0.023559520721435547, 0.023621408462524415, 0.023564863204956054, 0.02370591926574707, 0.023459808349609375, 0.023504735946655274, 0.02372380828857422, 0.024148384094238282, 0.024465599060058595, 0.023997983932495116, 0.023940383911132814, 0.02381670379638672, 0.02376550483703613, 0.024311040878295897, 0.02413369560241699, 0.0237795524597168, 0.0238637752532959, 0.023680639266967774, 0.023707103729248048, 0.023808927536010743, 0.023803903579711915, 0.023560192108154295, 0.023592960357666014, 0.02345779228210449, 0.0236395206451416, 0.02342108726501465, 0.023621984481811523, 0.02348569679260254, 0.023857952117919922, 0.023580671310424805, 0.02370684814453125, 0.0236408634185791, 0.023848543167114256, 0.02374083137512207, 0.02370355224609375, 0.023479551315307618, 0.02351923179626465, 0.023674688339233398, 0.02378438377380371, 0.023554048538208007, 0.023619583129882812, 0.02358460807800293, 0.023527584075927734, 0.02352924728393555, 0.023446752548217775, 0.023617504119873046, 0.02362166404724121, 0.023708736419677735, 0.023718528747558594, 0.023900480270385743, 0.023969791412353517, 0.023959711074829103, 0.02392848014831543, 0.02376313591003418, 0.02369331169128418, 0.02344960021972656, 0.02361302375793457, 0.02344905662536621, 0.023319488525390626, 0.02344688034057617, 0.023536224365234375, 0.023565568923950196, 0.023497535705566407, 0.023414783477783203, 0.02334467124938965, 0.023496992111206056, 0.023496896743774413, 0.02350601577758789, 0.02366582489013672, 0.02581068801879883, 0.023883775711059572, 0.023609344482421874, 0.02353958320617676, 0.023476352691650392, 0.023654399871826173, 0.02388159942626953, 0.023726463317871094, 0.023613183975219727, 0.023547903060913086, 0.023568384170532225, 0.02352025604248047, 0.02357046318054199, 0.02351203155517578, 0.023586559295654296, 0.02359321594238281, 0.02428313636779785, 0.02404761505126953, 0.02369126319885254, 0.023748607635498048, 0.023826208114624024, 0.023574304580688477, 0.023705312728881836, 0.02383535957336426, 0.023810176849365233, 0.023713632583618163, 0.02376406478881836, 0.024500543594360352, 0.025285247802734376, 0.024777759552001954, 0.02402777671813965, 0.02388115119934082, 0.023776159286499024, 0.02366464042663574, 0.023738048553466798, 0.02401299285888672, 0.023677055358886718, 0.023554048538208007, 0.023588863372802735, 0.02651683235168457, 0.027435680389404297, 0.023688671112060546, 0.023634271621704103, 0.02352761650085449, 0.023463552474975585, 0.02341516876220703, 0.023565824508666993, 0.023623424530029295, 0.023630592346191408, 0.023575679779052734, 0.02353660774230957, 0.023527328491210937, 0.023584768295288085, 0.02354380798339844, 0.023459840774536132, 0.02346188735961914, 0.02349465560913086, 0.02344960021972656, 0.023384063720703126, 0.023343103408813477, 0.023417024612426757, 0.023719743728637697, 0.02355311965942383, 0.023613855361938475, 0.023706111907958984, 0.023842079162597656, 0.02378607940673828, 0.023995807647705078, 0.023763679504394532, 0.02370515251159668, 0.023648704528808594, 0.023568384170532225, 0.023488512039184572, 0.02351513671875, 0.023537311553955078, 0.023556255340576173, 0.023514911651611327, 0.02364361572265625, 0.023466943740844726, 0.02348236846923828, 0.023358720779418946, 0.023522016525268554, 0.02350601577758789, 0.023714208602905275, 0.023570976257324218, 0.023617727279663086, 0.02346988868713379, 0.023434751510620116, 0.023302528381347658, 0.02348044776916504, 0.023435264587402343, 0.023410240173339845, 0.02344799995422363, 0.023459840774536132, 0.02344550323486328, 0.023537664413452147, 0.02352742385864258, 0.02349635124206543, 0.023596704483032225, 0.023621599197387697, 0.023651008605957032, 0.02367081642150879, 0.023865440368652343, 0.023478176116943358, 0.023646207809448243, 0.02352332878112793, 0.023597055435180665, 0.02364348793029785, 0.023633920669555664, 0.023861791610717772, 0.023758975982666016, 0.023631872177124022, 0.023455680847167967, 0.02335491180419922, 0.0233538875579834, 0.023797567367553712, 0.023684543609619142, 0.02394598388671875, 0.02389798355102539, 0.02368320083618164, 0.023658496856689453, 0.023635551452636717, 0.023480735778808593, 0.023541759490966797, 0.02361315155029297, 0.0234803524017334, 0.028160255432128908, 0.023734272003173826, 0.02369945526123047, 0.023613183975219727, 0.023668991088867188, 0.023822336196899413, 0.023793472290039062, 0.02366044807434082, 0.02385334396362305, 0.023746496200561525, 0.023670848846435548, 0.023650400161743163, 0.023701408386230468, 0.02367283248901367, 0.023576576232910155, 0.023528768539428712, 0.023548511505126952, 0.023664255142211914, 0.02386172866821289, 0.0237238712310791, 0.02363612747192383, 0.023692607879638672, 0.023715776443481447, 0.023724800109863282, 0.02372345542907715, 0.023970367431640625, 0.023705440521240233, 0.023459999084472657, 0.023611391067504883, 0.023667808532714843, 0.024595359802246093, 0.025053119659423827, 0.02361759948730469, 0.023590911865234376, 0.02371788787841797, 0.023484287261962892, 0.02350726318359375, 0.023835935592651368, 0.023568063735961913, 0.024106496810913085, 0.02358732795715332, 0.023638015747070314, 0.023400447845458985, 0.023506336212158203, 0.023501472473144533, 0.02361939239501953, 0.023484544754028322, 0.02359702491760254, 0.02338819122314453, 0.024063615798950194, 0.025059711456298827, 0.023543039321899415, 0.02350966453552246, 0.023421024322509764, 0.023375104904174805, 0.023519680023193358, 0.02355232048034668, 0.02393052864074707, 0.023877792358398438, 0.02459872055053711, 0.025624576568603515, 0.024131584167480468, 0.024153919219970704, 0.02418911933898926, 0.02391993522644043, 0.02420307159423828, 0.024927104949951172, 0.02432614326477051, 0.024131584167480468, 0.024252416610717774, 0.02410220718383789, 0.024013504028320313, 0.023901952743530273, 0.02386764717102051, 0.024004608154296874, 0.02386124801635742, 0.023799808502197265, 0.023782880783081054, 0.023681568145751952, 0.02364825630187988, 0.02368921661376953, 0.023641216278076173, 0.023401344299316406, 0.02346134376525879, 0.02345212745666504, 0.023467296600341796, 0.023490976333618165, 0.02362713623046875, 0.0238023681640625, 0.023919103622436523, 0.023779327392578126, 0.02386528015136719, 0.024145055770874023, 0.023959711074829103, 0.024110912322998047, 0.023845823287963867, 0.023741727828979493, 0.02358550453186035, 0.023525087356567383, 0.023758655548095704, 0.023585248947143554, 0.02368307113647461, 0.02375379180908203, 0.023647167205810546, 0.02371379280090332, 0.02349260711669922, 0.023917600631713866, 0.023833568572998048, 0.023750656127929686, 0.023805952072143553, 0.023812095642089845, 0.02364825630187988, 0.023727487564086915, 0.02373081588745117, 0.02365235137939453, 0.02361142349243164, 0.023481311798095702, 0.023634944915771484, 0.02344550323486328, 0.023478271484375, 0.02338515281677246, 0.023528064727783203, 0.023850400924682616, 0.023707584381103517, 0.023720928192138672, 0.023535167694091797, 0.023518783569335937, 0.023571264266967772, 0.023476287841796874, 0.023479551315307618, 0.023488672256469726, 0.023569087982177734, 0.023566240310668944, 0.023546880722045898, 0.02345235252380371, 0.023388351440429687, 0.023478399276733397, 0.023557855606079103, 0.023511072158813477, 0.023388320922851563, 0.023482271194458008, 0.023654367446899412, 0.02412566375732422, 0.023699583053588866, 0.023651647567749023, 0.023853311538696288, 0.023595327377319335, 0.023514911651611327, 0.02369968032836914, 0.02393087959289551, 0.023635967254638672, 0.023433216094970705, 0.023549951553344727, 0.023541952133178713, 0.023494464874267578, 0.02348236846923828, 0.02384000015258789, 0.023757087707519532, 0.02349235153198242, 0.023627552032470703, 0.02356934356689453, 0.023602848052978517, 0.023673120498657226, 0.02357004737854004, 0.023801792144775392, 0.023581375122070314, 0.023445695877075196, 0.023418752670288086, 0.02340255928039551, 0.0234432315826416, 0.023419168472290038, 0.02339027214050293, 0.023357440948486328, 0.023373760223388673, 0.023778335571289062, 0.023823328018188476, 0.023533504486083986, 0.0236945915222168, 0.023545759201049805, 0.023580671310424805, 0.023509920120239256, 0.02370512008666992, 0.02346236801147461, 0.02351923179626465, 0.023514175415039064, 0.023491199493408204, 0.023521600723266603, 0.023566144943237305, 0.02358086395263672, 0.02382195281982422, 0.02401523208618164, 0.023830528259277343, 0.023763071060180663, 0.023690208435058594, 0.023704479217529297, 0.023799392700195314, 0.02363155174255371, 0.02359574317932129, 0.024160255432128908, 0.023770624160766602, 0.023710399627685546, 0.02361724853515625, 0.023607391357421875, 0.02365644836425781, 0.02367897605895996, 0.023818239212036133, 0.023644159317016602, 0.02365644836425781, 0.023517183303833008, 0.023584192276000976, 0.02411782455444336, 0.02386025619506836, 0.02382080078125, 0.023628255844116212, 0.023550975799560548, 0.023591936111450194, 0.023549951553344727, 0.023625728607177734, 0.02351696014404297, 0.02349603271484375, 0.02354470443725586, 0.02344550323486328, 0.023379199981689452, 0.02353433609008789, 0.023574079513549805, 0.02368556785583496, 0.023517183303833008, 0.023764352798461914, 0.023732511520385743, 0.02350432014465332, 0.023540191650390625, 0.023801952362060546, 0.023756479263305662, 0.02350111961364746, 0.02357481575012207, 0.02376675224304199, 0.02349465560913086, 0.02352060890197754, 0.02338268852233887, 0.023472127914428712, 0.02352742385864258, 0.023615455627441405, 0.023563711166381836, 0.02394918441772461, 0.023687904357910156, 0.023696479797363282, 0.023825311660766603, 0.023432384490966796, 0.02360198402404785, 0.023541408538818358, 0.02349007987976074, 0.023571264266967772, 0.023561920166015625, 0.023548223495483397, 0.023594335556030275, 0.023567007064819335, 0.023479488372802733, 0.023530303955078127, 0.02371401596069336, 0.023621408462524415, 0.02374380874633789, 0.024146240234375, 0.023884159088134765, 0.02369923210144043, 0.023642335891723633, 0.02358787155151367, 0.023841760635375978, 0.023601152420043944, 0.023506591796875, 0.02368342399597168, 0.023487903594970702, 0.02343948745727539, 0.023555904388427733, 0.023920799255371095, 0.02368054389953613, 0.02377398490905762, 0.023646080017089843, 0.023786815643310547, 0.02373324775695801, 0.02388991928100586, 0.023820287704467775, 0.023615264892578126, 0.023450944900512697, 0.023567264556884765, 0.023588064193725587, 0.023624479293823244, 0.024071775436401367, 0.02362614440917969, 0.02446335983276367, 0.024461311340332033, 0.023755903244018554, 0.023903104782104494, 0.02375699234008789, 0.02372364807128906, 0.02379385566711426, 0.02369536018371582, 0.02373017692565918, 0.02354380798339844, 0.023576576232910155, 0.023626976013183594, 0.023567136764526368, 0.023570432662963867, 0.02375600051879883, 0.023857152938842774, 0.02377996826171875, 0.023670944213867187, 0.02356163215637207, 0.023685728073120117, 0.023803647994995118, 0.02412486457824707, 0.025300031661987306, 0.02402639961242676, 0.023986400604248045, 0.024169759750366213, 0.02516870307922363, 0.02745974349975586, 0.024098751068115234, 0.024070207595825194, 0.023814144134521483, 0.023736320495605468, 0.02353775978088379, 0.023548095703125, 0.023594079971313478, 0.023865440368652343, 0.02353721618652344, 0.023485183715820312, 0.025204959869384765, 0.023816192626953125, 0.023580671310424805, 0.030139583587646485, 0.024097440719604492, 0.023752864837646485, 0.023604671478271486, 0.023658464431762696, 0.023744287490844725, 0.023666912078857422, 0.02359891128540039, 0.023440160751342774, 0.023369728088378908, 0.023519392013549804, 0.023502687454223632, 0.02352345657348633, 0.023521408081054688, 0.023670528411865236, 0.023545856475830077, 0.02465177536010742, 0.02525539207458496, 0.024193567276000978, 0.023969791412353517, 0.02370150375366211, 0.023560287475585938, 0.0236911678314209, 0.023731391906738283, 0.023533920288085937]",tokens/s,42.117724876432185,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1845.112832,2760.835072,0.0,2365.587456,2314.318336,s,1,8.816744140625,8.816744140625,0.0,8.816744140625,8.816744140625,8.816744140625,8.816744140625,[8.816744140625],,kWh,5.478485249999494e-05,6.034266840466255e-06,1.6985013588002862e-05,7.780413292846406e-05,,MB,1848.582144,3092.185088,0.0,2682.257408,2607.60832,s,10,0.5065773773193359,0.05065773773193359,0.00020496744571672432,0.050588720321655276,0.05074995651245117,0.05099966545104981,0.05119943260192871,"[0.05124937438964844, 0.05058329772949219, 0.05051811218261719, 0.05058911895751953, 0.05054745483398437, 0.05058832168579101, 0.05066713714599609, 0.05069446563720703, 0.05062723159790039, 0.05051286315917969]",tokens/s,5053.522155977031,kWh,1.5431936126095368e-06,1.701864793126454e-07,1.024990586073712e-06,2.738370677995894e-06,tokens/kWh,93486247.88348827,MB,1848.582144,3092.185088,0.0,2682.257408,2607.61088,s,10,15.920609619140626,1.5920609619140627,0.005971955870236001,1.5915422973632811,1.5992711547851561,1.5996481506347657,1.5999497473144533,"[1.5907589111328124, 1.58003759765625, 1.5858822021484376, 1.58977587890625, 1.5933565673828125, 1.5989788818359374, 1.5991873779296875, 1.5902813720703124, 1.59232568359375, 1.600025146484375]",tokens/s,39.57134902941026,kWh,4.6056754418223436e-05,5.079726248708899e-06,2.1663277564525806e-05,7.279975823145815e-05,tokens/kWh,865387.4893333988,,s,630,15.918542459487922,0.02526752771347288,0.0003416172748956932,0.025196640014648435,0.025569696426391602,0.02576088399887085,0.02677633987426758,"[0.025562816619873047, 0.02546329689025879, 0.025427711486816405, 0.025194496154785157, 0.02530076789855957, 0.025295072555541993, 0.025429439544677735, 0.025172544479370118, 0.02522889518737793, 0.025182207107543944, 0.025069984436035156, 0.025357664108276366, 0.025485984802246092, 0.02529484748840332, 0.025363679885864257, 0.02517411231994629, 0.02503353691101074, 0.024985471725463868, 0.025083904266357423, 0.025057279586791992, 0.02493440055847168, 0.02507776069641113, 0.02521887969970703, 0.025130559921264648, 0.024956832885742186, 0.024984224319458008, 0.02503481674194336, 0.024971263885498047, 0.024905344009399415, 0.026468671798706055, 0.025141311645507813, 0.025143295288085937, 0.024983135223388672, 0.025294496536254884, 0.02518502426147461, 0.025177248001098634, 0.0251539192199707, 0.025160160064697266, 0.025309183120727538, 0.025032703399658202, 0.025001344680786134, 0.02510233688354492, 0.025060224533081054, 0.02537388801574707, 0.025319135665893555, 0.02654819107055664, 0.027015615463256835, 0.025450944900512695, 0.025375776290893555, 0.02513814353942871, 0.02512895965576172, 0.025047136306762696, 0.025536415100097656, 0.02519183921813965, 0.02511926460266113, 0.025080095291137694, 0.025061151504516602, 0.025075168609619142, 0.02508140754699707, 0.025048032760620117, 0.025155359268188477, 0.02506332778930664, 0.02517228889465332, 0.02506342315673828, 0.025057279586791992, 0.024922111511230468, 0.025108480453491212, 0.025011520385742187, 0.025055744171142577, 0.02504489517211914, 0.024883487701416015, 0.02488319969177246, 0.02512838363647461, 0.02513727951049805, 0.02548601531982422, 0.025392480850219726, 0.02538332748413086, 0.025212127685546874, 0.02510108757019043, 0.02511052894592285, 0.02497331237792969, 0.025016511917114258, 0.025068544387817384, 0.02501046371459961, 0.025004064559936524, 0.024963584899902344, 0.024980735778808594, 0.024869632720947266, 0.024823392868041992, 0.02522972869873047, 0.02546441650390625, 0.025477535247802736, 0.02536390495300293, 0.02524575996398926, 0.025303552627563477, 0.024878656387329102, 0.024830400466918947, 0.025059328079223633, 0.025022464752197264, 0.024800928115844726, 0.024828256607055663, 0.025054431915283202, 0.025043743133544922, 0.025047040939331053, 0.02493440055847168, 0.025037887573242188, 0.025155712127685546, 0.025187135696411133, 0.02509993553161621, 0.025123008728027342, 0.025067680358886717, 0.024851551055908205, 0.024963327407836914, 0.025196512222290038, 0.025000640869140625, 0.025067520141601563, 0.02509775924682617, 0.0251048641204834, 0.025010175704956054, 0.025020288467407226, 0.0250097599029541, 0.025133600234985353, 0.025069568634033205, 0.025092096328735353, 0.02512886428833008, 0.02512406349182129, 0.02547999954223633, 0.025442047119140623, 0.026634111404418945, 0.025792896270751955, 0.025153535842895508, 0.025012287139892578, 0.025120704650878907, 0.024936447143554686, 0.024856576919555663, 0.02494054412841797, 0.024808639526367186, 0.02492089653015137, 0.025060928344726563, 0.025067968368530272, 0.02499577522277832, 0.025087520599365233, 0.02572496032714844, 0.02522982406616211, 0.02513462448120117, 0.025126976013183595, 0.02499398422241211, 0.02495417594909668, 0.02509712028503418, 0.0249487361907959, 0.024907039642333983, 0.024896352767944337, 0.025071487426757813, 0.024971263885498047, 0.025184255599975586, 0.025079328536987303, 0.025069311141967775, 0.024902368545532228, 0.02497439956665039, 0.024921024322509765, 0.02545452880859375, 0.025142719268798828, 0.024986240386962892, 0.024999935150146483, 0.025251840591430662, 0.024983327865600587, 0.024883424758911133, 0.025053184509277345, 0.025387008666992186, 0.024989696502685548, 0.02527846336364746, 0.02548121643066406, 0.025179391860961915, 0.02482275199890137, 0.025018144607543945, 0.025099872589111328, 0.025316959381103517, 0.02490451240539551, 0.025087520599365233, 0.025002464294433594, 0.024997888565063478, 0.0271824951171875, 0.025160287857055662, 0.02529484748840332, 0.02525379180908203, 0.02526963233947754, 0.025129695892333985, 0.025356128692626954, 0.02521718406677246, 0.02517913627624512, 0.024976160049438475, 0.02503670310974121, 0.025268320083618165, 0.025006080627441408, 0.02510220718383789, 0.02514508819580078, 0.02500160026550293, 0.024887359619140625, 0.025094432830810545, 0.026052703857421877, 0.02498796844482422, 0.02509414482116699, 0.025097600936889647, 0.025031295776367188, 0.02494054412841797, 0.026804224014282226, 0.026626016616821287, 0.025489343643188476, 0.025425376892089843, 0.025498239517211915, 0.025311296463012695, 0.02537811279296875, 0.025293439865112306, 0.025260032653808592, 0.025184255599975586, 0.025247743606567383, 0.02505846405029297, 0.024981792449951173, 0.024998176574707032, 0.0250163516998291, 0.025008544921875, 0.025038463592529297, 0.025436384201049805, 0.02671308708190918, 0.02548863983154297, 0.025276159286499022, 0.025283967971801758, 0.02562704086303711, 0.025222816467285157, 0.024955007553100587, 0.02493280029296875, 0.02512825584411621, 0.02484908866882324, 0.02526924705505371, 0.025400352478027344, 0.025415008544921874, 0.02491628837585449, 0.025424192428588867, 0.025188352584838865, 0.025109823226928712, 0.024806079864501954, 0.025032703399658202, 0.025040960311889647, 0.025032480239868163, 0.025065696716308594, 0.025091552734375, 0.025051616668701173, 0.025049087524414062, 0.025034175872802735, 0.02505743980407715, 0.02516828727722168, 0.02498150444030762, 0.02511564826965332, 0.025138912200927736, 0.025090335845947265, 0.025085248947143556, 0.024858943939208983, 0.02497983932495117, 0.025169919967651368, 0.02494438362121582, 0.024942848205566408, 0.02502569580078125, 0.025082719802856444, 0.02495078468322754, 0.027107231140136717, 0.02651875114440918, 0.025428895950317384, 0.025292768478393554, 0.025234687805175782, 0.025207551956176757, 0.02527027130126953, 0.025034303665161132, 0.025467231750488283, 0.025417503356933595, 0.02549763107299805, 0.025315616607666017, 0.025209856033325196, 0.025154048919677735, 0.025164287567138673, 0.02507161521911621, 0.025163200378417967, 0.02524012756347656, 0.025608192443847655, 0.025370304107666015, 0.025320959091186524, 0.0253243522644043, 0.025785503387451173, 0.025219104766845704, 0.025606143951416017, 0.02561270332336426, 0.025569696426391602, 0.025124864578247072, 0.025108480453491212, 0.025214975357055663, 0.025268415451049804, 0.02504889678955078, 0.025364479064941405, 0.025366687774658205, 0.025642847061157225, 0.025130783081054688, 0.025196767807006835, 0.025103776931762696, 0.025227872848510743, 0.025208671569824218, 0.025346208572387695, 0.025290752410888673, 0.02532352066040039, 0.02529644775390625, 0.025213375091552734, 0.025285696029663084, 0.025242559432983397, 0.025083904266357423, 0.025109567642211915, 0.025191360473632813, 0.02516377639770508, 0.025141248703002928, 0.025177087783813477, 0.025731903076171875, 0.02512646484375, 0.025293216705322266, 0.02548918342590332, 0.02528857612609863, 0.025231935501098632, 0.0252860164642334, 0.0251757755279541, 0.025150367736816406, 0.02516377639770508, 0.025315008163452148, 0.0250729923248291, 0.025238496780395508, 0.025126911163330077, 0.025284032821655273, 0.025088575363159178, 0.025157632827758788, 0.02515545654296875, 0.02533967971801758, 0.025356639862060548, 0.02521446418762207, 0.025055744171142577, 0.025089408874511717, 0.02518284797668457, 0.025085887908935546, 0.025235456466674806, 0.025258047103881836, 0.02545254325866699, 0.025685792922973634, 0.026071264266967775, 0.025286272048950197, 0.02522137641906738, 0.025206079483032228, 0.02528748893737793, 0.025274368286132814, 0.02539244842529297, 0.028379840850830076, 0.025569280624389647, 0.025430015563964844, 0.024995712280273436, 0.02526425552368164, 0.025415103912353517, 0.026460128784179686, 0.025956960678100587, 0.02551807975769043, 0.025651199340820312, 0.025610240936279297, 0.02536419105529785, 0.026054208755493163, 0.025277408599853515, 0.025278207778930663, 0.0252205753326416, 0.02519708824157715, 0.025210304260253905, 0.025155328750610353, 0.025242431640625, 0.02512892723083496, 0.025363840103149415, 0.02535696029663086, 0.02512076759338379, 0.025165824890136718, 0.025953632354736328, 0.025737888336181642, 0.025761791229248047, 0.02558585548400879, 0.02567558479309082, 0.025600000381469725, 0.025486623764038086, 0.025348831176757812, 0.02527132797241211, 0.025258975982666014, 0.026195968627929687, 0.025302911758422853, 0.02547110366821289, 0.025251840591430662, 0.02535424041748047, 0.025235456466674806, 0.025233407974243165, 0.025182144165039062, 0.025597600936889647, 0.025569696426391602, 0.025181663513183593, 0.025165983200073242, 0.025561471939086915, 0.025075712203979493, 0.02523910331726074, 0.025227712631225585, 0.02532352066040039, 0.025092096328735353, 0.025860095977783205, 0.0257322883605957, 0.025446847915649416, 0.02504252815246582, 0.025368640899658203, 0.02559859275817871, 0.025475168228149415, 0.025038463592529297, 0.025147775650024413, 0.025380224227905274, 0.025256832122802733, 0.025105663299560547, 0.025266687393188478, 0.025367904663085937, 0.025326240539550782, 0.02525174331665039, 0.02534204864501953, 0.025752992630004884, 0.025367136001586913, 0.025050880432128907, 0.02528281593322754, 0.025174016952514647, 0.02513689613342285, 0.025244960784912108, 0.02513580894470215, 0.02525814437866211, 0.02533724784851074, 0.02527440071105957, 0.025340383529663085, 0.0254814395904541, 0.02577123260498047, 0.02542367935180664, 0.025148351669311522, 0.025401376724243165, 0.025443359375, 0.02538444709777832, 0.02524211120605469, 0.02515260887145996, 0.02510531234741211, 0.024983552932739257, 0.02509823989868164, 0.025266176223754884, 0.025108480453491212, 0.025206783294677734, 0.025188352584838865, 0.02512281608581543, 0.025010112762451173, 0.025212255477905274, 0.025133312225341795, 0.02504547119140625, 0.025495552062988282, 0.0253767032623291, 0.025411264419555664, 0.0255567684173584, 0.02542243194580078, 0.025364479064941405, 0.025233024597167968, 0.02522297668457031, 0.025010751724243163, 0.025114751815795897, 0.025173248291015624, 0.025156511306762695, 0.024923295974731446, 0.02527084732055664, 0.02532966423034668, 0.02507366371154785, 0.025014272689819338, 0.025284608840942382, 0.02572902488708496, 0.02548121643066406, 0.025454591751098633, 0.02520591926574707, 0.025195295333862305, 0.025378240585327148, 0.02522585678100586, 0.025341951370239257, 0.02507766342163086, 0.02528879928588867, 0.025279935836791993, 0.025457216262817384, 0.02515260887145996, 0.02523638343811035, 0.02520579147338867, 0.025336799621582032, 0.025182207107543944, 0.02529280090332031, 0.025188352584838865, 0.025151487350463866, 0.02565894317626953, 0.025184703826904298, 0.025126911163330077, 0.02523750305175781, 0.025148544311523437, 0.025154176712036132, 0.025045248031616212, 0.025444416046142577, 0.02530246353149414, 0.025215263366699218, 0.025411584854125976, 0.025278528213500975, 0.025108415603637694, 0.025200639724731445, 0.02506342315673828, 0.025136320114135743, 0.02512553596496582, 0.02566508865356445, 0.02549225616455078, 0.025070943832397462, 0.025089887619018553, 0.025209312438964845, 0.025163936614990234, 0.025135103225708007, 0.025208831787109375, 0.025217119216918944, 0.02506332778930664, 0.025089056015014648, 0.025180288314819336, 0.025032896041870117, 0.024936960220336913, 0.0250984001159668, 0.025212928771972655, 0.025153343200683593, 0.024930496215820313, 0.02559347152709961, 0.025034656524658205, 0.025057279586791992, 0.02508220863342285, 0.025106559753417967, 0.025083904266357423, 0.025006080627441408, 0.02511996841430664, 0.025209375381469726, 0.025317632675170898, 0.025300991058349608, 0.02533171272277832, 0.02517363166809082, 0.02521116828918457, 0.025184288024902343, 0.025401216506958008, 0.025126880645751953, 0.02520044708251953, 0.025759775161743163, 0.025148799896240234, 0.025122880935668945, 0.025269184112548828, 0.025091487884521483, 0.025391616821289063, 0.02613408088684082, 0.02554319953918457, 0.025694208145141603, 0.025429119110107423, 0.0257524471282959, 0.025447872161865233, 0.02529542350769043, 0.02514739227294922, 0.025201791763305663, 0.025278432846069336, 0.025056127548217774, 0.025933855056762694, 0.026226688385009765, 0.025382911682128906, 0.025218399047851562, 0.025318048477172853, 0.02508902359008789, 0.025289728164672853, 0.025263872146606445, 0.025333215713500976, 0.025242399215698243, 0.02496512031555176, 0.025134239196777344, 0.025068384170532226, 0.02514672088623047, 0.02637481689453125, 0.025581567764282227, 0.025260032653808592, 0.02546272087097168, 0.025276479721069337, 0.025200639724731445, 0.025210880279541017, 0.02525164794921875, 0.025080223083496094, 0.02512873649597168, 0.027023359298706053, 0.026802175521850585, 0.025605695724487305, 0.02593222427368164, 0.025550815582275392, 0.025417695999145506, 0.0256690559387207, 0.02597865676879883, 0.025449087142944336, 0.025179424285888673, 0.025260992050170898, 0.025427743911743163, 0.025229280471801757, 0.025220384597778322, 0.025230304718017578, 0.025378816604614256, 0.025154848098754883, 0.025258560180664063, 0.025437952041625977, 0.025645471572875975, 0.025390592575073243, 0.025353952407836913, 0.02558236885070801, 0.025425920486450194, 0.0252509765625, 0.02518649673461914, 0.025247808456420898, 0.025330272674560547, 0.02527027130126953, 0.02525951957702637, 0.025192960739135743, 0.025100288391113282, 0.02532761573791504, 0.025333759307861328, 0.026474496841430665, 0.025116672515869142, 0.025154815673828126, 0.02512575912475586, 0.02518809509277344, 0.025337984085083008, 0.025079616546630858, 0.02531564712524414]",tokens/s,39.57648770943232,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,824.741888,554.631168,0.0,159.383552,143.673856,s,1,7.478853515625,7.478853515625,0.0,7.478853515625,7.478853515625,7.478853515625,7.478853515625,[7.478853515625],,kWh,1.0678520679164194e-05,1.170553645839295e-06,2.7013910500056637e-06,1.4550465375009153e-05,,MB,1334.976512,609.15712,0.0,199.22944,186.684928,s,26,0.20124217653274534,0.007740083712797898,8.766793833376148e-05,0.007727728128433227,0.007832304000854492,0.007928336143493652,0.00797323191165924,"[0.007979519844055176, 0.007784575939178467, 0.007776576042175293, 0.0077358717918396, 0.007770431995391846, 0.0077610878944396975, 0.007806848049163819, 0.007745632171630859, 0.007740416049957275, 0.007731872081756592, 0.007612895965576172, 0.007652063846588134, 0.007627200126647949, 0.007639679908752441, 0.007671711921691894, 0.007814367771148682, 0.007954368114471436, 0.007850240230560303, 0.007647424221038818, 0.007723584175109864, 0.007691872119903564, 0.007701504230499268, 0.007705183982849121, 0.007722176074981689, 0.007712160110473633, 0.00768291187286377]",tokens/s,33074.57767888413,kWh,2.3046161862339852e-07,2.5411420892259663e-08,9.955721373323103e-08,3.5543025324888916e-07,tokens/kWh,720253826.6227344,MB,1374.285824,611.254272,0.0,201.326592,186.687488,s,26,10.150502136230466,0.39040392831655646,0.002830543431388422,0.3892741394042969,0.3940727081298828,0.3958575744628906,0.3988802185058594,"[0.39970950317382814, 0.38872210693359377, 0.3894590148925781, 0.3931019897460937, 0.3898243103027344, 0.3889993896484375, 0.3889587707519531, 0.3901562805175781, 0.3920888366699219, 0.38913449096679686, 0.38812295532226565, 0.3888493347167969, 0.38840655517578127, 0.3893247375488281, 0.3899820251464844, 0.3942532043457031, 0.39639236450195314, 0.39110336303710935, 0.3938922119140625, 0.38784860229492185, 0.38820147705078123, 0.3884596252441406, 0.38892135620117185, 0.3893332214355469, 0.3880328674316406, 0.3892235412597656]",tokens/s,161.3713270551849,kWh,1.1370478016312588e-05,1.2539743753767515e-06,3.9676896298051265e-06,1.6592142021494465e-05,tokens/kWh,3796978.10676801,,s,1638,10.138676737308518,0.006189668337795178,0.00015010116841278822,0.006172384023666383,0.006271177625656128,0.0063377505302429195,0.0067069341897964445,"[0.006173952102661133, 0.006246623992919922, 0.006181119918823242, 0.006245664119720459, 0.006173855781555175, 0.006268864154815674, 0.00621292781829834, 0.006273695945739746, 0.007050464153289795, 0.00792742395401001, 0.007961567878723144, 0.00797705602645874, 0.007917471885681152, 0.006174047946929931, 0.006233920097351074, 0.006347616195678711, 0.00642790412902832, 0.006175487995147705, 0.006289408206939697, 0.006230016231536865, 0.006166528224945069, 0.0061972479820251464, 0.006258687973022461, 0.006250495910644531, 0.006127391815185547, 0.006266335964202881, 0.006126336097717285, 0.006236159801483154, 0.006182911872863769, 0.006148384094238281, 0.0061718721389770505, 0.006175136089324951, 0.006199391841888427, 0.006127615928649902, 0.0062321281433105466, 0.006131648063659668, 0.0062197761535644535, 0.0061066560745239255, 0.006230303764343262, 0.006174911975860595, 0.006319680213928223, 0.006214079856872559, 0.006130943775177002, 0.0062041602134704586, 0.006108704090118408, 0.0062015681266784665, 0.006108543872833252, 0.00656492805480957, 0.006303071975708008, 0.006158304214477539, 0.006210048198699952, 0.0061296639442443845, 0.006237760066986084, 0.006144639968872071, 0.0063071041107177735, 0.006178783893585205, 0.006289984226226807, 0.006260735988616943, 0.006172671794891358, 0.006244383811950684, 0.00617468786239624, 0.006208831787109375, 0.006131904125213623, 0.006114848136901855, 0.006083040237426758, 0.0061972479820251464, 0.006131840229034424, 0.006174592018127442, 0.006196544170379638, 0.006159039974212646, 0.006188064098358154, 0.0061279358863830565, 0.006481728076934814, 0.006204127788543701, 0.006236288070678711, 0.006166783809661866, 0.006196000099182129, 0.006201632022857666, 0.006105663776397705, 0.006170752048492431, 0.006119423866271972, 0.006167712211608886, 0.006113408088684082, 0.006212319850921631, 0.006088704109191895, 0.006227968215942382, 0.006080383777618408, 0.006205535888671875, 0.006090784072875976, 0.006154240131378174, 0.006164480209350586, 0.006244128227233887, 0.006176383972167969, 0.006111839771270752, 0.006186592102050781, 0.006255008220672607, 0.006194431781768799, 0.006099679946899414, 0.006213664054870605, 0.006088736057281494, 0.006207456111907959, 0.0062481918334960935, 0.00610643196105957, 0.006170656204223633, 0.006091904163360595, 0.006170400142669678, 0.006086656093597412, 0.006207488059997559, 0.006111104011535645, 0.006193408012390137, 0.006071392059326172, 0.006202144145965576, 0.006078271865844726, 0.006167840003967285, 0.006120160102844239, 0.006146207809448242, 0.00616809606552124, 0.0061077442169189455, 0.006149919986724853, 0.0061175041198730466, 0.006162079811096191, 0.006107039928436279, 0.006227488040924072, 0.006108128070831299, 0.0062258877754211425, 0.006128672122955322, 0.006184832096099853, 0.006167103767395019, 0.006141952037811279, 0.006189055919647217, 0.00610643196105957, 0.00617910385131836, 0.006213024139404297, 0.006206816196441651, 0.006098015785217285, 0.0061773438453674315, 0.006066431999206543, 0.006172416210174561, 0.006108320236206054, 0.0061346240043640135, 0.0061421761512756344, 0.006100255966186524, 0.00613651180267334, 0.006088511943817139, 0.006162015914916992, 0.006089119911193847, 0.006164415836334229, 0.006076767921447754, 0.006176640033721923, 0.006084447860717774, 0.006213632106781006, 0.006115392208099365, 0.006266816139221192, 0.006196767807006836, 0.006263199806213379, 0.00625055980682373, 0.006180863857269287, 0.0062259521484375, 0.006635488033294678, 0.006211584091186524, 0.0061214399337768555, 0.006153439998626709, 0.00620198392868042, 0.006125760078430176, 0.006240255832672119, 0.006209536075592041, 0.006229023933410644, 0.006101984024047852, 0.006224991798400879, 0.00611740779876709, 0.006222720146179199, 0.006153823852539063, 0.006174272060394287, 0.00621449613571167, 0.006158336162567139, 0.006188543796539306, 0.006123231887817383, 0.006304543972015381, 0.00608790397644043, 0.006193952083587646, 0.006080319881439209, 0.006238656044006348, 0.00611081600189209, 0.006142111778259277, 0.0061521921157836916, 0.006109183788299561, 0.006160384178161621, 0.006350399971008301, 0.006186431884765625, 0.006044223785400391, 0.006187007904052734, 0.006105088233947754, 0.006185184001922608, 0.006106592178344726, 0.0061907520294189455, 0.006178688049316406, 0.006144927978515625, 0.006193183898925782, 0.006118783950805664, 0.006168352127075195, 0.006105760097503662, 0.006190207958221436, 0.00606873607635498, 0.006181280136108399, 0.006067647933959961, 0.006385471820831299, 0.006174975872039795, 0.006232063770294189, 0.006155968189239502, 0.00611411190032959, 0.006192768096923828, 0.0061032319068908695, 0.006282815933227539, 0.006098591804504395, 0.0061859521865844725, 0.0060928001403808595, 0.006176767826080322, 0.0060863041877746585, 0.006250080108642578, 0.006159039974212646, 0.006139071941375732, 0.006153088092803955, 0.0060928001403808595, 0.00616156816482544, 0.006111584186553955, 0.006152512073516846, 0.006134208202362061, 0.006202720165252686, 0.006105408191680908, 0.006185056209564209, 0.006063168048858643, 0.006153151988983154, 0.006107135772705078, 0.0061421761512756344, 0.006355967998504639, 0.006161183834075928, 0.006227519989013672, 0.006147744178771973, 0.006255551815032959, 0.0061049280166625974, 0.006220032215118408, 0.006097792148590088, 0.006187903881072998, 0.006128960132598877, 0.00618345594406128, 0.00683958387374878, 0.008580991744995117, 0.006486815929412842, 0.007170271873474121, 0.0062542400360107425, 0.006131040096282959, 0.0062904319763183595, 0.006124576091766357, 0.006222623825073242, 0.0061970877647399905, 0.006148416042327881, 0.006188223838806153, 0.00610975980758667, 0.0062297601699829105, 0.006095232009887696, 0.006189087867736817, 0.00610316801071167, 0.006217504024505615, 0.006191328048706055, 0.006170400142669678, 0.006172895908355713, 0.006144000053405762, 0.0061972479820251464, 0.0061380801200866695, 0.0062576642036437985, 0.006117856025695801, 0.006202847957611084, 0.006103392124176025, 0.006199808120727539, 0.006141952037811279, 0.006177887916564941, 0.006199520111083984, 0.006130112171173096, 0.006160639762878418, 0.006111423969268798, 0.006174431800842285, 0.0060867519378662105, 0.006164480209350586, 0.006070367813110352, 0.006209375858306884, 0.006156352043151856, 0.006256351947784424, 0.0061485438346862795, 0.00617574405670166, 0.006181727886199951, 0.006160384178161621, 0.006209536075592041, 0.006151711940765381, 0.0064058880805969234, 0.006130208015441894, 0.006245888233184814, 0.0061151041984558105, 0.006211520195007324, 0.0062137598991394045, 0.006155072212219238, 0.0061996798515319826, 0.006565536022186279, 0.006258687973022461, 0.006137887954711914, 0.006241951942443848, 0.006146368026733399, 0.006178624153137207, 0.006181056022644043, 0.006236159801483154, 0.006196447849273682, 0.006112192153930664, 0.006213471889495849, 0.00608787202835083, 0.006212224006652832, 0.006117216110229492, 0.0061562881469726565, 0.006193280220031738, 0.0061950721740722655, 0.006215007781982422, 0.006146719932556152, 0.0062259202003479, 0.006184288024902344, 0.006214079856872559, 0.00611894416809082, 0.006215968132019043, 0.006113791942596435, 0.006227424144744873, 0.006195519924163818, 0.006156576156616211, 0.006203199863433838, 0.006141983985900879, 0.006205440044403076, 0.0061214399337768555, 0.0061972799301147465, 0.0060965437889099125, 0.006205984115600586, 0.0061038718223571774, 0.006227295875549316, 0.006116608142852783, 0.006186431884765625, 0.006158400058746338, 0.0061244478225708, 0.0061799359321594236, 0.006122399806976318, 0.006185184001922608, 0.006125376224517822, 0.006211647987365723, 0.006099008083343506, 0.006217567920684814, 0.006146143913269043, 0.006229919910430908, 0.006119423866271972, 0.0061500477790832515, 0.006155807971954346, 0.006115808010101319, 0.006194399833679199, 0.006117280006408691, 0.0061933121681213375, 0.006119647979736328, 0.006203999996185303, 0.006098944187164307, 0.006177023887634277, 0.006100800037384033, 0.006206624031066895, 0.00628111982345581, 0.006189727783203125, 0.006224095821380615, 0.006139776229858398, 0.0061829757690429685, 0.006109119892120362, 0.006205088138580323, 0.006094687938690185, 0.006193984031677246, 0.006124576091766357, 0.006204192161560059, 0.006139904022216797, 0.006187007904052734, 0.0061561279296875, 0.0060414719581604005, 0.0061970877647399905, 0.006132800102233887, 0.006231904029846191, 0.006140223979949951, 0.00623145580291748, 0.006110496044158935, 0.006194303989410401, 0.006137983798980713, 0.006147903919219971, 0.006258304119110107, 0.006266975879669189, 0.006224095821380615, 0.006154016017913818, 0.006250720024108886, 0.006215519905090332, 0.006215712070465088, 0.0061703681945800785, 0.006166687965393066, 0.006172671794891358, 0.006129888057708741, 0.006178944110870361, 0.006129631996154785, 0.006193056106567383, 0.006117568016052246, 0.006186016082763672, 0.006103839874267578, 0.006192160129547119, 0.0061140480041503905, 0.006213856220245361, 0.006105343818664551, 0.006210912227630616, 0.006156832218170166, 0.00610700798034668, 0.006356959819793701, 0.006127776145935059, 0.006209248065948486, 0.006096992015838623, 0.006225984096527099, 0.006092319965362549, 0.006189536094665527, 0.006081535816192627, 0.006198400020599365, 0.0061418237686157225, 0.006129183769226074, 0.006170207977294922, 0.00611030387878418, 0.006165472030639649, 0.006115839958190918, 0.006169151782989502, 0.0061080641746521, 0.006216639995574951, 0.006104991912841797, 0.0062259202003479, 0.006106527805328369, 0.0062206401824951175, 0.006176544189453125, 0.0061560640335083005, 0.006199488162994385, 0.006111487865447998, 0.006198239803314209, 0.0061194877624511715, 0.006193888187408447, 0.005999040126800537, 0.006200799942016602, 0.006185247898101807, 0.006334720134735107, 0.006686336040496827, 0.0061627840995788575, 0.006373407840728759, 0.006572319984436035, 0.006242047786712646, 0.006223840236663818, 0.006174623966217041, 0.006199391841888427, 0.0061645441055297855, 0.006243264198303223, 0.0061242241859436036, 0.006224192142486572, 0.006196224212646485, 0.00621235179901123, 0.006292064189910889, 0.006149151802062988, 0.006162303924560547, 0.006136064052581787, 0.006199808120727539, 0.006121471881866455, 0.0061968002319335936, 0.006078911781311035, 0.006190144062042237, 0.006101280212402344, 0.006202303886413575, 0.006087423801422119, 0.006182112216949463, 0.006164224147796631, 0.006188032150268555, 0.006173120021820068, 0.006129248142242432, 0.0061898880004882815, 0.006113056182861328, 0.006219295978546142, 0.006097760200500488, 0.006204671859741211, 0.006089471817016602, 0.006213791847229004, 0.006110527992248535, 0.006177311897277832, 0.006141600131988526, 0.006142303943634033, 0.006170623779296875, 0.006123519897460937, 0.006243936061859131, 0.006128032207489014, 0.006212831974029541, 0.006087552070617676, 0.00620688009262085, 0.006101791858673096, 0.006200384140014648, 0.0061200962066650395, 0.006189055919647217, 0.006178944110870361, 0.00614796781539917, 0.006221824169158936, 0.006121471881866455, 0.006180863857269287, 0.006105088233947754, 0.006118368148803711, 0.006082560062408447, 0.006242015838623047, 0.006096223831176758, 0.006180960178375244, 0.006153312206268311, 0.006145792007446289, 0.00616864013671875, 0.006115263938903809, 0.006166528224945069, 0.006127615928649902, 0.0061682238578796384, 0.0061073598861694335, 0.006203519821166992, 0.006088704109191895, 0.006158080101013184, 0.0060841598510742185, 0.006183775901794433, 0.006243743896484375, 0.007389279842376709, 0.006201695919036865, 0.0060945281982421875, 0.006195519924163818, 0.006130015850067139, 0.006121119976043701, 0.0061541438102722164, 0.00610700798034668, 0.00614576005935669, 0.0060871682167053225, 0.006156576156616211, 0.006090528011322022, 0.006180960178375244, 0.0060793919563293455, 0.006198016166687012, 0.006087999820709228, 0.006204288005828857, 0.006072319984436035, 0.006187263965606689, 0.006192031860351562, 0.006116064071655274, 0.006174816131591797, 0.0061008319854736325, 0.006144383907318115, 0.00609987211227417, 0.006162432193756104, 0.006199359893798828, 0.006858848094940186, 0.006587135791778565, 0.0067358717918396, 0.006393856048583985, 0.00617574405670166, 0.006964223861694336, 0.006443039894104004, 0.006099167823791504, 0.0062481918334960935, 0.006105055809020996, 0.006192512035369873, 0.006161056041717529, 0.006168896198272705, 0.006166207790374756, 0.006139904022216797, 0.006174592018127442, 0.006126016139984131, 0.006082592010498047, 0.00609830379486084, 0.006261375904083252, 0.006381504058837891, 0.006215424060821533, 0.006175007820129395, 0.006134880065917969, 0.006193247795104981, 0.00613702392578125, 0.006212480068206787, 0.006124512195587158, 0.006182240009307861, 0.006177631855010987, 0.006182752132415772, 0.006090079784393311, 0.0062009282112121585, 0.0061874880790710445, 0.006162879943847656, 0.006187168121337891, 0.006119167804718018, 0.006201663970947266, 0.006143519878387451, 0.006274367809295654, 0.006105984210968017, 0.0062197761535644535, 0.006117055892944336, 0.006205760002136231, 0.006115327835083008, 0.006160416126251221, 0.006162335872650146, 0.006135424137115478, 0.006147712230682373, 0.006124351978302002, 0.006180543899536133, 0.0060910720825195315, 0.006237760066986084, 0.006120128154754639, 0.006196896076202393, 0.006094431877136231, 0.006244639873504639, 0.006244512081146241, 0.00611894416809082, 0.006144032001495361, 0.0061114559173583985, 0.006160672187805176, 0.006180863857269287, 0.006205344200134277, 0.006217823982238769, 0.006208896160125732, 0.006092576026916504, 0.006197440147399902, 0.0062304000854492185, 0.0062507839202880855, 0.006182911872863769, 0.006148096084594727, 0.006201344013214111, 0.006119423866271972, 0.006215167999267578, 0.006106815814971924, 0.006183680057525635, 0.0061010560989379885, 0.006158336162567139, 0.0060657281875610355, 0.006072319984436035, 0.006121471881866455, 0.0061411519050598145, 0.0061502718925476076, 0.006101664066314697, 0.006169663906097412, 0.006115808010101319, 0.006150815963745117, 0.006090816020965576, 0.006191199779510498, 0.006106783866882324, 0.0062156801223754886, 0.00609388780593872, 0.0062145919799804685, 0.006119391918182373, 0.0061785922050476075, 0.006156544208526612, 0.006105088233947754, 0.006174528121948243, 0.006109344005584717, 0.006193183898925782, 0.0061066560745239255, 0.006224671840667725, 0.006097695827484131, 0.006216608047485352, 0.006121312141418457, 0.0062046079635620114, 0.006159327983856201, 0.0061561279296875, 0.006190847873687744, 0.006128032207489014, 0.0061764798164367675, 0.006115935802459717, 0.006166207790374756, 0.006155807971954346, 0.006209472179412842, 0.006109119892120362, 0.00622160005569458, 0.006118207931518555, 0.0062197761535644535, 0.00614412784576416, 0.006129536151885986, 0.006170944213867187, 0.006131392002105713, 0.006170623779296875, 0.00612556791305542, 0.00618009614944458, 0.006138239860534668, 0.00619484806060791, 0.006062272071838379, 0.006180895805358887, 0.006189407825469971, 0.006240255832672119, 0.0061625919342041015, 0.006120831966400147, 0.006204095840454102, 0.006133920192718506, 0.006188416004180909, 0.006123936176300049, 0.006197567939758301, 0.006079264163970947, 0.006194272041320801, 0.006112383842468262, 0.006110400199890137, 0.0061305279731750486, 0.006189919948577881, 0.006168575763702393, 0.006111328125, 0.006166431903839111, 0.006119423866271972, 0.006158336162567139, 0.006123072147369385, 0.006214431762695313, 0.006108255863189698, 0.006234687805175781, 0.00613100814819336, 0.00636575984954834, 0.006233312129974365, 0.006153120040893555, 0.006230016231536865, 0.006137856006622314, 0.006191359996795654, 0.006133600234985352, 0.0062135357856750485, 0.006100992202758789, 0.006203680038452148, 0.0060984320640563965, 0.006197472095489502, 0.006166528224945069, 0.006144000053405762, 0.0061931519508361815, 0.006121471881866455, 0.006264544010162353, 0.006107423782348633, 0.006180416107177735, 0.006088992118835449, 0.006201632022857666, 0.006080383777618408, 0.006213791847229004, 0.00607747220993042, 0.0063127679824829105, 0.006158656120300293, 0.006135424137115478, 0.006185023784637451, 0.006182496070861816, 0.006190591812133789, 0.006112160205841065, 0.006196896076202393, 0.006092864036560059, 0.006314559936523438, 0.006102240085601807, 0.00619484806060791, 0.0061307201385498045, 0.006125376224517822, 0.006155744075775147, 0.006106815814971924, 0.006171487808227539, 0.006119423866271972, 0.006193120002746582, 0.006131680011749268, 0.006219840049743653, 0.006108736038208008, 0.006213471889495849, 0.006102719783782959, 0.006210464000701904, 0.006163584232330322, 0.00611516809463501, 0.006199295997619629, 0.006129824161529541, 0.006199295997619629, 0.006118783950805664, 0.006257215976715088, 0.006084671974182129, 0.006192736148834229, 0.006077119827270508, 0.0061803522109985356, 0.0061559038162231445, 0.006159039974212646, 0.006173920154571533, 0.006129600048065185, 0.0061877760887146, 0.006195199966430664, 0.006203264236450195, 0.006131487846374512, 0.006214272022247314, 0.006096608161926269, 0.006196512222290039, 0.006355679988861084, 0.006189055919647217, 0.006178815841674804, 0.006135968208312989, 0.006229856014251709, 0.006120800018310547, 0.00617244815826416, 0.006123904228210449, 0.006210048198699952, 0.006097087860107422, 0.0061868162155151365, 0.006083648204803466, 0.006161375999450684, 0.006131103992462158, 0.006150112152099609, 0.006156352043151856, 0.006136032104492187, 0.006162144184112549, 0.006115551948547363, 0.006173056125640869, 0.006107039928436279, 0.006207583904266357, 0.0060867519378662105, 0.006184671878814697, 0.006068031787872314, 0.006295360088348388, 0.006119584083557129, 0.00614847993850708, 0.006141791820526123, 0.0061289920806884765, 0.0061571521759033205, 0.006113599777221679, 0.006148863792419434, 0.006095808029174804, 0.006224127769470214, 0.006088448047637939, 0.0061933121681213375, 0.00609878396987915, 0.006205535888671875, 0.006108352184295654, 0.00619001579284668, 0.006176544189453125, 0.006088479995727539, 0.006166783809661866, 0.006109087944030762, 0.006182464122772216, 0.006111199855804443, 0.006208255767822266, 0.006094751834869384, 0.0062642240524291995, 0.006125631809234619, 0.0062176318168640134, 0.006131807804107666, 0.0061504640579223635, 0.006203616142272949, 0.006109183788299561, 0.0061662721633911136, 0.006096896171569824, 0.006193408012390137, 0.006137856006622314, 0.0062271361351013185, 0.006090943813323975, 0.006226560115814209, 0.006303872108459473, 0.006264544010162353, 0.006214144229888916, 0.006139552116394043, 0.006186495780944825, 0.006121312141418457, 0.00617958402633667, 0.006090752124786377, 0.006188960075378418, 0.006080704212188721, 0.006214943885803223, 0.006114175796508789, 0.006219711780548096, 0.006191904067993164, 0.006173503875732422, 0.006219903945922852, 0.006217023849487305, 0.0061918082237243655, 0.006135807991027832, 0.0062259202003479, 0.006131711959838867, 0.006187039852142334, 0.0060927681922912594, 0.006244351863861084, 0.0061662721633911136, 0.00626854419708252, 0.006206399917602539, 0.006126944065093994, 0.006426976203918457, 0.006102015972137451, 0.006214655876159668, 0.006108960151672363, 0.006217951774597168, 0.006181024074554444, 0.006188896179199219, 0.006168575763702393, 0.006126719951629639, 0.006155136108398437, 0.006117728233337403, 0.006172287940979004, 0.006094304084777832, 0.006204127788543701, 0.00598195219039917, 0.006206975936889648, 0.006071040153503418, 0.00620854377746582, 0.00611846399307251, 0.006164383888244629, 0.006315616130828858, 0.006221343994140625, 0.006166463851928711, 0.006116288185119629, 0.006187007904052734, 0.006072351932525635, 0.006188767910003662, 0.006076032161712646, 0.006181503772735596, 0.006094655990600586, 0.006199776172637939, 0.006170688152313232, 0.0061641278266906735, 0.006170559883117676, 0.006146111965179443, 0.006207488059997559, 0.006184351921081543, 0.006181663990020752, 0.0060878081321716305, 0.006174528121948243, 0.006075039863586425, 0.006158559799194336, 0.006067776203155518, 0.0061651840209960935, 0.006118591785430908, 0.006128416061401367, 0.00613705587387085, 0.00610591983795166, 0.006133503913879394, 0.006097119808197021, 0.006147935867309571, 0.006082496166229248, 0.006258687973022461, 0.00610316801071167, 0.00618016004562378, 0.00609503984451294, 0.006200992107391358, 0.006077151775360107, 0.006182784080505371, 0.006563519954681397, 0.006335231781005859, 0.006338240146636963, 0.006274591922760009, 0.006287839889526367, 0.006211647987365723, 0.006373311996459961, 0.006240255832672119, 0.006195199966430664, 0.006299647808074951, 0.006245728015899658, 0.006271647930145264, 0.006175968170166015, 0.006265632152557373, 0.006261792182922363, 0.006187808036804199, 0.006232255935668945, 0.0062009282112121585, 0.006176928043365479, 0.006148640155792236, 0.006262784004211426, 0.006334464073181153, 0.006250495910644531, 0.006241951942443848, 0.006256991863250732, 0.00628326416015625, 0.0062046399116516115, 0.006303616046905517, 0.0062024321556091305, 0.006346816062927246, 0.006411744117736816, 0.006244671821594238, 0.006303135871887207, 0.006172287940979004, 0.006286367893218994, 0.006196352005004882, 0.006319200038909912, 0.006275968074798584, 0.006217984199523926, 0.006293471813201904, 0.00635148811340332, 0.006282815933227539, 0.0061807999610900876, 0.0062754878997802735, 0.006260992050170899, 0.006199488162994385, 0.006231776237487793, 0.006247392177581787, 0.006279200077056885, 0.006146431922912598, 0.006232639789581299, 0.006146175861358643, 0.006256063938140869, 0.006286079883575439, 0.006185664176940918, 0.006230559825897217, 0.006316127777099609, 0.006285727977752686, 0.00616649580001831, 0.006245791912078857, 0.006222432136535645, 0.00620908784866333, 0.006230463981628418, 0.006199295997619629, 0.006209792137145996, 0.006148863792419434, 0.006343008041381836, 0.006185152053833008, 0.006244319915771485, 0.006283455848693847, 0.006205760002136231, 0.006354623794555664, 0.006203936100006103, 0.006686495780944824, 0.006151487827301025, 0.0062707200050354005, 0.006242591857910157, 0.006175680160522461, 0.00621062421798706, 0.006219744205474853, 0.00625273609161377, 0.006111072063446045, 0.006349664211273193, 0.006262815952301025, 0.00631820821762085, 0.006315936088562011, 0.006198751926422119, 0.006341119766235351, 0.006180704116821289, 0.006717152118682861, 0.006281888008117676, 0.006287168025970459, 0.006315711975097656, 0.006211584091186524, 0.006320032119750976, 0.00680998420715332, 0.006266880035400391, 0.006314112186431885, 0.00625161600112915, 0.006359456062316894, 0.0062631678581237795, 0.006272768020629883, 0.006321695804595948, 0.006329247951507569, 0.006326015949249268, 0.0062032961845397945, 0.006296768188476563, 0.006213791847229004, 0.00630406379699707, 0.006287871837615967, 0.006217728137969971, 0.006254591941833496, 0.006205632209777832, 0.006284863948822022, 0.006160704135894776, 0.0062871999740600586, 0.006266335964202881, 0.0062921919822692875, 0.006342559814453125, 0.006551136016845703, 0.006318496227264404, 0.006192416191101074, 0.006288095951080323, 0.006270400047302246, 0.006183616161346436, 0.00624832010269165, 0.006232063770294189, 0.006288479804992675, 0.006154304027557373, 0.0062679038047790524, 0.006217343807220459, 0.006244351863861084, 0.0062568001747131344, 0.006193215847015381, 0.006291456222534179, 0.006311935901641846, 0.006284543991088867, 0.006222559928894043, 0.006379551887512207, 0.006257919788360595, 0.0062000641822814945, 0.006253888130187988, 0.006189760208129883, 0.006302752017974853, 0.006072319984436035, 0.006305600166320801, 0.006354176044464112, 0.006210847854614258, 0.006264544010162353, 0.006193088054656982, 0.006348991870880127, 0.006192255973815918, 0.006265312194824219, 0.006220032215118408, 0.0063504958152771, 0.006315648078918457, 0.006419007778167725, 0.006330495834350586, 0.006233727931976319, 0.006428800106048584, 0.006207744121551514, 0.006150144100189209, 0.006260096073150635, 0.0061421761512756344, 0.006201759815216064, 0.0061194877624511715, 0.006418176174163818, 0.00609939193725586, 0.00621449613571167, 0.006185887813568115, 0.006146048069000244, 0.006182911872863769, 0.006107135772705078, 0.006168416023254394, 0.006096479892730713, 0.006149824142456055, 0.006103648185729981, 0.00618723201751709, 0.006082496166229248, 0.006205088138580323, 0.006689536094665527, 0.006420256137847901, 0.006267072200775147, 0.006102784156799317, 0.006166528224945069, 0.006072415828704834, 0.00617571210861206, 0.00605679988861084, 0.006178016185760498, 0.006082687854766846, 0.0061550078392028805, 0.006111231803894043, 0.006123104095458984, 0.0061324481964111326, 0.006176447868347168, 0.006261087894439697, 0.006110879898071289, 0.006166528224945069, 0.006123519897460937, 0.006268928050994873, 0.006063392162322998, 0.0061675839424133305, 0.006059967994689942, 0.006246143817901611, 0.0061147198677062985, 0.006091104030609131, 0.0061337919235229495, 0.006010752201080322, 0.006124800205230713, 0.006286431789398193, 0.006368256092071533, 0.006089375972747803, 0.0061931519508361815, 0.006231776237487793, 0.006203680038452148, 0.006143392086029053, 0.006103456020355225, 0.006164768218994141, 0.006110496044158935, 0.006303616046905517, 0.006189824104309082, 0.006205440044403076, 0.006068287849426269, 0.006189184188842773, 0.0060659837722778324, 0.00618064022064209, 0.0061875200271606446, 0.006358751773834229, 0.006431007862091064, 0.006843999862670898, 0.00625871992111206, 0.0065821118354797365, 0.0071478400230407715, 0.006469567775726318, 0.006234432220458984, 0.006172351837158203, 0.0061931519508361815, 0.006137856006622314, 0.00618720006942749, 0.006184768199920654, 0.006209504127502441, 0.006232096195220947, 0.006479872226715088, 0.006178815841674804, 0.006123519897460937, 0.006371327877044678, 0.006107135772705078, 0.006187007904052734, 0.006090079784393311, 0.006215936183929444, 0.006120192050933838, 0.006217567920684814, 0.006267871856689453, 0.006204351902008056, 0.006539167881011963, 0.006270976066589356, 0.006508543968200684, 0.006148352146148682, 0.006258431911468506, 0.006188672065734863, 0.00620147180557251, 0.00627888011932373, 0.006156832218170166, 0.006233151912689209, 0.006136064052581787, 0.006220479965209961, 0.006156576156616211, 0.006211296081542968, 0.00611737585067749, 0.006194623947143554, 0.00602291202545166, 0.006112927913665771, 0.006146399974822998, 0.0061151041984558105, 0.006158559799194336, 0.0060928001403808595, 0.006301504135131836, 0.006090943813323975, 0.0061699838638305665, 0.006066815853118896, 0.006174719810485839, 0.006055935859680176, 0.006187007904052734, 0.00606447982788086, 0.006151840209960937, 0.006158336162567139, 0.006129280090332031, 0.006156703948974609, 0.006176447868347168, 0.006171072006225586, 0.006100992202758789, 0.0061847038269042965, 0.006065343856811524, 0.006173952102661133, 0.006067071914672852, 0.006179840087890625, 0.006106912136077881, 0.006146048069000244, 0.006094848155975342, 0.006129151821136475, 0.006129824161529541, 0.006090176105499267, 0.0061281280517578125, 0.006105343818664551, 0.006125728130340576, 0.00606822395324707, 0.0066130561828613285, 0.006204991817474365, 0.006161888122558593, 0.006059135913848877, 0.006186783790588379, 0.0061289920806884765, 0.0061200962066650395, 0.006232063770294189, 0.006107135772705078, 0.006258687973022461, 0.006098336219787598, 0.006226784229278565, 0.006090496063232422, 0.006213632106781006, 0.0060878400802612306, 0.00619212818145752, 0.006091968059539795, 0.006199967861175537, 0.006163839817047119, 0.006341248035430908, 0.0061784000396728515, 0.006109600067138672, 0.0062230081558227535, 0.006081376075744629, 0.0062176637649536135, 0.006060256004333496, 0.006157567977905273, 0.0059502081871032715, 0.00615993595123291, 0.006111743927001953, 0.006131648063659668, 0.00614799976348877, 0.006110559940338134, 0.006169343948364258, 0.006100736141204834, 0.006338304042816162, 0.006103551864624023, 0.006189216136932373, 0.006075551986694336, 0.0061651840209960935, 0.00609062385559082, 0.0061809921264648435, 0.00607747220993042, 0.006169568061828614, 0.006133632183074951, 0.006127488136291504, 0.006156415939331055, 0.006125696182250976, 0.006157919883728027, 0.006094751834869384, 0.006148128032684326, 0.006155839920043945, 0.006163360118865967, 0.006143583774566651, 0.006171103954315186, 0.006072159767150879, 0.0064635839462280275, 0.006185311794281006, 0.006172063827514648, 0.0062120318412780765, 0.006110879898071289, 0.0062035517692565914, 0.00612886381149292, 0.006243103981018066, 0.006117055892944336, 0.0061848959922790525, 0.0060993280410766605, 0.006176767826080322, 0.00611737585067749, 0.006160031795501709, 0.00614844799041748, 0.00613702392578125, 0.006187839984893799, 0.006129312038421631, 0.006170976161956787, 0.0060908799171447755, 0.0061765117645263675, 0.006088223934173584, 0.006173408031463623, 0.006073728084564209, 0.006387968063354493, 0.00612172794342041, 0.006182079792022705, 0.006274943828582764, 0.0061244797706604005, 0.006159808158874512, 0.006105216026306153, 0.006263552188873291, 0.0060795841217041015, 0.0061855678558349605, 0.005990015983581543, 0.006193535804748535, 0.006113279819488526, 0.006154240131378174, 0.006146048069000244, 0.006174719810485839, 0.0061561598777771, 0.0060908479690551754, 0.006169727802276611, 0.006086880207061767, 0.006199359893798828, 0.006068128108978272, 0.006173408031463623, 0.006139904022216797, 0.006191008090972901, 0.006353280067443848, 0.006235712051391602, 0.0062527041435241695, 0.006120672225952148, 0.006187263965606689, 0.006099487781524658, 0.006238207817077636, 0.006153215885162353, 0.006185887813568115, 0.006143167972564697, 0.006203423976898193, 0.006280064105987549, 0.0061437439918518065, 0.006154496192932129, 0.006105088233947754, 0.006163936138153076, 0.006131264209747314, 0.006179808139801025, 0.006122879981994629, 0.0061896958351135255, 0.006090752124786377, 0.0061701440811157224, 0.006070752143859864, 0.006176352024078369, 0.006101408004760742, 0.006425792217254638, 0.0063376641273498536, 0.0060815677642822264, 0.006170559883117676, 0.006099679946899414, 0.006191103935241699, 0.006090271949768067, 0.006195680141448974, 0.0061040959358215335, 0.00618390417098999, 0.006114560127258301, 0.00613427209854126, 0.006146304130554199, 0.006114336013793945, 0.006247392177581787, 0.006116928100585938, 0.0061550397872924805, 0.0061164479255676266, 0.006193600177764893, 0.006071616172790528, 0.006167359828948974, 0.006082592010498047, 0.006178751945495605, 0.005960800170898437, 0.006173439979553223, 0.00612502384185791, 0.006124320030212402, 0.006139904022216797, 0.00616755199432373, 0.006232160091400147, 0.006148064136505127, 0.006178880214691162, 0.00607913589477539, 0.006183135986328125, 0.00608460807800293, 0.006269152164459228, 0.0060917119979858395, 0.006187359809875489, 0.006127359867095947, 0.006103519916534424, 0.006211872100830078, 0.006113279819488526, 0.0061519680023193355, 0.006123744010925293, 0.0061708478927612305, 0.006135168075561523, 0.006221568107604981, 0.006283552169799805, 0.006261119842529297, 0.0061374402046203615, 0.006165088176727295, 0.006596255779266358, 0.006148255825042724, 0.0062156801223754886, 0.006106336116790772, 0.006185919761657715, 0.006072159767150879, 0.0061972479820251464, 0.00611078405380249, 0.006207935810089112, 0.0061868162155151365, 0.006139711856842041, 0.0062073922157287595, 0.00617465591430664, 0.006171167850494385, 0.006221248149871826, 0.006192895889282226, 0.006074656009674073, 0.0061998400688171385, 0.006096320152282715, 0.0061866559982299806, 0.006138656139373779, 0.006159840106964111, 0.006186975955963135, 0.006118080139160156, 0.0062046079635620114, 0.006118207931518555, 0.0061983041763305664, 0.006108128070831299, 0.006189343929290772, 0.0061356801986694335, 0.006256383895874023, 0.006051680088043213, 0.006168831825256347, 0.006290783882141113, 0.006113823890686035, 0.006031551837921143, 0.0060845761299133305, 0.006146016120910644, 0.006089727878570556, 0.00616480016708374, 0.006097536087036132, 0.00617299222946167, 0.006074048042297363, 0.006172063827514648, 0.00606441593170166, 0.006302015781402588, 0.006107391834259034, 0.006141695976257324, 0.006143712043762207, 0.00617091178894043, 0.006176767826080322, 0.006113279819488526, 0.006180895805358887, 0.006103072166442871, 0.006178751945495605, 0.00609497594833374, 0.007206111907958984, 0.006517471790313721, 0.006143807888031006, 0.006250463962554932, 0.006157599925994873, 0.0062350077629089356, 0.006146080017089843, 0.006153567790985108, 0.006261631965637207, 0.0061265921592712404, 0.006304863929748535, 0.006113279819488526, 0.006237855911254883, 0.006086656093597412, 0.006170048236846924, 0.006059711933135987, 0.00615718412399292, 0.006144192218780518, 0.006252575874328614, 0.006130784034729004, 0.006095359802246094, 0.006111072063446045, 0.0060991039276123045, 0.006112448215484619, 0.006065343856811524, 0.006154399871826172, 0.006076384067535401, 0.006211264133453369, 0.006080512046813965, 0.006166304111480713, 0.006258592128753662, 0.006342527866363526, 0.006160831928253173, 0.006140960216522217, 0.006146304130554199, 0.006195775985717774, 0.006193376064300537, 0.00609609603881836, 0.0061972799301147465, 0.006068927764892578, 0.006184959888458252, 0.006060192108154297, 0.006086368083953858, 0.0061018881797790525, 0.006192607879638672, 0.006165023803710938, 0.006123104095458984, 0.006154496192932129, 0.006146207809448242, 0.0061699519157409665, 0.006165152072906494, 0.0061842241287231444, 0.006288095951080323, 0.0061943359375, 0.006077375888824463, 0.006188320159912109, 0.0061027522087097165, 0.006154816150665283, 0.006172255992889404, 0.0061173119544982914, 0.006173503875732422, 0.0061064958572387695, 0.00617852783203125, 0.006089983940124511, 0.006190080165863037, 0.006077280044555664, 0.006174528121948243, 0.00605785608291626, 0.006174848079681396, 0.006068287849426269, 0.00616428804397583, 0.006087007999420166, 0.006149919986724853, 0.0061521921157836916, 0.006090752124786377, 0.006151840209960937, 0.006093152046203613, 0.006139488220214844, 0.0060829758644104, 0.006145567893981934, 0.006088319778442383, 0.006277984142303467, 0.006068128108978272, 0.006285344123840332, 0.006094912052154541, 0.006166528224945069, 0.006110208034515381, 0.006140927791595459, 0.006203392028808594, 0.006203392028808594, 0.0062008957862854, 0.0060830078125, 0.006169663906097412, 0.006107679843902588, 0.006187136173248291, 0.006308127880096435, 0.006217887878417969, 0.006126751899719238, 0.006160704135894776, 0.006162047863006592, 0.006337503910064698, 0.006180543899536133, 0.006093952178955078, 0.006188064098358154, 0.006094783782958984, 0.006118656158447266, 0.006049471855163574, 0.006165535926818848, 0.006074687957763672, 0.006185632228851318, 0.006135647773742676, 0.006133920192718506, 0.006150144100189209, 0.0061211838722229, 0.006180672168731689, 0.006129983901977539, 0.006166687965393066, 0.006105088233947754, 0.006186304092407227, 0.006081215858459473, 0.006178624153137207, 0.00608076810836792, 0.006188992023468017, 0.006154240131378174, 0.006180384159088135, 0.006191584110260009, 0.006241375923156738, 0.006236991882324219, 0.006129983901977539, 0.006217504024505615, 0.006131487846374512, 0.0063654079437255855, 0.00620963191986084, 0.0062195839881896975, 0.006183008193969727, 0.006141952037811279, 0.006253632068634033, 0.006185919761657715, 0.00619923210144043, 0.006109248161315918, 0.006244448184967041, 0.006119264125823975, 0.006209216117858887, 0.0061075201034545895, 0.006219295978546142, 0.006213856220245361, 0.0061357121467590335, 0.006222015857696534, 0.006146431922912598, 0.006183712005615234, 0.006132863998413086, 0.006207359790802002, 0.006097087860107422, 0.006196544170379638, 0.006095359802246094, 0.006189055919647217, 0.006150144100189209, 0.006182623863220215, 0.006248000144958496, 0.006150496006011963, 0.006195583820343018, 0.00630790376663208, 0.006225855827331543, 0.006108992099761963, 0.006248960018157959, 0.0061047677993774415, 0.006188735961914062, 0.006148223876953125]",tokens/s,161.55954494263096,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,820.404224,538.836992,0.0,136.31488,130.303488,s,1,7.61305126953125,7.61305126953125,0.0,7.61305126953125,7.61305126953125,7.61305126953125,7.61305126953125,[7.61305126953125],,kWh,1.4275024983339788e-05,1.5674343510408715e-06,4.2555589600049015e-06,2.009801829438556e-05,,MB,1300.97152,616.431616,0.0,199.22944,174.868992,s,19,0.21869004917144774,0.011510002587970934,0.00018160719681779692,0.011484319686889649,0.01171907196044922,0.01178399076461792,0.011921470241546632,"[0.011479424476623536, 0.011707615852355958, 0.011455488204956055, 0.011571968078613281, 0.011217503547668458, 0.011325216293334961, 0.011372896194458007, 0.011293439865112305, 0.011428576469421386, 0.011764896392822266, 0.011484319686889649, 0.011572704315185547, 0.011686944007873535, 0.011457183837890624, 0.011955840110778809, 0.011517727851867676, 0.011611328125, 0.011548255920410156, 0.011238719940185547]",tokens/s,22241.524104220858,kWh,3.3857509911542186e-07,3.7338877544917535e-08,1.6881716263805107e-07,5.447311392983905e-07,tokens/kWh,469956610.75980717,MB,1334.02624,629.014528,0.0,211.812352,174.871552,s,19,10.04919241333008,0.5289048638594779,0.01081307573427528,0.532142333984375,0.5409972167968751,0.5432424438476563,0.5443411645507813,"[0.5385031127929687, 0.5404740600585938, 0.5446158447265625, 0.5362752075195313, 0.5186209106445312, 0.5128590087890625, 0.5140167236328125, 0.5120408325195313, 0.5302291259765625, 0.54308984375, 0.5396038818359375, 0.532142333984375, 0.5337122192382813, 0.5311636352539062, 0.5323350219726563, 0.53248876953125, 0.5276687622070313, 0.519033203125, 0.5103199157714844]",tokens/s,119.11404924560905,kWh,1.5312466089701253e-05,1.6886957079988829e-06,5.477035307256724e-06,2.247819710495686e-05,tokens/kWh,2802715.8808971974,,s,1197,10.03867111492157,0.00838652557637558,0.00024285614379801823,0.008392064094543458,0.00866423053741455,0.008724435424804687,0.009046640663146968,"[0.008494943618774413, 0.008554431915283203, 0.00850220775604248, 0.008453791618347167, 0.00840499210357666, 0.008269439697265625, 0.008371711730957031, 0.00825830364227295, 0.008298303604125977, 0.008263104438781739, 0.00842636775970459, 0.00859171199798584, 0.008570528030395508, 0.008701312065124511, 0.008853407859802246, 0.008761311531066894, 0.008678144454956054, 0.008691904067993163, 0.00874060821533203, 0.00877347183227539, 0.0087608003616333, 0.008683360099792481, 0.008637344360351563, 0.008763392448425293, 0.008790016174316406, 0.008724160194396972, 0.008479040145874023, 0.00848025608062744, 0.008513824462890625, 0.00852400016784668, 0.00846998405456543, 0.008427776336669923, 0.008610367774963379, 0.008584447860717773, 0.008534079551696777, 0.008553088188171387, 0.00868329620361328, 0.008611231803894044, 0.008665696144104004, 0.008572928428649903, 0.008632320404052735, 0.008637439727783204, 0.00846566390991211, 0.00868278408050537, 0.008591391563415528, 0.008669280052185058, 0.008607551574707032, 0.008561183929443359, 0.00834108829498291, 0.008290143966674804, 0.008329792022705078, 0.008384736061096191, 0.00837814426422119, 0.008355104446411132, 0.008231743812561036, 0.008329119682312012, 0.00851318359375, 0.0086080961227417, 0.008519680023193359, 0.008487135887145995, 0.008507200241088867, 0.008498559951782227, 0.008532768249511718, 0.008343392372131348, 0.008534496307373046, 0.008527456283569336, 0.008575360298156738, 0.008595168113708496, 0.008793760299682617, 0.008422911643981934, 0.008410112380981445, 0.008337247848510742, 0.00862003231048584, 0.008366080284118652, 0.00828544044494629, 0.008229632377624512, 0.008322239875793457, 0.008302783966064453, 0.008809087753295898, 0.008593215942382812, 0.008405183792114258, 0.008351743698120117, 0.008375807762145996, 0.00832153606414795, 0.008361184120178222, 0.008985376358032227, 0.008920639991760253, 0.010494400024414062, 0.008705920219421386, 0.008582304000854492, 0.008684127807617188, 0.008419712066650391, 0.008443103790283204, 0.008315072059631348, 0.008276576042175294, 0.008765439987182617, 0.008719615936279297, 0.008673791885375976, 0.008613344192504882, 0.00845680046081543, 0.008535455703735352, 0.008646592140197754, 0.008525856018066406, 0.008556639671325684, 0.008461248397827149, 0.008561568260192871, 0.008408032417297363, 0.008433088302612305, 0.008592063903808594, 0.008549280166625976, 0.008639360427856445, 0.008679424285888672, 0.008761343955993652, 0.008681471824645997, 0.008429023742675781, 0.008667263984680176, 0.008699359893798829, 0.008592320442199706, 0.008466400146484376, 0.008558624267578125, 0.008637855529785157, 0.008664704322814942, 0.008542495727539063, 0.008508095741271973, 0.008453696250915528, 0.00863481616973877, 0.008277312278747558, 0.008440447807312011, 0.008556608200073243, 0.008725536346435548, 0.008632351875305175, 0.00854700756072998, 0.008460288047790527, 0.008528223991394043, 0.008519583702087403, 0.008474176406860351, 0.008389056205749512, 0.008365599632263184, 0.00833788776397705, 0.008382464408874512, 0.008460288047790527, 0.008570879936218261, 0.008620320320129394, 0.008506464004516602, 0.008487104415893554, 0.008458687782287597, 0.008457599639892578, 0.008495648384094239, 0.008525919914245606, 0.008576255798339844, 0.008644895553588867, 0.008536352157592773, 0.008601792335510254, 0.008631584167480468, 0.008556768417358399, 0.008665599822998048, 0.008702112197875976, 0.008744095802307129, 0.008753952026367187, 0.008718239784240722, 0.008787967681884766, 0.008678560256958007, 0.00874182415008545, 0.00867039966583252, 0.008681504249572754, 0.008665056228637695, 0.00866335964202881, 0.008677696228027344, 0.00868182373046875, 0.008589056015014649, 0.00867852783203125, 0.00861888027191162, 0.008644607543945313, 0.00870195198059082, 0.008727871894836425, 0.008698559761047364, 0.00870195198059082, 0.009248767852783203, 0.009025504112243652, 0.008736800193786622, 0.008726176261901855, 0.00868556785583496, 0.008642911911010742, 0.00870620822906494, 0.009041728019714356, 0.008904735565185546, 0.008775456428527832, 0.008720000267028809, 0.008771871566772461, 0.00871014404296875, 0.008696063995361328, 0.00869155216217041, 0.008664383888244628, 0.008682080268859863, 0.008831999778747558, 0.008776448249816894, 0.008539392471313476, 0.008375295639038086, 0.008302592277526855, 0.008401023864746093, 0.008664128303527831, 0.008725567817687989, 0.008625887870788574, 0.008675359725952148, 0.008650239944458007, 0.008677568435668945, 0.008613439559936523, 0.008702719688415527, 0.008574975967407226, 0.008621279716491698, 0.008595775604248047, 0.008605695724487305, 0.008604127883911133, 0.008748576164245606, 0.008665151596069336, 0.008636832237243652, 0.008650752067565918, 0.008603679656982421, 0.008587231636047363, 0.008495103836059571, 0.008579360008239747, 0.008604864120483398, 0.009265695571899414, 0.008612095832824707, 0.008619392395019531, 0.008657535552978516, 0.008701760292053223, 0.008564672470092773, 0.00857430362701416, 0.00842409610748291, 0.008500384330749513, 0.008469344139099121, 0.008423423767089844, 0.008378368377685547, 0.008580960273742676, 0.008433088302612305, 0.008338144302368164, 0.008407039642333984, 0.008372127532958984, 0.00821615982055664, 0.008191519737243653, 0.008108511924743652, 0.008063712120056153, 0.008085280418395997, 0.008115455627441407, 0.008065440177917481, 0.008105376243591308, 0.008147551536560058, 0.008284607887268066, 0.00817347240447998, 0.0081693115234375, 0.00812668800354004, 0.00793398380279541, 0.008240287780761718, 0.008104479789733887, 0.00815494441986084, 0.008122112274169922, 0.008190688133239745, 0.00826534366607666, 0.00835804843902588, 0.00839897632598877, 0.00841327953338623, 0.008282112121582032, 0.008307840347290039, 0.008229375839233399, 0.008247679710388183, 0.008280223846435546, 0.008222623825073242, 0.008393856048583984, 0.008233823776245118, 0.008175392150878907, 0.008204480171203614, 0.008216575622558593, 0.008282112121582032, 0.00819814395904541, 0.00820633602142334, 0.008209471702575683, 0.008270784378051758, 0.008218624114990235, 0.008216575622558593, 0.00817955207824707, 0.008144831657409668, 0.008181983947753907, 0.00817302417755127, 0.008202783584594727, 0.0082227201461792, 0.008191712379455566, 0.008132160186767578, 0.008141535758972168, 0.008113920211791992, 0.008141056060791015, 0.008105183601379394, 0.00810086441040039, 0.00831056022644043, 0.008466431617736817, 0.008368127822875977, 0.008372223854064942, 0.00818518352508545, 0.008166239738464356, 0.008470335960388183, 0.008177984237670898, 0.008171199798583984, 0.008241312026977539, 0.008387776374816894, 0.00825171184539795, 0.008206687927246093, 0.008347200393676759, 0.008141247749328614, 0.008155136108398438, 0.008155136108398438, 0.008152607917785645, 0.008132863998413085, 0.008151264190673828, 0.008214495658874511, 0.008271552085876465, 0.007925824165344238, 0.0082161283493042, 0.00814732837677002, 0.008153087615966797, 0.008160479545593262, 0.008204256057739258, 0.008149824142456054, 0.008139007568359375, 0.008228384017944335, 0.008173567771911621, 0.00827609634399414, 0.008199616432189942, 0.008190624237060547, 0.008163328170776368, 0.008200287818908691, 0.008191904067993165, 0.008181759834289551, 0.008136927604675293, 0.008097760200500488, 0.008059743881225587, 0.008086496353149415, 0.008039615631103515, 0.008180543899536133, 0.008084511756896973, 0.007993728160858153, 0.00803110408782959, 0.008066975593566894, 0.008068927764892578, 0.008068832397460937, 0.008046815872192383, 0.008131744384765625, 0.008084447860717773, 0.008087488174438476, 0.008067071914672852, 0.008054688453674316, 0.008061280250549316, 0.00807910442352295, 0.008076959609985352, 0.008091456413269043, 0.008081536293029785, 0.008075679779052734, 0.008077312469482421, 0.00813436794281006, 0.008141087532043458, 0.008122400283813476, 0.00809721565246582, 0.00804099178314209, 0.008039775848388673, 0.008043519973754883, 0.008076959609985352, 0.008069024085998536, 0.008052831649780273, 0.008175135612487793, 0.008090080261230469, 0.008077312469482421, 0.008122015953063964, 0.008114336013793945, 0.008249823570251464, 0.008189984321594238, 0.008100831985473633, 0.008126432418823243, 0.008082304000854492, 0.009402239799499511, 0.007903232097625732, 0.008114144325256348, 0.008132672309875489, 0.00810812759399414, 0.008242207527160645, 0.008098208427429199, 0.008137056350708008, 0.008126560211181641, 0.008072768211364746, 0.008036800384521484, 0.008089599609375, 0.008121600151062012, 0.00825216007232666, 0.008964096069335938, 0.008062975883483887, 0.008032256126403809, 0.008095744132995606, 0.008026111602783203, 0.008275967597961426, 0.008295840263366699, 0.00834982395172119, 0.008071359634399413, 0.008038687705993652, 0.008054783821105957, 0.008056511878967285, 0.008154623985290528, 0.008125247955322266, 0.00809779167175293, 0.008105183601379394, 0.008099776268005371, 0.00816982364654541, 0.00807372760772705, 0.008065024375915527, 0.008042336463928223, 0.008059264183044434, 0.00822156810760498, 0.008054880142211914, 0.008358847618103028, 0.008089471817016601, 0.008194047927856446, 0.008203392028808593, 0.00820473575592041, 0.008619647979736328, 0.008153247833251952, 0.008139583587646484, 0.008170495986938477, 0.008090527534484863, 0.008229951858520507, 0.008162272453308106, 0.008069024085998536, 0.00805497646331787, 0.008066880226135253, 0.008130559921264649, 0.008103936195373536, 0.008119968414306641, 0.008254976272583007, 0.008331711769104005, 0.008075519561767578, 0.008122719764709473, 0.008195520401000977, 0.008106207847595215, 0.00810371208190918, 0.00812060832977295, 0.007834527969360352, 0.00811564826965332, 0.00811411190032959, 0.00819264030456543, 0.008082847595214843, 0.008091327667236328, 0.008122528076171875, 0.008061759948730469, 0.008077247619628906, 0.008159232139587403, 0.008091648101806641, 0.008099072456359864, 0.008093695640563964, 0.008315648078918457, 0.008138751983642578, 0.008316032409667969, 0.008264575958251953, 0.00809603214263916, 0.008086912155151367, 0.008124768257141113, 0.008185855865478516, 0.008180928230285645, 0.008165439605712891, 0.00816204833984375, 0.00824892807006836, 0.008134688377380371, 0.008129183769226073, 0.008077343940734864, 0.008210111618041992, 0.008156864166259766, 0.008077919960021973, 0.008053952217102051, 0.008101568222045899, 0.008074080467224121, 0.008093152046203613, 0.00804304027557373, 0.008154144287109375, 0.008012639999389648, 0.008216064453125, 0.008065664291381835, 0.008099583625793457, 0.008048895835876466, 0.008202239990234375, 0.008180800437927247, 0.008065216064453125, 0.008073023796081543, 0.00810694408416748, 0.008136704444885253, 0.00812179183959961, 0.008118847846984863, 0.008189248085021974, 0.008045248031616211, 0.00809596824645996, 0.008100831985473633, 0.00809177589416504, 0.008188896179199219, 0.008216287612915039, 0.008095744132995606, 0.008097951889038085, 0.008046272277832032, 0.008050848007202148, 0.00812172794342041, 0.00805337619781494, 0.007907616138458253, 0.008196512222290038, 0.008311840057373046, 0.0083538236618042, 0.008358271598815917, 0.008265631675720215, 0.00826809597015381, 0.009249024391174317, 0.008733792304992676, 0.008923135757446288, 0.008488639831542969, 0.009190624237060546, 0.00858512020111084, 0.008515423774719238, 0.008507072448730469, 0.008396160125732422, 0.008440799713134765, 0.008454015731811523, 0.008462495803833008, 0.008396736145019531, 0.008392224311828613, 0.008395584106445312, 0.008340576171875, 0.008210495948791504, 0.00810604763031006, 0.008087615966796875, 0.008153471946716308, 0.008199647903442383, 0.00840937614440918, 0.008132287979125977, 0.008108799934387206, 0.008099648475646972, 0.00851353645324707, 0.008445952415466309, 0.008763680458068848, 0.00838422393798828, 0.008257280349731446, 0.00823526382446289, 0.008739871978759766, 0.008511551856994629, 0.00854099178314209, 0.008619872093200684, 0.008447711944580077, 0.00843132781982422, 0.008383296012878417, 0.008308735847473145, 0.008295743942260743, 0.008380736351013184, 0.008507776260375976, 0.008349696159362792, 0.008369600296020508, 0.008402976036071777, 0.008352288246154785, 0.008359935760498047, 0.00830463981628418, 0.008277055740356445, 0.00829148769378662, 0.008273695945739746, 0.00830025577545166, 0.008403488159179687, 0.008421119689941406, 0.008525856018066406, 0.00857699203491211, 0.008585375785827637, 0.008619232177734375, 0.008625856399536133, 0.009655360221862794, 0.010676032066345214, 0.009283647537231446, 0.008781824111938476, 0.008577024459838867, 0.00846553611755371, 0.008500096321105956, 0.008472576141357421, 0.008386560440063476, 0.008432671546936036, 0.008465120315551758, 0.008464608192443848, 0.008439840316772461, 0.008429568290710449, 0.008476672172546386, 0.008441344261169433, 0.008525759696960448, 0.008387136459350585, 0.008493023872375488, 0.00841919994354248, 0.00835804843902588, 0.008429727554321288, 0.008481760025024414, 0.008506272315979004, 0.008697664260864258, 0.008555904388427734, 0.008813183784484863, 0.00862019157409668, 0.008683520317077637, 0.00855027198791504, 0.008607040405273438, 0.00855942440032959, 0.008544544219970703, 0.008546112060546875, 0.00847862434387207, 0.008476287841796874, 0.008488863945007323, 0.008533503532409668, 0.008681856155395507, 0.008577631950378419, 0.008681471824645997, 0.008715871810913087, 0.008690208435058593, 0.00865993595123291, 0.008575008392333985, 0.008704895973205567, 0.008826272010803222, 0.008624320030212402, 0.008694175720214845, 0.008593664169311524, 0.008677120208740235, 0.008635711669921876, 0.008637120246887207, 0.008462271690368652, 0.008461664199829102, 0.008426207542419434, 0.008439519882202148, 0.008438048362731933, 0.008407103538513183, 0.008375967979431152, 0.008317055702209473, 0.008514495849609375, 0.008512448310852051, 0.0085032958984375, 0.008426976203918458, 0.008401408195495605, 0.009185312271118164, 0.008466431617736817, 0.008540160179138183, 0.008886272430419923, 0.008466560363769532, 0.008400704383850098, 0.008452159881591797, 0.008339679718017579, 0.008359871864318847, 0.00837820816040039, 0.008435711860656739, 0.008386560440063476, 0.008382719993591308, 0.008420991897583007, 0.008697376251220704, 0.008702591896057129, 0.008621408462524413, 0.008647616386413575, 0.00864019203186035, 0.009003007888793945, 0.008646656036376953, 0.008812031745910644, 0.009284095764160156, 0.00844166374206543, 0.00833574390411377, 0.008424384117126465, 0.008495360374450684, 0.008513407707214355, 0.008518400192260743, 0.008447967529296874, 0.008633631706237793, 0.008731136322021485, 0.008755359649658203, 0.008640607833862305, 0.008548192024230957, 0.008549951553344726, 0.008616543769836426, 0.008582943916320801, 0.0085731201171875, 0.00856383991241455, 0.008561823844909668, 0.008647839546203613, 0.008518239974975587, 0.008648287773132325, 0.008722847938537597, 0.008632320404052735, 0.00857744026184082, 0.008469632148742676, 0.008495712280273437, 0.008490240097045899, 0.00846083164215088, 0.00856054401397705, 0.008461503982543946, 0.008510111808776855, 0.008401247978210449, 0.008325119972229005, 0.008392704010009766, 0.008228128433227538, 0.008389216423034668, 0.008337535858154298, 0.008421183586120606, 0.008460479736328125, 0.008443648338317872, 0.008497407913208008, 0.008523776054382324, 0.008622079849243165, 0.008623392105102538, 0.008597215652465821, 0.008542655944824219, 0.00858512020111084, 0.008532671928405762, 0.008549983978271485, 0.008505727767944336, 0.008466431617736817, 0.008376319885253907, 0.008376319885253907, 0.008288031578063964, 0.008309280395507813, 0.008232959747314453, 0.008183712005615235, 0.008175040245056152, 0.00824124813079834, 0.008381888389587403, 0.008299039840698243, 0.008323360443115234, 0.008450048446655273, 0.008415103912353516, 0.00838259220123291, 0.0084203519821167, 0.008403743743896484, 0.008348992347717285, 0.008329631805419922, 0.008552063941955566, 0.008504192352294921, 0.008407039642333984, 0.008421088218688965, 0.008325152397155762, 0.008347904205322266, 0.008456192016601562, 0.00851683235168457, 0.008429535865783691, 0.008475616455078125, 0.00844489574432373, 0.008356608390808106, 0.00827353572845459, 0.008327936172485352, 0.00838748836517334, 0.008438624382019044, 0.008525823593139649, 0.008623552322387696, 0.008612416267395019, 0.008637887954711914, 0.008657471656799316, 0.008566143989562989, 0.008583456039428712, 0.008572735786437989, 0.008459967613220215, 0.008438176155090332, 0.008478431701660157, 0.008495840072631836, 0.008333312034606934, 0.008370016098022461, 0.008314080238342285, 0.008443072319030761, 0.008494560241699218, 0.008628512382507324, 0.008621088027954102, 0.00857596778869629, 0.008589247703552245, 0.008634431838989258, 0.008631936073303223, 0.008737088203430176, 0.008588959693908691, 0.008492639541625976, 0.008485312461853028, 0.008708383560180664, 0.00857532787322998, 0.008574239730834961, 0.008472288131713867, 0.00844876766204834, 0.00838377571105957, 0.0084203519821167, 0.008375295639038086, 0.008321375846862792, 0.008344223976135254, 0.008432703971862793, 0.00848095989227295, 0.008462816238403321, 0.008465791702270508, 0.008736448287963867, 0.008639391899108886, 0.008495455741882324, 0.008713631629943848, 0.008546624183654785, 0.008527839660644531, 0.00841113567352295, 0.008447999954223634, 0.008502304077148437, 0.008498368263244628, 0.008488960266113281, 0.008418848037719726, 0.008448351860046386, 0.00846771240234375, 0.008366304397583008, 0.008225215911865234, 0.008332768440246581, 0.008260128021240234, 0.00821065616607666, 0.008216352462768554, 0.008331583976745606, 0.008296128273010253, 0.008349216461181641, 0.008552927970886231, 0.008523776054382324, 0.008633791923522949, 0.008466591835021972, 0.008401151657104491, 0.008351903915405273, 0.008441632270812988, 0.008443584442138672, 0.008278528213500976, 0.008304512023925781, 0.008384672164916993, 0.008364383697509765, 0.008419103622436523, 0.008309087753295898, 0.008338720321655273, 0.008374048233032226, 0.008342464447021485, 0.008431391716003418, 0.008511232376098634, 0.0085664644241333, 0.008591808319091798, 0.008589311599731446, 0.008575039863586426, 0.00846777629852295, 0.008489952087402344, 0.00829849624633789, 0.008344608306884766, 0.008388992309570312, 0.008260191917419434, 0.008179488182067872, 0.008224479675292969, 0.008326656341552734, 0.008406335830688477, 0.008259391784667969, 0.008454015731811523, 0.008574496269226075, 0.008592000007629394, 0.00860758399963379, 0.008499360084533692, 0.0084551362991333, 0.008482912063598632, 0.008258432388305663, 0.008239359855651855, 0.008349311828613282, 0.008424896240234375, 0.008304736137390138, 0.008396672248840333, 0.008435744285583496, 0.008407903671264648, 0.008439552307128907, 0.008664671897888183, 0.008420063972473144, 0.008453503608703613, 0.00835206413269043, 0.008339743614196777, 0.00840828800201416, 0.008406815528869628, 0.00844057559967041, 0.008377375602722168, 0.0084716157913208, 0.008441727638244629, 0.008398847579956055, 0.008403039932250977, 0.00840617561340332, 0.008370944023132324, 0.008361984252929687, 0.008352031707763672, 0.008459391593933106, 0.00863702392578125, 0.008552448272705078, 0.008459712028503418, 0.008659808158874512, 0.008378080368041992, 0.008409088134765624, 0.008456416130065918, 0.008393216133117675, 0.008346624374389648, 0.008403231620788574, 0.008367072105407715, 0.008506591796875, 0.008403583526611328, 0.00850268840789795, 0.008497664451599121, 0.008424639701843262, 0.008418111801147462, 0.008357728004455566, 0.008347488403320313, 0.008354111671447754, 0.008497376441955567, 0.00858518409729004, 0.008480640411376953, 0.008502528190612793, 0.00859615993499756, 0.008425791740417481, 0.00847225570678711, 0.008357760429382324, 0.00839078426361084, 0.008423423767089844, 0.008431615829467774, 0.008435359954833984, 0.008399200439453126, 0.008546367645263672, 0.008343487739562989, 0.008359935760498047, 0.008400896072387695, 0.008378368377685547, 0.008556544303894043, 0.008491007804870606, 0.008521727561950684, 0.008461407661437988, 0.008414175987243653, 0.008587167739868165, 0.008509183883666992, 0.008489248275756835, 0.00840617561340332, 0.008340319633483887, 0.008299776077270508, 0.00828223991394043, 0.008330880165100097, 0.008350111961364747, 0.008479328155517578, 0.008455679893493653, 0.008424063682556153, 0.009164544105529785, 0.008439840316772461, 0.008439295768737793, 0.008466783523559571, 0.00865068817138672, 0.00843500804901123, 0.00838758373260498, 0.008347071647644044, 0.008278592109680176, 0.008310943603515626, 0.008465632438659668, 0.008358816146850585, 0.008373984336853027, 0.008425472259521484, 0.008328319549560547, 0.008501919746398925, 0.00840230369567871, 0.008464927673339845, 0.00837446403503418, 0.008442208290100098, 0.008392064094543458, 0.008366304397583008, 0.008454560279846191, 0.008398688316345215, 0.008505120277404785, 0.008361824035644531, 0.008561023712158203, 0.008441951751708985, 0.008533503532409668, 0.008517184257507325, 0.00857583999633789, 0.008454143524169922, 0.008495136260986328, 0.008396767616271972, 0.008410623550415039, 0.008471039772033692, 0.008555520057678222, 0.008423680305480958, 0.00847100830078125, 0.008415807723999023, 0.008410112380981445, 0.008590368270874024, 0.008539999961853027, 0.008726304054260253, 0.008527935981750489, 0.008439807891845704, 0.00841932773590088, 0.008439807891845704, 0.008621791839599609, 0.008397088050842286, 0.008586624145507812, 0.008335264205932617, 0.008326016426086425, 0.00832051181793213, 0.008386303901672363, 0.0083372163772583, 0.008313887596130371, 0.0082772798538208, 0.008276384353637695, 0.00830675220489502, 0.008388607978820802, 0.008505151748657227, 0.008503487586975098, 0.008548352241516113, 0.008381792068481445, 0.008345824241638183, 0.008401344299316407, 0.008398271560668946, 0.008319552421569825, 0.008643967628479004, 0.008421728134155274, 0.008395008087158203, 0.008689023971557618, 0.008560864448547364, 0.008410655975341796, 0.008335807800292968, 0.008372544288635254, 0.008464927673339845, 0.00847657585144043, 0.008560223579406738, 0.008482912063598632, 0.008475168228149414, 0.008355104446411132, 0.008311871528625489, 0.00833296012878418, 0.008300543785095215, 0.008312704086303711, 0.008259872436523437, 0.008268768310546876, 0.008278911590576171, 0.008367456436157227, 0.008387104034423828, 0.008419039726257325, 0.008344223976135254, 0.008255231857299804, 0.008246463775634766, 0.008223615646362304, 0.008238783836364746, 0.008180224418640136, 0.008443167686462402, 0.008312383651733398, 0.00825011157989502, 0.00835331153869629, 0.008348287582397461, 0.008335359573364258, 0.008311903953552247, 0.00817859172821045, 0.00818169593811035, 0.00821168041229248, 0.008178624153137207, 0.008257439613342285, 0.00823481559753418, 0.008363295555114746, 0.008307071685791015, 0.008293184280395507, 0.008281760215759278, 0.00821401596069336, 0.008241312026977539, 0.008414624214172363, 0.008360960006713868, 0.008234304428100586, 0.008252351760864259, 0.008306431770324706, 0.008354144096374512, 0.008498751640319824, 0.008904800415039063, 0.008599552154541015, 0.008613887786865235, 0.008480095863342285, 0.00843228816986084, 0.00841113567352295, 0.008386624336242677, 0.008599488258361817, 0.008525823593139649, 0.008445952415466309, 0.008523008346557618, 0.008560992240905762, 0.008442272186279296, 0.008482815742492676, 0.008431615829467774, 0.008332415580749512, 0.00853433609008789, 0.008550975799560547, 0.008726271629333496, 0.008485119819641113, 0.008429568290710449, 0.00828384017944336, 0.008256863594055176, 0.008285152435302734, 0.00823852825164795, 0.008297023773193359, 0.008253439903259278, 0.00828006362915039, 0.008265727996826172, 0.008256671905517578, 0.00839129638671875, 0.008560416221618652, 0.008515647888183594, 0.008474687576293945, 0.00841536045074463, 0.00841097640991211, 0.008351327896118164, 0.008311360359191895, 0.008370368003845215, 0.00831283187866211, 0.008327487945556641, 0.00832051181793213, 0.008195808410644531, 0.008200672149658203, 0.008290495872497559, 0.008173376083374024, 0.008159168243408203, 0.008128640174865722, 0.008150976181030273, 0.008113856315612794, 0.008140831947326661, 0.00818819236755371, 0.008112128257751466, 0.008302335739135741, 0.008132863998413085, 0.0081080322265625, 0.008071392059326173, 0.008060192108154297, 0.00812399959564209, 0.008067263603210448, 0.00810700798034668, 0.008060416221618653, 0.008117792129516601, 0.008080127716064454, 0.008087488174438476, 0.008141823768615723, 0.008086527824401855, 0.008091648101806641, 0.008071167945861817, 0.0081080322265625, 0.008070560455322265, 0.008131168365478515, 0.008046688079833985, 0.00807753562927246, 0.008046272277832032, 0.008170528411865234, 0.00805945587158203, 0.008034720420837402, 0.007895008087158203, 0.008077343940734864, 0.008042495727539062, 0.008052576065063476, 0.008064288139343262, 0.008016736030578613, 0.008074655532836914, 0.008026752471923828, 0.008025535583496094, 0.008054368019104004, 0.008076255798339843, 0.008087552070617676, 0.00808569622039795, 0.008107135772705078, 0.008108480453491211, 0.008087807655334473, 0.008091648101806641, 0.008058527946472167, 0.007999519824981689, 0.008085824012756348, 0.008009407997131348, 0.00812012767791748, 0.008181952476501464, 0.008057151794433594, 0.008087552070617676, 0.008160575866699219, 0.00809993553161621, 0.008002143859863281, 0.00806499195098877, 0.00806704044342041, 0.00807753562927246, 0.00820412826538086, 0.008550496101379394, 0.00817471981048584, 0.008040287971496581, 0.008038592338562012, 0.008028927803039551, 0.008175616264343261, 0.008046367645263671, 0.008103167533874512, 0.008098431587219238, 0.008214879989624023, 0.008232959747314453, 0.008083871841430664, 0.008050432205200195, 0.008022015571594238, 0.008082528114318848, 0.008179807662963867, 0.008049311637878418, 0.0080382080078125, 0.00802947235107422, 0.00803273582458496, 0.008048224449157714, 0.008100704193115234, 0.008042816162109376, 0.00806060791015625, 0.008042112350463867, 0.008036800384521484, 0.008159199714660644, 0.008459520339965821, 0.008213215827941895, 0.0081014404296875, 0.008068703651428222]",tokens/s,119.2388899184842,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,933.347328,641.59744,0.0,239.075328,225.530368,s,1,7.75225439453125,7.75225439453125,0.0,7.75225439453125,7.75225439453125,7.75225439453125,7.75225439453125,[7.75225439453125],,kWh,2.1314118991654142e-05,2.3431838227763432e-06,5.978060338002189e-06,2.9635363152432676e-05,,MB,1327.538176,733.872128,0.0,316.669952,285.824512,s,10,0.2095629138946533,0.02095629138946533,0.0003355476054913902,0.020923359870910645,0.0213248104095459,0.02144235725402832,0.02153639472961426,"[0.021559904098510742, 0.021187456130981445, 0.021298688888549806, 0.02093552017211914, 0.021255840301513673, 0.02059881591796875, 0.02059600067138672, 0.02091119956970215, 0.020631168365478517, 0.020588319778442384]",tokens/s,12215.901909470991,kWh,6.150000256228325e-07,6.78235697591607e-08,3.6823713669473087e-07,1.051060732076724e-06,tokens/kWh,243563470.87021875,MB,1360.592896,775.815168,0.0,358.612992,297.747968,s,10,10.140543029785157,1.0140543029785156,0.013402947383342408,1.011234375,1.0319357666015625,1.0376974365234375,1.0423067724609374,"[1.0306553955078126, 1.0434591064453125, 1.0155003662109374, 1.016322509765625, 1.0176226806640625, 1.0064497680664062, 1.005771484375, 0.99901904296875, 0.9987742919921875, 1.0069683837890624]",tokens/s,62.1268504210812,kWh,2.8881392158960423e-05,3.1850991709041666e-06,1.0540021589906274e-05,4.260651291977086e-05,tokens/kWh,1478647.1758116088,,s,630,10.13371453285218,0.016085261163257418,0.00040710155788790703,0.016052288055419923,0.01638685359954834,0.016480169296264648,0.017334088726043702,"[0.016094112396240236, 0.016222047805786132, 0.01616502380371094, 0.016066591262817384, 0.016148448944091797, 0.01619353675842285, 0.016187583923339844, 0.01634409523010254, 0.016257087707519532, 0.016245471954345704, 0.016194879531860353, 0.016239295959472655, 0.01643519973754883, 0.016477888107299804, 0.016200000762939454, 0.01619264030456543, 0.01641152000427246, 0.016271583557128905, 0.01679462432861328, 0.016325408935546876, 0.016183296203613282, 0.01615667152404785, 0.016242687225341796, 0.016210079193115234, 0.01617078399658203, 0.016316320419311522, 0.016445215225219727, 0.016200063705444336, 0.016270591735839845, 0.01630080032348633, 0.016471584320068358, 0.016253087997436525, 0.01628003120422363, 0.01634102439880371, 0.016249887466430663, 0.01626316833496094, 0.016433120727539063, 0.016796480178833006, 0.01701683235168457, 0.01810867118835449, 0.01663974380493164, 0.016328704833984374, 0.016355552673339845, 0.0163623046875, 0.016300895690917968, 0.016260639190673828, 0.016411231994628905, 0.016441343307495117, 0.016393247604370116, 0.01627872085571289, 0.01618511962890625, 0.016451583862304688, 0.016192672729492187, 0.016525888442993165, 0.016172479629516602, 0.016311391830444336, 0.016426368713378905, 0.016268800735473633, 0.01639718437194824, 0.016443584442138674, 0.016186464309692384, 0.016227039337158203, 0.016199680328369142, 0.016230655670166017, 0.016246528625488282, 0.016302080154418946, 0.016764255523681642, 0.016333280563354494, 0.01635862350463867, 0.01627606391906738, 0.01677289581298828, 0.016486431121826173, 0.016554527282714844, 0.016316448211669922, 0.01637276840209961, 0.01645257568359375, 0.01650092887878418, 0.016480064392089842, 0.016543519973754882, 0.01631439971923828, 0.016381439208984376, 0.01638675117492676, 0.016500991821289064, 0.01622809600830078, 0.016480255126953124, 0.01636083221435547, 0.01638777542114258, 0.016263263702392578, 0.016325471878051757, 0.01637798309326172, 0.016432640075683593, 0.016375520706176757, 0.016234592437744142, 0.016339616775512697, 0.016374847412109376, 0.01672435188293457, 0.01631043243408203, 0.016379968643188476, 0.016306240081787108, 0.0164003849029541, 0.016419008255004884, 0.016343008041381835, 0.01629801559448242, 0.016381952285766603, 0.016359392166137694, 0.016267295837402343, 0.01620992088317871, 0.016284767150878905, 0.016356319427490235, 0.01653753662109375, 0.01734249687194824, 0.022081504821777342, 0.019544256210327147, 0.01731350326538086, 0.016452735900878906, 0.016309215545654298, 0.016300256729125977, 0.016347232818603515, 0.016237663269042968, 0.016392959594726562, 0.016369504928588866, 0.016416767120361327, 0.016363296508789062, 0.016330976486206055, 0.01639017677307129, 0.016230367660522462, 0.01631350326538086, 0.016311071395874024, 0.016291839599609375, 0.01632467269897461, 0.01623664093017578, 0.016797536849975585, 0.016385887145996095, 0.0163985595703125, 0.016385759353637695, 0.016298208236694336, 0.016180511474609374, 0.01680656051635742, 0.01630009651184082, 0.016334367752075196, 0.016329216003417968, 0.01630156707763672, 0.016232032775878907, 0.016319488525390623, 0.016334047317504884, 0.016409248352050782, 0.01627907180786133, 0.016289247512817382, 0.016260095596313476, 0.016310272216796876, 0.01622425651550293, 0.016074560165405274, 0.016306367874145508, 0.015851519584655763, 0.015699968338012696, 0.015715904235839843, 0.016081184387207032, 0.015893919944763184, 0.01576848030090332, 0.015971936225891114, 0.01565644836425781, 0.015860735893249513, 0.015824447631835936, 0.015722816467285156, 0.015761280059814455, 0.016031423568725587, 0.01604332733154297, 0.015805343627929687, 0.01580246353149414, 0.015703807830810546, 0.015839648246765137, 0.015648320198059083, 0.016083328247070313, 0.01572876834869385, 0.015712063789367677, 0.0158822078704834, 0.01594598388671875, 0.015972096443176268, 0.01639638328552246, 0.016217151641845704, 0.0169564151763916, 0.016164703369140623, 0.01599897575378418, 0.01589616012573242, 0.015943552017211916, 0.016238176345825195, 0.0157838716506958, 0.016235519409179687, 0.0159617919921875, 0.016155040740966797, 0.016097280502319337, 0.01626963233947754, 0.01640166473388672, 0.01603014373779297, 0.016038976669311523, 0.015973312377929687, 0.015994848251342772, 0.015857791900634764, 0.016250112533569335, 0.01613260841369629, 0.01614614486694336, 0.016179616928100587, 0.01612771224975586, 0.016092992782592772, 0.016040447235107422, 0.016068607330322265, 0.015980544090270995, 0.01588748836517334, 0.01607769584655762, 0.01604198455810547, 0.01620966339111328, 0.01593171215057373, 0.015902815818786623, 0.015880031585693358, 0.016021120071411134, 0.016130016326904296, 0.016091327667236328, 0.016056543350219728, 0.015988736152648925, 0.016211551666259767, 0.01629635238647461, 0.016115007400512697, 0.016304832458496094, 0.01636147117614746, 0.016125120162963868, 0.01608787155151367, 0.016134143829345703, 0.01615398406982422, 0.016235136032104493, 0.016295040130615234, 0.016067455291748047, 0.016095232009887696, 0.016019487380981447, 0.016103391647338868, 0.015913151741027833, 0.01583894443511963, 0.016193632125854493, 0.01617001533508301, 0.016028640747070312, 0.016252927780151367, 0.015986687660217287, 0.016123392105102538, 0.016001535415649415, 0.016170047760009767, 0.016024511337280275, 0.016027456283569337, 0.016011808395385744, 0.01634828758239746, 0.01625347137451172, 0.016037567138671875, 0.016138816833496095, 0.017370880126953123, 0.01664143943786621, 0.016497503280639647, 0.016160127639770507, 0.016234880447387696, 0.016232448577880858, 0.016258655548095705, 0.016349599838256835, 0.016463872909545898, 0.01635327911376953, 0.016359424591064452, 0.016261215209960937, 0.01638800048828125, 0.01622425651550293, 0.01626316833496094, 0.01624233627319336, 0.016144256591796875, 0.016734687805175782, 0.016060543060302735, 0.01620569610595703, 0.0161814079284668, 0.016364959716796874, 0.016312768936157226, 0.016234495162963866, 0.016162303924560546, 0.01614633560180664, 0.01610601615905762, 0.016211360931396485, 0.016194175720214844, 0.016065696716308593, 0.0162108154296875, 0.01618124771118164, 0.01616841506958008, 0.016147071838378907, 0.01611996841430664, 0.016130943298339844, 0.016155519485473634, 0.016011007308959962, 0.016102783203125, 0.01605235290527344, 0.01587715244293213, 0.01606825637817383, 0.01589891242980957, 0.01620560073852539, 0.01588742446899414, 0.0158373441696167, 0.015991583824157714, 0.015842559814453126, 0.016120447158813476, 0.01587347221374512, 0.016093887329101563, 0.015932640075683593, 0.015887136459350585, 0.015883744239807127, 0.015890975952148438, 0.016095232009887696, 0.01607865524291992, 0.016124448776245116, 0.01633833694458008, 0.01606233596801758, 0.015923583984375, 0.01602505683898926, 0.01579062366485596, 0.015929023742675782, 0.015742112159729003, 0.015895392417907713, 0.016057504653930663, 0.01608790397644043, 0.016052223205566405, 0.015961376190185547, 0.015798975944519043, 0.0159716796875, 0.015694047927856444, 0.015784704208374023, 0.01632534408569336, 0.0174704647064209, 0.016338144302368164, 0.016183807373046876, 0.016158336639404296, 0.01595779228210449, 0.015859871864318847, 0.015993568420410158, 0.016147775650024412, 0.01624700736999512, 0.016078720092773436, 0.015897279739379884, 0.015853823661804198, 0.016090431213378907, 0.01596076774597168, 0.015947104454040528, 0.015789759635925292, 0.015823488235473634, 0.015849472045898438, 0.01573468780517578, 0.015666720390319824, 0.015655488014221192, 0.015716032028198244, 0.015745344161987303, 0.015742976188659667, 0.015918335914611816, 0.015944064140319823, 0.016273792266845704, 0.01626140785217285, 0.01613590431213379, 0.01602505683898926, 0.015974944114685058, 0.016080896377563478, 0.016135711669921875, 0.016019935607910155, 0.016044288635253905, 0.015975872039794923, 0.015874367713928222, 0.016213024139404297, 0.016126176834106446, 0.0161329288482666, 0.01598400020599365, 0.01591967964172363, 0.015759360313415526, 0.015789088249206544, 0.01585478401184082, 0.015775424003601074, 0.01561945629119873, 0.015571680068969727, 0.015558015823364258, 0.015584896087646485, 0.0161046085357666, 0.015884256362915038, 0.015647104263305664, 0.015533727645874023, 0.015802720069885253, 0.015805983543395997, 0.016064384460449218, 0.015872896194458006, 0.015914719581604005, 0.015922304153442382, 0.015926048278808592, 0.01585974407196045, 0.015678560256958008, 0.01566534423828125, 0.015792927742004394, 0.01584108829498291, 0.01610495948791504, 0.016130239486694335, 0.015981247901916504, 0.016101184844970702, 0.0160994873046875, 0.015909024238586426, 0.01584003162384033, 0.015872127532958986, 0.015909664154052733, 0.015932703971862795, 0.01583148765563965, 0.015882240295410157, 0.015770976066589355, 0.015744128227233888, 0.015748543739318846, 0.015644960403442383, 0.015694047927856444, 0.0159681282043457, 0.01603379249572754, 0.01589891242980957, 0.015850943565368654, 0.01593350410461426, 0.01621615982055664, 0.01605459213256836, 0.016228160858154296, 0.01603955268859863, 0.01598703956604004, 0.016324159622192382, 0.01616147232055664, 0.016074560165405274, 0.016016639709472657, 0.016395999908447267, 0.016776191711425782, 0.01637171173095703, 0.016125600814819337, 0.01601571273803711, 0.01604198455810547, 0.01579212760925293, 0.015955391883850097, 0.015970879554748536, 0.015888383865356445, 0.015871232032775878, 0.015745792388916015, 0.01581663990020752, 0.01589433574676514, 0.016222463607788087, 0.016074752807617186, 0.015921152114868165, 0.015902976036071777, 0.015799903869628908, 0.01597856044769287, 0.015870207786560057, 0.015826623916625978, 0.015823136329650878, 0.01584982395172119, 0.015824031829833985, 0.01593715190887451, 0.015860735893249513, 0.015585280418395997, 0.01568499183654785, 0.01583343982696533, 0.01593782424926758, 0.015804415702819825, 0.015884287834167482, 0.015938655853271484, 0.015845919609069826, 0.01572697639465332, 0.015730976104736328, 0.015728351593017578, 0.015596768379211427, 0.015624159812927247, 0.0156309757232666, 0.01556604766845703, 0.015425567626953124, 0.015385184288024902, 0.015445535659790038, 0.015399744033813476, 0.015557760238647462, 0.016046976089477537, 0.016017055511474608, 0.015919039726257325, 0.015968671798706056, 0.015862879753112794, 0.015798208236694335, 0.015891136169433592, 0.01581289577484131, 0.01582217597961426, 0.015827008247375488, 0.015953760147094726, 0.015992608070373535, 0.015971327781677248, 0.016149631500244142, 0.016040927886962892, 0.016109439849853517, 0.0160185604095459, 0.015972319602966308, 0.015929247856140135, 0.01579315185546875, 0.01580172824859619, 0.015823488235473634, 0.015956064224243165, 0.015824095726013183, 0.015879039764404298, 0.0157258882522583, 0.01581257629394531, 0.015839936256408692, 0.01686662483215332, 0.016329248428344725, 0.015986592292785644, 0.015949312210083007, 0.01574515247344971, 0.015786463737487794, 0.015943552017211916, 0.016042495727539064, 0.01619843292236328, 0.01611871910095215, 0.01591500759124756, 0.015728639602661132, 0.015814528465270997, 0.015992159843444823, 0.01587279987335205, 0.016462976455688477, 0.016175935745239258, 0.015843392372131348, 0.01582694435119629, 0.015793760299682616, 0.015970720291137695, 0.015945247650146484, 0.015886816024780273, 0.016044031143188475, 0.015796416282653807, 0.015726400375366212, 0.015737183570861818, 0.01567903995513916, 0.015711999893188475, 0.016034143447875977, 0.016021600723266603, 0.015979776382446288, 0.015782719612121583, 0.01568342399597168, 0.01593139171600342, 0.015848480224609374, 0.015841376304626464, 0.015819583892822266, 0.015666848182678224, 0.0158887996673584, 0.015768896102905272, 0.016147136688232422, 0.015629599571228028, 0.01570479965209961, 0.015613951683044434, 0.015747072219848633, 0.015837183952331545, 0.015800352096557616, 0.015843392372131348, 0.016340255737304688, 0.015778431892395018, 0.015564448356628418, 0.015681471824645996, 0.01572473621368408, 0.015710016250610352, 0.015571328163146972, 0.015759391784667967, 0.015572223663330078, 0.015671711921691894, 0.015761759757995604, 0.01571020793914795, 0.015773695945739748, 0.015904767990112305, 0.016029695510864257, 0.015677056312561034, 0.01585993576049805, 0.015571071624755859, 0.01564470386505127, 0.016035839080810545, 0.01563024044036865, 0.015676480293273926, 0.015526944160461426, 0.015525792121887207, 0.01547878360748291, 0.015553759574890138, 0.015686112403869627, 0.015475135803222656, 0.015609727859497071, 0.015613951683044434, 0.015590559959411622, 0.015895392417907713, 0.017497760772705078, 0.016023807525634766, 0.015984800338745116, 0.01597772789001465, 0.015954015731811523, 0.01592307186126709, 0.015946656227111815, 0.01593513584136963, 0.01615273666381836, 0.01620969581604004, 0.016171232223510742, 0.016021535873413085, 0.01610044860839844, 0.016085376739501955, 0.016046592712402344, 0.015972352027893065, 0.015841279983520508, 0.016029535293579103, 0.01591926383972168, 0.015931424140930177, 0.01587401580810547, 0.016164127349853515, 0.015899359703063966, 0.0160317440032959, 0.015892736434936522, 0.01585446357727051, 0.015897472381591796, 0.01581500816345215, 0.015685279846191405, 0.015856767654418947, 0.0162108154296875, 0.016055519104003907, 0.01618409538269043, 0.016113279342651367, 0.016093568801879884, 0.01616896057128906, 0.01615011215209961, 0.016114368438720703, 0.016053983688354492, 0.016150751113891602, 0.016272832870483398, 0.016078880310058594, 0.016311647415161133, 0.01602457618713379, 0.01652355194091797, 0.01599462413787842, 0.01599251174926758, 0.01598431968688965, 0.015874303817749024, 0.01598291206359863, 0.01593776035308838]",tokens/s,62.16871394567339,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7436.88192,8041.463808,0.0,7646.216192,7627.584,s,1,13.0828779296875,13.0828779296875,0.0,13.0828779296875,13.0828779296875,13.0828779296875,13.0828779296875,[13.0828779296875],,kWh,0.00017310213232083242,1.908722760301612e-05,5.2883375640000105e-05,0.00024507273556384864,,MB,1765.060608,8687.386624,0.0,8277.458944,8199.8592,s,10,3.5461555175781245,0.35461555175781245,0.0007464210454196315,0.35487646484375,0.3554279327392578,0.35550009918212894,0.3555578323364258,"[0.35337371826171876, 0.35448025512695314, 0.3538238525390625, 0.3535220336914062, 0.354822509765625, 0.35504058837890623, 0.354930419921875, 0.35541189575195314, 0.355572265625, 0.355177978515625]",tokens/s,721.9085534489961,kWh,1.0389587062930928e-05,1.145673224278213e-06,6.860972921724143e-06,1.8396233208933282e-05,tokens/kWh,13915892.296673289,MB,1777.09056,9001.959424,0.0,8592.031744,8476.849152,s,10,29.368402099609373,2.9368402099609376,0.003935139654403692,2.9374609375,2.9412039794921876,2.9414275512695314,2.941606408691406,"[2.931014892578125, 2.928758544921875, 2.937771240234375, 2.941154296875, 2.937150634765625, 2.939466796875, 2.941651123046875, 2.935904541015625, 2.9364609375, 2.939069091796875]",tokens/s,21.451626747114698,kWh,8.633575688248555e-05,9.523114343253929e-06,5.7382856251075835e-05,0.00015324172747681528,tokens/kWh,411115.1775519601,,s,630,29.361263904571537,0.0466051808009072,0.0003901684382636195,0.04659067153930664,0.04708167533874512,0.04720060176849365,0.04764319000244141,"[0.04670352172851563, 0.046142688751220705, 0.045762462615966795, 0.046123905181884764, 0.045878944396972654, 0.04604118347167969, 0.04607001495361328, 0.04605324935913086, 0.04619046401977539, 0.045961471557617185, 0.04616191864013672, 0.04620470428466797, 0.04629718399047852, 0.04616220855712891, 0.046212959289550784, 0.046102527618408204, 0.04604288101196289, 0.04619651031494141, 0.04616649627685547, 0.04619878387451172, 0.04636262512207031, 0.046534656524658206, 0.04632912063598633, 0.04638496017456055, 0.046219966888427735, 0.047315166473388674, 0.0464496955871582, 0.04650902557373047, 0.04641321563720703, 0.046432865142822265, 0.046189697265625, 0.04632665634155273, 0.046351390838623045, 0.046349281311035155, 0.04656332778930664, 0.04650188827514649, 0.04632275390625, 0.04637382507324219, 0.046581153869628904, 0.046578559875488285, 0.04644015884399414, 0.046499839782714845, 0.04759500885009765, 0.048302593231201174, 0.046728511810302735, 0.046682815551757816, 0.04698316955566406, 0.04741308975219727, 0.04673961639404297, 0.04672512054443359, 0.04695964813232422, 0.0466728630065918, 0.046604286193847655, 0.046581760406494144, 0.04689420700073242, 0.04684275054931641, 0.04688076782226563, 0.04661043167114258, 0.04679270553588867, 0.04677632141113281, 0.04688803100585937, 0.04679315185546875, 0.04718384170532226, 0.046684318542480466, 0.04623353576660156, 0.04602719879150391, 0.04585382461547852, 0.04574835205078125, 0.045943294525146484, 0.045917312622070314, 0.045853569030761716, 0.04598905563354492, 0.04589241409301758, 0.04598988723754883, 0.046040321350097654, 0.046279422760009764, 0.04622489547729492, 0.04590233612060547, 0.04646236801147461, 0.046211681365966796, 0.046137054443359374, 0.046270751953125, 0.04633804702758789, 0.04623686218261719, 0.04655724716186523, 0.046365440368652346, 0.046366367340087894, 0.04605987167358398, 0.046137344360351565, 0.046252033233642575, 0.046206878662109374, 0.046205024719238284, 0.04629913711547851, 0.046325759887695314, 0.04627241516113281, 0.04654499053955078, 0.04651193618774414, 0.04622518539428711, 0.04654655838012695, 0.04654569625854492, 0.04642611312866211, 0.046647296905517575, 0.04667747116088867, 0.04664579010009766, 0.04665139389038086, 0.04680681610107422, 0.04761008071899414, 0.04676198577880859, 0.046688255310058595, 0.04669164657592773, 0.04697875213623047, 0.046898174285888675, 0.046626399993896485, 0.04686064147949219, 0.04705900955200195, 0.04677427291870117, 0.04670259094238281, 0.04694015884399414, 0.047081024169921874, 0.04706963348388672, 0.04686000061035156, 0.046784801483154295, 0.04682547378540039, 0.0470200309753418, 0.04716080093383789, 0.047163681030273436, 0.04653670501708984, 0.04603481674194336, 0.04583817672729492, 0.04612739181518555, 0.045813758850097655, 0.04611072158813476, 0.04614144134521484, 0.04603289413452148, 0.0459521598815918, 0.046104896545410154, 0.04619318389892578, 0.04609843063354492, 0.04641177749633789, 0.04627190399169922, 0.046228065490722656, 0.04637286376953125, 0.04649369430541992, 0.04648527908325195, 0.04642627334594727, 0.046610496520996095, 0.04683699035644531, 0.04659027099609375, 0.046559680938720704, 0.04628844833374023, 0.04628688049316406, 0.0462459831237793, 0.04642438507080078, 0.04650982284545899, 0.046340351104736326, 0.0466143684387207, 0.046773536682128906, 0.046521278381347654, 0.04657497787475586, 0.04668454360961914, 0.048195137023925784, 0.04675443267822266, 0.04644454574584961, 0.046505630493164064, 0.046757568359375, 0.04736886215209961, 0.04674969482421875, 0.04678041458129883, 0.04675788879394531, 0.046886913299560545, 0.046718784332275394, 0.04657785415649414, 0.04682870483398437, 0.046926559448242186, 0.046752960205078124, 0.04687558364868164, 0.047249408721923826, 0.04696854400634766, 0.04701827239990235, 0.04686643218994141, 0.047010814666748044, 0.047010879516601566, 0.04689503860473633, 0.04681907272338867, 0.047368446350097654, 0.04724531173706055, 0.04713881683349609, 0.04701113510131836, 0.0470552978515625, 0.04673491287231445, 0.04633951950073242, 0.04594099044799805, 0.046297855377197265, 0.046011489868164064, 0.046110847473144534, 0.04614377593994141, 0.04618844985961914, 0.04608265686035156, 0.046083263397216793, 0.046218048095703124, 0.046478687286376955, 0.04647769546508789, 0.04623593521118164, 0.04634444808959961, 0.04639254379272461, 0.04627510452270508, 0.046292991638183595, 0.046342144012451174, 0.04647731018066406, 0.046696449279785154, 0.046712833404541014, 0.04652860641479492, 0.04643756866455078, 0.04649801635742187, 0.04648191833496094, 0.04670873641967774, 0.046804126739501954, 0.04661033630371094, 0.04668307113647461, 0.04679679870605469, 0.04669440078735351, 0.046671871185302735, 0.04676607894897461, 0.046650753021240235, 0.04669279861450195, 0.046561729431152346, 0.04669385528564453, 0.046874912261962894, 0.046854145050048826, 0.04686431884765625, 0.046771358489990235, 0.04682640075683594, 0.04687865447998047, 0.04676409530639648, 0.047027488708496094, 0.047182464599609376, 0.047081409454345705, 0.046918846130371096, 0.04722787094116211, 0.04708051300048828, 0.04694931030273437, 0.04684799957275391, 0.046876129150390626, 0.04708377456665039, 0.04707731246948242, 0.0474579849243164, 0.046921470642089846, 0.04716432189941406, 0.04712038421630859, 0.0471615982055664, 0.047110942840576174, 0.04716003036499023, 0.04696604919433594, 0.04639104080200195, 0.04597414398193359, 0.046145313262939455, 0.04610924911499024, 0.046203937530517575, 0.04604412841796875, 0.04612668609619141, 0.046067615509033204, 0.045894142150878905, 0.046247390747070315, 0.04627920150756836, 0.04634009552001953, 0.04620646286010742, 0.04614809417724609, 0.04632748794555664, 0.046209152221679685, 0.04643420791625977, 0.0464117431640625, 0.046456897735595706, 0.04651375961303711, 0.04687094497680664, 0.04665164947509766, 0.04641791915893555, 0.04648531341552734, 0.046372928619384766, 0.04642214584350586, 0.04648294448852539, 0.04637270355224609, 0.046451038360595706, 0.04638956832885742, 0.04656060791015625, 0.04664591979980469, 0.04680729675292969, 0.04646271896362305, 0.04661372756958008, 0.04671551895141601, 0.04679030227661133, 0.04680755233764648, 0.0466778564453125, 0.04668771362304688, 0.04683436965942383, 0.04743167877197266, 0.04676607894897461, 0.04682710266113281, 0.04674959945678711, 0.047294975280761715, 0.04686617660522461, 0.04671641540527344, 0.046848766326904295, 0.046878719329833986, 0.04689894485473633, 0.046833919525146483, 0.046728382110595705, 0.047084095001220704, 0.0469854736328125, 0.046948158264160156, 0.04703251266479492, 0.0473105583190918, 0.047032608032226565, 0.047026176452636716, 0.04706304168701172, 0.04709894561767578, 0.046640640258789064, 0.04622079849243164, 0.046023681640625, 0.046086143493652344, 0.04595663833618164, 0.046072288513183596, 0.04617324829101563, 0.04613216018676758, 0.046033088684082034, 0.04609724807739258, 0.046373855590820315, 0.046225406646728515, 0.04653263854980469, 0.046393310546875, 0.04636876678466797, 0.04642201614379883, 0.04653875350952148, 0.04657561492919922, 0.046202880859375, 0.046440448760986325, 0.046451744079589845, 0.04651721572875977, 0.047470592498779295, 0.04656470489501953, 0.04640835189819336, 0.04631961441040039, 0.046392864227294925, 0.04652080154418945, 0.04652767944335937, 0.04652729415893555, 0.046481311798095705, 0.04655523300170898, 0.04655718231201172, 0.046581760406494144, 0.046383102416992186, 0.046712833404541014, 0.047923198699951174, 0.047924606323242185, 0.046795455932617185, 0.0466759033203125, 0.04659807968139648, 0.04667308807373047, 0.04684684753417969, 0.04672284698486328, 0.04677030563354492, 0.046739425659179684, 0.04686656188964844, 0.04693753433227539, 0.046914112091064455, 0.04689433670043945, 0.046936416625976564, 0.04683407974243164, 0.046798336029052735, 0.04676409530639648, 0.047061439514160155, 0.04708467102050781, 0.04716838455200195, 0.04692582321166992, 0.04698316955566406, 0.04707942581176758, 0.04713676834106445, 0.04700748825073242, 0.04720256042480469, 0.046647296905517575, 0.046260223388671876, 0.04600774383544922, 0.046145889282226564, 0.046028705596923826, 0.04609260940551758, 0.046061569213867185, 0.04615292739868164, 0.046320415496826174, 0.04627046585083008, 0.04628684616088867, 0.046290145874023435, 0.04635113525390625, 0.04628070449829102, 0.046283935546875, 0.04647932815551758, 0.046535552978515624, 0.0463699836730957, 0.04630352020263672, 0.0464031982421875, 0.04647520065307617, 0.046695392608642576, 0.04640703964233398, 0.046391937255859376, 0.04637638473510742, 0.04637446212768555, 0.04663180923461914, 0.046623905181884764, 0.04644758224487305, 0.04659404754638672, 0.04646211242675781, 0.04658671951293945, 0.0468513298034668, 0.04685660934448242, 0.046559585571289065, 0.04684185409545898, 0.046630912780761716, 0.04664495849609375, 0.04692816162109375, 0.04699955368041992, 0.046845951080322266, 0.047099903106689454, 0.047265792846679686, 0.046951839447021484, 0.046840351104736326, 0.04687225723266602, 0.04700198364257813, 0.047175422668457034, 0.047728256225585936, 0.04702422332763672, 0.04697520065307617, 0.04754582214355469, 0.04709257507324219, 0.047032257080078126, 0.04721670532226562, 0.046947456359863284, 0.047030464172363284, 0.047104991912841794, 0.04699622344970703, 0.046926815032958986, 0.04720435333251953, 0.047034366607666016, 0.047083518981933595, 0.04696752166748047, 0.04640124893188476, 0.04600400161743164, 0.045973217010498044, 0.04587916946411133, 0.04609088134765625, 0.04597964859008789, 0.046053375244140625, 0.046061569213867185, 0.046002174377441404, 0.04613529586791992, 0.04616550445556641, 0.04633232116699219, 0.046137439727783204, 0.046142784118652344, 0.046416576385498044, 0.04632166290283203, 0.04624588775634766, 0.04617180633544922, 0.04643875122070312, 0.04659107208251953, 0.04672195053100586, 0.046374561309814454, 0.046317920684814454, 0.04638236618041992, 0.04622771072387695, 0.046631393432617185, 0.04638019180297852, 0.0463012466430664, 0.04651036834716797, 0.04645939254760742, 0.04668182373046875, 0.046771934509277344, 0.046633537292480466, 0.04657731246948242, 0.046784862518310544, 0.04666572952270508, 0.04679065704345703, 0.04689715194702149, 0.04702412796020508, 0.04693392181396484, 0.04694153594970703, 0.04687334442138672, 0.04679420852661133, 0.0467006721496582, 0.04686275100708008, 0.046920734405517577, 0.04684489440917969, 0.046884735107421874, 0.0480808334350586, 0.04663315200805664, 0.04676796722412109, 0.04676214218139649, 0.04666764831542969, 0.04681075286865234, 0.04692428970336914, 0.04685619354248047, 0.046746654510498045, 0.04680393600463867, 0.04716918563842774, 0.04715555191040039, 0.04708147048950195, 0.04725964736938477, 0.046662017822265624, 0.046229248046875, 0.04592051315307617, 0.04608943939208984, 0.046008735656738284, 0.046163551330566405, 0.04627536010742188, 0.04617340850830078, 0.045957920074462894, 0.04617580795288086, 0.04643260955810547, 0.04636476898193359, 0.046309024810791015, 0.046411136627197265, 0.04638614273071289, 0.04638719940185547, 0.046247905731201175, 0.04640163040161133, 0.04622438430786133, 0.046541152954101564, 0.04643900680541992, 0.04657513427734375, 0.046372638702392575, 0.04650259017944336, 0.04654079818725586, 0.04674969482421875, 0.046430206298828124, 0.04656332778930664, 0.04640553665161133, 0.04646255874633789, 0.04644009780883789, 0.04651430511474609, 0.0466091194152832, 0.04666572952270508, 0.0464793586730957, 0.046757823944091795, 0.04653472137451172, 0.04657503890991211, 0.046757984161376956, 0.04764854431152344, 0.046868961334228514, 0.046739681243896485, 0.046941665649414065, 0.04683622360229492, 0.046790496826171875, 0.04684000015258789, 0.046835712432861325, 0.04681836700439453, 0.04667692947387695, 0.046710945129394534, 0.04672700881958008, 0.04673126220703125, 0.046667007446289065, 0.04677228927612305, 0.046873153686523436, 0.047042911529541015, 0.04688873672485352, 0.046922046661376955, 0.047122112274169924, 0.04707660675048828, 0.0471162223815918, 0.04718675231933594, 0.047158912658691404, 0.04683161544799805, 0.046196449279785154, 0.04594633483886719, 0.04615996932983398, 0.04591830444335938, 0.04616767883300781, 0.046099361419677735, 0.04613711929321289, 0.04616995239257812, 0.04611455917358399, 0.0462825927734375, 0.046340991973876956, 0.046330974578857424, 0.04618332672119141, 0.046159870147705076, 0.046368511199951175, 0.04638729476928711, 0.04652150344848633, 0.046410751342773435, 0.0466464958190918, 0.046478111267089846, 0.04660588836669922, 0.046452224731445314, 0.04651462554931641, 0.046421600341796876, 0.046303295135498045, 0.046357025146484376, 0.04622367858886719, 0.046467071533203126, 0.04689920043945312, 0.04649369430541992, 0.04648550415039063, 0.04674969482421875, 0.04659977722167969, 0.0464277114868164, 0.04715155029296875, 0.04659651184082031, 0.04755027389526367, 0.04680019378662109, 0.04687756729125977, 0.04674345779418945, 0.04670816040039062, 0.04681081771850586, 0.04692272186279297, 0.04688614273071289, 0.0468507194519043, 0.04671404647827149, 0.046701473236083986, 0.046675167083740234, 0.04666592025756836, 0.04733536148071289, 0.047630081176757814, 0.04695951843261719, 0.047007553100585936, 0.04720659255981445, 0.04687257766723633, 0.04715097427368164, 0.04707888031005859, 0.04702684783935547, 0.04708713531494141, 0.04719820785522461, 0.046985694885253906, 0.047286113739013674]",tokens/s,21.45684198226594,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4404.502528,4976.410624,0.0,4573.888512,4568.93696,s,1,11.1963173828125,11.1963173828125,0.0,11.1963173828125,11.1963173828125,11.1963173828125,11.1963173828125,[11.1963173828125],,kWh,0.00011957140402082585,1.318214900981315e-05,3.518030592199878e-05,0.0001679338589526378,,MB,2192.95744,5303.566336,0.0,4886.36416,4814.97344,s,10,2.093231201171875,0.2093231201171875,0.0007797172315558181,0.2089561462402344,0.21011659088134765,0.21054148025512695,0.21088139175415038,"[0.20884967041015626, 0.20860928344726562, 0.20850743103027344, 0.20868092346191405, 0.2087398986816406, 0.21096636962890625, 0.2090626220703125, 0.20997955322265624, 0.2098132781982422, 0.21002217102050783]",tokens/s,1222.9896050502252,kWh,6.164865391319394e-06,6.798674849971117e-07,4.066982420249949e-06,1.0911715296566454e-05,tokens/kWh,23461022.675376665,MB,2198.2208,5408.423936,0.0,4991.22176,4947.963904,s,10,25.41122998046875,2.541122998046875,0.019529103375626543,2.5415146484375,2.562885791015625,2.567808984375,2.5717475390625,"[2.515795166015625, 2.51910693359375, 2.524144287109375, 2.52001611328125, 2.540578857421875, 2.558209228515625, 2.542450439453125, 2.572732177734375, 2.556405029296875, 2.561791748046875]",tokens/s,24.792188354685013,kWh,7.462294721243101e-05,8.230870861251594e-06,4.549702945315009e-05,0.00012835084752683273,tokens/kWh,490842.1036084656,,s,630,25.40759236907958,0.040329511696951724,0.0006311088888960408,0.04027203178405762,0.04090074157714844,0.041167752838134765,0.042666635360717776,"[0.04029849624633789, 0.04018179321289062, 0.040214496612548827, 0.04018806457519531, 0.03997065734863281, 0.03990323257446289, 0.03958108901977539, 0.039676513671875, 0.039790687561035154, 0.03973654556274414, 0.04009024047851562, 0.03983366394042969, 0.03982534408569336, 0.03974111938476563, 0.03980758285522461, 0.039560958862304686, 0.03975161743164062, 0.03970198440551758, 0.03963510513305664, 0.03958534240722656, 0.039713088989257815, 0.03973798370361328, 0.0396044807434082, 0.03965923309326172, 0.03969638442993164, 0.03964083099365234, 0.0397949104309082, 0.03974934387207031, 0.03965679931640625, 0.03988131332397461, 0.039591487884521485, 0.039743934631347656, 0.039706302642822267, 0.03961008071899414, 0.03972367858886719, 0.039970718383789065, 0.039755680084228515, 0.03975638580322265, 0.04039779281616211, 0.04015923309326172, 0.0402050552368164, 0.04036198425292969, 0.04067478561401367, 0.04032380676269531, 0.04036198425292969, 0.04019209671020508, 0.04043366241455078, 0.040246337890625, 0.040180545806884765, 0.04028400039672852, 0.03994047927856445, 0.0399189453125, 0.03996425628662109, 0.03993484878540039, 0.039742752075195314, 0.03995107269287109, 0.03999129486083984, 0.03993395233154297, 0.039867424011230466, 0.03990563201904297, 0.04019673538208008, 0.04003190231323242, 0.04020668792724609, 0.042608062744140626, 0.0401611213684082, 0.03988723373413086, 0.04080428695678711, 0.039828960418701174, 0.04088812637329101, 0.039790592193603515, 0.039869182586669924, 0.03999375915527344, 0.03967574310302734, 0.03971878433227539, 0.03997449493408203, 0.03999772644042969, 0.04007846450805664, 0.03981999969482422, 0.03984783935546875, 0.039952320098876955, 0.040425918579101563, 0.043878623962402344, 0.039959423065185545, 0.04154665756225586, 0.040083358764648434, 0.03960022354125976, 0.03965542221069336, 0.03958963012695312, 0.03974777603149414, 0.03970822525024414, 0.03971916961669922, 0.0397130241394043, 0.04006070327758789, 0.04080665588378906, 0.03979481506347656, 0.0395750732421875, 0.039669471740722655, 0.03961088180541992, 0.03953587341308594, 0.0395948486328125, 0.03965043258666992, 0.0397108154296875, 0.039416446685791015, 0.03954108810424805, 0.03977606582641602, 0.03983708953857422, 0.03973795318603516, 0.03960396957397461, 0.039747840881347654, 0.039569408416748046, 0.039485439300537106, 0.039613697052001955, 0.039370784759521486, 0.04039148712158203, 0.03967350387573242, 0.03935891342163086, 0.03939014434814453, 0.03963382339477539, 0.040548641204833986, 0.04002511978149414, 0.039790462493896485, 0.03986307144165039, 0.03977011108398437, 0.0400129280090332, 0.04004473495483398, 0.04001801681518555, 0.04023401641845703, 0.04007158279418945, 0.03955654525756836, 0.039641918182373045, 0.03979257583618164, 0.03974764633178711, 0.03993766403198242, 0.040268032073974606, 0.0411096305847168, 0.040408737182617185, 0.04154403305053711, 0.04038860702514648, 0.04118486404418945, 0.04026403045654297, 0.039936065673828125, 0.03981721496582031, 0.039642688751220706, 0.03981151962280274, 0.03962422561645508, 0.040048385620117186, 0.03977699279785156, 0.03962300872802734, 0.03964886474609375, 0.04018796920776367, 0.03998336029052734, 0.04038630294799805, 0.040594593048095706, 0.040445793151855466, 0.04022502517700195, 0.04019686508178711, 0.04000755310058594, 0.039886878967285155, 0.040127937316894534, 0.0398419189453125, 0.03979699325561523, 0.039928096771240235, 0.0397946891784668, 0.04070604705810547, 0.04012384033203125, 0.04002422332763672, 0.04002435302734375, 0.03985801696777344, 0.04068963241577148, 0.0409455680847168, 0.040062591552734374, 0.03986547088623047, 0.039618209838867186, 0.0399420166015625, 0.03982137680053711, 0.04006099319458008, 0.03973529434204102, 0.039763904571533205, 0.04005263900756836, 0.039870624542236326, 0.03969020843505859, 0.039769824981689454, 0.03973500823974609, 0.03989891052246094, 0.03982156753540039, 0.040007518768310546, 0.039834400177001954, 0.03978790283203125, 0.040632705688476566, 0.039981887817382815, 0.03988396835327149, 0.03976275253295898, 0.03975372695922851, 0.039717086791992186, 0.040982017517089846, 0.04337420654296875, 0.040229534149169924, 0.04027715301513672, 0.04016624069213867, 0.03982876968383789, 0.03982614517211914, 0.03968582534790039, 0.03957587051391601, 0.03980035018920899, 0.039841503143310544, 0.0395968017578125, 0.03970873641967773, 0.039746910095214846, 0.040589920043945314, 0.039664958953857424, 0.039685951232910154, 0.039895103454589846, 0.03992559814453125, 0.03965171051025391, 0.03932995223999024, 0.039608352661132815, 0.03957612609863281, 0.039489376068115235, 0.03964092636108398, 0.0400549430847168, 0.03974553680419922, 0.03969023895263672, 0.039743679046630856, 0.03973305511474609, 0.03976166534423828, 0.03986198425292969, 0.04016182327270508, 0.04008883285522461, 0.039997344970703126, 0.03978652954101562, 0.0397496337890625, 0.039857311248779295, 0.04022441482543945, 0.040820735931396485, 0.040007648468017576, 0.03977606582641602, 0.03986054229736328, 0.039898399353027345, 0.040172000885009766, 0.04027571105957031, 0.04029276657104492, 0.04011391830444336, 0.03990143966674805, 0.04004249572753906, 0.039929855346679685, 0.03987830352783203, 0.04017596817016601, 0.04017692947387695, 0.040271678924560544, 0.04016038513183594, 0.04033884811401367, 0.040280223846435544, 0.04065513610839844, 0.041412479400634764, 0.04080640029907227, 0.0400849609375, 0.04034819030761719, 0.040134654998779294, 0.04005635070800781, 0.040226943969726564, 0.04019564819335938, 0.04000374221801758, 0.03996940612792969, 0.040218177795410155, 0.04041593551635742, 0.0402529296875, 0.04011983871459961, 0.04032806396484375, 0.04015520095825195, 0.040029983520507816, 0.03994646453857422, 0.04003142547607422, 0.040188865661621095, 0.040099742889404294, 0.04010367965698242, 0.039944255828857425, 0.04009088134765625, 0.040298526763916015, 0.04114089584350586, 0.03988275146484375, 0.04269055938720703, 0.04044822311401367, 0.040036384582519534, 0.04038025665283203, 0.040191455841064455, 0.04028460693359375, 0.04035379028320313, 0.040204288482666016, 0.04004249572753906, 0.04056063842773437, 0.040260929107666016, 0.04042822265625, 0.04041878509521484, 0.04031900787353516, 0.040269790649414064, 0.04030028915405273, 0.03991632080078125, 0.040152862548828126, 0.04040095901489258, 0.04018355178833008, 0.040349376678466796, 0.04111801528930664, 0.04022313690185547, 0.0411583366394043, 0.04049324798583984, 0.040110462188720705, 0.03999308776855469, 0.04031692886352539, 0.03996809768676758, 0.04011894226074219, 0.04026889419555664, 0.03998758316040039, 0.04007526397705078, 0.04056937789916992, 0.040459999084472655, 0.04104198455810547, 0.04078118515014648, 0.04163033676147461, 0.04068374252319336, 0.0409486083984375, 0.04088441467285156, 0.040556800842285155, 0.04153334426879883, 0.04078649520874023, 0.0404188461303711, 0.04063075256347656, 0.04084044647216797, 0.04053241729736328, 0.040976032257080075, 0.04099683380126953, 0.040805183410644534, 0.0407834243774414, 0.040640159606933596, 0.040753185272216795, 0.04073040008544922, 0.0412591667175293, 0.040787841796875, 0.040750110626220706, 0.04095564651489258, 0.04050492858886719, 0.04049555206298828, 0.04053740692138672, 0.0404384651184082, 0.04046255874633789, 0.04063209533691406, 0.04046847915649414, 0.04033542251586914, 0.041672672271728516, 0.04043529510498047, 0.04065478515625, 0.04041689682006836, 0.0403628158569336, 0.04044758224487305, 0.04046473693847656, 0.04044319915771484, 0.04058915328979492, 0.040835361480712894, 0.04060140609741211, 0.04099769592285156, 0.040513534545898434, 0.041040897369384766, 0.0405794563293457, 0.04045644760131836, 0.040409473419189455, 0.040514881134033204, 0.040393409729003904, 0.04033744049072266, 0.040228832244873045, 0.04038159942626953, 0.04162031936645508, 0.04028188705444336, 0.04054447937011719, 0.040441856384277344, 0.039725055694580076, 0.03943862533569336, 0.039424991607666014, 0.0392704963684082, 0.0397441291809082, 0.03992015838623047, 0.03988515090942383, 0.04043907165527344, 0.03986812973022461, 0.03962777709960937, 0.039851486206054686, 0.03966934585571289, 0.03991584014892578, 0.04047526550292969, 0.04041523361206055, 0.04024729537963867, 0.040032257080078126, 0.040099327087402346, 0.03992374420166016, 0.03993443298339844, 0.039800830841064457, 0.03982684707641602, 0.039680606842041014, 0.040112033843994144, 0.041424991607666016, 0.04046851348876953, 0.040185985565185545, 0.040459518432617185, 0.04026015853881836, 0.04015756988525391, 0.03987401580810547, 0.039843551635742186, 0.04043414306640625, 0.04005683135986328, 0.03984384155273438, 0.039927806854248044, 0.04015513610839844, 0.040392257690429687, 0.040659393310546875, 0.04068796920776367, 0.04037801742553711, 0.040458240509033204, 0.040394016265869144, 0.04060211181640625, 0.040394977569580076, 0.04036377716064453, 0.040161537170410155, 0.04047872161865235, 0.04030035018920899, 0.040309951782226565, 0.0403507194519043, 0.0402815055847168, 0.04033145523071289, 0.040472606658935546, 0.04384969711303711, 0.0407628173828125, 0.0406945915222168, 0.04087033462524414, 0.0404804801940918, 0.040382366180419925, 0.04047081756591797, 0.040300670623779296, 0.04054547119140625, 0.040595966339111327, 0.04167241668701172, 0.04069721603393555, 0.040387489318847655, 0.04053926467895508, 0.040777599334716794, 0.04063126373291016, 0.0405436782836914, 0.04037609481811524, 0.040460769653320315, 0.04162591934204102, 0.041084449768066404, 0.04067951965332031, 0.04081039810180664, 0.04133116912841797, 0.040900543212890626, 0.041293888092041015, 0.04066204833984375, 0.04057964706420898, 0.041006912231445314, 0.041049793243408204, 0.04080940628051758, 0.040611553192138675, 0.04043356704711914, 0.04111142349243164, 0.04052425765991211, 0.04071571350097656, 0.04099334335327148, 0.040548351287841795, 0.040973472595214847, 0.040962913513183596, 0.04095948791503906, 0.04067935943603516, 0.04066550445556641, 0.040827041625976564, 0.04079411315917969, 0.040839168548583986, 0.040925182342529294, 0.04087603378295898, 0.041246177673339844, 0.040865951538085935, 0.04086412811279297, 0.04067734527587891, 0.040861438751220704, 0.04062966537475586, 0.04101824188232422, 0.04082825469970703, 0.040589599609375, 0.04072022247314453, 0.04108921432495117, 0.04076579284667969, 0.04068131256103515, 0.04063644790649414, 0.04068723297119141, 0.04081132888793945, 0.04087763214111328, 0.04068889617919922, 0.04082720184326172, 0.04069462585449219, 0.04156099319458008, 0.04125526428222656, 0.04088675308227539, 0.04060992050170899, 0.040820606231689455, 0.04084735870361328, 0.04076748657226562, 0.04075273513793945, 0.04070646286010742, 0.040804481506347655, 0.04064985656738281, 0.04147289657592773, 0.04081049728393555, 0.040850654602050784, 0.04076828765869141, 0.04069580841064453, 0.04094566345214844, 0.040984737396240235, 0.040746337890625, 0.04070041656494141, 0.040645790100097653, 0.040794017791748044, 0.040870849609375, 0.040833343505859376, 0.04106172943115234, 0.04070230484008789, 0.04084870529174805, 0.04065555191040039, 0.040784896850585936, 0.0403702392578125, 0.04012108612060547, 0.04043798446655274, 0.03979183959960937, 0.04044217681884766, 0.04019449615478515, 0.04034156799316406, 0.040389728546142575, 0.04019801712036133, 0.040143840789794924, 0.04041535949707031, 0.04029859161376953, 0.03999107360839844, 0.040207584381103514, 0.03997980880737305, 0.04076688003540039, 0.04023971176147461, 0.04043529510498047, 0.04665340805053711, 0.04117545700073242, 0.04053097534179687, 0.0405015983581543, 0.04071615982055664, 0.04006991958618164, 0.039964672088623046, 0.03986227035522461, 0.04020016098022461, 0.04042150497436523, 0.04055440139770508, 0.04047872161865235, 0.04065689468383789, 0.03985513687133789, 0.040220993041992184, 0.04001449584960937, 0.039913471221923826, 0.03976595306396485, 0.03984419250488281, 0.04048406219482422, 0.040349952697753905, 0.04042982482910156, 0.04071187210083008, 0.04059552001953125, 0.04063462448120117, 0.04139414215087891, 0.04068451309204101, 0.04041830444335937, 0.040529918670654294, 0.04042252731323242, 0.040422401428222655, 0.04090252685546875, 0.04047679901123047, 0.040267681121826174, 0.04046793746948242, 0.04027238464355469, 0.04061183929443359, 0.04014009475708008, 0.040495777130126955, 0.040392223358154296, 0.040570945739746095, 0.04128607940673828, 0.0407039680480957, 0.04120284652709961, 0.040373470306396486, 0.04009884643554688, 0.04029718399047852, 0.04081449508666992, 0.04046454238891602, 0.040336288452148435, 0.040704959869384764, 0.040243198394775394, 0.04026367950439453, 0.042000385284423826, 0.040463935852050784, 0.040288318634033204, 0.04056921768188477, 0.04026582336425781, 0.04070556640625, 0.040431999206542966, 0.04079001617431641, 0.04107622528076172, 0.04029516983032227, 0.0402347526550293, 0.0401715202331543, 0.04013983917236328, 0.040498111724853514, 0.040172798156738285, 0.0408358383178711, 0.04012764739990234, 0.04040697479248047, 0.04099164962768555, 0.041062400817871096, 0.040417278289794925, 0.04002422332763672, 0.040195934295654295, 0.040504894256591796, 0.04031123352050781, 0.04031475067138672, 0.04040879821777344, 0.04073081588745117, 0.043383007049560544, 0.04082716751098633, 0.04056649780273437, 0.04118899154663086, 0.04451068878173828, 0.04067414474487305, 0.04060150527954102]",tokens/s,24.79573785852667,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1039.089664,904.855552,0.0,509.607936,491.434496,s,1,7.7753291015625,7.7753291015625,0.0,7.7753291015625,7.7753291015625,7.7753291015625,7.7753291015625,[7.7753291015625],,kWh,2.3769561345824284e-05,2.613634343838651e-06,8.089450916001772e-06,3.447264660566471e-05,,MB,1393.004544,1039.07328,0.0,629.1456,592.24832,s,10,0.2588756771087647,0.025887567710876462,0.00012938341568561115,0.025884959220886232,0.026041721725463866,0.026084604454040528,0.026118910636901856,"[0.026127487182617187, 0.025942752838134766, 0.02589206314086914, 0.02603219223022461, 0.02582262420654297, 0.02577039909362793, 0.025944799423217774, 0.025641632080078126, 0.02587785530090332, 0.02582387161254883]",tokens/s,9888.916674564354,kWh,7.569353342724582e-07,8.347716594352991e-08,4.6696856586011393e-07,1.307381066076102e-06,tokens/kWh,195811310.59848037,MB,1433.088,1053.753344,0.0,643.825664,605.085696,s,10,13.554338012695311,1.355433801269531,0.008772509112208286,1.3541259765625,1.3627170166015623,1.3704699584960938,1.3766723120117188,"[1.378222900390625, 1.355204833984375, 1.34927197265625, 1.356851318359375, 1.354110107421875, 1.354141845703125, 1.3519068603515625, 1.360994140625, 1.34954833984375, 1.344085693359375]",tokens/s,46.479584573582805,kWh,3.870019445531075e-05,4.268203905542278e-06,1.4181515375140097e-05,5.714991373599313e-05,tokens/kWh,1102363.8686670926,,s,630,13.548963447570797,0.021506291186620317,0.0005965968022652301,0.021377023696899415,0.02176020202636719,0.022067100620269776,0.024917819614410433,"[0.021605600357055665, 0.021792800903320312, 0.021680896759033202, 0.021649280548095704, 0.022975807189941407, 0.021523263931274413, 0.021261823654174804, 0.02147177505493164, 0.021694303512573242, 0.022093952178955076, 0.02187900733947754, 0.021880640029907226, 0.02219004821777344, 0.022295583724975587, 0.022047712326049806, 0.02183590316772461, 0.02166156768798828, 0.021579967498779298, 0.0215897274017334, 0.022064319610595705, 0.021475839614868163, 0.02144483184814453, 0.021657440185546876, 0.022732288360595702, 0.02273161506652832, 0.021560991287231445, 0.02210771179199219, 0.021524415969848634, 0.021602367401123045, 0.021600255966186522, 0.022296831130981444, 0.022540128707885743, 0.022129344940185546, 0.021938528060913086, 0.022011552810668945, 0.021779680252075197, 0.021781280517578126, 0.02230681610107422, 0.02163711929321289, 0.021552928924560545, 0.021431615829467773, 0.02162371253967285, 0.021443872451782225, 0.021514976501464844, 0.021552831649780273, 0.021574047088623045, 0.021441728591918945, 0.02137571144104004, 0.021816864013671874, 0.026312959671020507, 0.022155744552612305, 0.021871871948242187, 0.02197747230529785, 0.02200998306274414, 0.021794815063476563, 0.021606399536132814, 0.02148761558532715, 0.021831167221069335, 0.021527040481567384, 0.021514144897460938, 0.0215, 0.021485567092895508, 0.021611616134643553, 0.020992416381835938, 0.021509727478027343, 0.02124595260620117, 0.02125209617614746, 0.02175814437866211, 0.021278528213500975, 0.02127667236328125, 0.021362688064575194, 0.021270719528198243, 0.021317087173461913, 0.021282432556152343, 0.021279455184936524, 0.021603904724121093, 0.021422527313232423, 0.02117046356201172, 0.02134601593017578, 0.02123161506652832, 0.021356864929199217, 0.021398975372314454, 0.021328128814697266, 0.021354303359985352, 0.021288288116455077, 0.021238624572753908, 0.021284128189086916, 0.021654144287109375, 0.02170889663696289, 0.02143395233154297, 0.021449119567871093, 0.02128691291809082, 0.021299039840698242, 0.021321887969970702, 0.02126233673095703, 0.023619264602661134, 0.023195968627929688, 0.02149488067626953, 0.021391519546508787, 0.02424083137512207, 0.02174086380004883, 0.021441280364990236, 0.0214835205078125, 0.021497856140136717, 0.021260480880737304, 0.02150102424621582, 0.021502368927001952, 0.021519968032836914, 0.02140438461303711, 0.021376256942749024, 0.021631744384765624, 0.02128281593322754, 0.021331968307495116, 0.02154924774169922, 0.021354272842407228, 0.021288991928100586, 0.021342111587524415, 0.021262432098388673, 0.02150409507751465, 0.021641120910644532, 0.02143756866455078, 0.02126969528198242, 0.02166524887084961, 0.02130556869506836, 0.02138422393798828, 0.021494752883911134, 0.020882368087768555, 0.021180383682250975, 0.021178400039672852, 0.02126755142211914, 0.02129158401489258, 0.021375423431396486, 0.021245248794555666, 0.021242431640625, 0.021190687179565428, 0.021315872192382814, 0.021313247680664064, 0.0212992000579834, 0.02124595260620117, 0.02121238327026367, 0.02147225570678711, 0.021624191284179688, 0.02530950355529785, 0.021676416397094726, 0.021705951690673828, 0.021457408905029295, 0.021397247314453124, 0.02128211212158203, 0.021379903793334962, 0.02166387176513672, 0.021587968826293946, 0.021583871841430666, 0.021295103073120117, 0.02134809684753418, 0.021139711380004884, 0.021501951217651367, 0.021531999588012694, 0.02132649612426758, 0.021348320007324218, 0.021368671417236328, 0.021274816513061522, 0.021432319641113282, 0.02144816017150879, 0.021560895919799806, 0.021347232818603516, 0.021299264907836915, 0.021551103591918946, 0.021518335342407227, 0.02125619125366211, 0.021331968307495116, 0.02122956848144531, 0.02122480010986328, 0.02142064094543457, 0.021229631423950197, 0.02124569511413574, 0.02132953643798828, 0.021461631774902342, 0.021179647445678712, 0.021197568893432616, 0.021178367614746094, 0.021180320739746093, 0.021125215530395508, 0.0214466552734375, 0.021338144302368165, 0.021380735397338868, 0.021356063842773436, 0.021359424591064453, 0.02122137641906738, 0.021368831634521485, 0.0251943359375, 0.021628095626831056, 0.021242847442626955, 0.021303295135498047, 0.02133718490600586, 0.02122230339050293, 0.021299072265625, 0.02125632095336914, 0.021159423828125, 0.02126825523376465, 0.021248735427856446, 0.02126848030090332, 0.021622783660888673, 0.021434207916259766, 0.021233823776245116, 0.021264352798461915, 0.021231647491455077, 0.021300991058349608, 0.021405088424682618, 0.02131030464172363, 0.02126646423339844, 0.02140771293640137, 0.02134022331237793, 0.02125721549987793, 0.02172163200378418, 0.021404064178466797, 0.021243904113769533, 0.021431968688964843, 0.021358943939208983, 0.021323776245117186, 0.021370847702026366, 0.02231839942932129, 0.021557056427001953, 0.021398048400878906, 0.021262720108032228, 0.021308799743652344, 0.02149407958984375, 0.02175382423400879, 0.02163484764099121, 0.021381696701049804, 0.021356351852416994, 0.021370912551879884, 0.0214466552734375, 0.021483295440673827, 0.021275007247924804, 0.02123776054382324, 0.021448703765869142, 0.021542783737182616, 0.025877983093261718, 0.021785247802734376, 0.021552160263061525, 0.021607744216918946, 0.021421728134155275, 0.021355520248413085, 0.021430784225463868, 0.021395103454589844, 0.02133078384399414, 0.021299264907836915, 0.0213370246887207, 0.02130636787414551, 0.02127462387084961, 0.02131059265136719, 0.021405599594116212, 0.020952192306518555, 0.02130624008178711, 0.021290912628173828, 0.021399648666381835, 0.021413888931274414, 0.02151356887817383, 0.02130169677734375, 0.02143254470825195, 0.02122547149658203, 0.021215232849121093, 0.021376096725463867, 0.021252960205078126, 0.021239871978759765, 0.021413440704345702, 0.02124435234069824, 0.02112512016296387, 0.021212160110473634, 0.021206016540527343, 0.021235071182250976, 0.021238399505615235, 0.021321727752685548, 0.02134121513366699, 0.02202524757385254, 0.02237843132019043, 0.021837472915649414, 0.021713247299194337, 0.021507551193237304, 0.02140332794189453, 0.021338111877441408, 0.0215797119140625, 0.021490591049194336, 0.02143177604675293, 0.025245824813842774, 0.02139792060852051, 0.021419551849365233, 0.02140208053588867, 0.021377023696899415, 0.02124595260620117, 0.02127872085571289, 0.021243904113769533, 0.021208576202392578, 0.02150169563293457, 0.02126006317138672, 0.021238752365112305, 0.021665599822998045, 0.02137660789489746, 0.021264991760253905, 0.021379072189331053, 0.021213184356689452, 0.021204992294311522, 0.02124595260620117, 0.021178304672241213, 0.021965919494628908, 0.02206937599182129, 0.022513952255249024, 0.02148204803466797, 0.021298879623413085, 0.021582143783569336, 0.021303295135498047, 0.021302976608276368, 0.02194384002685547, 0.021410591125488283, 0.021354496002197267, 0.02090598487854004, 0.021147647857666017, 0.021131263732910157, 0.021311296463012695, 0.021257408142089845, 0.02118966484069824, 0.021187616348266602, 0.021095359802246094, 0.021296735763549804, 0.021409791946411134, 0.021262144088745116, 0.021362815856933594, 0.02134614372253418, 0.021114879608154297, 0.021236352920532227, 0.021571584701538086, 0.026343360900878906, 0.02183788871765137, 0.021646751403808593, 0.021422592163085938, 0.021389408111572264, 0.021722335815429688, 0.02177872085571289, 0.021418495178222655, 0.021377023696899415, 0.02129100799560547, 0.021243904113769533, 0.021339744567871095, 0.021247520446777343, 0.021220224380493164, 0.021319679260253906, 0.021243743896484375, 0.02117238426208496, 0.021356544494628905, 0.0225218563079834, 0.021720895767211913, 0.021468416213989257, 0.02156025505065918, 0.0213723201751709, 0.021313663482666015, 0.021328351974487306, 0.021699871063232422, 0.021481472015380858, 0.021301984786987305, 0.021317632675170898, 0.02130496025085449, 0.02161712074279785, 0.021510047912597655, 0.021297088623046877, 0.021435455322265626, 0.02135321617126465, 0.021383424758911133, 0.02144256019592285, 0.021313343048095703, 0.021229759216308593, 0.021493696212768556, 0.021548255920410157, 0.021646175384521484, 0.021510015487670897, 0.021411968231201173, 0.021720287322998046, 0.02158252716064453, 0.02148182487487793, 0.02103091239929199, 0.02153654479980469, 0.021522655487060546, 0.021663808822631837, 0.021450687408447265, 0.02155708885192871, 0.021665952682495118, 0.021436416625976562, 0.021952384948730468, 0.02170857620239258, 0.021691936492919922, 0.021551296234130858, 0.021505727767944335, 0.021633695602416993, 0.02150831985473633, 0.021268543243408204, 0.02205683135986328, 0.02187788772583008, 0.02141900825500488, 0.021336063385009766, 0.021448415756225588, 0.021436704635620116, 0.02132374382019043, 0.02122979164123535, 0.021370687484741212, 0.021608448028564452, 0.02186057662963867, 0.021749088287353516, 0.021592512130737304, 0.021597312927246093, 0.021597055435180663, 0.021651456832885742, 0.02145280075073242, 0.021195968627929686, 0.021359424591064453, 0.02122956848144531, 0.02122547149658203, 0.021208351135253906, 0.021267168045043944, 0.02124185562133789, 0.02143596839904785, 0.021629695892333985, 0.02132054328918457, 0.02133087921142578, 0.021356000900268554, 0.021438623428344728, 0.021339584350585937, 0.021453216552734376, 0.021377471923828124, 0.021698144912719725, 0.021485984802246092, 0.021315584182739256, 0.021380416870117186, 0.021342912673950196, 0.021306400299072267, 0.021269472122192382, 0.02127020835876465, 0.021225183486938477, 0.02124220848083496, 0.02125644874572754, 0.021321727752685548, 0.021312704086303712, 0.021277376174926758, 0.02118492889404297, 0.021561344146728514, 0.021549055099487305, 0.021307743072509766, 0.021263872146606445, 0.02130668830871582, 0.021815967559814454, 0.02876025581359863, 0.02145039939880371, 0.021473535537719725, 0.021507328033447265, 0.021248863220214843, 0.02122889518737793, 0.02126892852783203, 0.021532896041870118, 0.02151628875732422, 0.021327871322631836, 0.021396991729736328, 0.021278528213500975, 0.022399551391601564, 0.021446399688720703, 0.021229759216308593, 0.021256128311157228, 0.02143257522583008, 0.02136515235900879, 0.02139299201965332, 0.021372928619384765, 0.02126028823852539, 0.02136636734008789, 0.021350816726684572, 0.021327871322631836, 0.021336063385009766, 0.021410911560058594, 0.02144534492492676, 0.021442176818847657, 0.02145337677001953, 0.02133795166015625, 0.021279232025146484, 0.022124191284179688, 0.02256924819946289, 0.02160611152648926, 0.021407743453979493, 0.021309440612792968, 0.021204992294311522, 0.021528543472290038, 0.021796607971191408, 0.022412832260131837, 0.021838623046875, 0.021879871368408202, 0.021707679748535155, 0.02140771293640137, 0.021536800384521486, 0.02149616050720215, 0.021366432189941408, 0.021336063385009766, 0.021193727493286133, 0.02129622459411621, 0.021338016510009765, 0.02147327995300293, 0.021661056518554687, 0.021492351531982423, 0.02130732727050781, 0.021278783798217772, 0.02151219177246094, 0.021647232055664063, 0.021543039321899413, 0.02147942352294922, 0.021395456314086913, 0.021596160888671875, 0.021420032501220702, 0.021321727752685548, 0.021310815811157225, 0.02123638343811035, 0.021198848724365234, 0.021168127059936523, 0.021462432861328123, 0.02162508773803711, 0.021428575515747072, 0.02161769676208496, 0.021523199081420898, 0.021730623245239257, 0.021402399063110353, 0.021304864883422852, 0.02126860809326172, 0.02131769561767578, 0.021249631881713867, 0.02128339195251465, 0.021258432388305663, 0.02125212860107422, 0.021361888885498045, 0.021441696166992187, 0.02145449638366699, 0.021415807723999022, 0.02125222396850586, 0.02117750358581543, 0.02134841537475586, 0.02145359992980957, 0.02122547149658203, 0.021327199935913085, 0.021357215881347657, 0.021188959121704102, 0.021443296432495117, 0.02119980812072754, 0.021230720520019532, 0.021437311172485353, 0.021334016799926758, 0.021301248550415038, 0.021204992294311522, 0.021347360610961916, 0.021293695449829102, 0.02139904022216797, 0.0213656005859375, 0.021335391998291015, 0.0213592643737793, 0.021369951248168945, 0.02143734359741211, 0.021372928619384765, 0.02390425682067871, 0.02165760040283203, 0.02142207908630371, 0.021411008834838867, 0.02127881622314453, 0.02142076873779297, 0.02136003112792969, 0.02129680061340332, 0.02125715255737305, 0.021032960891723632, 0.021559328079223634, 0.02142633628845215, 0.0211777286529541, 0.02126892852783203, 0.0212541446685791, 0.021552255630493164, 0.02146329689025879, 0.021227359771728516, 0.02118934440612793, 0.02126582336425781, 0.021340160369873046, 0.021207712173461915, 0.021180416107177736, 0.021187583923339845, 0.021139936447143556, 0.021217567443847656, 0.021160192489624023, 0.021229248046875, 0.021233983993530273, 0.021309440612792968, 0.02123481559753418, 0.021245920181274414, 0.02121334457397461, 0.021309856414794923, 0.021303647994995116, 0.021565376281738283, 0.021469247817993163, 0.02142207908630371, 0.02134966468811035, 0.021412448883056642, 0.021150848388671876, 0.021238784790039062, 0.021182464599609374, 0.021202272415161132, 0.021325855255126952, 0.021299840927124024, 0.021219327926635743, 0.021472383499145507, 0.02142838478088379, 0.021471935272216795, 0.021546144485473633, 0.021367679595947264, 0.021387264251708983, 0.021481472015380858, 0.021450143814086914, 0.021316192626953126, 0.021307392120361326, 0.021729183197021485, 0.02168022346496582, 0.021317632675170898, 0.02138697624206543, 0.021297439575195313, 0.02121891212463379, 0.021203359603881835, 0.02125209617614746, 0.021301248550415038, 0.021526496887207033, 0.021347551345825194, 0.021371711730957033, 0.021265472412109375, 0.02141484832763672, 0.02129715156555176]",tokens/s,46.49802196587614,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4412.592128,4707.975168,0.0,4305.453056,4304.491008,s,1,10.4071025390625,10.4071025390625,0.0,10.4071025390625,10.4071025390625,10.4071025390625,10.4071025390625,[10.4071025390625],,kWh,9.888320817079451e-05,1.0900108380933137e-05,3.075863571799786e-05,0.0001405419522697255,,MB,4447.096832,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9709659881591797,0.19709659881591796,0.0009136969979040734,0.1969654235839844,0.19806541290283203,0.19866162033081053,0.19913858627319336,"[0.19614093017578124, 0.19593606567382812, 0.19677565002441405, 0.19731097412109375, 0.19689251708984376, 0.197038330078125, 0.19639730834960936, 0.19793292236328125, 0.19925782775878906, 0.19728346252441406]",tokens/s,1298.8554928798947,kWh,5.804917726633897e-06,6.401814714006342e-07,3.830460580705801e-06,1.0275559778740331e-05,tokens/kWh,24913484.570412643,MB,4457.79968,4844.290048,0.0,4429.185024,4373.844992,s,10,18.095225463867187,1.809522546386719,0.009216393917116204,1.8056864013671876,1.8201615112304688,1.8234003356933592,1.825991395263672,"[1.8029951171875, 1.7980750732421875, 1.8045372314453125, 1.799474853515625, 1.8036649169921875, 1.82663916015625, 1.8194417724609375, 1.81436328125, 1.819198486328125, 1.8068355712890625]",tokens/s,34.81581377684368,kWh,5.268680201961281e-05,5.809376723954254e-06,3.465118131569406e-05,9.314736005926111e-05,tokens/kWh,676347.6706148074,,s,630,18.09113057518008,0.028716080278063577,0.0004462183527656419,0.028671728134155274,0.029064616775512697,0.029322348499298094,0.030622859077453622,"[0.02946393585205078, 0.028597375869750977, 0.028632736206054686, 0.028476608276367187, 0.028307424545288087, 0.028375648498535157, 0.02865567970275879, 0.0285546875, 0.028598976135253907, 0.02841334342956543, 0.02822547149658203, 0.028336959838867186, 0.02817433547973633, 0.028209375381469726, 0.02847427177429199, 0.02879372787475586, 0.028568927764892577, 0.028551136016845703, 0.028836511611938478, 0.028641536712646486, 0.0288305606842041, 0.028593088150024416, 0.028494976043701173, 0.02848761558532715, 0.028427200317382814, 0.029197471618652344, 0.02913190460205078, 0.02892361640930176, 0.028579200744628906, 0.028725183486938477, 0.02864303970336914, 0.02860700798034668, 0.029428159713745117, 0.028729343414306642, 0.029278207778930664, 0.028614656448364258, 0.02866156768798828, 0.028747648239135743, 0.02881977653503418, 0.029207807540893554, 0.02878335952758789, 0.02889727973937988, 0.028888639450073243, 0.02876255989074707, 0.028511520385742187, 0.028545759201049806, 0.02853887939453125, 0.028360383987426758, 0.02836307144165039, 0.028266496658325195, 0.028262367248535158, 0.028227615356445312, 0.028294303894042968, 0.02825916862487793, 0.028317888259887694, 0.028462688446044923, 0.02836092758178711, 0.02865670394897461, 0.0290129280090332, 0.02864134407043457, 0.02837411117553711, 0.028326751708984375, 0.028309503555297853, 0.03039289665222168, 0.028694528579711914, 0.028463104248046874, 0.02839049530029297, 0.028502527236938476, 0.028312063217163085, 0.028385183334350587, 0.028317792892456055, 0.028370431900024414, 0.028424575805664064, 0.02823129653930664, 0.028213663101196287, 0.02826406478881836, 0.02833036804199219, 0.028399072647094726, 0.028469791412353517, 0.0285614070892334, 0.028442623138427735, 0.02832383918762207, 0.028264448165893553, 0.02838118362426758, 0.028246015548706056, 0.02838118362426758, 0.0286167049407959, 0.031107072830200196, 0.029613183975219726, 0.029070175170898438, 0.02862918472290039, 0.02831955146789551, 0.028676000595092774, 0.02851443290710449, 0.028721151351928712, 0.028803071975708007, 0.02850201606750488, 0.028917760848999025, 0.02864681625366211, 0.028389055252075194, 0.02851468849182129, 0.02830771255493164, 0.028247392654418946, 0.028416959762573243, 0.028494943618774415, 0.02839369583129883, 0.02845977592468262, 0.028514015197753907, 0.028352319717407228, 0.028303775787353515, 0.02845280075073242, 0.028495935440063475, 0.028293119430541993, 0.02828825569152832, 0.028271360397338866, 0.028430335998535155, 0.02816409683227539, 0.028172128677368163, 0.028340608596801757, 0.028309280395507813, 0.028880704879760744, 0.02835196876525879, 0.028601055145263673, 0.028315071105957032, 0.02835103988647461, 0.028338176727294922, 0.0283907527923584, 0.02832057571411133, 0.0284117431640625, 0.028726879119873046, 0.028288543701171873, 0.02821209526062012, 0.028274879455566407, 0.028531776428222657, 0.028467615127563475, 0.02839353561401367, 0.0282258243560791, 0.028450111389160156, 0.028340991973876954, 0.02824287986755371, 0.028247039794921876, 0.028216320037841795, 0.0283637752532959, 0.02853887939453125, 0.028434431076049805, 0.028397535324096678, 0.02870582389831543, 0.02851737594604492, 0.02879897689819336, 0.028670976638793946, 0.028803712844848634, 0.028600032806396485, 0.028760480880737304, 0.029258207321166994, 0.028673824310302735, 0.028509504318237306, 0.028517248153686524, 0.028446527481079103, 0.02856755256652832, 0.028437728881835937, 0.028658464431762697, 0.0290119686126709, 0.02841596794128418, 0.028635103225708006, 0.02850003242492676, 0.028516351699829103, 0.03160883140563965, 0.02870444869995117, 0.029512351989746093, 0.028698272705078125, 0.028882272720336916, 0.029406015396118163, 0.029026496887207032, 0.02852739143371582, 0.028696575164794923, 0.02853977584838867, 0.028505311965942384, 0.02843507194519043, 0.028551071166992188, 0.028824960708618164, 0.028486528396606446, 0.02876006317138672, 0.029655040740966795, 0.02857164764404297, 0.028524415969848633, 0.028538591384887697, 0.02835830307006836, 0.0284003849029541, 0.02852835273742676, 0.029204544067382814, 0.02853071975708008, 0.028554271697998047, 0.02846774482727051, 0.02958582305908203, 0.02838025665283203, 0.02850864028930664, 0.028477888107299804, 0.02855731201171875, 0.028595455169677736, 0.02876406478881836, 0.028566368103027345, 0.028516319274902342, 0.028624927520751953, 0.028360992431640624, 0.028357887268066408, 0.028391071319580078, 0.028435264587402344, 0.028445920944213866, 0.028449567794799804, 0.02860032081604004, 0.028438528060913085, 0.02835251235961914, 0.028803071975708007, 0.028645376205444335, 0.028493824005126952, 0.028485343933105468, 0.028391712188720702, 0.028266496658325195, 0.028317695617675782, 0.028227584838867188, 0.0282739200592041, 0.028555103302001953, 0.02833500862121582, 0.028684288024902343, 0.028478944778442383, 0.029341344833374024, 0.028711519241333007, 0.02865385627746582, 0.02831155204772949, 0.02836275291442871, 0.02840707206726074, 0.028357343673706056, 0.02859769630432129, 0.028717119216918944, 0.028579744338989257, 0.02871766471862793, 0.028405567169189454, 0.0286680965423584, 0.02852457618713379, 0.02841609573364258, 0.02840972709655762, 0.028483583450317384, 0.028345439910888674, 0.028418560028076172, 0.028495296478271485, 0.028635744094848634, 0.028541311264038086, 0.0287127685546875, 0.02874998474121094, 0.028667488098144532, 0.028762432098388673, 0.029046911239624024, 0.029714431762695313, 0.028680192947387696, 0.028563360214233398, 0.02875014305114746, 0.02873107147216797, 0.02875811195373535, 0.028270591735839845, 0.028360000610351564, 0.028693119049072267, 0.02865567970275879, 0.028365888595581056, 0.028490688323974608, 0.028629024505615233, 0.028375072479248045, 0.02844281578063965, 0.028294912338256838, 0.028276063919067382, 0.028395904541015624, 0.02830364799499512, 0.028328224182128905, 0.0284498233795166, 0.02837936019897461, 0.028239519119262695, 0.02841254425048828, 0.02866921615600586, 0.02868899154663086, 0.029253952026367186, 0.029099872589111328, 0.02871721649169922, 0.028919807434082033, 0.028645376205444335, 0.028588031768798827, 0.02857164764404297, 0.02919424057006836, 0.029081472396850584, 0.02876006317138672, 0.02868176078796387, 0.028612831115722655, 0.0285166072845459, 0.028661888122558595, 0.028618751525878908, 0.028641376495361328, 0.028471200942993165, 0.02851430320739746, 0.02884739112854004, 0.02885215950012207, 0.028936479568481447, 0.028941919326782226, 0.028930976867675783, 0.028676095962524413, 0.028491584777832032, 0.028354719161987306, 0.0283156795501709, 0.028254207611083985, 0.028252063751220705, 0.0283853759765625, 0.028294431686401368, 0.028334815979003905, 0.028254207611083985, 0.02821286392211914, 0.028268896102905273, 0.030709632873535155, 0.028592287063598634, 0.02954745674133301, 0.02903366470336914, 0.029096799850463866, 0.028909536361694337, 0.028846080780029298, 0.028722463607788087, 0.028732128143310547, 0.02898044776916504, 0.028936895370483398, 0.028847328186035158, 0.028789344787597655, 0.028790271759033204, 0.028865312576293944, 0.028915712356567383, 0.02892799949645996, 0.02905404853820801, 0.028973983764648437, 0.029097984313964844, 0.028870176315307618, 0.03042937660217285, 0.0290982723236084, 0.02897667121887207, 0.028738016128540038, 0.02886649513244629, 0.028741695404052733, 0.029265920639038087, 0.030373888015747072, 0.028976991653442384, 0.028884864807128905, 0.028916000366210937, 0.028855327606201173, 0.028844255447387696, 0.029031167984008788, 0.02911027145385742, 0.028948480606079102, 0.029654048919677736, 0.028988384246826173, 0.028959808349609376, 0.028928640365600587, 0.02877471923828125, 0.028839935302734376, 0.028734975814819336, 0.02876025581359863, 0.028672319412231445, 0.028821504592895508, 0.02919628715515137, 0.028880863189697265, 0.028960800170898436, 0.028829696655273438, 0.0288372802734375, 0.029014303207397462, 0.029053247451782227, 0.0288985595703125, 0.028684480667114258, 0.028813888549804687, 0.029005823135375978, 0.028770303726196288, 0.028886688232421874, 0.029356351852416994, 0.029074495315551757, 0.029002944946289064, 0.028837663650512695, 0.02875596809387207, 0.029176959991455077, 0.0289102725982666, 0.028779775619506835, 0.028867328643798828, 0.028852224349975586, 0.028702720642089844, 0.028662879943847655, 0.02873027229309082, 0.028682239532470705, 0.02931622314453125, 0.028733888626098634, 0.028671743392944336, 0.028705184936523437, 0.02878499221801758, 0.028729280471801757, 0.028719104766845704, 0.028821504592895508, 0.028762111663818358, 0.029040639877319335, 0.028870655059814454, 0.028637184143066406, 0.028683296203613283, 0.028627935409545897, 0.03236207962036133, 0.029163936614990234, 0.029327360153198243, 0.0289300479888916, 0.028856512069702148, 0.029063999176025392, 0.028876991271972657, 0.028853183746337892, 0.02893948745727539, 0.028967039108276367, 0.029137439727783203, 0.028686336517333984, 0.02874083137512207, 0.02873219108581543, 0.028763328552246094, 0.029532991409301757, 0.028386976242065428, 0.028381280899047852, 0.028379392623901368, 0.028348159790039063, 0.02875315284729004, 0.02858880043029785, 0.028797183990478516, 0.028788288116455077, 0.028793119430541993, 0.028801183700561523, 0.029014015197753908, 0.02892720031738281, 0.028853023529052734, 0.028848127365112306, 0.028845535278320313, 0.028959007263183595, 0.028701120376586915, 0.028827072143554688, 0.028774784088134765, 0.028867807388305664, 0.028730144500732423, 0.02875801658630371, 0.02878873634338379, 0.028788415908813477, 0.02926019287109375, 0.028880895614624022, 0.028729120254516603, 0.028689952850341798, 0.028791263580322267, 0.028688608169555666, 0.02876006317138672, 0.028657663345336915, 0.028661760330200195, 0.02875961685180664, 0.02863542366027832, 0.028410015106201173, 0.02829516792297363, 0.02838937568664551, 0.02851020812988281, 0.028624895095825196, 0.028722368240356445, 0.028822336196899414, 0.02885139274597168, 0.028838720321655274, 0.02876825523376465, 0.028825439453125, 0.028763551712036133, 0.028822240829467775, 0.02870275115966797, 0.029476863861083984, 0.02872319984436035, 0.028741600036621094, 0.028785919189453123, 0.028705568313598634, 0.02876620864868164, 0.028850080490112305, 0.028966527938842773, 0.028754400253295898, 0.02874777603149414, 0.028751840591430666, 0.028723232269287108, 0.028725248336791992, 0.02967724800109863, 0.030701887130737304, 0.02896281623840332, 0.028759391784667968, 0.028846752166748046, 0.02853273582458496, 0.028602367401123048, 0.028948320388793945, 0.028815488815307617, 0.028778528213500975, 0.028675104141235353, 0.028726112365722655, 0.029071487426757813, 0.02862460708618164, 0.028618816375732423, 0.028627168655395507, 0.028843488693237305, 0.02865206336975098, 0.028712352752685546, 0.02881760025024414, 0.028676511764526368, 0.028690431594848635, 0.028755680084228515, 0.028671712875366212, 0.028666431427001954, 0.02939072036743164, 0.02877903938293457, 0.028631040573120117, 0.028655359268188477, 0.028734848022460936, 0.028542112350463868, 0.02852412796020508, 0.028585792541503906, 0.028514495849609377, 0.028547199249267578, 0.028675680160522462, 0.028690847396850586, 0.028649023056030273, 0.028584384918212892, 0.02855526351928711, 0.028479488372802734, 0.02856345558166504, 0.0285467529296875, 0.028719327926635743, 0.028517663955688475, 0.028737375259399414, 0.028750591278076172, 0.029046079635620118, 0.02876883125305176, 0.02876652717590332, 0.029056800842285156, 0.028755264282226564, 0.02867910385131836, 0.0287825927734375, 0.02875187110900879, 0.02878054428100586, 0.02910380744934082, 0.029097471237182617, 0.033148929595947264, 0.030184255599975587, 0.0293287353515625, 0.029296384811401368, 0.029313919067382812, 0.030017248153686525, 0.02878054428100586, 0.02880143928527832, 0.02874972724914551, 0.028779903411865235, 0.028807680130004884, 0.028630912780761717, 0.02880460739135742, 0.028766687393188477, 0.028808544158935547, 0.028727872848510742, 0.02877894401550293, 0.02861248016357422, 0.028763391494750976, 0.028775232315063477, 0.028738784790039062, 0.028592927932739258, 0.02857779121398926, 0.028839807510375976, 0.028700799942016603, 0.028690431594848635, 0.028761632919311525, 0.02891823959350586, 0.028639232635498047, 0.028590080261230468, 0.029496448516845703, 0.029002111434936525, 0.02890729522705078, 0.02914179229736328, 0.028856576919555663, 0.028772031784057617, 0.02856755256652832, 0.028886911392211913, 0.028852256774902343, 0.028967008590698243, 0.029059328079223633, 0.02930240058898926, 0.028999807357788086, 0.028893503189086914, 0.028724128723144532, 0.028580640792846678, 0.028507808685302734, 0.02856585693359375, 0.02837299156188965, 0.028329984664916992, 0.02833612823486328, 0.028392704010009765, 0.028525312423706053, 0.028495199203491212, 0.028719743728637694, 0.028446752548217772, 0.028688480377197265, 0.028498111724853517, 0.02846870422363281, 0.028242176055908202, 0.028245023727416992, 0.02863532829284668, 0.02858255958557129, 0.028450431823730468, 0.028386911392211913, 0.028334911346435548, 0.028359039306640625, 0.028300287246704102, 0.029175968170166017, 0.028461631774902345, 0.02844198417663574, 0.02838387107849121, 0.028848127365112306, 0.02879283142089844, 0.028903423309326173, 0.029051935195922852, 0.028879615783691408, 0.028528863906860352, 0.028431392669677733, 0.028400224685668947, 0.028399999618530274, 0.028471296310424804, 0.028379135131835938, 0.02832383918762207, 0.028290143966674806, 0.02826684761047363, 0.028320320129394533, 0.028413951873779295, 0.030315967559814454, 0.030834335327148438, 0.028619680404663086, 0.028395647048950194, 0.028331775665283204]",tokens/s,34.82369426177943,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4410.59328,4707.975168,0.0,4305.453056,4304.491008,s,1,10.421193359375,10.421193359375,0.0,10.421193359375,10.421193359375,10.421193359375,10.421193359375,[10.421193359375],,kWh,9.875442427084332e-05,1.0885916664281946e-05,3.108863598200173e-05,0.000140728976917127,,MB,4271.702016,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9635379180908201,0.19635379180908202,0.0005258069885088617,0.1964731216430664,0.1968551834106445,0.19693533554077147,0.19699945724487306,"[0.19682847595214845, 0.1953652801513672, 0.19582838439941405, 0.19660870361328125, 0.19615023803710938, 0.19683737182617186, 0.19633753967285156, 0.1967821502685547, 0.19578428649902344, 0.19701548767089844]",tokens/s,1303.7690672605547,kWh,5.797203101552838e-06,6.393265815337422e-07,3.8559889235683555e-06,1.0292518606654937e-05,tokens/kWh,24872434.99705461,MB,4281.012224,4844.290048,0.0,4429.185024,4373.844992,s,10,20.827313964843754,2.082731396484375,0.29721883058390564,2.089891540527344,2.3863911132812503,2.3877281494140625,2.3887977783203125,"[2.370843505859375, 2.37403466796875, 2.386093994140625, 2.378898193359375, 2.389065185546875, 1.8089395751953126, 1.783486572265625, 1.78332177734375, 1.775119140625, 1.7775113525390625]",tokens/s,30.248739758925815,kWh,6.056855987594817e-05,6.680083376092295e-06,3.893257308503296e-05,0.00010618121633707344,tokens/kWh,593325.2808105514,,s,630,20.824298826217653,0.03305444258129786,0.004756487799541903,0.03701318359375,0.03809029388427734,0.03827253875732422,0.03943698825836183,"[0.03786547088623047, 0.03763814544677734, 0.03753779220581055, 0.03745177459716797, 0.037787647247314454, 0.037314239501953124, 0.03743388748168945, 0.03732601547241211, 0.03728140640258789, 0.03735446548461914, 0.037294078826904296, 0.03783683013916016, 0.03791254425048828, 0.03805759811401367, 0.03802355194091797, 0.037967201232910155, 0.03782624053955078, 0.03864675140380859, 0.03816812896728516, 0.038021568298339845, 0.03821526336669922, 0.03919692611694336, 0.03794291305541992, 0.03755062484741211, 0.03750883102416992, 0.037251232147216796, 0.037400703430175784, 0.03747443389892578, 0.03750428771972656, 0.037706336975097655, 0.03740585708618164, 0.037619873046875, 0.03742380905151367, 0.03741491317749023, 0.03749478530883789, 0.037455646514892575, 0.03750048065185547, 0.03780207824707031, 0.037298465728759764, 0.037488929748535155, 0.03743743896484375, 0.03781596755981445, 0.03772041702270508, 0.037466110229492186, 0.03760108947753906, 0.037437633514404295, 0.03728339385986328, 0.03953504180908203, 0.039732673645019534, 0.037795841217041014, 0.0372374382019043, 0.03716080093383789, 0.037027168273925784, 0.03711660766601563, 0.03717529678344727, 0.03730979156494141, 0.0370797119140625, 0.037292030334472655, 0.03717529678344727, 0.037107681274414064, 0.03742723083496094, 0.037197662353515626, 0.03699919891357422, 0.03759462356567383, 0.03716339111328125, 0.037406177520751954, 0.03751103973388672, 0.037048286437988284, 0.03732342529296875, 0.03714678573608399, 0.037101566314697264, 0.03742323303222656, 0.0371671028137207, 0.0372017936706543, 0.03714572906494141, 0.0374257926940918, 0.037290271759033204, 0.03736163330078125, 0.037410655975341794, 0.037226657867431644, 0.03742281723022461, 0.0373455696105957, 0.037209407806396484, 0.03733369445800781, 0.03722256088256836, 0.03708671951293945, 0.037196128845214844, 0.037104736328125, 0.03716188812255859, 0.03739033508300781, 0.038136993408203125, 0.037731040954589845, 0.038056095123291014, 0.03779955291748047, 0.03779619216918945, 0.03809280014038086, 0.03817881774902344, 0.03790419387817383, 0.03824415969848633, 0.03805427169799805, 0.03787980651855469, 0.03783852767944336, 0.03782451248168945, 0.03813558578491211, 0.03821622467041016, 0.037953536987304685, 0.038182849884033206, 0.038260799407958984, 0.03827711868286133, 0.03814604949951172, 0.03805184173583984, 0.03781024169921875, 0.03765580749511719, 0.03762041473388672, 0.0376360969543457, 0.03781017684936523, 0.037522750854492186, 0.03754207992553711, 0.04001228713989258, 0.03809001541137695, 0.03794515228271484, 0.03779062271118164, 0.037978111267089845, 0.03769651031494141, 0.03752576065063477, 0.03782118225097656, 0.037974689483642576, 0.037806079864501956, 0.03781619262695313, 0.037602432250976564, 0.037663806915283204, 0.038295745849609375, 0.0383377914428711, 0.03789875030517578, 0.03797372817993164, 0.038018943786621094, 0.037775295257568356, 0.03768368148803711, 0.03836502456665039, 0.039542465209960936, 0.041674686431884767, 0.037959327697753904, 0.03765321731567383, 0.0377017936706543, 0.037676288604736326, 0.03810153579711914, 0.037928256988525394, 0.037553054809570316, 0.0377341423034668, 0.0375048942565918, 0.03885055923461914, 0.03757696151733399, 0.03793916702270508, 0.038058143615722656, 0.03823321533203125, 0.038088768005371094, 0.03808748626708985, 0.03768320083618164, 0.037713920593261716, 0.03814329528808594, 0.038129375457763674, 0.03881833648681641, 0.03828531265258789, 0.038950782775878906, 0.03822463989257813, 0.03785299301147461, 0.038438175201416014, 0.037862239837646486, 0.038801280975341794, 0.03747635269165039, 0.03747430419921875, 0.03733110427856445, 0.037257057189941406, 0.037416961669921874, 0.037492256164550784, 0.037302753448486325, 0.03721830368041992, 0.037230209350585936, 0.03723836898803711, 0.037280544281005856, 0.03751289749145508, 0.037079296112060546, 0.03715488052368164, 0.03721567916870117, 0.03708121490478516, 0.03734764862060547, 0.03731232070922851, 0.0372165756225586, 0.03719347381591797, 0.03834614562988281, 0.03819785690307617, 0.03789801788330078, 0.037402847290039065, 0.03768729782104492, 0.0376866569519043, 0.038163070678710935, 0.03853251266479492, 0.03847433471679688, 0.03804374313354492, 0.03762371063232422, 0.03751116943359375, 0.03751702499389648, 0.037424671173095704, 0.03738291168212891, 0.03730031967163086, 0.03776835250854492, 0.03786419296264648, 0.037555999755859375, 0.03742041778564453, 0.037415775299072265, 0.0377262077331543, 0.03828326416015625, 0.038254398345947266, 0.03836348724365234, 0.03851772689819336, 0.0379153938293457, 0.03778575897216797, 0.037791713714599606, 0.03754179382324219, 0.03738428878784179, 0.03735551834106445, 0.03736371231079102, 0.037359169006347656, 0.03734163284301758, 0.037644256591796876, 0.03825398254394531, 0.03755286407470703, 0.03757660675048828, 0.03750003051757812, 0.03819510269165039, 0.03748124694824219, 0.03732092666625977, 0.03744764709472656, 0.037722110748291016, 0.037560417175292966, 0.03754998397827149, 0.03751107025146484, 0.03773564910888672, 0.03766563034057617, 0.03754576110839844, 0.037814529418945315, 0.03751523208618164, 0.04087811279296875, 0.037625694274902345, 0.037900062561035154, 0.0374329605102539, 0.03771775817871094, 0.03727052688598633, 0.037643871307373046, 0.037454238891601564, 0.03749068832397461, 0.03740671920776367, 0.03812457656860352, 0.03775382232666016, 0.037463871002197266, 0.03727788925170898, 0.0373737907409668, 0.03739254379272461, 0.03751321411132812, 0.037528705596923825, 0.03743420791625977, 0.03770479965209961, 0.03774496078491211, 0.03773308944702149, 0.03781798553466797, 0.038580703735351565, 0.039567169189453126, 0.037996543884277346, 0.037689342498779296, 0.037763072967529294, 0.03783814239501953, 0.03793142318725586, 0.038131839752197264, 0.03773468780517578, 0.03785820770263672, 0.03840918350219726, 0.03825254440307617, 0.038335742950439455, 0.03826748657226563, 0.03824873733520508, 0.03807424163818359, 0.03792099380493164, 0.03808540725708008, 0.03809516906738281, 0.03808121490478516, 0.038265918731689455, 0.038238849639892575, 0.03812313461303711, 0.03857888031005859, 0.03794124984741211, 0.03773583984375, 0.03831254577636719, 0.03827667236328125, 0.03876499176025391, 0.03787558364868164, 0.03792268753051758, 0.037818622589111325, 0.037773311614990236, 0.03792844772338867, 0.03793561553955078, 0.037863040924072264, 0.037882080078125, 0.03793116760253906, 0.037599071502685544, 0.037654689788818356, 0.037738494873046875, 0.0377704963684082, 0.03755219268798828, 0.037562175750732424, 0.03769232177734375, 0.03766032028198242, 0.03780230331420899, 0.037803680419921874, 0.037513633728027344, 0.03751222229003906, 0.03813792037963867, 0.028981311798095703, 0.02886854362487793, 0.028608352661132812, 0.029390335083007812, 0.02878704071044922, 0.028376991271972657, 0.028670656204223634, 0.028315359115600586, 0.028471200942993165, 0.028643423080444336, 0.028556480407714843, 0.02881577682495117, 0.02863350486755371, 0.02854911994934082, 0.028434431076049805, 0.029191871643066407, 0.028456703186035156, 0.028338720321655273, 0.02838479995727539, 0.02828748893737793, 0.02839756774902344, 0.0282541446685791, 0.028473407745361327, 0.028688383102416993, 0.028305408477783203, 0.02841788864135742, 0.029312288284301758, 0.02853923225402832, 0.028479839324951173, 0.028175615310668947, 0.028766624450683592, 0.028636863708496094, 0.028402528762817382, 0.02885203170776367, 0.0285861759185791, 0.028696575164794923, 0.028804447174072264, 0.02852931213378906, 0.02852249526977539, 0.02833203125, 0.028130495071411132, 0.028136255264282227, 0.028182079315185547, 0.0282293758392334, 0.02869264030456543, 0.028605056762695313, 0.028682336807250977, 0.028611711502075195, 0.028436672210693358, 0.029339935302734373, 0.028641183853149413, 0.028082496643066408, 0.02820089530944824, 0.028155967712402342, 0.028296192169189452, 0.02810982322692871, 0.028357919692993165, 0.028402399063110352, 0.028276575088500976, 0.02830761528015137, 0.0288536319732666, 0.029827327728271485, 0.028917856216430664, 0.02845827293395996, 0.028403520584106445, 0.028291999816894533, 0.02863225555419922, 0.028111679077148437, 0.02815385627746582, 0.02804694366455078, 0.027911712646484375, 0.027971519470214843, 0.027943872451782228, 0.028023839950561524, 0.02800326347351074, 0.02806524848937988, 0.02802364730834961, 0.028178144454956054, 0.02899692726135254, 0.02858857536315918, 0.028417312622070312, 0.028232576370239258, 0.02830486488342285, 0.028355104446411133, 0.02826268768310547, 0.028237632751464844, 0.02819424057006836, 0.02813385581970215, 0.028712160110473634, 0.029105247497558592, 0.03048819160461426, 0.028235776901245117, 0.02820102310180664, 0.028184576034545897, 0.028321247100830078, 0.028160543441772462, 0.029286399841308593, 0.028583904266357422, 0.028223520278930665, 0.028224863052368165, 0.028114816665649415, 0.028211839675903322, 0.028680351257324217, 0.02834748840332031, 0.02810563278198242, 0.028073984146118162, 0.028137279510498048, 0.028082368850708007, 0.028213151931762694, 0.028282880783081055, 0.028204736709594728, 0.02807644844055176, 0.028198623657226564, 0.027959583282470703, 0.029050655364990234, 0.028053728103637696, 0.028069952011108398, 0.028001440048217772, 0.028230016708374023, 0.028166336059570314, 0.02814588737487793, 0.028279935836791992, 0.028033824920654298, 0.02799420738220215, 0.02790729522705078, 0.028239679336547852, 0.028242143630981445, 0.029321151733398436, 0.03182723236083984, 0.02815683174133301, 0.028481536865234375, 0.02840575981140137, 0.02811248016357422, 0.02816655921936035, 0.02808367919921875, 0.027922975540161134, 0.02800230407714844, 0.02795699119567871, 0.02809878349304199, 0.02798918342590332, 0.02797654342651367, 0.028056991577148437, 0.02817084884643555, 0.028124704360961914, 0.02812928009033203, 0.027894111633300783, 0.028104448318481447, 0.028040864944458007, 0.02808835220336914, 0.028680896759033202, 0.02937779235839844, 0.030400447845458985, 0.02867897605895996, 0.030002496719360353, 0.02848124885559082, 0.028387615203857422, 0.028428991317749022, 0.028173887252807617, 0.028092863082885743, 0.028004352569580077, 0.028735488891601563, 0.028133119583129883, 0.02803536033630371, 0.027973600387573242, 0.027981407165527345, 0.027939039230346678, 0.02840595245361328, 0.02827231979370117, 0.028141536712646485, 0.028023136138916015, 0.02812723159790039, 0.028003328323364256, 0.02788844871520996, 0.02800624084472656, 0.02793641662597656, 0.0279266242980957, 0.027965408325195312, 0.027972223281860352, 0.02790399932861328, 0.027963392257690428, 0.02788764762878418, 0.027897823333740236, 0.028028928756713867, 0.02786463928222656, 0.028138111114501953, 0.028294143676757814, 0.029105279922485353, 0.028184255599975585, 0.02822428894042969, 0.02850364875793457, 0.02816860771179199, 0.028108800888061523, 0.027951135635375976, 0.0280185604095459, 0.02798396873474121, 0.028461055755615236, 0.028227584838867188, 0.028159744262695314, 0.02806403160095215, 0.028056831359863282, 0.02805740737915039, 0.02814454460144043, 0.028280832290649413, 0.027967487335205078, 0.02799331283569336, 0.027939456939697266, 0.028022079467773436, 0.027911008834838866, 0.027926240921020508, 0.02803331184387207, 0.02802899169921875, 0.02797177505493164, 0.028251903533935547, 0.028304927825927733, 0.028100128173828124, 0.027960384368896484, 0.028060735702514647, 0.02823456001281738, 0.028012544631958007, 0.028045536041259766, 0.02870249557495117, 0.02996633529663086, 0.028366464614868164, 0.02827017593383789, 0.028384159088134766, 0.028190591812133788, 0.028045183181762696, 0.028022911071777342, 0.028053279876708984, 0.027922655105590822, 0.028016096115112306, 0.028518335342407226, 0.028123680114746093, 0.028299583435058593, 0.028239679336547852, 0.02828486442565918, 0.028219392776489258, 0.02812668800354004, 0.0281646728515625, 0.02806492805480957, 0.028119871139526367, 0.028176128387451174, 0.027983264923095705, 0.027982816696166993, 0.0281409912109375, 0.028002048492431642, 0.028127552032470703, 0.028006784439086913, 0.028008447647094727, 0.028016319274902345, 0.029122880935668945, 0.028511871337890626, 0.02865760040283203, 0.028033439636230468, 0.028039327621459963, 0.028041088104248046, 0.027917631149291994, 0.028025535583496092, 0.028862079620361327, 0.0281298885345459, 0.028882080078125, 0.028179071426391603, 0.02813337516784668, 0.027914047241210938, 0.028233983993530273, 0.02805894470214844, 0.02806028747558594, 0.02814499282836914, 0.02820924758911133, 0.028109119415283202, 0.028084672927856446, 0.02851820755004883, 0.02822332763671875, 0.02814771270751953, 0.027959455490112306, 0.028092479705810545, 0.02817024040222168, 0.02817817687988281, 0.028242111206054688, 0.02817228889465332, 0.027925695419311523, 0.028266336441040037, 0.02812131118774414, 0.027976448059082032, 0.028032320022583008, 0.028529344558715822, 0.028733407974243164, 0.028162303924560546, 0.02892166328430176, 0.028196575164794922, 0.028007999420166015, 0.02799411201477051, 0.028050111770629882, 0.02816636848449707, 0.028213024139404297, 0.02872150421142578, 0.028657312393188476, 0.02830486488342285, 0.028105247497558595, 0.028090208053588868, 0.02826051139831543, 0.028151487350463866, 0.028041759490966798, 0.02804128074645996, 0.028003168106079102, 0.02805235290527344, 0.028135391235351564, 0.02811292839050293, 0.028004352569580077, 0.02804047966003418, 0.028090560913085937, 0.028129823684692384, 0.028917760848999025, 0.02812723159790039]",tokens/s,30.253119457104326,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4399.464448,4707.975168,0.0,4305.453056,4304.491008,s,1,10.4375556640625,10.4375556640625,0.0,10.4375556640625,10.4375556640625,10.4375556640625,10.4375556640625,[10.4375556640625],,kWh,0.00010035908481667473,1.1062763752588356e-05,3.16041919499882e-05,0.0001430260405192513,,MB,4333.52704,4829.609984,0.0,4412.407808,4373.842432,s,10,1.971469039916992,0.1971469039916992,0.0012051284099117738,0.1968300094604492,0.19905128784179688,0.19922076568603517,0.1993563479614258,"[0.19614028930664062, 0.19901362609863282, 0.19662838745117187, 0.19564703369140626, 0.195801025390625, 0.19939024353027343, 0.19699551391601564, 0.19765420532226563, 0.1966645050048828, 0.19753421020507814]",tokens/s,1298.5240691924778,kWh,5.804839304901595e-06,6.400014898219934e-07,3.853085871137283e-06,1.0297926665860872e-05,tokens/kWh,24859372.988999557,MB,4342.403072,4846.3872,0.0,4429.185024,4373.844992,s,10,18.24494299316406,1.824494299316406,0.02397269952250489,1.810436096191406,1.8609276000976562,1.8610742126464843,1.8611915026855468,"[1.8047381591796876, 1.8015810546875, 1.807452392578125, 1.810733154296875, 1.848447509765625, 1.799188232421875, 1.86089501953125, 1.8612208251953124, 1.840547607421875, 1.8101390380859375]",tokens/s,34.530116111409384,kWh,5.2653107582178747e-05,5.806195810555981e-06,3.468183384566244e-05,9.31411372383972e-05,tokens/kWh,676392.8578491568,,s,630,18.241828424453722,0.02895528321341863,0.0006153523640659173,0.028787663459777832,0.02968755531311035,0.029953547477722168,0.031324677066802996,"[0.028867103576660155, 0.028528640747070313, 0.028407808303833007, 0.02839347267150879, 0.028429759979248046, 0.028910144805908203, 0.028788543701171874, 0.02867219161987305, 0.028676095962524413, 0.028669952392578125, 0.02856345558166504, 0.028636320114135742, 0.02854956817626953, 0.028741056442260743, 0.028617599487304687, 0.029266016006469726, 0.028897151947021485, 0.028678272247314455, 0.028712959289550782, 0.028485631942749022, 0.02855276870727539, 0.02860691261291504, 0.02909334373474121, 0.02856582450866699, 0.028478944778442383, 0.028762880325317382, 0.028475391387939454, 0.028620128631591798, 0.02847577667236328, 0.0286395206451416, 0.028519487380981444, 0.028593088150024416, 0.028604415893554686, 0.028706111907958985, 0.028803775787353516, 0.028721151351928712, 0.028675968170166016, 0.028708992004394532, 0.028430335998535155, 0.028403711318969727, 0.028384319305419924, 0.028488000869750976, 0.0284268798828125, 0.028725248336791992, 0.028602367401123048, 0.02859401512145996, 0.02847145652770996, 0.028495136260986327, 0.028520448684692383, 0.028345056533813476, 0.02848089599609375, 0.028967552185058594, 0.028716543197631835, 0.028725120544433595, 0.02927804756164551, 0.029005983352661132, 0.028965503692626952, 0.028725248336791992, 0.028686336517333984, 0.028564767837524416, 0.02845529556274414, 0.028455263137817384, 0.028440576553344726, 0.029288127899169923, 0.02874809646606445, 0.028708864212036132, 0.028606464385986328, 0.028416000366210937, 0.02841379165649414, 0.02828303909301758, 0.02841747283935547, 0.028420223236083984, 0.028398015975952148, 0.02830681610107422, 0.02839206314086914, 0.02833203125, 0.02845827293395996, 0.028333824157714845, 0.028696863174438477, 0.02849247932434082, 0.028493824005126952, 0.02839094352722168, 0.028250656127929687, 0.028288192749023437, 0.028209856033325195, 0.028266559600830077, 0.02828451156616211, 0.028314016342163087, 0.029001728057861328, 0.028804384231567382, 0.02937446403503418, 0.02861129570007324, 0.02881331253051758, 0.028571359634399413, 0.028530975341796876, 0.028423967361450194, 0.028428064346313477, 0.028501632690429688, 0.02848627281188965, 0.02835465621948242, 0.029001792907714843, 0.028972223281860353, 0.028760927200317383, 0.028712959289550782, 0.028641279220581056, 0.03062579154968262, 0.028839935302734376, 0.028802175521850586, 0.028668800354003907, 0.028487136840820312, 0.02859257507324219, 0.028510303497314454, 0.028553216934204102, 0.028565216064453124, 0.028557600021362303, 0.02867196846008301, 0.02852854347229004, 0.028646560668945314, 0.028570592880249022, 0.028833696365356445, 0.028462207794189454, 0.02853167915344238, 0.028448768615722656, 0.028379135131835938, 0.0283951358795166, 0.028404096603393554, 0.029393632888793944, 0.028979103088378907, 0.028816543579101562, 0.028590496063232423, 0.02849990463256836, 0.028578208923339843, 0.02878678321838379, 0.030971904754638672, 0.03189555168151856, 0.028735488891601563, 0.02852454376220703, 0.028473344802856446, 0.028687583923339845, 0.02870147132873535, 0.028708864212036132, 0.02936953544616699, 0.02892678451538086, 0.028663808822631837, 0.02845891189575195, 0.028471391677856447, 0.028435455322265626, 0.028326400756835936, 0.028352224349975585, 0.028259103775024413, 0.028225215911865234, 0.02837500762939453, 0.028590431213378908, 0.028631040573120117, 0.028598079681396483, 0.028509952545166015, 0.028436735153198243, 0.02840390396118164, 0.028426015853881836, 0.028553247451782227, 0.02850009536743164, 0.029129888534545897, 0.028666784286499023, 0.028659711837768553, 0.028739551544189453, 0.028606464385986328, 0.028614688873291015, 0.028616416931152345, 0.02865974426269531, 0.02871446418762207, 0.028646175384521484, 0.028512256622314453, 0.028455936431884765, 0.028559711456298827, 0.028560031890869142, 0.030713855743408205, 0.028560735702514647, 0.028310176849365234, 0.02830335998535156, 0.02834828758239746, 0.028250240325927736, 0.028221439361572266, 0.028387327194213868, 0.028448768615722656, 0.02834668731689453, 0.028325567245483397, 0.028254079818725585, 0.028496000289916994, 0.028207103729248048, 0.02856959915161133, 0.029476127624511718, 0.02849865531921387, 0.028464319229125977, 0.028369728088378905, 0.03296460723876953, 0.02868796730041504, 0.02857187271118164, 0.028368576049804688, 0.02827724838256836, 0.02829516792297363, 0.02829516792297363, 0.028474624633789063, 0.02848771286010742, 0.028278783798217775, 0.028773088455200196, 0.02838118362426758, 0.028317695617675782, 0.02842736053466797, 0.02873436737060547, 0.0286167049407959, 0.029022207260131837, 0.02897920036315918, 0.02920147132873535, 0.029088352203369142, 0.02934351921081543, 0.02920467185974121, 0.02914137649536133, 0.02889081573486328, 0.02876448059082031, 0.028833599090576173, 0.02863942337036133, 0.028722848892211914, 0.029094240188598634, 0.029022207260131837, 0.029134143829345704, 0.029608640670776367, 0.029034496307373047, 0.029246944427490235, 0.028813631057739257, 0.028860639572143555, 0.028778495788574218, 0.02859212875366211, 0.028415519714355467, 0.028385759353637696, 0.029047935485839844, 0.028754463195800783, 0.028599679946899412, 0.028382080078125, 0.028347551345825197, 0.028247072219848634, 0.028257600784301756, 0.02830396842956543, 0.028309471130371095, 0.028391456604003905, 0.028431936264038084, 0.028356832504272463, 0.02836092758178711, 0.0286529598236084, 0.028420703887939453, 0.02852659225463867, 0.02831974411010742, 0.028597888946533204, 0.029834112167358397, 0.029027904510498047, 0.029020095825195314, 0.02919375991821289, 0.02965193557739258, 0.031468767166137696, 0.030028575897216796, 0.029812736511230467, 0.029687456130981445, 0.030054271697998045, 0.02975382423400879, 0.029776927947998046, 0.030006240844726563, 0.029783071517944334, 0.02995043182373047, 0.029811199188232423, 0.02963039970397949, 0.02967900848388672, 0.029541023254394533, 0.02963046455383301, 0.029550592422485353, 0.02976358413696289, 0.029453920364379882, 0.029446559906005858, 0.029226144790649413, 0.02980950355529785, 0.029070911407470704, 0.029139392852783202, 0.028786687850952147, 0.02893417549133301, 0.02894339179992676, 0.02896335983276367, 0.028906944274902344, 0.02900681686401367, 0.02888256072998047, 0.028983680725097657, 0.02882476806640625, 0.028949312210083008, 0.02877568054199219, 0.02889731216430664, 0.028836576461791993, 0.0289300479888916, 0.029319168090820313, 0.029027488708496092, 0.02886716842651367, 0.029699583053588868, 0.030522111892700196, 0.02918383979797363, 0.029114528656005858, 0.029321216583251954, 0.02943180847167969, 0.029255680084228516, 0.029177631378173828, 0.030011615753173827, 0.02919625663757324, 0.029159456253051757, 0.02906096076965332, 0.028896575927734376, 0.028744543075561523, 0.028775903701782228, 0.028799423217773436, 0.028612192153930665, 0.02853673553466797, 0.02929280090332031, 0.029048831939697265, 0.02879542350769043, 0.029132799148559572, 0.028878847122192384, 0.028487680435180664, 0.02949692726135254, 0.028442047119140626, 0.028410848617553712, 0.028395423889160155, 0.028321535110473632, 0.028578079223632813, 0.028607648849487306, 0.029234079360961913, 0.028692480087280273, 0.028475040435791014, 0.02831385612487793, 0.02845257568359375, 0.028481311798095703, 0.028470943450927735, 0.028447168350219727, 0.028549631118774413, 0.028269760131835936, 0.02830828857421875, 0.028577728271484373, 0.028489791870117187, 0.028313600540161132, 0.028438528060913085, 0.02840166473388672, 0.028370943069458008, 0.028192768096923827, 0.028280960083007813, 0.028709760665893556, 0.028453535079956054, 0.028434335708618166, 0.02835091209411621, 0.028637184143066406, 0.028810688018798828, 0.028561119079589845, 0.028500415802001952, 0.02844905662536621, 0.028668031692504883, 0.028342271804809572, 0.02827225685119629, 0.028712959289550782, 0.028762496948242188, 0.028540639877319335, 0.028377376556396484, 0.02836467170715332, 0.028272544860839844, 0.02844076728820801, 0.028405792236328126, 0.028359840393066407, 0.02845756721496582, 0.02857804870605469, 0.02835456085205078, 0.028319456100463866, 0.028344415664672853, 0.02846067237854004, 0.02865190315246582, 0.028844064712524414, 0.028870111465454103, 0.02889593505859375, 0.03002572822570801, 0.029845504760742186, 0.029560831069946288, 0.030674943923950194, 0.029752416610717772, 0.03034614372253418, 0.030155935287475587, 0.029735071182250977, 0.02954310417175293, 0.029616128921508788, 0.02975948715209961, 0.029509632110595704, 0.029714431762695313, 0.02995609664916992, 0.02918400001525879, 0.029091840744018556, 0.02899942398071289, 0.02890777587890625, 0.029052671432495118, 0.029228992462158203, 0.02899558448791504, 0.028993471145629883, 0.02875430488586426, 0.029044416427612303, 0.029024576187133787, 0.029029632568359377, 0.029026912689208983, 0.02911609649658203, 0.029105632781982423, 0.02897817611694336, 0.029083648681640626, 0.029076831817626953, 0.028875423431396485, 0.028833791732788085, 0.029009599685668946, 0.029105855941772462, 0.029160064697265627, 0.02920857620239258, 0.02902182388305664, 0.032626880645751956, 0.029483200073242188, 0.029570528030395508, 0.03084137535095215, 0.029885536193847657, 0.03079814338684082, 0.03156595230102539, 0.030149375915527344, 0.030166751861572267, 0.029891616821289064, 0.029710880279541017, 0.02962060737609863, 0.02966329574584961, 0.029710336685180663, 0.029349727630615233, 0.029336992263793944, 0.02938047981262207, 0.029502336502075194, 0.029241344451904298, 0.029390111923217773, 0.029129440307617188, 0.02913030433654785, 0.029110111236572266, 0.029166175842285157, 0.029566976547241212, 0.02931110382080078, 0.029126527786254883, 0.03221196746826172, 0.030569375991821288, 0.029421663284301756, 0.029598848342895508, 0.029195135116577148, 0.029607295989990234, 0.029358720779418944, 0.02933350372314453, 0.03061759948730469, 0.02963046455383301, 0.02974224090576172, 0.02947977638244629, 0.029831167221069335, 0.029808767318725587, 0.029765504837036133, 0.029855424880981446, 0.029720096588134765, 0.029661983489990235, 0.02963046455383301, 0.0295229434967041, 0.02949836730957031, 0.029239295959472656, 0.029249536514282228, 0.029231103897094726, 0.029375743865966798, 0.029111040115356444, 0.029707967758178713, 0.029358400344848632, 0.02928371238708496, 0.029257440567016603, 0.02933852767944336, 0.029171712875366212, 0.02951932716369629, 0.02935215950012207, 0.029140480041503908, 0.029088287353515624, 0.029221151351928713, 0.02902355194091797, 0.02893484878540039, 0.03022233581542969, 0.029242944717407227, 0.029087167739868164, 0.02917852783203125, 0.029169376373291016, 0.030253440856933593, 0.02912076759338379, 0.02954444885253906, 0.029336864471435548, 0.02968844795227051, 0.030347007751464844, 0.029686111450195313, 0.029619199752807617, 0.03000422477722168, 0.029527488708496093, 0.029329984664916993, 0.02935379219055176, 0.02931043243408203, 0.029309663772583008, 0.029241344451904298, 0.02959974479675293, 0.030079231262207032, 0.02937651252746582, 0.02980454444885254, 0.02930998420715332, 0.02921366310119629, 0.0291627197265625, 0.02928825569152832, 0.029318431854248046, 0.02906822395324707, 0.02919705581665039, 0.029097087860107423, 0.02922742462158203, 0.02929302406311035, 0.0293536319732666, 0.029485408782958984, 0.029388799667358398, 0.029506591796875, 0.02938751983642578, 0.029274335861206056, 0.029001247406005858, 0.029182079315185547, 0.029307167053222657, 0.029292415618896485, 0.029345983505249022, 0.029814207077026367, 0.029536832809448244, 0.02967046356201172, 0.0294716796875, 0.029671424865722655, 0.029654304504394532, 0.02948579216003418, 0.029419519424438476, 0.029445280075073244, 0.029293407440185548, 0.029106176376342774, 0.029077152252197265, 0.02912086486816406, 0.029212032318115234, 0.029126399993896483, 0.029049087524414062, 0.029340288162231446, 0.028895231246948243, 0.028903423309326173, 0.02898851203918457, 0.028828128814697266, 0.02881171226501465, 0.02896691131591797, 0.028860416412353516, 0.028985343933105468, 0.02871244812011719, 0.028741952896118163, 0.02904447937011719, 0.029692352294921873, 0.028573055267333985, 0.028422176361083986, 0.028353120803833006, 0.02852889633178711, 0.02842336082458496, 0.029079776763916015, 0.0320904655456543, 0.028661760330200195, 0.0284998722076416, 0.028741024017333985, 0.02921939277648926, 0.028690080642700195, 0.02837539291381836, 0.028370336532592775, 0.028367456436157228, 0.028763839721679688, 0.02862486457824707, 0.028731103897094726, 0.029035135269165038, 0.02880512046813965, 0.02869411277770996, 0.028842048645019533, 0.028743583679199217, 0.029053375244140624, 0.029054975509643553, 0.02898294448852539, 0.029057376861572264, 0.029911039352416992, 0.02894643211364746, 0.02893414306640625, 0.028753919601440428, 0.028846080780029298, 0.028618751525878908, 0.028673919677734375, 0.02867795181274414, 0.028665695190429687, 0.02854550361633301, 0.028811264038085937, 0.02852854347229004, 0.028414047241210938, 0.02883350372314453, 0.02845017623901367, 0.028427167892456053, 0.028488895416259766, 0.028588863372802736, 0.02857164764404297, 0.02861568069458008, 0.028406368255615235, 0.028602239608764648, 0.028512800216674804, 0.03017728042602539, 0.028383232116699218, 0.028391616821289063, 0.028335264205932617, 0.02840028762817383, 0.028499488830566407, 0.028516159057617188, 0.028576416015625, 0.028604415893554686, 0.028669567108154298, 0.028641279220581056, 0.028799360275268554, 0.028767776489257813, 0.02865740776062012, 0.028692672729492188, 0.028656160354614258, 0.02871049690246582, 0.028723615646362305, 0.028628992080688476, 0.028724895477294923, 0.02898099136352539, 0.029399648666381836, 0.028675680160522462]",tokens/s,34.53601170568327,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4415.11936,4707.975168,0.0,4305.453056,4304.491008,s,1,11.59986328125,11.59986328125,0.0,11.59986328125,11.59986328125,11.59986328125,11.59986328125,[11.59986328125],,kWh,0.0001269411279208043,1.399500317815278e-05,3.98894763559976e-05,0.00018082560745495468,,MB,4448.31744,4829.609984,0.0,4412.407808,4373.842432,s,10,1.9623368682861329,0.1962336868286133,0.0006252823265912817,0.19620695495605467,0.19687013854980467,0.19715223999023437,0.19737792114257813,"[0.19581907653808595, 0.19525216674804688, 0.19603631591796875, 0.1956294403076172, 0.19637759399414062, 0.195721923828125, 0.1968074493408203, 0.19667430114746093, 0.19743434143066407, 0.19658425903320312]",tokens/s,1304.567040131012,kWh,5.789630836110506e-06,6.382631315602204e-07,3.85367410690212e-06,1.0281568074572846e-05,tokens/kWh,24898925.74198957,MB,4458.610688,4844.290048,0.0,4429.185024,4373.844992,s,10,23.742114501953125,2.3742114501953124,0.015632395599483418,2.3700614013671872,2.3915708251953127,2.4010015991210936,2.4085462182617188,"[2.3574619140625, 2.366328369140625, 2.3652587890625, 2.3673447265625, 2.372778076171875, 2.376846923828125, 2.38947509765625, 2.410432373046875, 2.38122705078125, 2.354961181640625]",tokens/s,26.53512600776033,kWh,5.234218386930875e-05,5.771405713331326e-06,3.444805097889653e-05,9.256164056153662e-05,tokens/kWh,680627.5214851716,,s,630,23.738529830932595,0.03768020608084543,0.0005415232815325333,0.03756222343444825,0.03827065773010254,0.03846059131622315,0.03946921695709228,"[0.037668895721435544, 0.03738809585571289, 0.03710083389282227, 0.037264289855957033, 0.037571678161621096, 0.037583774566650394, 0.037575839996337894, 0.037561119079589846, 0.03716921615600586, 0.037408096313476566, 0.03709939193725586, 0.03718838500976562, 0.037166175842285154, 0.0370552978515625, 0.03710563278198242, 0.03704012680053711, 0.037391681671142575, 0.03764252853393555, 0.037517822265625, 0.03767001724243164, 0.03769232177734375, 0.03731990432739258, 0.03724889755249024, 0.03722243118286133, 0.03709632110595703, 0.03733036804199219, 0.03735017776489258, 0.03715868759155273, 0.03741491317749023, 0.037227710723876956, 0.03736406326293945, 0.03737238311767578, 0.03735327911376953, 0.03791622543334961, 0.03772227096557617, 0.03736937713623047, 0.037198143005371095, 0.03763407897949219, 0.03728854370117188, 0.03727155303955078, 0.03712988662719727, 0.03734767913818359, 0.037238784790039066, 0.03740467071533203, 0.03814118576049805, 0.03843292617797851, 0.03798483276367187, 0.037679134368896486, 0.03771187210083008, 0.03721420669555664, 0.03734732818603516, 0.037188640594482424, 0.03788899230957031, 0.03737395095825195, 0.03732400131225586, 0.03733379364013672, 0.037280895233154296, 0.03726835250854492, 0.0373205451965332, 0.037496990203857425, 0.037588672637939455, 0.03757497787475586, 0.03719168090820312, 0.037102783203125, 0.03719977569580078, 0.037122848510742185, 0.037152896881103514, 0.03725894546508789, 0.03720608139038086, 0.03722576141357422, 0.03753263854980469, 0.037335041046142575, 0.0372911376953125, 0.03717801666259766, 0.03827324676513672, 0.03902873611450195, 0.03895843124389648, 0.03764723205566406, 0.03756009674072266, 0.037443584442138675, 0.037562080383300785, 0.03782831954956055, 0.03777798461914063, 0.0373309440612793, 0.03740671920776367, 0.03734636688232422, 0.03727203369140625, 0.037412670135498045, 0.0373765754699707, 0.03728937530517578, 0.03732345581054688, 0.037566112518310546, 0.037574432373046876, 0.037552703857421876, 0.037905567169189455, 0.03818521499633789, 0.037832416534423825, 0.037825408935546874, 0.03780352020263672, 0.03756697463989258, 0.03794739151000977, 0.03748147201538086, 0.037580833435058594, 0.03749753570556641, 0.037619998931884766, 0.03769343948364258, 0.0378238410949707, 0.03764028930664062, 0.0379991683959961, 0.03800083160400391, 0.03779155349731445, 0.037510753631591794, 0.037464481353759765, 0.03735318374633789, 0.037060897827148435, 0.0371486701965332, 0.03707699203491211, 0.0373043212890625, 0.03714771270751953, 0.037284351348876955, 0.037286334991455075, 0.037162494659423825, 0.037429759979248044, 0.03715212631225586, 0.0385968017578125, 0.03771612930297852, 0.0373752326965332, 0.03730508804321289, 0.03719887924194336, 0.037140766143798826, 0.03795024108886719, 0.037471935272216796, 0.03726972961425781, 0.03754377746582031, 0.03752758407592773, 0.037496192932128906, 0.03725183868408203, 0.037287105560302736, 0.037403457641601565, 0.03740262222290039, 0.03753945541381836, 0.03737235260009766, 0.037653728485107424, 0.037343807220458984, 0.037251232147216796, 0.03735686492919922, 0.03942854309082031, 0.03857843017578125, 0.037976062774658204, 0.04103299331665039, 0.03759990310668945, 0.037529632568359374, 0.037528705596923825, 0.03725353622436523, 0.03723929595947266, 0.037187583923339845, 0.03700297546386719, 0.03803305435180664, 0.03721484756469726, 0.03749824142456055, 0.037274238586425784, 0.03789376068115234, 0.03725683212280274, 0.0371781120300293, 0.037169151306152344, 0.037470176696777345, 0.03709955215454101, 0.03717299270629883, 0.037154560089111326, 0.037480960845947264, 0.03764223861694336, 0.037959007263183596, 0.03805446243286133, 0.03810915374755859, 0.038104705810546875, 0.03790240097045899, 0.03730681610107422, 0.03719145584106445, 0.037576927185058596, 0.03744153594970703, 0.037285888671875, 0.03729199981689453, 0.03709251022338867, 0.03722943878173828, 0.03777503967285156, 0.03719404983520508, 0.037310462951660156, 0.037086528778076173, 0.03700921630859375, 0.0372305908203125, 0.03715686416625977, 0.03706880187988281, 0.03727360153198242, 0.03706675338745117, 0.0373260498046875, 0.037370655059814455, 0.037566463470458986, 0.03762176132202148, 0.03751459121704102, 0.03710617446899414, 0.03709148788452148, 0.037238784790039066, 0.037383617401123045, 0.03701375961303711, 0.03746771240234375, 0.03849292755126953, 0.038309600830078124, 0.037756320953369144, 0.037206912994384764, 0.037337055206298826, 0.037283870697021486, 0.037185344696044925, 0.03714998245239258, 0.037364574432373045, 0.037248737335205076, 0.03715107345581055, 0.03769456100463867, 0.03942697525024414, 0.037631999969482424, 0.03739174270629883, 0.0372988166809082, 0.03768320083618164, 0.03737190246582031, 0.03756032180786133, 0.038059070587158204, 0.03855251312255859, 0.037375999450683595, 0.03730636978149414, 0.037119998931884765, 0.03712819290161133, 0.0372408332824707, 0.03718105697631836, 0.03783513641357422, 0.03765798568725586, 0.03755235290527344, 0.03784873580932617, 0.03778022384643555, 0.03765862274169922, 0.03750083160400391, 0.037717376708984375, 0.037472991943359374, 0.038174720764160154, 0.03733708953857422, 0.03800883102416992, 0.04069171142578125, 0.03749478530883789, 0.03736751937866211, 0.03741929626464844, 0.03726540756225586, 0.0374799690246582, 0.03812195205688477, 0.03763814544677734, 0.03730451202392578, 0.03769513702392578, 0.03785145568847656, 0.03734688186645508, 0.03712995147705078, 0.0373092155456543, 0.037007328033447265, 0.037179393768310545, 0.03713827133178711, 0.03722387313842773, 0.037461761474609376, 0.037755104064941404, 0.037876480102539065, 0.03786137771606445, 0.0377262077331543, 0.038524063110351565, 0.037663585662841795, 0.037993568420410156, 0.03781315231323242, 0.03771596908569336, 0.03759308624267578, 0.037599231719970705, 0.03751891326904297, 0.03753414535522461, 0.03821491241455078, 0.0371712646484375, 0.03719852828979492, 0.037287296295166014, 0.038023807525634765, 0.038012928009033206, 0.03769110488891601, 0.037394718170166014, 0.0373021125793457, 0.037289344787597656, 0.03729843139648437, 0.03727993774414062, 0.037269855499267576, 0.037422431945800784, 0.04151772689819336, 0.038262462615966795, 0.038139678955078124, 0.038693408966064456, 0.037738494873046875, 0.03788083267211914, 0.03750300979614258, 0.037606369018554686, 0.03745526504516602, 0.03737456130981445, 0.037351425170898435, 0.03748803329467774, 0.03812393569946289, 0.03780969619750976, 0.037550750732421874, 0.03739955139160156, 0.037485118865966796, 0.03744982528686523, 0.037990558624267576, 0.037658817291259764, 0.03727974319458008, 0.03770127868652344, 0.03765871810913086, 0.03739263916015625, 0.03728793716430664, 0.038080318450927735, 0.03769683074951172, 0.03761651229858398, 0.03734511947631836, 0.03732905578613281, 0.037264511108398436, 0.038340991973876956, 0.03880553436279297, 0.03784460830688476, 0.03831280136108398, 0.03790028762817383, 0.03806745529174805, 0.03790310287475586, 0.03765212631225586, 0.03779183959960938, 0.03780019378662109, 0.03764223861694336, 0.037550048828125, 0.03735145568847656, 0.037449726104736326, 0.03737939071655273, 0.03787027359008789, 0.037754878997802735, 0.03759475326538086, 0.0375579833984375, 0.03741513442993164, 0.037781856536865235, 0.03802735900878906, 0.038088062286376956, 0.03784723281860351, 0.03877523040771484, 0.037459968566894535, 0.03740444946289063, 0.03725033569335937, 0.03733395385742187, 0.03748044967651367, 0.038055519104003906, 0.03794371032714844, 0.037475616455078124, 0.037933216094970704, 0.03746227264404297, 0.037845119476318356, 0.037535934448242186, 0.03748454284667969, 0.03727558517456055, 0.037332862854003904, 0.03717510223388672, 0.0373414077758789, 0.037230430603027345, 0.03725344085693359, 0.037171070098876956, 0.037120128631591795, 0.03705219268798828, 0.03707107162475586, 0.038449153900146485, 0.0382393913269043, 0.0383455696105957, 0.03825033569335937, 0.03818102264404297, 0.03817814254760742, 0.03801971054077148, 0.0382259521484375, 0.03786924743652344, 0.03875241470336914, 0.03793612670898437, 0.03762073516845703, 0.03777503967285156, 0.037833023071289065, 0.03757056045532227, 0.03762176132202148, 0.03778355026245117, 0.0375623664855957, 0.0375623664855957, 0.03716486358642578, 0.03789433670043945, 0.03723263931274414, 0.03741491317749023, 0.03743292617797851, 0.03791484832763672, 0.03730758285522461, 0.037311038970947265, 0.03727001571655274, 0.03717523193359375, 0.037518497467041015, 0.037673824310302736, 0.03776921463012695, 0.03798015975952149, 0.038438079833984375, 0.037833057403564456, 0.03772454452514649, 0.03830147171020508, 0.03811564636230469, 0.037894142150878905, 0.03782783889770508, 0.03787238311767578, 0.03783462524414063, 0.03782179260253906, 0.03792099380493164, 0.03786924743652344, 0.03836812973022461, 0.03811891174316406, 0.03903539276123047, 0.03821769714355469, 0.038147457122802736, 0.03828803253173828, 0.0381214714050293, 0.03810508728027344, 0.03797398376464844, 0.038115264892578125, 0.03804694366455078, 0.038030113220214844, 0.03797615814208984, 0.03793484878540039, 0.03822780990600586, 0.038405601501464846, 0.038042110443115236, 0.038392257690429685, 0.038166526794433595, 0.03789209747314453, 0.038069793701171875, 0.03800931167602539, 0.037928958892822266, 0.038029312133789066, 0.03783065414428711, 0.03790195083618164, 0.03905779266357422, 0.038370689392089846, 0.03795846557617188, 0.03781017684936523, 0.03784089660644531, 0.038035457611083984, 0.037795841217041014, 0.037804031372070314, 0.038067649841308594, 0.03818143844604492, 0.03910854339599609, 0.03828521728515625, 0.03845955276489258, 0.03829759979248047, 0.03829721450805664, 0.037987968444824216, 0.037945953369140625, 0.0381684799194336, 0.03808691024780273, 0.037950782775878905, 0.03856428909301758, 0.040186111450195315, 0.038254177093505856, 0.03793552017211914, 0.03843475341796875, 0.037933120727539064, 0.03830374526977539, 0.03832627105712891, 0.03818086242675781, 0.03828736114501953, 0.03794524765014649, 0.03812361526489258, 0.03846960067749024, 0.037885982513427736, 0.03794716644287109, 0.03785363388061523, 0.038313758850097655, 0.03791782379150391, 0.03785408020019531, 0.0394769287109375, 0.03824399948120117, 0.03797663879394531, 0.03824371337890625, 0.038083038330078124, 0.03849763107299805, 0.03828822326660156, 0.03886905670166016, 0.03994214248657227, 0.03827097702026367, 0.038211166381835936, 0.038007198333740236, 0.038076416015625, 0.03945033645629883, 0.038959327697753905, 0.03790848159790039, 0.03796793746948242, 0.03802521514892578, 0.038043647766113284, 0.0381399040222168, 0.03794944000244141, 0.03808051300048828, 0.03804569625854492, 0.038115169525146486, 0.03793241500854492, 0.03827062225341797, 0.037660385131835936, 0.03794496154785156, 0.03802214431762695, 0.038338817596435544, 0.03784627151489258, 0.037902847290039066, 0.03787366485595703, 0.038215679168701173, 0.0380148811340332, 0.03792822265625, 0.037788478851318356, 0.04134707260131836, 0.038204639434814454, 0.038066654205322265, 0.03829996871948242, 0.03799039840698242, 0.03786675262451172, 0.03818368148803711, 0.03801839828491211, 0.037925537109375, 0.0379554557800293, 0.03809830474853516, 0.038443775177001954, 0.03802048110961914, 0.037972511291503905, 0.03780172729492187, 0.0375893440246582, 0.038670337677001954, 0.03830579376220703, 0.038029312133789066, 0.03748585510253906, 0.037316673278808596, 0.03726150512695312, 0.037308895111083984, 0.03846144104003906, 0.037807487487792966, 0.037383872985839846, 0.03833478546142578, 0.0373438720703125, 0.03738009643554688, 0.03728179168701172, 0.03716825485229492, 0.03724582290649414, 0.03728793716430664, 0.03728793716430664, 0.03766995239257812, 0.03729913711547852, 0.03725107192993164, 0.03782595062255859, 0.03814640045166016, 0.037609729766845706, 0.03734092712402344, 0.03728819274902344, 0.03726921463012695, 0.03718313598632812, 0.03727833557128906, 0.03719168090820312, 0.037289440155029295, 0.0372270393371582, 0.037154815673828126, 0.03742886352539063, 0.03753398513793945, 0.03782489776611328, 0.03734662246704101, 0.0373480339050293, 0.037168704986572265, 0.03773276901245117, 0.037101505279541015, 0.037255264282226565, 0.0370700798034668, 0.03774540710449219, 0.037548030853271484, 0.037418495178222655, 0.037160606384277345, 0.037286590576171875, 0.037488414764404294, 0.03724940872192383, 0.03807401657104492, 0.037235038757324215, 0.03719372940063476, 0.037125473022460935, 0.037339809417724606, 0.037246849060058596, 0.038430110931396484, 0.037354209899902346, 0.03722403335571289, 0.03708335876464844, 0.03716524887084961, 0.03701756668090821, 0.037084991455078126, 0.037079166412353516, 0.03690630340576172, 0.03705311965942383, 0.03751689529418945, 0.03715532684326172, 0.03726540756225586, 0.03726505661010742, 0.037779808044433594, 0.03762128067016601, 0.03736950302124024, 0.03808729553222656, 0.03720534515380859, 0.037235553741455076, 0.03728998565673828, 0.03776102447509765, 0.03752345657348633, 0.0373306884765625, 0.03778995132446289, 0.03876233673095703, 0.03725881576538086, 0.03719638442993164, 0.03716700744628906, 0.03715081787109375, 0.03714591979980469, 0.038109886169433595, 0.03714252853393555, 0.03712790298461914, 0.03710800170898437, 0.03711795043945312, 0.037150527954101564, 0.0374189453125, 0.03727385711669922, 0.03728521728515625, 0.0370978889465332, 0.03761321640014648]",tokens/s,26.539132982830093,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4384.014336,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3226669921875,10.3226669921875,0.0,10.3226669921875,10.3226669921875,10.3226669921875,10.3226669921875,[10.3226669921875],,kWh,9.785378997917177e-05,1.0785661533696069e-05,3.2396137028004035e-05,0.00014103558854087187,,MB,4337.463296,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85943878173828,0.7859438781738282,0.003248193699247347,0.7875285034179688,0.7886953369140625,0.7889753234863282,0.7891993127441406,"[0.78, 0.7875231323242188, 0.7826694946289062, 0.7807692260742187, 0.7872844848632813, 0.7876341552734375, 0.788135986328125, 0.7892553100585937, 0.7875338745117187, 0.7886331176757813]",tokens/s,325.72300276048486,kWh,2.281208450769251e-05,2.5157658233373223e-06,1.5158153152154578e-05,4.048600348318441e-05,tokens/kWh,6323172.898661828,MB,4346.912768,4979.621888,0.0,4571.79136,4514.271744,s,10,466.6024882812499,46.660248828125,0.011068309855742515,46.662541015625,46.666687499999995,46.672259765625,46.676717578125,"[46.632265625, 46.65224609375, 46.66014453125, 46.6612578125, 46.6629609375, 46.663859375, 46.66212109375, 46.6643515625, 46.66544921875, 46.67783203125]",tokens/s,1.3501856844369426,kWh,0.0013613940065339734,0.000150172057835159,0.0009055239445040442,0.0024170900088731767,tokens/kWh,26064.39965773967,,s,630,466.5971835937501,0.7406304501488096,0.0003842440844463258,0.7406415100097656,0.741086279296875,0.7412395568847657,0.7414922277832031,"[0.7400195922851562, 0.7400505981445312, 0.7398154907226563, 0.739328857421875, 0.7396557006835938, 0.7402882080078125, 0.739842529296875, 0.7398804931640625, 0.7397766723632813, 0.7399896240234375, 0.739989501953125, 0.7401123046875, 0.7397409057617188, 0.7400776977539063, 0.7401336059570313, 0.7396638793945313, 0.7399824829101562, 0.7398463745117188, 0.740166259765625, 0.7398889770507813, 0.7399959716796874, 0.740632080078125, 0.7399937133789063, 0.740153564453125, 0.740021728515625, 0.7406412963867187, 0.7402557373046875, 0.7396383056640625, 0.7405346069335937, 0.74044873046875, 0.7398190307617187, 0.7404366455078125, 0.7408128051757813, 0.7404246826171875, 0.740150146484375, 0.7401944580078125, 0.7401615600585938, 0.7403519897460937, 0.739842041015625, 0.740030029296875, 0.7403851928710937, 0.7400409545898438, 0.7400709228515625, 0.740415771484375, 0.74038623046875, 0.7406367797851563, 0.7403157958984375, 0.7405259399414063, 0.7403190307617188, 0.7402066650390625, 0.7406757202148437, 0.74048681640625, 0.7396448974609375, 0.740488037109375, 0.7407861938476562, 0.7405541381835937, 0.739809814453125, 0.740874267578125, 0.740378662109375, 0.7405140380859375, 0.7401420288085937, 0.74058544921875, 0.740694580078125, 0.7398834228515625, 0.740550048828125, 0.7404017944335938, 0.740384765625, 0.7400115966796875, 0.7401517333984375, 0.7411056518554687, 0.7402147827148438, 0.7400131225585938, 0.7413422241210937, 0.7399955444335937, 0.7396536254882813, 0.7408514404296875, 0.7402305908203125, 0.740455078125, 0.7398401489257812, 0.7406387329101562, 0.7405711059570312, 0.7406399536132813, 0.7395582275390625, 0.74058544921875, 0.7408004760742187, 0.7407980346679688, 0.739641845703125, 0.7406991577148437, 0.7408834228515625, 0.7402168579101562, 0.739751220703125, 0.7406128540039062, 0.740921142578125, 0.7402518310546875, 0.74022705078125, 0.7406018676757813, 0.7409004516601563, 0.7403721313476562, 0.740874755859375, 0.7404523315429687, 0.7409985961914063, 0.7404183349609375, 0.7406793823242187, 0.7401761474609375, 0.7410494384765625, 0.7407820434570312, 0.7404300537109375, 0.740877197265625, 0.7407839965820312, 0.7402352905273437, 0.7404441528320312, 0.7406465454101563, 0.7406863403320313, 0.740124755859375, 0.7403067016601562, 0.7408099365234375, 0.7403179931640625, 0.7408599243164062, 0.7406141357421875, 0.7409703369140626, 0.740460693359375, 0.7409868774414062, 0.7406669311523437, 0.7407661743164062, 0.7410028076171875, 0.74051953125, 0.740729248046875, 0.7405772705078125, 0.7405260620117188, 0.740441162109375, 0.7405618286132812, 0.7406755981445312, 0.739999755859375, 0.7403212890625, 0.7403572998046875, 0.7405899658203124, 0.739885498046875, 0.7405383911132812, 0.74066943359375, 0.7408038940429688, 0.740121337890625, 0.7403797607421875, 0.7404747314453125, 0.7406744995117187, 0.7401647338867188, 0.7403672485351562, 0.7405545654296875, 0.7406163940429688, 0.740849853515625, 0.74037841796875, 0.7402281494140625, 0.7402581176757812, 0.7403209838867187, 0.7402026977539062, 0.74078076171875, 0.7406192626953125, 0.7403645629882812, 0.74097021484375, 0.740475341796875, 0.7406515502929687, 0.7405623779296875, 0.7410548095703124, 0.740423828125, 0.7406408081054687, 0.7409862060546875, 0.7403770141601562, 0.7402435302734375, 0.7409392700195313, 0.7409180297851562, 0.7407205810546875, 0.7408023071289063, 0.7407756958007813, 0.7408173217773437, 0.7409131469726562, 0.7404827880859375, 0.7408663330078125, 0.7405254516601563, 0.7406817016601562, 0.7409219970703125, 0.7408558349609375, 0.7406814575195313, 0.7406544799804687, 0.7410221557617187, 0.7406570434570312, 0.7411595458984375, 0.7401569213867187, 0.74084814453125, 0.74292431640625, 0.7409152221679688, 0.741015625, 0.7407022094726563, 0.7403618774414062, 0.73979931640625, 0.7403438110351562, 0.7413043212890625, 0.740691650390625, 0.7397128295898437, 0.7405382690429687, 0.7412589721679688, 0.7402750244140625, 0.7407606201171875, 0.7405452270507813, 0.7407579956054687, 0.7404172973632812, 0.7403253784179687, 0.7409540405273437, 0.7409331665039063, 0.7402005004882812, 0.7401491088867187, 0.7404910888671875, 0.7404653930664062, 0.7410072631835938, 0.7403982543945312, 0.7402630615234375, 0.7406876831054687, 0.7406591186523438, 0.7409129028320313, 0.740784423828125, 0.7405977783203125, 0.7408927001953125, 0.740636474609375, 0.7404173583984375, 0.7406964721679687, 0.7409541015625, 0.7407647705078125, 0.7403014526367188, 0.7405775146484375, 0.7406959838867188, 0.7411856079101562, 0.7405547485351562, 0.7409561767578124, 0.74051953125, 0.7405894775390625, 0.7410322265625, 0.7404525146484375, 0.740384033203125, 0.74058154296875, 0.7407244262695313, 0.7407064208984375, 0.7408749389648438, 0.7411825561523437, 0.7406376342773437, 0.7403233032226563, 0.7412362670898438, 0.7411880493164062, 0.7403493041992187, 0.7403728637695313, 0.7409031982421875, 0.7412422485351563, 0.7403444213867187, 0.74039501953125, 0.7407388916015625, 0.7407108764648438, 0.7405159301757812, 0.7409315795898438, 0.7405479125976563, 0.7402473754882812, 0.7405596923828125, 0.7411990966796875, 0.7401375122070313, 0.74032568359375, 0.7403480834960937, 0.7403026733398438, 0.7407575073242187, 0.7404391479492187, 0.7401747436523437, 0.7403162841796875, 0.7408889770507813, 0.7405757446289063, 0.7406051635742188, 0.7404649658203125, 0.7399645385742187, 0.7404735107421875, 0.7410853271484374, 0.740697509765625, 0.740595458984375, 0.740115234375, 0.7407564697265625, 0.7409378051757812, 0.7409120483398437, 0.7404903564453125, 0.740961181640625, 0.7407882080078125, 0.740537353515625, 0.7407932739257812, 0.7409603271484375, 0.7407615966796876, 0.7405029296875, 0.7403587646484375, 0.7408844604492187, 0.7408968505859375, 0.7407646484375, 0.7403816528320313, 0.7410360107421875, 0.740190185546875, 0.741265380859375, 0.7407677001953125, 0.7410004272460937, 0.74047119140625, 0.7410343017578125, 0.7408720703125, 0.7408909301757812, 0.7405621948242187, 0.7408171997070313, 0.740706787109375, 0.7409868774414062, 0.7408309936523437, 0.7405748291015625, 0.7406619262695312, 0.7407206420898438, 0.7407114868164062, 0.7407252197265625, 0.7413704833984375, 0.7406524047851563, 0.7408787841796876, 0.7406328125, 0.7405247802734375, 0.7409304809570313, 0.7407001342773437, 0.7402608032226563, 0.7400745239257812, 0.7405209350585937, 0.7406243896484375, 0.74045849609375, 0.7405711059570312, 0.7408148193359375, 0.7404664916992187, 0.7404846801757813, 0.7408441772460937, 0.7406868896484375, 0.7407542724609375, 0.7403439331054688, 0.7405230712890625, 0.740754150390625, 0.74096630859375, 0.740278564453125, 0.7405875244140625, 0.740833251953125, 0.7408721923828125, 0.7404459228515625, 0.7403925170898438, 0.74037841796875, 0.7408239135742187, 0.7405977783203125, 0.7402426147460938, 0.7409152221679688, 0.7409365234375, 0.7402453002929688, 0.7410056762695313, 0.7410706787109375, 0.74068994140625, 0.740447509765625, 0.7406742553710938, 0.7407472534179688, 0.74175927734375, 0.7406363525390625, 0.7406817626953125, 0.74133642578125, 0.7406576538085937, 0.7406693725585938, 0.7409738159179687, 0.7410184936523437, 0.7404336547851562, 0.7407271728515625, 0.7404195556640625, 0.7414859008789062, 0.7402501831054688, 0.7405077514648437, 0.7404359741210937, 0.741105224609375, 0.7406533203125, 0.7404771118164063, 0.74070654296875, 0.7406445922851562, 0.7408680419921875, 0.7405767822265625, 0.7410755004882813, 0.74109130859375, 0.7405181274414062, 0.7403685913085938, 0.7405951538085938, 0.7404239501953125, 0.7404251098632812, 0.7405381469726563, 0.740688232421875, 0.7403665161132813, 0.7406198120117188, 0.740737548828125, 0.7408297119140625, 0.7404478759765625, 0.7403399047851562, 0.7411488037109375, 0.7406000366210937, 0.740068115234375, 0.7401787719726562, 0.7406900024414063, 0.7399046020507812, 0.7412335815429687, 0.740421630859375, 0.7405916137695312, 0.7405240478515625, 0.7406632690429688, 0.7404830932617188, 0.7408414916992188, 0.7406366577148438, 0.7406956176757813, 0.7402152099609375, 0.7408025512695312, 0.741148681640625, 0.7409144897460938, 0.7405612182617187, 0.74076318359375, 0.7404671630859375, 0.7405343017578125, 0.7407120361328124, 0.7408722534179687, 0.7407449951171875, 0.7402197875976563, 0.7407444458007812, 0.7409302978515625, 0.7409007568359375, 0.74053857421875, 0.741087158203125, 0.7408059692382812, 0.7411715087890625, 0.74035205078125, 0.7410333251953125, 0.7403152465820313, 0.740950927734375, 0.7404436645507813, 0.7409464111328125, 0.7409313354492187, 0.7408642578125, 0.7404564208984376, 0.7408292236328125, 0.7408162841796875, 0.7409790649414062, 0.7411129760742188, 0.7406704711914063, 0.7402434692382812, 0.7409337768554688, 0.7404783935546875, 0.7400045166015625, 0.740599609375, 0.7409668579101563, 0.7405609130859375, 0.74033740234375, 0.7406492309570313, 0.7410005493164062, 0.7402352294921875, 0.7402005004882812, 0.7404649047851563, 0.7413662109375, 0.74050537109375, 0.7403521728515625, 0.7408446655273437, 0.7406520385742188, 0.7407183227539063, 0.7406736450195313, 0.740123779296875, 0.74053515625, 0.7404544067382812, 0.7410208129882813, 0.7403468017578125, 0.7404906005859375, 0.7409271850585938, 0.7403507080078126, 0.7404238891601562, 0.7406466064453125, 0.7406123657226562, 0.7408271484375, 0.7403028564453125, 0.7402880249023438, 0.7413002319335937, 0.740756103515625, 0.7403969116210938, 0.7409766235351563, 0.741285888671875, 0.7410333862304688, 0.740450927734375, 0.74140283203125, 0.7407388305664062, 0.7408353271484375, 0.7404111328125, 0.7407590942382812, 0.7405862426757812, 0.7411541137695312, 0.7404346313476563, 0.7409149780273437, 0.7412155151367188, 0.7401030883789063, 0.7410667724609376, 0.7412072143554688, 0.7405166625976562, 0.74062353515625, 0.741194580078125, 0.7408453979492188, 0.7408536987304688, 0.7405017700195312, 0.74096630859375, 0.7404544067382812, 0.741702880859375, 0.7403590087890625, 0.7408206787109375, 0.7402210083007813, 0.7412155151367188, 0.74056689453125, 0.7406417236328126, 0.7403038940429687, 0.7407339477539062, 0.7400856323242188, 0.7402881469726562, 0.7404854736328125, 0.7403048706054688, 0.7405787353515625, 0.740346435546875, 0.7411138305664062, 0.7401697387695313, 0.7402978515625, 0.7403488159179688, 0.740921142578125, 0.7409185180664063, 0.7402034912109375, 0.7403060302734376, 0.7403733520507813, 0.7407513427734375, 0.7413466796875, 0.7407335815429688, 0.740063232421875, 0.7410543212890625, 0.7406931762695312, 0.741095458984375, 0.7402077026367188, 0.7405111694335937, 0.740576904296875, 0.7409139404296875, 0.74109130859375, 0.7403963012695313, 0.7408911743164063, 0.7407228393554688, 0.740656494140625, 0.7409578857421875, 0.7406044311523438, 0.7414297485351562, 0.741580810546875, 0.740294677734375, 0.7404707641601562, 0.7409067993164062, 0.7414436645507813, 0.740173828125, 0.740737060546875, 0.7412010498046875, 0.7408501586914062, 0.7404363403320312, 0.7409541015625, 0.7410216674804687, 0.7404930419921875, 0.74058984375, 0.7407490844726563, 0.7411278686523437, 0.740725341796875, 0.7408616333007813, 0.7412691650390625, 0.7407438354492187, 0.7408267211914062, 0.741392822265625, 0.740957275390625, 0.7410277709960937, 0.7412838745117187, 0.7407637329101563, 0.7409290771484375, 0.7401904907226563, 0.7408988037109375, 0.740469970703125, 0.7405146484375, 0.7407565307617188, 0.74098583984375, 0.7407430419921875, 0.7400154418945313, 0.7405875854492188, 0.7409999389648437, 0.7410193481445313, 0.7408694458007813, 0.7402786865234375, 0.740968505859375, 0.7406876220703125, 0.7410165405273438, 0.7406650390625, 0.7407493896484375, 0.7404891967773437, 0.7407401733398438, 0.7408728637695312, 0.740468994140625, 0.7417009887695313, 0.7409425048828125, 0.7408291625976563, 0.740797607421875, 0.7413209228515625, 0.740708984375, 0.7406807861328125, 0.7412581787109375, 0.7409722900390625, 0.7409625244140625, 0.7412992553710938, 0.7411414184570313, 0.7412796630859375, 0.7408763427734375, 0.7417642211914063, 0.74086083984375, 0.7408189697265625, 0.7406930541992187, 0.7413052978515625, 0.7410216674804687, 0.7411827392578125, 0.741086181640625, 0.7409776611328125, 0.7408381958007813, 0.7414948120117187, 0.7412696533203125, 0.740348876953125, 0.740801025390625, 0.7411981811523437, 0.7411402587890625, 0.7410465087890625, 0.740831298828125, 0.7412284545898438, 0.7412449951171876, 0.740482177734375, 0.7411937255859375, 0.7407173461914063]",tokens/s,1.350201034536289,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 60000 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 153641 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 801, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 563, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 23072 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphzxp6bgi/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1033, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 808, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 549, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 382, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpto96a_aa/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4385.583104,4566.482944,0.0,4188.012544,4187.049984,s,1,10.2750205078125,10.2750205078125,0.0,10.2750205078125,10.2750205078125,10.2750205078125,10.2750205078125,[10.2750205078125],,kWh,9.746523692499522e-05,1.0743721955271504e-05,3.126280278799992e-05,0.00013947176166826666,,MB,4391.743488,4962.844672,0.0,4555.014144,4514.269184,s,10,7.850749145507812,0.7850749145507813,0.0029818719380852246,0.7838614196777344,0.789292431640625,0.7898505920410157,0.7902971203613282,"[0.7818594360351563, 0.7877095947265625, 0.7823174438476562, 0.7825433959960938, 0.7844056396484375, 0.7904087524414063, 0.7833171997070313, 0.7865393676757813, 0.7824799194335937, 0.7891683959960938]",tokens/s,326.08353069908344,kWh,2.281054614999978e-05,2.5155927895983764e-06,1.517390102799981e-05,4.0500039967597965e-05,tokens/kWh,6320981.4164334815,MB,4399.607808,4979.621888,0.0,4571.79136,4514.271744,s,10,466.8263046875,46.68263046875,0.015457591777333574,46.686087890625004,46.693671093750005,46.69438828125,46.69496203125,"[46.64066015625, 46.670609375, 46.685453125, 46.69510546875, 46.69034375, 46.69351171875, 46.68563671875, 46.69313671875, 46.6865390625, 46.68530859375]",tokens/s,1.3495383479337797,kWh,0.0013616506224445842,0.00015019915050282654,0.0009056978078910003,0.0024175475808384107,tokens/kWh,26059.466419333705,,s,630,466.81600244140645,0.7409777816530257,0.0005184305986670322,0.7410007019042969,0.7414539123535155,0.7416112060546874,0.7419606964111328,"[0.7400110473632813, 0.7402894897460938, 0.7400440063476562, 0.7403200073242188, 0.7396430053710937, 0.7401265869140625, 0.7395269165039062, 0.7397890625, 0.7398823852539063, 0.73982421875, 0.740220947265625, 0.7401492309570312, 0.7397346801757813, 0.7396583862304688, 0.7403153076171874, 0.7400753784179688, 0.7398667602539063, 0.7393792114257812, 0.7404309692382812, 0.74035400390625, 0.7401881103515625, 0.7396765747070313, 0.7399342041015625, 0.7405410766601562, 0.7398911743164063, 0.7407472534179688, 0.7407882080078125, 0.7400386352539062, 0.73987890625, 0.7409293212890625, 0.7406143798828125, 0.7407343139648438, 0.7397560424804688, 0.7399384155273437, 0.740581298828125, 0.7402973022460938, 0.740220947265625, 0.740360107421875, 0.7411057739257813, 0.740449462890625, 0.7406700439453126, 0.7400977172851563, 0.7408890991210938, 0.740763671875, 0.7403888549804688, 0.7405721435546875, 0.7406007080078125, 0.7408599853515625, 0.740433349609375, 0.7407367553710937, 0.7409295654296875, 0.7406866455078125, 0.7404031982421875, 0.7403448486328125, 0.7409674072265625, 0.7407156982421875, 0.7408065795898438, 0.740592529296875, 0.7401984252929688, 0.7403945922851562, 0.7403984985351563, 0.741126953125, 0.7402785034179687, 0.7403516235351563, 0.7404179077148437, 0.7406781005859375, 0.7401555786132813, 0.7410413208007812, 0.74025439453125, 0.7404482421875, 0.7400786743164063, 0.74094482421875, 0.7405525512695312, 0.7404586791992187, 0.7402434692382812, 0.7404423217773437, 0.74058935546875, 0.740398681640625, 0.7412412719726562, 0.7406448364257813, 0.7404994506835938, 0.7403804931640625, 0.7406102294921875, 0.7411128540039063, 0.7417538452148438, 0.7404564819335937, 0.7408516845703125, 0.7410953979492187, 0.740627685546875, 0.7406206665039062, 0.740828857421875, 0.741005859375, 0.7405958862304688, 0.7401922607421875, 0.7412469482421875, 0.74111181640625, 0.7408843994140625, 0.7404359741210937, 0.7411138916015625, 0.7408823852539063, 0.7407124633789063, 0.74094384765625, 0.7413555297851563, 0.7404906616210938, 0.7410133666992188, 0.7409793701171875, 0.740929443359375, 0.7408640747070312, 0.7405813598632812, 0.7402782592773437, 0.7413414916992187, 0.7409857788085937, 0.7406785888671875, 0.7406057739257812, 0.7411691284179688, 0.7411315307617188, 0.74105859375, 0.7411226806640625, 0.7413324584960937, 0.7406558227539063, 0.740982666015625, 0.74103369140625, 0.7415252685546875, 0.7407310180664063, 0.7412965698242188, 0.7410989990234375, 0.740869873046875, 0.740468994140625, 0.7411036376953125, 0.740706298828125, 0.7408719482421875, 0.740638916015625, 0.7408571166992187, 0.7408074340820312, 0.7406796875, 0.7405250854492188, 0.7408786010742188, 0.741176025390625, 0.740675048828125, 0.7408501586914062, 0.74096435546875, 0.741158935546875, 0.741158935546875, 0.7413923950195312, 0.740759521484375, 0.7410171508789063, 0.7406097412109375, 0.7411217651367188, 0.7407523803710937, 0.7410889892578125, 0.7411448974609375, 0.740943359375, 0.7407252197265625, 0.7412610473632812, 0.7411159057617187, 0.7411837768554688, 0.7409470825195312, 0.7410205688476562, 0.7412899780273438, 0.740505615234375, 0.7411202392578125, 0.7413648071289063, 0.7410611572265625, 0.7406178588867187, 0.7403484497070313, 0.7415132446289062, 0.7412777099609374, 0.7411558837890625, 0.7409234619140626, 0.7417967529296875, 0.7408416748046875, 0.7407122802734375, 0.7415451049804688, 0.74143994140625, 0.7403668823242188, 0.7410130004882812, 0.7412227783203125, 0.7412770385742188, 0.7410735473632812, 0.7407284545898437, 0.7417527465820313, 0.7415567626953125, 0.7406766967773437, 0.7416492309570313, 0.7416729736328125, 0.7409459228515625, 0.7413637084960938, 0.7415271606445313, 0.7411019287109375, 0.7406522827148437, 0.7408670654296875, 0.7413903198242188, 0.7409971313476562, 0.7406673583984374, 0.7407609252929688, 0.7414934692382813, 0.7408578491210938, 0.7410231323242188, 0.7411513671875, 0.7412428588867187, 0.74096435546875, 0.7406705322265625, 0.7410676879882813, 0.7414188232421876, 0.741134521484375, 0.74067333984375, 0.740796630859375, 0.7407656860351562, 0.7411671142578125, 0.7410687255859375, 0.740404541015625, 0.7409191284179687, 0.7406130981445312, 0.7412982788085938, 0.7409271240234375, 0.7409646606445313, 0.7411522827148438, 0.7412577514648437, 0.7407513427734375, 0.7421317138671875, 0.7406898193359375, 0.741083251953125, 0.741148681640625, 0.7414967041015625, 0.741117919921875, 0.741251220703125, 0.7408323364257813, 0.7416058349609375, 0.7406943969726563, 0.7411171264648437, 0.7410963134765625, 0.74192236328125, 0.74168505859375, 0.7406979370117187, 0.7414682006835938, 0.7410567016601562, 0.7410836181640625, 0.7407222290039063, 0.7419583129882813, 0.7410271606445312, 0.7411206665039063, 0.74096435546875, 0.7410503540039063, 0.7414824829101563, 0.7414549560546875, 0.7413988037109375, 0.7414192504882813, 0.741542236328125, 0.7408271484375, 0.7461724243164063, 0.7407713623046875, 0.741369873046875, 0.741060791015625, 0.7408067016601563, 0.741339599609375, 0.7412830810546875, 0.7406840209960938, 0.740358642578125, 0.7411712036132813, 0.741201904296875, 0.7406981201171875, 0.7412203369140625, 0.7410372924804688, 0.7412518920898438, 0.74081689453125, 0.74126953125, 0.740822021484375, 0.7409920043945313, 0.7408836669921876, 0.741301025390625, 0.7412136840820313, 0.7412188110351563, 0.7408919067382812, 0.7412188720703125, 0.7414063720703125, 0.7408522338867187, 0.741738525390625, 0.7410501098632812, 0.7414287719726562, 0.7408514404296875, 0.7411904907226563, 0.7410075073242187, 0.7409111328125, 0.7416375732421875, 0.741001708984375, 0.7411466674804688, 0.7413488159179688, 0.7409137573242187, 0.7413770751953125, 0.7412225952148438, 0.7407398071289063, 0.7414620361328125, 0.7413800659179688, 0.7403970336914063, 0.7408448486328125, 0.7415630493164063, 0.7409124755859375, 0.7413480224609375, 0.7406469116210938, 0.741533203125, 0.7410672607421875, 0.7411220703125, 0.7410269165039063, 0.7413400268554687, 0.7415316772460937, 0.740600830078125, 0.74132373046875, 0.7415449829101562, 0.7412333984375, 0.740911376953125, 0.7413792114257812, 0.7409547119140625, 0.7410911254882813, 0.7410734252929687, 0.741003173828125, 0.74100146484375, 0.7405606689453125, 0.7409389038085937, 0.7408321533203125, 0.741001220703125, 0.7408599243164062, 0.7409613647460938, 0.7415075073242188, 0.7406945190429688, 0.7410227661132812, 0.7412909545898437, 0.7417645263671875, 0.7411779174804688, 0.7408681030273437, 0.7413229370117187, 0.7407265625, 0.7409848022460938, 0.7410198364257813, 0.7413305053710938, 0.7408888549804687, 0.7409436645507812, 0.7412243041992187, 0.741204345703125, 0.7410892944335937, 0.7410191650390625, 0.7411611938476562, 0.74098095703125, 0.7409533081054688, 0.7411494750976563, 0.7408394165039063, 0.7410585327148438, 0.7416481323242188, 0.7412449951171876, 0.740628662109375, 0.74061962890625, 0.7409568481445312, 0.7417200927734375, 0.7409229736328125, 0.7412781372070313, 0.7406406860351562, 0.7419451293945313, 0.7410582275390625, 0.7408953247070312, 0.7413330078125, 0.7406510009765624, 0.7417876586914063, 0.7407821044921875, 0.741222412109375, 0.7412777099609374, 0.7410585327148438, 0.741105712890625, 0.7408578491210938, 0.740893798828125, 0.7417701416015625, 0.7412296752929688, 0.740856689453125, 0.7409622802734375, 0.74039501953125, 0.741365234375, 0.7414215698242187, 0.7406849365234375, 0.7411270141601562, 0.7410870361328125, 0.7406585083007813, 0.7405780029296875, 0.74112939453125, 0.7410717163085937, 0.7406264038085938, 0.7405916137695312, 0.7411134033203125, 0.740692626953125, 0.74117236328125, 0.7411555786132813, 0.7407922973632812, 0.74039501953125, 0.7411712036132813, 0.7411712036132813, 0.7413074951171875, 0.7409837036132813, 0.7411322631835937, 0.7407247314453125, 0.7409061889648437, 0.741374755859375, 0.7408959350585937, 0.7425195922851563, 0.740439453125, 0.7409522094726563, 0.7411123046875, 0.7409574584960937, 0.741372802734375, 0.7410932006835937, 0.7408209838867188, 0.7407821044921875, 0.7407656860351562, 0.74102685546875, 0.7413851928710937, 0.7412572021484375, 0.741064697265625, 0.741011474609375, 0.7408129272460937, 0.7410192260742188, 0.74131640625, 0.7405298461914063, 0.7413767700195313, 0.74063671875, 0.7410964965820312, 0.7412127075195313, 0.7412821655273437, 0.7412142333984375, 0.7414312744140625, 0.7404844360351562, 0.7407822265625, 0.7410919189453125, 0.7412132568359375, 0.7407432250976562, 0.741016357421875, 0.7410842895507812, 0.741087646484375, 0.7406998291015625, 0.7413154907226562, 0.7415840454101562, 0.7410225830078125, 0.7411015625, 0.7414537963867187, 0.740874267578125, 0.7414203491210938, 0.7403264770507813, 0.7410634155273438, 0.7409185180664063, 0.7408353271484375, 0.7404669189453125, 0.7404203491210938, 0.7403519897460937, 0.7420149536132813, 0.7408612670898438, 0.7405206298828125, 0.7407544555664063, 0.7414341430664062, 0.7410803833007813, 0.7410902099609376, 0.7408519287109375, 0.7409415893554687, 0.7404564208984376, 0.7404268188476563, 0.740907958984375, 0.741411865234375, 0.7406228637695312, 0.741372314453125, 0.7408510131835937, 0.7408688354492188, 0.741296142578125, 0.7404994506835938, 0.74149267578125, 0.74127978515625, 0.7410421752929688, 0.7401585693359375, 0.7415073852539062, 0.7416324462890626, 0.7409112548828125, 0.7406940307617188, 0.7410747680664063, 0.7413629760742187, 0.7412233276367187, 0.74068994140625, 0.7412100830078125, 0.7406735229492187, 0.7413104858398437, 0.7413859252929688, 0.7409503173828125, 0.7408230590820313, 0.7416156005859375, 0.741074951171875, 0.7407103881835938, 0.741961669921875, 0.7415742797851562, 0.7415607299804687, 0.7408815307617187, 0.741004150390625, 0.7419085083007813, 0.7408836059570313, 0.7411778564453125, 0.7420112915039062, 0.7413792114257812, 0.7412273559570313, 0.7409903564453125, 0.7410182495117188, 0.74169140625, 0.746708984375, 0.7411466064453125, 0.741240478515625, 0.7410732421875, 0.7407437744140625, 0.740569091796875, 0.7410682983398438, 0.7411778564453125, 0.7406277465820312, 0.7407330322265625, 0.7405321044921875, 0.741503662109375, 0.7409111328125, 0.7410657348632812, 0.74077490234375, 0.7412572021484375, 0.740464599609375, 0.7408700561523438, 0.7406626586914062, 0.7411368408203125, 0.741069091796875, 0.74134326171875, 0.7410131225585938, 0.7410650634765625, 0.7405731811523437, 0.7413833618164063, 0.7407764282226562, 0.7408101196289063, 0.7409360961914062, 0.7406024169921875, 0.7416668090820312, 0.7409581909179688, 0.740361572265625, 0.7412241821289063, 0.7413707275390625, 0.7408877563476562, 0.7406815795898437, 0.74096728515625, 0.7412142944335938, 0.7409121704101562, 0.7410260620117187, 0.74164501953125, 0.7411445922851563, 0.741074951171875, 0.7407349853515625, 0.7418388671875, 0.7409418334960938, 0.7413053588867188, 0.7412003173828124, 0.7406268310546875, 0.741662841796875, 0.7410089721679688, 0.7414379272460937, 0.7410524291992188, 0.741074951171875, 0.7409862670898437, 0.741231201171875, 0.7412080688476562, 0.7415582885742188, 0.7415643920898437, 0.7411831665039063, 0.740969970703125, 0.7412069091796875, 0.7410032348632812, 0.7411319580078125, 0.7406467895507812, 0.7410001831054688, 0.7411732177734375, 0.7405772705078125, 0.7407882080078125, 0.74119580078125, 0.7415930786132813, 0.7405238647460938, 0.74088671875, 0.7413165893554687, 0.740890625, 0.7410768432617187, 0.7412532958984375, 0.7409111328125, 0.7414476928710938, 0.740789794921875, 0.740991455078125, 0.7406918334960938, 0.7410219116210938, 0.7410482788085937, 0.74030078125, 0.74102783203125, 0.7408057861328124, 0.7415857543945312, 0.7404413452148437, 0.7403440551757813, 0.7411328125, 0.7417835693359375, 0.741223876953125, 0.7406965942382813, 0.7409930419921875, 0.7410515747070312, 0.7409672241210937, 0.7407218627929687, 0.7408848876953125, 0.7415095825195313, 0.7411261596679688, 0.7403001098632812, 0.740924072265625, 0.7412849731445312, 0.74093994140625, 0.7412880249023438, 0.7411759643554687, 0.7412305908203125, 0.7412200317382812, 0.7410457763671875, 0.74151953125, 0.7411492919921875, 0.7410728759765625, 0.741060302734375, 0.7409923706054687, 0.7410328979492188, 0.7411978149414062, 0.7412183227539062, 0.7411544189453125, 0.7412242431640625, 0.7409833984375, 0.7409845581054687, 0.7409912109375, 0.7409848022460938, 0.7414599609375, 0.7411077270507812, 0.74080419921875]",tokens/s,1.349568131137656,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 59428 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 153068 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 801, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 563, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 22469 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphs_wxucg/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1033, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 808, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 549, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 382, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8afbzdc6/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3162.034176,4423.876608,0.0,4028.628992,3944.723968,s,1,10.139919921875,10.139919921875,0.0,10.139919921875,10.139919921875,10.139919921875,10.139919921875,[10.139919921875],,kWh,9.448301945834221e-05,1.0414976070960834e-05,2.917669000800094e-05,0.00013407468553730398,,MB,2943.471616,4763.615232,0.0,4353.687552,4305.05728,s,10,1.0864285354614256,0.10864285354614256,0.0009323365593873592,0.10896449661254883,0.10917128753662109,0.10925636291503907,0.10932442321777344,"[0.108942626953125, 0.10898636627197265, 0.10888729858398437, 0.10912060546875, 0.10934143829345704, 0.10915238189697266, 0.10807360076904297, 0.10881046295166015, 0.10909606170654297, 0.10601769256591796]",tokens/s,2356.3445881994644,kWh,3.251938411296191e-06,3.586319725762157e-07,2.1665819801776804e-06,5.777152364050087e-06,tokens/kWh,44312488.89903443,MB,2945.818624,4763.615232,0.0,4353.687552,4305.05984,s,10,21.81330712890625,2.1813307128906247,0.008862422337077453,2.181543701171875,2.1906317626953125,2.1919728637695313,2.1930457446289062,"[2.188095703125, 2.190167236328125, 2.19331396484375, 2.174505126953125, 2.1765244140625, 2.18593408203125, 2.170438720703125, 2.1771533203125, 2.1668408203125, 2.190333740234375]",tokens/s,28.881452788291124,kWh,6.324622699662207e-05,6.975845056279136e-06,3.424028047862247e-05,0.0001044623525315237,tokens/kWh,603088.0836326985,,s,630,21.81070582580564,0.03462016797746931,0.0005132230684201481,0.034509664535522455,0.0350792552947998,0.0353736967086792,0.03716842910766602,"[0.03538739013671875, 0.03503308868408203, 0.03500851058959961, 0.034581729888916016, 0.034584415435791015, 0.034508766174316405, 0.034425216674804686, 0.034523521423339844, 0.03444329452514648, 0.03458272171020508, 0.03487948989868164, 0.03455385589599609, 0.034490367889404294, 0.03480985641479492, 0.03461939239501953, 0.03500646209716797, 0.03496259307861328, 0.03493465423583984, 0.03535696029663086, 0.03476755142211914, 0.03455980682373047, 0.03481411361694336, 0.03457846450805664, 0.03494911956787109, 0.03481100845336914, 0.03465468978881836, 0.03455340957641601, 0.03461356735229492, 0.03443075180053711, 0.03442953491210937, 0.03432259368896484, 0.03476684951782227, 0.03417497634887695, 0.034326526641845705, 0.03474051284790039, 0.034445022583007814, 0.03461503982543945, 0.03466870498657226, 0.03456156921386719, 0.034665023803710934, 0.03441664123535156, 0.03457987213134766, 0.034259552001953124, 0.03418838500976563, 0.0344334716796875, 0.034353633880615235, 0.03444057464599609, 0.03480384063720703, 0.034728446960449216, 0.037367809295654295, 0.034961406707763674, 0.03476275253295898, 0.03463987350463867, 0.034697216033935545, 0.03470950317382813, 0.03469107055664063, 0.034442272186279294, 0.03448112106323242, 0.03439206314086914, 0.034598209381103515, 0.03485356903076172, 0.03717529678344727, 0.03470131301879883, 0.03522969436645508, 0.03618815994262695, 0.0348711051940918, 0.034547039031982425, 0.035461982727050784, 0.036808704376220705, 0.03485628890991211, 0.03467913436889648, 0.03490028762817383, 0.03580723190307617, 0.03491984176635742, 0.034877857208251956, 0.03518278503417969, 0.03449651336669922, 0.03465011215209961, 0.03480166244506836, 0.03527679824829102, 0.034560001373291016, 0.03447795104980469, 0.03453305435180664, 0.03462393569946289, 0.03460675048828125, 0.034551647186279295, 0.03454617691040039, 0.03439811325073242, 0.03451500701904297, 0.034612319946289063, 0.03459900665283203, 0.035543903350830075, 0.035125152587890625, 0.03527811050415039, 0.03485555267333985, 0.0345797119140625, 0.03456915283203125, 0.03445555114746094, 0.03445555114746094, 0.034465473175048826, 0.0344290885925293, 0.03450896072387695, 0.03436921691894531, 0.03450681686401367, 0.03479283142089844, 0.0345337905883789, 0.034466270446777345, 0.03457369613647461, 0.03440435028076172, 0.03450646209716797, 0.03443382263183594, 0.03447814559936523, 0.03442489624023438, 0.03464601516723633, 0.03480575942993164, 0.034988033294677735, 0.034799617767333986, 0.03470950317382813, 0.03457024002075195, 0.034336769104003906, 0.03439820861816406, 0.03457331085205078, 0.034457759857177736, 0.03474313735961914, 0.0348322868347168, 0.03470479965209961, 0.035405601501464844, 0.03495062255859375, 0.03476534271240234, 0.03467673492431641, 0.03451084899902344, 0.03451254272460937, 0.034285919189453125, 0.03433881759643555, 0.03480780792236328, 0.03479347229003906, 0.0348671989440918, 0.0348221435546875, 0.034754558563232424, 0.034590721130371094, 0.034648063659667966, 0.03437088012695313, 0.03443513488769531, 0.03448691177368164, 0.03458796691894531, 0.03470099258422851, 0.03478192138671875, 0.03465859222412109, 0.03509849548339844, 0.034899456024169925, 0.03473436737060547, 0.03448796844482422, 0.034662113189697266, 0.03426553726196289, 0.03439260864257813, 0.03489484786987305, 0.034423809051513675, 0.03466236877441406, 0.03502054214477539, 0.03528054428100586, 0.03485299301147461, 0.035001056671142575, 0.03490793609619141, 0.03477043151855469, 0.0346319694519043, 0.03445939254760742, 0.034443744659423826, 0.03430131149291992, 0.0340814094543457, 0.034412384033203125, 0.034861217498779296, 0.03501875305175781, 0.03528927993774414, 0.03508614349365234, 0.03504742431640625, 0.03521331024169922, 0.03507183837890625, 0.03508348846435547, 0.03534944152832031, 0.03530281448364258, 0.03534646224975586, 0.03532790374755859, 0.035154590606689455, 0.035168254852294925, 0.03540377426147461, 0.03496956634521484, 0.03473590469360351, 0.035028831481933594, 0.03512483215332031, 0.035402751922607424, 0.034958335876464845, 0.0347586555480957, 0.034907615661621094, 0.034514591217041014, 0.03465286254882813, 0.03465753555297851, 0.03452998352050781, 0.034382080078125, 0.034215614318847655, 0.034271808624267576, 0.03417625427246094, 0.03409356689453125, 0.03415407943725586, 0.03432080078125, 0.03439779281616211, 0.034636192321777344, 0.03439206314086914, 0.034340545654296874, 0.03420393753051758, 0.034358814239501954, 0.0344246711730957, 0.034375457763671874, 0.03425491333007812, 0.03466665649414063, 0.034382495880126956, 0.03459827041625976, 0.03473881530761719, 0.03456950378417969, 0.034681312561035155, 0.03521356964111328, 0.03593011093139648, 0.034830337524414064, 0.03462960052490234, 0.03442076873779297, 0.03430806350708008, 0.03432825469970703, 0.03442723083496094, 0.034457023620605466, 0.03437420654296875, 0.03429580688476563, 0.03421388626098633, 0.03410528182983399, 0.03410480117797852, 0.03428003311157227, 0.03431628799438476, 0.03419340896606445, 0.034129920959472655, 0.034305759429931644, 0.03464015960693359, 0.03699097442626953, 0.03503513717651367, 0.03465216064453125, 0.03462348937988281, 0.034258846282958985, 0.03424470520019531, 0.0341071662902832, 0.03416086578369141, 0.034286880493164064, 0.03406460952758789, 0.03412223815917969, 0.03442483139038086, 0.03474374389648437, 0.03446169662475586, 0.034123870849609376, 0.03405408096313477, 0.03405337524414063, 0.034000926971435544, 0.03715161514282227, 0.03461939239501953, 0.034738174438476564, 0.03495727920532227, 0.03478940963745117, 0.034686817169189456, 0.03463753509521485, 0.03480825424194336, 0.03475843048095703, 0.03445126342773437, 0.03434947204589844, 0.03459481430053711, 0.034151649475097655, 0.0343785285949707, 0.034163871765136716, 0.03462643051147461, 0.03454492950439453, 0.03486995315551758, 0.03440639877319336, 0.03420528030395508, 0.0343392333984375, 0.03420569610595703, 0.03414371109008789, 0.0342817268371582, 0.03405033493041992, 0.034269184112548826, 0.034716926574707034, 0.03448684692382813, 0.03436761474609375, 0.03440166473388672, 0.03427734375, 0.03451561737060547, 0.03450067138671875, 0.03446934509277344, 0.03465827178955078, 0.03459331130981445, 0.03539971160888672, 0.03482419204711914, 0.03490611267089844, 0.03496345520019531, 0.03491020965576172, 0.0347393913269043, 0.03496364974975586, 0.034672286987304686, 0.03454665756225586, 0.03490371322631836, 0.034436607360839845, 0.03442134475708008, 0.03540518569946289, 0.03430694580078125, 0.03446076965332031, 0.034442142486572264, 0.03417497634887695, 0.0343633918762207, 0.03428281784057617, 0.03401388931274414, 0.03418080139160156, 0.03410966491699219, 0.035161598205566406, 0.0349005126953125, 0.03469292831420898, 0.03443059158325195, 0.034965534210205075, 0.034675521850585936, 0.034418689727783204, 0.0344370231628418, 0.03464371109008789, 0.03407206344604492, 0.03417993545532227, 0.034063423156738284, 0.03438278579711914, 0.035097984313964846, 0.03460492706298828, 0.0343109130859375, 0.034393505096435545, 0.03465071868896484, 0.03419456100463867, 0.03722943878173828, 0.03759638214111328, 0.03425513458251953, 0.034242111206054686, 0.03434108734130859, 0.03450508880615234, 0.03655100631713867, 0.03470489501953125, 0.03449478530883789, 0.03580742263793945, 0.035037185668945314, 0.03465011215209961, 0.03425075149536133, 0.034661983489990236, 0.035546913146972656, 0.03507878494262695, 0.035026943206787106, 0.03523583984375, 0.03490339279174805, 0.034546337127685546, 0.03462736129760742, 0.03440576171875, 0.03429430389404297, 0.03507436752319336, 0.03422822570800781, 0.03430115127563477, 0.03435708618164063, 0.03416096115112305, 0.03426572799682617, 0.03452694320678711, 0.03469692611694336, 0.034337024688720706, 0.03436316680908203, 0.03471971130371094, 0.03423494338989258, 0.03422208023071289, 0.03426038360595703, 0.03411004638671875, 0.034327934265136716, 0.034500606536865236, 0.03432447814941406, 0.034810497283935544, 0.03475001525878906, 0.034834495544433595, 0.03526022338867187, 0.034554847717285155, 0.03434659194946289, 0.034300193786621094, 0.034144382476806644, 0.03457747268676758, 0.034872257232666015, 0.034576385498046876, 0.03452320098876953, 0.03423839950561523, 0.034103294372558594, 0.034207199096679686, 0.03395199966430664, 0.03404748916625976, 0.03407747268676758, 0.0339128303527832, 0.033914302825927736, 0.03436758422851562, 0.03526294326782226, 0.034846240997314454, 0.034691551208496096, 0.034610561370849606, 0.034374271392822266, 0.03436048126220703, 0.03418812942504883, 0.03469107055664063, 0.03437286376953125, 0.034105182647705075, 0.034255775451660156, 0.03461324691772461, 0.03479142379760742, 0.034479934692382815, 0.03427552032470703, 0.03419955062866211, 0.03521283340454102, 0.03430380630493164, 0.03440019226074219, 0.03425558471679688, 0.03420159912109375, 0.03383295822143555, 0.0341190414428711, 0.03439475250244141, 0.03479075241088867, 0.03478915023803711, 0.03463257598876953, 0.034334720611572264, 0.0346333122253418, 0.034671009063720705, 0.034455169677734376, 0.03461977767944336, 0.03432243347167969, 0.03423788833618164, 0.034253376007080075, 0.03528444671630859, 0.034896385192871096, 0.03472796630859375, 0.03426303863525391, 0.03448831939697266, 0.03466035079956055, 0.03435724639892578, 0.03428953552246094, 0.034375808715820313, 0.03428704071044922, 0.03538739013671875, 0.0346478385925293, 0.03445987319946289, 0.034533374786376955, 0.034240543365478514, 0.034676704406738285, 0.03415260696411133, 0.03426838302612305, 0.03423091125488281, 0.0341682243347168, 0.034324417114257814, 0.034571006774902345, 0.03437942504882813, 0.034590110778808594, 0.03449932861328125, 0.034164833068847655, 0.037119998931884765, 0.034484222412109376, 0.034332481384277344, 0.034144161224365234, 0.0343059196472168, 0.03420585632324219, 0.03451110458374024, 0.034869247436523435, 0.034743648529052734, 0.0345258560180664, 0.03438777542114258, 0.034264671325683595, 0.034313823699951174, 0.03429478454589844, 0.03442227172851563, 0.03442329788208008, 0.034070526123046875, 0.034181121826171876, 0.0343078384399414, 0.034873054504394534, 0.03471404647827148, 0.03460300827026367, 0.03437932968139648, 0.03432463836669922, 0.03433478546142578, 0.03424492645263672, 0.03417497634887695, 0.034080768585205076, 0.034533374786376955, 0.03428102493286133, 0.03427372741699219, 0.03471769714355469, 0.034769054412841796, 0.03438985443115234, 0.034375679016113284, 0.03410739135742188, 0.03474431991577148, 0.034375137329101565, 0.03459945678710938, 0.03543215942382812, 0.03740409469604492, 0.0350731201171875, 0.03491814422607422, 0.034776287078857424, 0.034506752014160154, 0.034498687744140624, 0.03420598220825195, 0.034304000854492187, 0.03426303863525391, 0.033974273681640625, 0.034166145324707034, 0.03424505615234375, 0.034444671630859375, 0.03430688095092774, 0.034227745056152344, 0.036382560729980466, 0.035078239440917966, 0.03435984039306641, 0.034076576232910154, 0.034343006134033204, 0.03406380844116211, 0.0340906867980957, 0.03396217727661133, 0.03464860916137695, 0.03468828964233398, 0.03446636962890625, 0.034426433563232425, 0.03444403076171875, 0.0346861457824707, 0.03450124740600586, 0.034191551208496096, 0.034248767852783205, 0.034186176300048825, 0.0342108154296875, 0.03424665451049805, 0.034519264221191406, 0.03483363342285156, 0.03455779266357422, 0.034461536407470704, 0.034366336822509766, 0.03419443130493164, 0.03432115173339844, 0.03420800018310547, 0.03411264038085938, 0.03432742309570312, 0.03422003173828125, 0.034070560455322266, 0.03454528045654297, 0.034783584594726566, 0.03443270492553711, 0.03435961532592773, 0.034399551391601564, 0.03446956634521484, 0.034452255249023435, 0.03435542297363281, 0.034404319763183595, 0.03434409713745117, 0.034214336395263674, 0.03435974502563476, 0.03442483139038086, 0.03465420913696289, 0.034725822448730466, 0.034389472961425784, 0.034279647827148436, 0.03436991882324219, 0.03427900695800781, 0.03428803253173828, 0.034088958740234376, 0.034426048278808595, 0.03413670349121094, 0.0355302734375, 0.034889217376708984, 0.034751487731933595, 0.03433062362670898, 0.034617088317871095, 0.03422233581542969, 0.034455360412597655, 0.03429804611206055, 0.03434239959716797, 0.034319038391113284, 0.03494073486328125, 0.03477612686157226, 0.03458758544921875, 0.03472515106201172, 0.03469590377807617, 0.03427081680297851, 0.03440201568603515, 0.034493118286132815, 0.03430809783935547, 0.034106689453125, 0.03428422546386719, 0.034625408172607425, 0.0350079345703125, 0.034570945739746096, 0.034457599639892575, 0.034471519470214845, 0.03428803253173828, 0.03433039855957031, 0.03435951995849609, 0.03451036834716797, 0.03448403167724609, 0.034151073455810546, 0.03410124969482422, 0.03423027038574219, 0.03497574234008789, 0.034664447784423826, 0.03458598327636719, 0.034447391510009764, 0.034482112884521486, 0.03431827163696289, 0.03467702484130859, 0.03444364929199219, 0.03444035339355469, 0.03432745742797851, 0.03461251068115234, 0.03448400115966797, 0.03810604858398438, 0.03489996719360351, 0.034705406188964845, 0.034699264526367186, 0.035168254852294925, 0.034813697814941404, 0.03469952011108399, 0.03594649505615234, 0.03774259185791016, 0.034887680053710936, 0.03483011245727539, 0.03527702331542969, 0.03587071990966797, 0.03531491088867188, 0.03511785507202148, 0.03482150268554687, 0.03575462341308594]",tokens/s,28.88489740000097,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8213.446656,11251.089408,0.0,10848.567296,10616.027648,s,1,14.78121484375,14.78121484375,0.0,14.78121484375,14.78121484375,14.78121484375,14.78121484375,[14.78121484375],,kWh,0.0002216113200000071,2.4437422025175433e-05,6.652866433399726e-05,0.0003125774063591798,,MB,4051.382272,11672.61696,0.0,11255.414784,11070.470656,s,10,3.7072559814453125,0.37072559814453127,0.0016514955874063807,0.37090721130371096,0.3719229248046875,0.37241461791992186,0.3728079724121094,"[0.36627883911132814, 0.3705299377441406, 0.37290631103515626, 0.37088775634765625, 0.37029959106445315, 0.37181365966796875, 0.3712012939453125, 0.3709266662597656, 0.37174774169921876, 0.3706641845703125]",tokens/s,690.5376949454559,kWh,1.0849438628086383e-05,1.1964937800458096e-06,7.175890514370403e-06,1.9221822922502597e-05,tokens/kWh,13318195.731597653,MB,4055.629824,11674.714112,0.0,11257.511936,11070.473216,s,10,28.617343505859377,2.861734350585938,0.004188017939021872,2.8611508789062503,2.8643912841796872,2.868191125488281,2.871230998535156,"[2.85651708984375, 2.860181884765625, 2.859625732421875, 2.8588779296875, 2.857465087890625, 2.86347802734375, 2.862119873046875, 2.8635400390625, 2.863546875, 2.871990966796875]",tokens/s,22.01462200259811,kWh,8.376885722149708e-05,9.239831935810024e-06,5.579215986002944e-05,0.00014880084901733656,tokens/kWh,423384.6810421086,,s,630,28.614166778564442,0.045419312346927705,0.0004494754846813241,0.045390863418579105,0.04581254425048828,0.04596101360321045,0.04755982044219971,"[0.04830428695678711, 0.04540620803833008, 0.04493068695068359, 0.04485718536376953, 0.04479619216918945, 0.044742912292480466, 0.04469721603393555, 0.044810558319091795, 0.04508790588378906, 0.04485123062133789, 0.04474969482421875, 0.04497817611694336, 0.04544112014770508, 0.045104736328125, 0.04486380767822266, 0.04478275299072266, 0.04503577423095703, 0.0453392333984375, 0.04504576110839844, 0.04490358352661133, 0.045284191131591794, 0.04603084945678711, 0.04566339111328125, 0.04521609497070313, 0.045160961151123044, 0.04541219329833984, 0.04595119857788086, 0.04471388626098633, 0.04493046569824219, 0.04514879989624023, 0.045453311920166016, 0.04497817611694336, 0.045010433197021485, 0.04525270462036133, 0.04545577621459961, 0.0451641616821289, 0.04518739318847656, 0.04590188980102539, 0.04550409698486328, 0.045259166717529296, 0.04530147171020508, 0.045880897521972656, 0.04572246551513672, 0.04556592178344727, 0.04542051315307617, 0.04537897491455078, 0.04573238372802734, 0.0452935676574707, 0.04517679977416992, 0.04546694564819336, 0.04595990371704101, 0.045281280517578126, 0.045367294311523435, 0.04532015991210937, 0.04519670486450195, 0.04533110427856445, 0.04555513763427734, 0.04578732681274414, 0.045805919647216795, 0.04524863815307617, 0.04544063949584961, 0.04572390365600586, 0.045780990600585936, 0.04852131271362305, 0.04556579208374024, 0.04495171356201172, 0.04478771209716797, 0.044987838745117185, 0.04501497650146485, 0.0446673583984375, 0.04485062408447266, 0.04492790222167969, 0.045076286315917966, 0.04497315216064453, 0.04473334503173828, 0.04503327941894531, 0.044998302459716796, 0.04537343978881836, 0.04535145568847656, 0.04560067367553711, 0.045570144653320314, 0.0453570556640625, 0.04501913452148437, 0.04542009735107422, 0.04616236877441406, 0.045848575592041016, 0.045464736938476566, 0.0451572151184082, 0.045195262908935545, 0.04518307113647461, 0.04511449432373047, 0.04525279998779297, 0.045174751281738285, 0.04525878524780273, 0.044941921234130856, 0.04503756713867187, 0.04516454315185547, 0.04554326248168945, 0.04543401718139648, 0.045663230895996096, 0.04554342269897461, 0.04534886550903321, 0.04566377639770508, 0.0456135368347168, 0.045723648071289064, 0.04587216186523437, 0.04577788925170898, 0.04577487945556641, 0.04566742324829102, 0.045461727142333985, 0.04508902359008789, 0.04537705612182617, 0.04538252639770508, 0.04531216049194336, 0.04522991943359375, 0.04546559906005859, 0.04544326400756836, 0.04534457778930664, 0.04507638549804688, 0.04575187301635742, 0.04556451034545898, 0.045432769775390625, 0.04547993469238281, 0.045485729217529296, 0.0457874870300293, 0.04583395385742187, 0.04750140762329102, 0.04535385513305664, 0.04499216079711914, 0.0449760627746582, 0.044872097015380856, 0.044875774383544925, 0.044622879028320316, 0.044981216430664064, 0.04487977600097656, 0.04479939270019531, 0.04492489624023437, 0.04531840133666992, 0.045181407928466796, 0.04492083358764649, 0.04523212814331055, 0.04553113555908203, 0.04565606307983398, 0.04558643341064453, 0.045338623046875, 0.04547788619995117, 0.04602265548706055, 0.04574425506591797, 0.045246337890625, 0.04512102508544922, 0.045476448059082034, 0.045393310546875, 0.045070846557617186, 0.04501708984375, 0.04520307159423828, 0.044990848541259766, 0.0450334701538086, 0.045039264678955075, 0.04523379135131836, 0.04524303817749024, 0.04530380630493164, 0.04539398574829102, 0.045601951599121095, 0.04558729553222656, 0.04522393417358399, 0.04531814575195312, 0.04614553451538086, 0.045906974792480466, 0.04572835159301758, 0.04569935989379883, 0.04566819381713867, 0.045559902191162106, 0.04525475311279297, 0.04526454544067383, 0.04568310546875, 0.0454774398803711, 0.0452182731628418, 0.045264289855957034, 0.04542316818237305, 0.045297664642333986, 0.04536064147949219, 0.045427200317382815, 0.04573593521118164, 0.04583155059814453, 0.045582977294921875, 0.04548172760009766, 0.04537164688110352, 0.04590387344360351, 0.04576425552368164, 0.04764640045166016, 0.045093185424804685, 0.04489731216430664, 0.04479647827148438, 0.04505641555786133, 0.044744705200195314, 0.04465011215209961, 0.04489459228515625, 0.044994110107421874, 0.04483321762084961, 0.044655616760253904, 0.04495872116088867, 0.045363166809082034, 0.045131072998046876, 0.04508086395263672, 0.045297664642333986, 0.04568431854248047, 0.04513631820678711, 0.04511276626586914, 0.0453392333984375, 0.045541374206542966, 0.04551718521118164, 0.04516659164428711, 0.04519472122192383, 0.045453857421875, 0.045230079650878906, 0.04513801574707031, 0.04502518463134766, 0.04522358322143555, 0.045451614379882814, 0.045182430267333984, 0.04502399826049805, 0.04518608093261719, 0.04546022415161133, 0.045331489562988284, 0.045122528076171876, 0.045185089111328125, 0.04811148834228516, 0.04522921752929687, 0.045362239837646486, 0.04595833587646484, 0.04567292785644531, 0.04534700775146484, 0.045360576629638674, 0.045781566619873044, 0.045571937561035156, 0.045170848846435546, 0.04516454315185547, 0.04538982391357422, 0.04519475173950195, 0.04544681549072266, 0.04566470336914062, 0.04566672134399414, 0.04543814468383789, 0.04572153472900391, 0.04568972778320313, 0.045486080169677735, 0.04532633590698242, 0.04539788818359375, 0.04543091201782227, 0.045856769561767576, 0.045852127075195315, 0.04548662567138672, 0.04678601455688477, 0.04475913619995117, 0.04471139144897461, 0.04467366409301758, 0.04503919982910156, 0.04473523330688477, 0.04476054382324219, 0.04502582550048828, 0.04538140869140625, 0.04501478576660156, 0.04484524917602539, 0.045209888458251954, 0.04528451156616211, 0.04505072021484375, 0.04505535888671875, 0.045120094299316404, 0.04513180923461914, 0.04506623840332031, 0.045151935577392575, 0.04548230361938477, 0.04576870346069336, 0.045486080169677735, 0.04511235046386719, 0.045017440795898436, 0.04498495864868164, 0.04555311965942383, 0.045158977508544924, 0.04508169555664063, 0.04502531051635742, 0.04541526412963867, 0.04523379135131836, 0.045369728088378906, 0.0452935676574707, 0.04517027282714844, 0.045470016479492184, 0.04556198501586914, 0.045292606353759764, 0.045636512756347655, 0.04529328155517578, 0.045185310363769535, 0.04567801666259766, 0.04591788864135742, 0.04563443374633789, 0.045535232543945314, 0.045416255950927735, 0.045967552185058595, 0.045418495178222655, 0.04553468704223633, 0.04574262237548828, 0.045526302337646485, 0.04521420669555664, 0.045389503479003904, 0.045781505584716796, 0.045717281341552736, 0.04528358459472656, 0.045426334381103516, 0.04573603057861328, 0.045590782165527345, 0.04550041580200195, 0.04544716644287109, 0.04576169586181641, 0.04576752090454102, 0.04577280044555664, 0.04681523132324219, 0.045842048645019534, 0.044943550109863284, 0.0450684814453125, 0.044886016845703126, 0.04481539154052734, 0.044865535736083983, 0.04500976181030274, 0.04484505462646484, 0.045154430389404296, 0.04534272003173828, 0.0452628173828125, 0.04495724868774414, 0.045144542694091794, 0.04514806365966797, 0.04558227157592774, 0.04557020950317383, 0.04513564682006836, 0.045168094635009766, 0.04581043243408203, 0.04557209777832031, 0.04554751968383789, 0.04542649459838867, 0.04544633483886719, 0.045456382751464845, 0.04524364852905274, 0.045179393768310545, 0.04518118286132813, 0.04547772979736328, 0.045088897705078124, 0.04529359817504883, 0.04545491027832031, 0.045467552185058595, 0.04535919952392578, 0.04523628616333008, 0.04649148941040039, 0.04526953506469727, 0.04531609725952149, 0.04555788803100586, 0.04598278427124024, 0.04567942428588867, 0.04544655990600586, 0.045666431427001955, 0.04565449523925781, 0.04542259216308594, 0.0456703987121582, 0.045676544189453126, 0.04547795104980469, 0.04536313629150391, 0.04526620864868164, 0.04572438430786133, 0.045841407775878903, 0.045290496826171874, 0.0453570556640625, 0.04573084640502929, 0.04575664138793945, 0.04549299240112305, 0.04548339080810547, 0.04551129531860352, 0.04577679824829101, 0.04575651168823242, 0.045902976989746096, 0.04579008102416992, 0.04781260681152344, 0.04546355056762695, 0.04528537750244141, 0.04482457733154297, 0.04470495986938477, 0.04492780685424805, 0.04492710494995117, 0.04504358291625977, 0.04470579147338867, 0.04485887908935547, 0.04506265640258789, 0.04529151916503906, 0.04488191986083984, 0.04529558563232422, 0.04509027099609375, 0.04512825775146485, 0.045266529083251954, 0.045582752227783206, 0.04554905700683594, 0.04549052810668945, 0.045407936096191405, 0.04519164657592773, 0.045409950256347656, 0.04557398223876953, 0.04536371231079102, 0.04499456024169922, 0.04519116973876953, 0.04511334228515625, 0.045303359985351566, 0.045255104064941404, 0.045153377532958984, 0.045391902923583985, 0.045491073608398436, 0.04524236679077148, 0.04521993637084961, 0.04557609558105469, 0.04532633590698242, 0.04545273590087891, 0.04564019012451172, 0.04592851257324219, 0.045709022521972655, 0.045682945251464845, 0.045625377655029296, 0.04585647964477539, 0.04572598266601562, 0.04539507293701172, 0.045603710174560545, 0.0454447021484375, 0.04526326370239258, 0.04524358367919922, 0.045439647674560546, 0.04570272064208984, 0.045606815338134765, 0.04538643264770508, 0.04542390441894531, 0.04566425704956055, 0.045609695434570316, 0.045717601776123044, 0.04575328063964844, 0.04584908676147461, 0.04578684616088867, 0.045763328552246095, 0.04615718460083008, 0.04758367919921875, 0.045778942108154294, 0.04489011383056641, 0.04472217559814453, 0.044717281341552735, 0.04495439910888672, 0.04493856048583984, 0.04480790328979492, 0.04466787338256836, 0.04515404891967773, 0.04529587173461914, 0.045216896057128905, 0.044985214233398435, 0.045006847381591795, 0.04565401458740234, 0.04573295974731445, 0.045321247100830075, 0.04523187255859375, 0.04531577682495117, 0.04539836883544922, 0.0450601921081543, 0.04533657455444336, 0.04578236770629883, 0.04548255920410156, 0.04542438507080078, 0.04542031860351563, 0.04520198440551758, 0.04498636627197266, 0.04498339080810547, 0.04501724624633789, 0.045404769897460937, 0.04522588729858398, 0.045254913330078125, 0.045466976165771486, 0.045478721618652344, 0.04527907180786133, 0.045901630401611326, 0.04586105728149414, 0.04568473434448242, 0.04595916748046875, 0.045604705810546875, 0.04551446533203125, 0.045625377655029296, 0.045580703735351565, 0.04565366363525391, 0.0456376953125, 0.0457562255859375, 0.045347297668457034, 0.045279232025146485, 0.04522598266601562, 0.04564287948608398, 0.045385761260986326, 0.04546441650390625, 0.0456888313293457, 0.04560486221313476, 0.04540143966674805, 0.04566697692871094, 0.04596700668334961, 0.045767009735107424, 0.046088191986083986, 0.04610166549682617, 0.04579414367675781, 0.045854305267333986, 0.04833100891113281, 0.04544515228271485, 0.045192577362060546, 0.045154945373535156, 0.0448526725769043, 0.04479238510131836, 0.044776737213134764, 0.04490518569946289, 0.04517881774902344, 0.04498233413696289, 0.04500275039672851, 0.04518262481689453, 0.04525833511352539, 0.04509894561767578, 0.045370174407958985, 0.045230079650878906, 0.04554956817626953, 0.04545536041259766, 0.045123584747314455, 0.04517174530029297, 0.045781982421875, 0.04576051330566406, 0.04542668914794922, 0.04532223892211914, 0.045428737640380856, 0.045497566223144534, 0.045380382537841796, 0.04549836730957031, 0.04526694488525391, 0.04500300979614258, 0.04496063995361328, 0.0449986572265625, 0.04541846466064453, 0.04528425598144531, 0.0453221435546875, 0.0454771842956543, 0.04589852905273437, 0.04563270568847656, 0.04526918411254883, 0.045392513275146484, 0.04586454391479492, 0.045513118743896484, 0.04559830474853516, 0.045539871215820316, 0.045594497680664064, 0.04534230422973633, 0.04560240173339844, 0.04563027191162109, 0.04541439819335937, 0.045776031494140626, 0.04516540908813477, 0.04527632141113281, 0.045624160766601564, 0.04558233642578125, 0.0452784309387207, 0.04540444946289063, 0.045703678131103515, 0.046045185089111325, 0.04572774505615235, 0.04566444778442383, 0.045758113861083985, 0.04578524780273437, 0.046268383026123044, 0.046811134338378906, 0.04522188949584961, 0.045080352783203125, 0.04491846466064453, 0.045068832397460935, 0.04489731216430664, 0.04498732757568359, 0.045348831176757816, 0.04520294570922852, 0.04496559906005859, 0.04538851165771484, 0.045674625396728515, 0.045532958984375, 0.04520982360839844, 0.04527004623413086, 0.04551676940917969, 0.045369823455810546, 0.04522185516357422, 0.04580614471435547, 0.04616191864013672, 0.04594623947143555, 0.04557888031005859, 0.045649921417236325, 0.04531228637695312, 0.04554665756225586, 0.045075103759765624, 0.04520924758911133, 0.04553855895996094, 0.045342880249023436, 0.045099327087402344, 0.04544527816772461, 0.04565030288696289, 0.045765918731689455, 0.04544790267944336, 0.04532611083984375, 0.045588577270507816, 0.04580774307250977, 0.04545721435546875, 0.045571422576904295, 0.04586172866821289, 0.04584646224975586, 0.045809726715087894, 0.04618854522705078, 0.046015777587890626, 0.04588598251342774, 0.046145408630371094, 0.045676864624023435, 0.045414241790771484, 0.045698814392089844, 0.045541408538818356, 0.045676769256591795, 0.04573404693603516, 0.04576665496826172, 0.04573523330688477, 0.045717823028564454, 0.045468032836914064, 0.045581630706787106, 0.04596192169189453, 0.04592844772338867, 0.04575027084350586, 0.045991519927978515, 0.04607567977905273, 0.04611340713500976]",tokens/s,22.017066052468383,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 58305 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 816, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 734, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1584.623616,1596.915712,0.0,1201.668096,1189.151232,s,1,8.3438662109375,8.3438662109375,0.0,8.3438662109375,8.3438662109375,8.3438662109375,8.3438662109375,[8.3438662109375],,kWh,3.5593928066667024e-05,3.918479009886834e-06,1.0983897675998638e-05,5.04963047525525e-05,,MB,1721.929728,1794.048,0.0,1384.12032,1351.367168,s,10,0.41199757003784176,0.04119975700378418,0.00014483756846233586,0.041238176345825196,0.04129707107543945,0.04141133575439453,0.04150274749755859,"[0.04152560043334961, 0.041245952606201175, 0.04123302459716797, 0.04099168014526367, 0.041271678924560545, 0.04107120132446289, 0.04107059097290039, 0.04125964736938476, 0.041243328094482425, 0.04108486557006836]",tokens/s,6213.628880784091,kWh,1.24244321833335e-06,1.370178162340395e-07,8.233245357276679e-07,2.2027855702950574e-06,tokens/kWh,116216486.7303491,MB,1731.1744,1835.99104,0.0,1426.06336,1407.548416,s,10,10.576904418945313,1.0576904418945312,0.005352611792629986,1.0581462402343749,1.062176708984375,1.0641183227539062,1.0656716137695312,"[1.0660599365234376, 1.0612806396484376, 1.057637939453125, 1.04605126953125, 1.058654541015625, 1.055569580078125, 1.0571251220703124, 1.0617452392578124, 1.051779052734375, 1.0610010986328124]",tokens/s,59.5637414356838,kWh,3.0963543785417386e-05,3.414185807765365e-06,1.5384993986472118e-05,4.976272357965487e-05,tokens/kWh,1266007.8763405364,,s,630,10.574133043289184,0.016784338163951085,0.00033400692043555826,0.016730128288269044,0.017058774185180665,0.01727484941482544,0.01835901706695557,"[0.017328544616699217, 0.017398239135742188, 0.019746816635131836, 0.017338048934936522, 0.017328447341918945, 0.017270784378051757, 0.017137664794921875, 0.017006591796875, 0.017165567398071287, 0.017231744766235353, 0.017054559707641602, 0.018394336700439454, 0.01705196762084961, 0.016991968154907226, 0.016888799667358397, 0.01684432029724121, 0.016972192764282225, 0.016801664352416992, 0.016859136581420898, 0.016817760467529298, 0.01684115219116211, 0.01670550346374512, 0.016723968505859374, 0.01664169692993164, 0.016574560165405275, 0.016689407348632813, 0.016590368270874022, 0.016595424652099608, 0.016736255645751954, 0.016760576248168946, 0.016652671813964844, 0.01663167953491211, 0.01663801574707031, 0.01651456069946289, 0.016541280746459962, 0.016528480529785155, 0.016549055099487304, 0.016645952224731444, 0.016554752349853517, 0.01653555107116699, 0.016547840118408205, 0.016520736694335937, 0.016498687744140626, 0.01650326347351074, 0.016541696548461913, 0.01648227119445801, 0.016515104293823243, 0.016457696914672852, 0.016565439224243163, 0.018501760482788086, 0.01876652717590332, 0.016746623992919922, 0.016576416015625, 0.016617151260375978, 0.016764863967895508, 0.016619903564453125, 0.01661302375793457, 0.016699743270874024, 0.017123327255249024, 0.017155519485473635, 0.016878143310546875, 0.01692006492614746, 0.0168985595703125, 0.016728384017944336, 0.016622976303100587, 0.01651161575317383, 0.01637990379333496, 0.016433151245117187, 0.01636672019958496, 0.01643814468383789, 0.01662156867980957, 0.016702720642089844, 0.01674515151977539, 0.01691372871398926, 0.0167063045501709, 0.016736480712890626, 0.01679952049255371, 0.016730112075805666, 0.01668016052246094, 0.016747295379638674, 0.016592256546020506, 0.016581087112426757, 0.01652956771850586, 0.01678473663330078, 0.01681475257873535, 0.016930816650390625, 0.016777215957641603, 0.017083423614501953, 0.01678428840637207, 0.016756383895874024, 0.01665884780883789, 0.016875520706176757, 0.01662156867980957, 0.016691200256347655, 0.016614463806152342, 0.016645055770874023, 0.017143808364868163, 0.01717647933959961, 0.01663542366027832, 0.016659008026123048, 0.016740447998046876, 0.016750495910644533, 0.01670924758911133, 0.0169168643951416, 0.016764928817749023, 0.016730112075805666, 0.016912128448486326, 0.01686070442199707, 0.01667555236816406, 0.01684819221496582, 0.01668908882141113, 0.016730880737304686, 0.01666633605957031, 0.016752927780151368, 0.016631200790405275, 0.01797590446472168, 0.016836063385009765, 0.016726560592651367, 0.019449567794799803, 0.018272544860839842, 0.016942399978637696, 0.017070783615112304, 0.01716223907470703, 0.017334112167358397, 0.016857536315917968, 0.016805599212646485, 0.017108991622924806, 0.016888896942138673, 0.016927679061889647, 0.016903615951538085, 0.01703993606567383, 0.017079296112060546, 0.016873855590820313, 0.016760799407958986, 0.016554079055786132, 0.016622047424316406, 0.01680803108215332, 0.016699392318725585, 0.01674822425842285, 0.016685375213623045, 0.016904447555541994, 0.016625408172607423, 0.016751808166503908, 0.016812864303588866, 0.016680448532104493, 0.016653888702392577, 0.016587455749511718, 0.016648448944091798, 0.01679769515991211, 0.016662080764770507, 0.016693695068359375, 0.0166297607421875, 0.01663385581970215, 0.016570367813110352, 0.016699392318725585, 0.016598304748535155, 0.016769376754760743, 0.016673152923583984, 0.016644096374511717, 0.016539392471313478, 0.016615680694580078, 0.01653555107116699, 0.017320032119750976, 0.01705743980407715, 0.016765024185180662, 0.016692480087280272, 0.016739519119262695, 0.016801504135131835, 0.016760671615600586, 0.0166810245513916, 0.016773216247558592, 0.016748544692993163, 0.017016576766967773, 0.017344768524169923, 0.016951295852661134, 0.016928768157958983, 0.016832063674926758, 0.01681171226501465, 0.016934783935546874, 0.016954496383666993, 0.01690188789367676, 0.017088064193725584, 0.01713190460205078, 0.01683990478515625, 0.016741216659545897, 0.01664204788208008, 0.016516319274902345, 0.016431903839111327, 0.016578527450561525, 0.016645023345947266, 0.016582847595214844, 0.016561983108520507, 0.01684889602661133, 0.016512672424316407, 0.016559616088867187, 0.016452287673950194, 0.01645756721496582, 0.01655958366394043, 0.016626527786254883, 0.016451583862304688, 0.016530815124511718, 0.016783872604370118, 0.016490623474121092, 0.016556032180786134, 0.01645952033996582, 0.016435136795043947, 0.016539520263671875, 0.016550336837768555, 0.016581888198852538, 0.016547935485839844, 0.016520095825195313, 0.01647385597229004, 0.016523103713989257, 0.016455743789672853, 0.016530784606933593, 0.01644825553894043, 0.016472063064575194, 0.016457727432250976, 0.01657241630554199, 0.01646905517578125, 0.01728595161437988, 0.016627840042114258, 0.016590560913085937, 0.016470176696777344, 0.016766176223754883, 0.0165948486328125, 0.016559104919433593, 0.016447488784790038, 0.016590719223022462, 0.016558271408081054, 0.01665017509460449, 0.016562175750732423, 0.016570367813110352, 0.016554079055786132, 0.01651702308654785, 0.0164454402923584, 0.016453632354736326, 0.016493791580200194, 0.01653430366516113, 0.016523263931274415, 0.016559232711791994, 0.0165382080078125, 0.016419103622436523, 0.016434656143188477, 0.016575008392333983, 0.01659903907775879, 0.016850944519042968, 0.01692576026916504, 0.016927391052246093, 0.01716409683227539, 0.017289695739746095, 0.017037120819091797, 0.017079456329345703, 0.016754400253295897, 0.016631200790405275, 0.01657494354248047, 0.016573856353759766, 0.01667363166809082, 0.0166759033203125, 0.016702272415161132, 0.01671548843383789, 0.016795936584472655, 0.016744447708129884, 0.01671891212463379, 0.01672492790222168, 0.01665433692932129, 0.016670719146728515, 0.016861183166503906, 0.016855039596557618, 0.01683660888671875, 0.01676288032531738, 0.016762432098388673, 0.01680019187927246, 0.01683251190185547, 0.01688800048828125, 0.016942047119140626, 0.01675734329223633, 0.017033472061157226, 0.016856639862060548, 0.016818239212036134, 0.016914815902709962, 0.017098623275756834, 0.016900224685668944, 0.01684480094909668, 0.01678745651245117, 0.016787391662597656, 0.016863296508789063, 0.01679155158996582, 0.01699839973449707, 0.016766752243041992, 0.01661302375793457, 0.016728639602661133, 0.016898048400878905, 0.016931039810180664, 0.01682377624511719, 0.017117504119873048, 0.016855039596557618, 0.016854944229125975, 0.016803936004638673, 0.016902143478393555, 0.016943103790283204, 0.01705369567871094, 0.016817983627319337, 0.016791872024536133, 0.016781183242797853, 0.016744447708129884, 0.016693248748779296, 0.01676892852783203, 0.01670751953125, 0.01673846435546875, 0.01676697540283203, 0.016703487396240235, 0.016596607208251953, 0.016658176422119142, 0.01666227149963379, 0.01702230453491211, 0.0168799991607666, 0.01670172882080078, 0.01679555130004883, 0.016953407287597658, 0.016898080825805663, 0.016985952377319338, 0.016903392791748045, 0.016772031784057617, 0.016693248748779296, 0.01660927963256836, 0.0165928955078125, 0.016532512664794923, 0.016659423828125, 0.01661747169494629, 0.016900096893310547, 0.016664575576782227, 0.016639488220214844, 0.016574464797973632, 0.016961887359619142, 0.016873632431030273, 0.016631807327270508, 0.016727584838867188, 0.016798240661621094, 0.016813568115234375, 0.01678585624694824, 0.01679897689819336, 0.01671855926513672, 0.016750272750854493, 0.01678927993774414, 0.01684867286682129, 0.01686387252807617, 0.016807903289794923, 0.016863040924072266, 0.016690784454345704, 0.016626495361328125, 0.016631391525268553, 0.01653798484802246, 0.016498687744140626, 0.016488447189331054, 0.01646329689025879, 0.0164913272857666, 0.016444320678710937, 0.016633760452270507, 0.016689407348632813, 0.01655171203613281, 0.016547935485839844, 0.01662211227416992, 0.016566560745239257, 0.016674623489379883, 0.016979232788085937, 0.017308576583862305, 0.016637727737426757, 0.01656787109375, 0.016578943252563476, 0.016628000259399416, 0.016543807983398436, 0.016602880477905275, 0.01651321601867676, 0.016502784729003905, 0.016477888107299804, 0.01911020851135254, 0.017278175354003906, 0.016667776107788086, 0.016669536590576173, 0.01653334426879883, 0.01648044776916504, 0.016506879806518555, 0.016483999252319335, 0.0165382080078125, 0.016617216110229493, 0.016639999389648438, 0.01660723114013672, 0.01659903907775879, 0.016506879806518555, 0.01652115249633789, 0.01674025535583496, 0.01664620780944824, 0.01660323143005371, 0.01649446487426758, 0.016496768951416017, 0.01658233642578125, 0.016546112060546875, 0.01665023994445801, 0.016553216934204102, 0.016547584533691408, 0.016550943374633788, 0.01783724784851074, 0.017103616714477538, 0.0185262393951416, 0.016793600082397463, 0.016610912322998047, 0.016648607254028322, 0.01658060836791992, 0.016700576782226563, 0.016797664642333985, 0.016843648910522462, 0.01681203269958496, 0.016699392318725585, 0.016742399215698242, 0.016920576095581053, 0.01680384063720703, 0.01670966339111328, 0.016764543533325196, 0.016926752090454102, 0.017097024917602538, 0.017333343505859376, 0.016874399185180664, 0.01700864028930664, 0.016750175476074217, 0.01687183952331543, 0.016852447509765625, 0.01687606430053711, 0.01675596809387207, 0.01668118476867676, 0.016637760162353514, 0.016626399993896486, 0.016785408020019533, 0.017003807067871093, 0.01672643280029297, 0.01680732727050781, 0.016868255615234376, 0.016965375900268555, 0.016799936294555663, 0.016928831100463868, 0.01697996711730957, 0.016926752090454102, 0.016794143676757814, 0.01716806411743164, 0.017211103439331056, 0.017242303848266603, 0.01677676773071289, 0.01679657554626465, 0.01689948844909668, 0.01687196731567383, 0.016846847534179688, 0.016836063385009765, 0.016726335525512694, 0.016909664154052734, 0.016792512893676757, 0.016822208404541017, 0.016723775863647462, 0.016558271408081054, 0.016999679565429686, 0.01686809539794922, 0.01678745651245117, 0.01687481689453125, 0.016824287414550783, 0.01672435188293457, 0.01664601516723633, 0.016613439559936525, 0.016730144500732423, 0.01661929512023926, 0.016646751403808592, 0.016504480361938478, 0.01646134376525879, 0.01653446388244629, 0.01653708839416504, 0.016662912368774416, 0.016721920013427736, 0.016674783706665038, 0.016619455337524413, 0.016899391174316405, 0.0174021110534668, 0.016900640487670898, 0.017296672821044922, 0.017191648483276367, 0.01679769515991211, 0.016891904830932617, 0.01687731170654297, 0.016957696914672853, 0.016941055297851563, 0.016852991104125976, 0.016649375915527342, 0.016585151672363282, 0.016784000396728515, 0.016728992462158202, 0.0168723201751709, 0.017110015869140623, 0.017101472854614257, 0.016959840774536133, 0.01683456039428711, 0.016924671173095703, 0.017020256042480468, 0.01702355194091797, 0.016892000198364256, 0.016748544692993163, 0.01686028861999512, 0.017437568664550783, 0.01700454330444336, 0.016893888473510744, 0.01685264015197754, 0.01695568084716797, 0.01704560089111328, 0.016857120513916017, 0.016803647994995115, 0.016771263122558593, 0.016724319458007814, 0.01665603256225586, 0.016633216857910155, 0.016680864334106444, 0.017133440017700195, 0.016792352676391602, 0.016750656127929687, 0.01661747169494629, 0.016672767639160157, 0.01676643180847168, 0.016740896224975585, 0.016767999649047852, 0.016731136322021483, 0.017012224197387696, 0.01703558349609375, 0.01695120048522949, 0.017004352569580078, 0.017011167526245118, 0.01684480094909668, 0.01672208023071289, 0.016693088531494142, 0.016574304580688478, 0.01651113510131836, 0.016521472930908204, 0.016649984359741212, 0.016590368270874022, 0.01681046485900879, 0.01650908851623535, 0.016848735809326172, 0.016594944000244142, 0.016547840118408205, 0.016518592834472656, 0.0165545597076416, 0.016637535095214845, 0.016559871673583984, 0.017420448303222657, 0.016605247497558595, 0.016597087860107423, 0.016499040603637695, 0.01647542381286621, 0.01646870422363281, 0.016441343307495117, 0.016597024917602538, 0.01649660873413086, 0.016488544464111327, 0.016578079223632813, 0.016577024459838868, 0.01661939239501953, 0.01639833641052246, 0.0163918399810791, 0.016468320846557617, 0.01642905616760254, 0.016451040267944337, 0.016513792037963868, 0.016477983474731447, 0.016530048370361327, 0.016579999923706054, 0.01673916816711426, 0.016836639404296874, 0.016770687103271485, 0.016836896896362304, 0.016791072845458985, 0.016822816848754883, 0.01683977508544922, 0.01680476760864258, 0.016692672729492188, 0.016716352462768556, 0.016789535522460937, 0.016601055145263673, 0.01658470344543457, 0.016422592163085937, 0.016568639755249023, 0.01692451286315918, 0.016436672210693358, 0.017430944442749022, 0.0164150390625, 0.016357152938842774, 0.016462047576904296, 0.0166748161315918, 0.0167587833404541, 0.01715814399719238, 0.01707827186584473, 0.017126848220825195, 0.017044031143188476, 0.017405120849609376, 0.017578784942626952, 0.01694108772277832, 0.01699839973449707, 0.016914560317993165, 0.016905567169189454, 0.016779808044433592, 0.016989343643188475, 0.017310560226440428, 0.016952352523803713, 0.016819168090820312, 0.016731487274169923, 0.016722591400146484, 0.016719104766845704, 0.016609727859497072, 0.01663724708557129, 0.016616191864013672, 0.01661516761779785, 0.01656268882751465, 0.01658880043029785, 0.016766656875610353, 0.016654560089111328, 0.016663904190063476, 0.016601856231689454, 0.016633087158203125, 0.016589183807373047, 0.01680175971984863, 0.018142656326293947, 0.018082239151000976, 0.01688425636291504, 0.01669113540649414, 0.016951072692871095, 0.016978336334228517, 0.016882816314697267]",tokens/s,59.57935250302398,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.558656,1596.915712,0.0,1201.668096,1189.151232,s,1,8.3514072265625,8.3514072265625,0.0,8.3514072265625,8.3514072265625,8.3514072265625,8.3514072265625,[8.3514072265625],,kWh,3.551230208750174e-05,3.909955984242548e-06,1.0968342108001738e-05,5.039060017974603e-05,,MB,1587.183616,1794.048,0.0,1384.12032,1351.367168,s,10,0.41280178833007813,0.041280178833007816,0.0002324199835202943,0.04122892761230469,0.04140379981994628,0.04166441097259521,0.041872899894714356,"[0.04192502212524414, 0.04125302505493164, 0.04117756652832031, 0.04118697738647461, 0.04127635192871094, 0.041204830169677735, 0.0410423698425293, 0.041086944580078125, 0.04130281448364258, 0.04134588623046875]",tokens/s,6201.523521387975,kWh,1.2442479582092324e-06,1.372182518467848e-07,8.22242974578723e-07,2.20370918463474e-06,tokens/kWh,116167778.30076134,MB,1592.111104,1835.99104,0.0,1426.06336,1407.548416,s,10,10.592636230468752,1.0592636230468753,0.004145041345814589,1.0582247924804689,1.0626305419921875,1.0657195678710938,1.0681907885742188,"[1.0590074462890624, 1.05448046875, 1.061748046875, 1.06880859375, 1.0547740478515626, 1.057442138671875, 1.061944091796875, 1.0614659423828126, 1.057037109375, 1.0559283447265626]",tokens/s,59.47527945761627,kWh,3.0436203563040093e-05,3.3566527589631704e-06,1.536185330602108e-05,4.915470962802435e-05,tokens/kWh,1281667.6260881033,,s,630,10.589003307342528,0.016807941757686556,0.00032476302278186965,0.016749680519104006,0.016978409004211426,0.017119202995300294,0.017825050716400153,"[0.017150848388671876, 0.017287168502807617, 0.016746496200561522, 0.016875520706176757, 0.01684217643737793, 0.016746112823486328, 0.01671673583984375, 0.01660313606262207, 0.016697216033935546, 0.01676710319519043, 0.016852991104125976, 0.01664963150024414, 0.016642656326293945, 0.016740352630615234, 0.01662761688232422, 0.01687126350402832, 0.016965856552124025, 0.016768224716186525, 0.0168437442779541, 0.01672707176208496, 0.01672025680541992, 0.016713823318481445, 0.01672540855407715, 0.01683888053894043, 0.01675132751464844, 0.016748544692993163, 0.016778783798217775, 0.016869855880737306, 0.016906240463256835, 0.016884895324707033, 0.016795967102050783, 0.016860864639282228, 0.01676153564453125, 0.016793760299682617, 0.01678745651245117, 0.016789344787597655, 0.017066144943237306, 0.017964927673339844, 0.016765056610107423, 0.01677644729614258, 0.01664896011352539, 0.016655359268188476, 0.0167490234375, 0.016868896484375, 0.016741439819335936, 0.016914304733276368, 0.01686124801635742, 0.017123327255249024, 0.016726015090942382, 0.016764928817749023, 0.016697343826293946, 0.016793535232543944, 0.01669536018371582, 0.016729663848876954, 0.016681407928466795, 0.016717119216918944, 0.016652095794677736, 0.016714624404907227, 0.01670528030395508, 0.01675699234008789, 0.01669468879699707, 0.016810527801513674, 0.016659616470336914, 0.016749183654785157, 0.016668607711791992, 0.01669945526123047, 0.016674144744873047, 0.016882335662841797, 0.016611072540283205, 0.016762752532958985, 0.016673152923583984, 0.016633024215698244, 0.016607072830200194, 0.016788448333740234, 0.01660054397583008, 0.016798112869262697, 0.016689279556274413, 0.016690431594848634, 0.0166878719329834, 0.016842752456665038, 0.016728063583374024, 0.01683660888671875, 0.016707231521606445, 0.016718175888061522, 0.0168787841796875, 0.01697260856628418, 0.016783327102661134, 0.01677110481262207, 0.016748544692993163, 0.0167956485748291, 0.016639999389648438, 0.01669526481628418, 0.01668239974975586, 0.016749055862426757, 0.01680499267578125, 0.017029375076293946, 0.017089279174804687, 0.01679270362854004, 0.016719968795776367, 0.01685772705078125, 0.016692703247070312, 0.016610208511352538, 0.016727712631225584, 0.0165449275970459, 0.01688675117492676, 0.016852319717407225, 0.016724544525146483, 0.016691295623779297, 0.0166748161315918, 0.016713727951049806, 0.01661952018737793, 0.016750591278076172, 0.01667036819458008, 0.016617183685302735, 0.016656511306762694, 0.016629535675048827, 0.01650524711608887, 0.01678995132446289, 0.016786367416381835, 0.016861568450927733, 0.016788127899169922, 0.016697248458862304, 0.016739519119262695, 0.016595775604248048, 0.016594911575317384, 0.016773151397705077, 0.01666454315185547, 0.016793407440185548, 0.01705923271179199, 0.016935712814331056, 0.01679769515991211, 0.01662512016296387, 0.01689244842529297, 0.016794784545898438, 0.016609119415283202, 0.016607967376708985, 0.016785696029663087, 0.016838623046875, 0.016666591644287108, 0.01674799919128418, 0.01674710464477539, 0.016951295852661134, 0.017088512420654296, 0.01672563171386719, 0.016713216781616212, 0.01674025535583496, 0.016724960327148437, 0.01664780807495117, 0.01675712013244629, 0.016729663848876954, 0.01662816047668457, 0.016697343826293946, 0.016961536407470702, 0.016728063583374024, 0.016752639770507814, 0.01668057632446289, 0.016719968795776367, 0.016828447341918944, 0.01676860809326172, 0.016793695449829102, 0.016886335372924804, 0.016971967697143556, 0.017194911956787108, 0.01717033576965332, 0.016977088928222656, 0.01704140853881836, 0.0169268798828125, 0.016884479522705078, 0.016854944229125975, 0.016809120178222656, 0.016867647171020506, 0.016760671615600586, 0.01700524711608887, 0.017864704132080078, 0.017680383682250975, 0.017104032516479493, 0.017005407333374023, 0.01711916732788086, 0.01694918441772461, 0.01677939224243164, 0.016744447708129884, 0.01671507263183594, 0.01667910385131836, 0.01684940719604492, 0.016678911209106445, 0.01672985649108887, 0.01665795135498047, 0.016636640548706054, 0.016772224426269532, 0.016833536148071288, 0.016675840377807616, 0.016683008193969725, 0.016719871520996094, 0.01668819236755371, 0.016677824020385743, 0.016683008193969725, 0.016769023895263673, 0.016721920013427736, 0.016963584899902344, 0.017119232177734374, 0.016730112075805666, 0.01660416030883789, 0.0168089599609375, 0.016657888412475588, 0.016544288635253906, 0.0166014404296875, 0.016544511795043945, 0.01658358383178711, 0.016699199676513673, 0.01747577667236328, 0.020911552429199218, 0.021582239151000975, 0.01703651237487793, 0.016823232650756834, 0.016908031463623047, 0.016689407348632813, 0.016858272552490235, 0.016670751571655273, 0.016933439254760742, 0.016679168701171875, 0.016742399215698242, 0.016693248748779296, 0.01678745651245117, 0.016702592849731444, 0.0171276798248291, 0.0171812801361084, 0.016777023315429688, 0.0167174072265625, 0.016748512268066406, 0.016695968627929686, 0.016877407073974608, 0.016871583938598632, 0.01666864013671875, 0.01665363121032715, 0.016890592575073242, 0.016731584548950195, 0.01673263931274414, 0.016752735137939453, 0.016708992004394532, 0.01670412826538086, 0.016852991104125976, 0.016746496200561522, 0.01667024040222168, 0.016656448364257812, 0.016756160736083985, 0.016673215866088865, 0.016828128814697266, 0.01674671936035156, 0.01690809631347656, 0.016742687225341796, 0.0191779842376709, 0.01718003273010254, 0.01734294319152832, 0.016876575469970703, 0.01684124755859375, 0.016887231826782225, 0.016709856033325195, 0.01688991928100586, 0.01694371223449707, 0.016648191452026367, 0.016611263275146483, 0.016643295288085936, 0.016622432708740233, 0.01663795280456543, 0.01666662406921387, 0.01683865547180176, 0.016602655410766602, 0.016743104934692384, 0.016647680282592774, 0.016760671615600586, 0.016683584213256837, 0.01685286331176758, 0.016611551284790037, 0.016763776779174806, 0.016698432922363282, 0.01693270492553711, 0.016570367813110352, 0.016774560928344725, 0.016681568145751953, 0.01665843200683594, 0.016583744049072265, 0.016813119888305663, 0.01696460723876953, 0.016740447998046876, 0.01677187156677246, 0.016670719146728515, 0.016655359268188476, 0.01660211181640625, 0.016645824432373047, 0.01678163146972656, 0.016603328704833983, 0.01672502326965332, 0.016577184677124022, 0.01676915168762207, 0.016704704284667967, 0.01694921684265137, 0.016802656173706056, 0.016879039764404295, 0.016646080017089844, 0.016617311477661132, 0.016681695938110353, 0.016738367080688477, 0.01660678482055664, 0.016839040756225585, 0.01662099266052246, 0.01686300849914551, 0.01670639991760254, 0.01682841682434082, 0.016660480499267577, 0.016683008193969725, 0.016717824935913086, 0.01679120063781738, 0.016662879943847655, 0.01677107238769531, 0.016732160568237304, 0.01710095977783203, 0.017058431625366213, 0.016781312942504883, 0.016774431228637695, 0.01671446418762207, 0.01663702392578125, 0.016760992050170898, 0.016763647079467772, 0.017102848052978514, 0.016842752456665038, 0.01677836799621582, 0.0168143367767334, 0.01681878471374512, 0.016817535400390625, 0.016865951538085937, 0.01686499214172363, 0.016711519241333007, 0.017183168411254883, 0.016670719146728515, 0.01690323257446289, 0.016747264862060546, 0.016810176849365234, 0.01676803207397461, 0.016712671279907228, 0.01697996711730957, 0.016957536697387695, 0.016762176513671876, 0.01680182456970215, 0.016696895599365234, 0.016747520446777343, 0.016631807327270508, 0.016710975646972655, 0.01673664093017578, 0.016744768142700196, 0.016693248748779296, 0.016648191452026367, 0.016622751235961915, 0.01667568016052246, 0.016692703247070312, 0.016910879135131836, 0.01663795280456543, 0.016627328872680664, 0.016654720306396486, 0.01676892852783203, 0.016601184844970703, 0.016717824935913086, 0.01657241630554199, 0.016764928817749023, 0.016726015090942382, 0.01682636833190918, 0.01667430305480957, 0.01674006462097168, 0.01675254440307617, 0.01664899253845215, 0.016706911087036133, 0.01673664093017578, 0.016615680694580078, 0.01696780776977539, 0.016760639190673828, 0.01669548797607422, 0.016828479766845702, 0.017229759216308593, 0.016897247314453124, 0.01677587127685547, 0.016596960067749023, 0.016689344406127928, 0.01670515251159668, 0.016693695068359375, 0.016680831909179687, 0.01669340705871582, 0.01664614486694336, 0.016777215957641603, 0.01682841682434082, 0.016846847534179688, 0.016632863998413086, 0.016759456634521483, 0.016813535690307618, 0.016672767639160157, 0.016622432708740233, 0.01671548843383789, 0.01761065673828125, 0.016715263366699217, 0.016663103103637694, 0.0168656005859375, 0.0167521915435791, 0.016884159088134766, 0.016635967254638673, 0.01665836715698242, 0.016793600082397463, 0.016844287872314453, 0.01672038459777832, 0.016852991104125976, 0.016737823486328126, 0.01661952018737793, 0.01662828826904297, 0.016746463775634764, 0.01668835258483887, 0.017320415496826173, 0.016793855667114256, 0.016687103271484375, 0.01721958351135254, 0.01683443260192871, 0.0166843204498291, 0.016620384216308594, 0.01677516746520996, 0.016807903289794923, 0.016643392562866212, 0.016706079483032227, 0.016982175827026366, 0.016803871154785155, 0.016752639770507814, 0.016772928237915038, 0.016718015670776368, 0.0168571834564209, 0.017033119201660157, 0.017881088256835938, 0.018191904067993165, 0.0169783992767334, 0.01695692825317383, 0.016949535369873047, 0.016873695373535155, 0.016829504013061523, 0.016686016082763672, 0.01683033561706543, 0.017127552032470704, 0.017394943237304686, 0.017176448822021486, 0.017006528854370116, 0.01684294319152832, 0.01690006446838379, 0.01672400093078613, 0.0169998722076416, 0.016795423507690428, 0.016676927566528322, 0.016777952194213866, 0.016774784088134764, 0.01687923240661621, 0.016631776809692383, 0.017727968215942382, 0.01667308807373047, 0.01681203269958496, 0.017625087738037108, 0.01705366325378418, 0.016821823120117187, 0.016922592163085937, 0.016652351379394532, 0.01666502380371094, 0.016693248748779296, 0.016700864791870117, 0.016702016830444335, 0.01684889602661133, 0.017160192489624023, 0.01699190330505371, 0.016968032836914063, 0.017003711700439454, 0.016975744247436524, 0.017044416427612306, 0.017059743881225584, 0.016730207443237305, 0.01676233673095703, 0.016636447906494142, 0.016961088180541994, 0.016913888931274414, 0.016587232589721678, 0.016717727661132813, 0.016880224227905274, 0.01683456039428711, 0.01679974365234375, 0.016664575576782227, 0.016773311614990235, 0.01680899238586426, 0.016783456802368164, 0.016667327880859374, 0.016664575576782227, 0.01677926445007324, 0.016685056686401366, 0.016662687301635743, 0.016770591735839845, 0.01677343940734863, 0.016828191757202147, 0.016908607482910155, 0.017033119201660157, 0.016707584381103514, 0.016715776443481444, 0.016714048385620118, 0.01665158462524414, 0.01660713577270508, 0.016664768218994142, 0.017252191543579102, 0.01720966339111328, 0.016949247360229493, 0.01682841682434082, 0.016717824935913086, 0.01666819190979004, 0.016654367446899413, 0.016667072296142577, 0.016772159576416014, 0.01681056022644043, 0.016847232818603515, 0.016743488311767578, 0.016679040908813475, 0.017023807525634767, 0.01680588722229004, 0.016726015090942382, 0.01677107238769531, 0.016639135360717774, 0.016784223556518554, 0.016730112075805666, 0.01674425506591797, 0.016766176223754883, 0.01685321617126465, 0.01687424087524414, 0.01676288032531738, 0.016732160568237304, 0.0167956485748291, 0.01671513557434082, 0.016804479598999025, 0.016713247299194336, 0.01679408073425293, 0.016701440811157226, 0.016728063583374024, 0.016709632873535156, 0.01677107238769531, 0.01660723114013672, 0.016742399215698242, 0.016676864624023437, 0.016877567291259766, 0.016859136581420898, 0.016898048400878905, 0.016685056686401366, 0.01665433692932129, 0.01675382423400879, 0.016724191665649413, 0.01686140823364258, 0.016789920806884767, 0.016644096374511717, 0.016770143508911133, 0.016892831802368165, 0.01674985694885254, 0.017081056594848633, 0.01682431983947754, 0.01664614486694336, 0.016633216857910155, 0.01665292739868164, 0.01663385581970215, 0.016573728561401366, 0.01682896041870117, 0.01662585639953613, 0.0169716796875, 0.016633567810058595, 0.016820512771606445, 0.01674665641784668, 0.016906240463256835, 0.016701440811157226, 0.01684000015258789, 0.016886463165283205, 0.01701478385925293, 0.016760831832885743, 0.016752639770507814, 0.016764928817749023, 0.017043455123901367, 0.016883712768554687, 0.01684480094909668, 0.016752511978149413, 0.01679372787475586, 0.01666646385192871, 0.016623008728027345, 0.01678976058959961, 0.016808448791503908, 0.01679769515991211, 0.016678911209106445, 0.01667635154724121, 0.0170644474029541, 0.016901567459106447, 0.01697849655151367, 0.016856992721557617, 0.01664771270751953, 0.016756767272949218, 0.01664259147644043, 0.016928096771240235, 0.01677788734436035, 0.01668611145019531, 0.016563167572021486, 0.016688671112060547, 0.016616992950439453, 0.01663702392578125, 0.01674950408935547, 0.016690080642700195, 0.016643552780151366, 0.01669174385070801, 0.01666662406921387, 0.01667433547973633, 0.016650880813598633, 0.01671561622619629, 0.016606239318847655, 0.016683584213256837, 0.016916223526000976, 0.01692972755432129, 0.016776512145996094, 0.01671824073791504, 0.01665433692932129, 0.016695295333862305, 0.016760831832885743, 0.016801599502563477, 0.016659744262695314, 0.016705535888671876, 0.01676585578918457, 0.016900096893310547, 0.016668031692504883, 0.016681215286254884, 0.016628095626831055, 0.01664204788208008, 0.01660259246826172, 0.016660192489624023, 0.016614208221435545]",tokens/s,59.49568450537277,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 21332 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6765.789184,7762.542592,0.0,7367.294976,7351.94368,s,1,12.5058095703125,12.5058095703125,0.0,12.5058095703125,12.5058095703125,12.5058095703125,12.5058095703125,[12.5058095703125],,kWh,0.00015767173855417126,1.738523505041432e-05,5.054559599200134e-05,0.00022560256959658692,,MB,1635.319808,8404.271104,0.0,7994.343424,7863.794176,s,10,2.9980195617675776,0.29980195617675787,0.0004853329511504965,0.2998737182617187,0.3003250793457031,0.3003877288818359,0.3004378485107422,"[0.2996583557128906, 0.30008908081054686, 0.29907073974609377, 0.29927716064453125, 0.29923199462890626, 0.30045037841796873, 0.30015191650390627, 0.30027667236328126, 0.3003111572265625, 0.2995021057128906]",tokens/s,853.8970301083259,kWh,8.76525808872551e-06,9.666528461919464e-07,5.797406598705881e-06,1.5529317533623337e-05,tokens/kWh,16484948.514042618,MB,1641.824256,8676.900864,0.0,8266.973184,8120.408064,s,10,24.660447021484373,2.4660447021484373,0.0022546624102290007,2.4662266845703122,2.4684953125,2.4685719970703124,2.4686333447265625,"[2.465872314453125, 2.465596923828125, 2.4603427734375, 2.46468408203125, 2.467619873046875, 2.46687158203125, 2.468648681640625, 2.46575146484375, 2.4665810546875, 2.468478271484375]",tokens/s,25.546982155316936,kWh,7.217162492544173e-05,7.960560736861764e-06,4.812663654049395e-05,0.00012825882220279745,tokens/kWh,491194.2813601317,,s,630,24.654574352264408,0.03913424500359429,0.0003454978796092168,0.0391464958190918,0.03957495574951172,0.03966784496307373,0.03983591388702393,"[0.03908633422851562, 0.03847494506835938, 0.03833103942871094, 0.03829171371459961, 0.038526878356933594, 0.03899097442626953, 0.03889823913574219, 0.03885702514648438, 0.038631393432617185, 0.03855363082885742, 0.03917004776000976, 0.03863142395019531, 0.03844710540771484, 0.0385986557006836, 0.03871539306640625, 0.038866943359375, 0.03882393646240234, 0.038727680206298826, 0.03872972869873047, 0.03907379150390625, 0.03919772720336914, 0.03914236831665039, 0.03912483215332031, 0.03905142211914062, 0.03930521774291992, 0.039206912994384766, 0.039188255310058595, 0.03881596755981445, 0.038830078125, 0.0387665901184082, 0.039073055267333984, 0.03901513671875, 0.03908943939208984, 0.03895574569702148, 0.039374526977539064, 0.03924166488647461, 0.03908415985107422, 0.03921529769897461, 0.03938924789428711, 0.03960627365112305, 0.03947315216064453, 0.03955055999755859, 0.039526622772216795, 0.03944451141357422, 0.03933552169799805, 0.03915568161010742, 0.03917168045043945, 0.03952742385864258, 0.039373855590820316, 0.03941020965576172, 0.039637439727783205, 0.03946495819091797, 0.039247966766357424, 0.03930908966064453, 0.03944979095458984, 0.03929980850219727, 0.03959215927124023, 0.03966918563842774, 0.03949539184570312, 0.039905982971191405, 0.039701793670654295, 0.03977865600585938, 0.03989763259887695, 0.03919830322265625, 0.03895702362060547, 0.038859455108642575, 0.03898137664794922, 0.03891225433349609, 0.03890291213989258, 0.038277568817138674, 0.03859632110595703, 0.03843468856811524, 0.03857705688476563, 0.03897951889038086, 0.03896054458618164, 0.038754913330078126, 0.03876249694824219, 0.03929644775390625, 0.039166526794433595, 0.03902668762207031, 0.038771743774414065, 0.03885564804077148, 0.038776832580566405, 0.038809600830078124, 0.038819839477539066, 0.03923747253417969, 0.039166175842285156, 0.03884806442260742, 0.03942643356323242, 0.03916156768798828, 0.03917238235473633, 0.03909836959838867, 0.03903039932250976, 0.039541118621826174, 0.03944160079956055, 0.03918316650390625, 0.03896319961547851, 0.0388218879699707, 0.03886899185180664, 0.03897100830078125, 0.039123329162597656, 0.039251518249511716, 0.039039615631103516, 0.03924332809448242, 0.039198974609375, 0.03910860824584961, 0.03946086502075195, 0.03938230514526367, 0.039402206420898436, 0.0392540168762207, 0.03923763275146484, 0.03910806274414062, 0.03962246322631836, 0.039778144836425784, 0.03965017700195313, 0.03947724914550781, 0.03935635375976562, 0.03935785675048828, 0.039365280151367185, 0.03947724914550781, 0.039370750427246096, 0.03933184051513672, 0.0393072624206543, 0.03922739028930664, 0.03951113510131836, 0.039443359375, 0.03890985488891602, 0.03852912139892578, 0.038545406341552735, 0.03855974578857422, 0.0385654411315918, 0.038521278381347654, 0.0384224967956543, 0.03846352005004883, 0.03845657730102539, 0.03877964782714844, 0.03866419219970703, 0.0385629768371582, 0.03856083297729492, 0.039030017852783205, 0.038930976867675784, 0.03887891387939453, 0.038844734191894534, 0.03880755233764648, 0.03870515060424805, 0.038578174591064454, 0.03853107070922852, 0.0388955192565918, 0.03902268981933594, 0.038834175109863284, 0.03908748626708984, 0.03920355224609375, 0.03910851287841797, 0.038946495056152344, 0.038963264465332034, 0.0388139533996582, 0.03936793518066406, 0.039242496490478514, 0.03906755065917969, 0.03894895935058594, 0.03896934509277344, 0.03911270523071289, 0.03892784118652344, 0.03896368026733398, 0.03908169555664062, 0.03919625473022461, 0.03929065704345703, 0.039297855377197266, 0.03925609588623047, 0.039250049591064456, 0.039257953643798825, 0.03962860870361328, 0.03947555160522461, 0.03938508987426758, 0.039413761138916016, 0.039452671051025394, 0.0394947509765625, 0.03954166412353516, 0.03939123153686523, 0.03936880111694336, 0.039245246887207035, 0.039252574920654294, 0.039474880218505856, 0.03960639953613281, 0.039446590423583984, 0.03951424026489258, 0.0395359992980957, 0.03946547317504883, 0.03937212753295898, 0.03901440048217773, 0.038747135162353515, 0.038492225646972654, 0.03894524765014649, 0.039061920166015625, 0.03886678314208984, 0.03855791854858399, 0.0385167350769043, 0.038553600311279294, 0.03877068710327149, 0.03877273559570313, 0.03873382568359375, 0.038836223602294925, 0.03881951904296875, 0.03884054565429688, 0.038757823944091795, 0.03915433502197266, 0.03899135971069336, 0.038992385864257816, 0.03890716934204102, 0.03875094223022461, 0.03882393646240234, 0.03887513732910156, 0.03874540710449219, 0.038957439422607425, 0.03901472091674805, 0.03894831848144531, 0.03887760162353516, 0.03935801696777344, 0.03922371292114258, 0.03917536163330078, 0.039203422546386715, 0.03935475158691406, 0.03919257736206055, 0.03926012802124024, 0.039423103332519534, 0.039265182495117186, 0.03904307174682617, 0.03892019271850586, 0.03905036926269531, 0.039120960235595706, 0.039050048828125, 0.03911385726928711, 0.03934067153930664, 0.039272705078125, 0.039284320831298826, 0.0395145263671875, 0.03934400177001953, 0.03943027114868164, 0.039460159301757815, 0.03928543853759765, 0.03971859359741211, 0.03963935852050781, 0.039413761138916016, 0.03942102432250977, 0.039424671173095706, 0.039551231384277345, 0.039624385833740235, 0.039548961639404294, 0.03943164825439453, 0.03959276962280273, 0.0395142707824707, 0.03946684646606445, 0.03901033782958984, 0.03861577606201172, 0.03848732757568359, 0.038486560821533206, 0.038798656463623044, 0.03878579330444336, 0.03866195297241211, 0.0386761589050293, 0.038765182495117186, 0.03880956649780273, 0.03876457595825195, 0.03870684814453125, 0.03892870330810547, 0.038895294189453124, 0.03881350326538086, 0.03877737426757812, 0.038959102630615236, 0.038958942413330075, 0.03873603057861328, 0.03901792144775391, 0.03902521514892578, 0.0393994255065918, 0.039016448974609375, 0.039124385833740234, 0.03902912139892578, 0.0390043830871582, 0.038735870361328126, 0.038856254577636716, 0.03909062576293945, 0.03914956665039063, 0.03905945587158203, 0.0391734733581543, 0.03916422271728515, 0.039063583374023436, 0.039512382507324216, 0.03952640151977539, 0.03931545639038086, 0.0393359375, 0.03922518539428711, 0.03938524627685547, 0.03928617477416992, 0.03904982376098633, 0.03920896148681641, 0.03936275100708008, 0.03937849426269531, 0.03938451385498047, 0.039405536651611325, 0.039410526275634766, 0.03946022415161133, 0.03937139129638672, 0.039684097290039064, 0.03947520065307617, 0.03959807968139648, 0.03953664016723633, 0.03949158477783203, 0.03956252670288086, 0.03959676742553711, 0.03955279922485352, 0.03957753753662109, 0.0394666862487793, 0.03974614334106445, 0.03972463989257812, 0.0400654411315918, 0.03949135971069336, 0.03877068710327149, 0.03853952026367188, 0.03837952041625976, 0.03858790588378906, 0.03865983963012695, 0.03862579345703125, 0.038604225158691406, 0.038472511291503905, 0.03874816131591797, 0.03878448104858399, 0.03890230560302734, 0.03886489486694336, 0.03895296096801758, 0.03865727996826172, 0.03863808059692383, 0.038965503692626954, 0.03907583999633789, 0.038873153686523436, 0.03879740905761719, 0.03873484802246094, 0.039008960723876954, 0.03924595260620117, 0.03937283325195313, 0.039231487274169925, 0.03914342498779297, 0.0390041618347168, 0.03895500946044922, 0.0393809928894043, 0.03894076919555664, 0.03896105575561523, 0.03904512023925781, 0.03896115112304688, 0.03902668762207031, 0.03917824172973633, 0.039153568267822264, 0.03915935897827148, 0.0393240966796875, 0.039346271514892575, 0.039327743530273435, 0.039362560272216796, 0.039589088439941404, 0.039658432006835935, 0.03944755172729492, 0.03941462326049805, 0.039399265289306644, 0.039349536895751956, 0.03926713562011719, 0.03971263885498047, 0.03972713470458984, 0.039562751770019534, 0.039737888336181644, 0.0395470085144043, 0.039346176147460936, 0.03926425552368164, 0.03931545639038086, 0.039329792022705076, 0.03933184051513672, 0.03946086502075195, 0.039550975799560545, 0.039637054443359375, 0.0394936637878418, 0.03943734359741211, 0.038967262268066405, 0.03844409561157226, 0.038675422668457034, 0.038801406860351564, 0.03873750305175781, 0.03901603317260742, 0.03892102432250977, 0.03878908920288086, 0.03875433731079102, 0.038596607208251955, 0.038604286193847655, 0.038631519317626956, 0.03864617538452148, 0.0388485107421875, 0.038917537689208984, 0.038741695404052735, 0.03872604751586914, 0.039135295867919924, 0.039258560180664065, 0.03897923278808594, 0.039035232543945315, 0.03896319961547851, 0.03898303985595703, 0.038824577331542966, 0.03889561462402344, 0.038828033447265625, 0.0389939193725586, 0.03902054214477539, 0.03918950271606445, 0.03943936157226562, 0.03926835250854492, 0.03915161514282227, 0.039210945129394534, 0.03958585739135742, 0.03938508987426758, 0.03918438339233398, 0.03915375900268555, 0.03917404937744141, 0.03946086502075195, 0.039241695404052736, 0.03900617599487305, 0.039110721588134764, 0.03909836959838867, 0.0390978889465332, 0.03927702331542969, 0.039354366302490236, 0.03925299072265625, 0.03979270553588867, 0.03973388671875, 0.03953190231323242, 0.039494590759277345, 0.03949552154541015, 0.03936819076538086, 0.03980579376220703, 0.039804737091064454, 0.039653377532958986, 0.03972259140014649, 0.03957564926147461, 0.03978604888916016, 0.03972787094116211, 0.03974092864990234, 0.03990784072875977, 0.0398287353515625, 0.038836223602294925, 0.03855769729614258, 0.03857920074462891, 0.03857926559448242, 0.03845929718017578, 0.03849628829956055, 0.038991870880126955, 0.03892633438110352, 0.038866943359375, 0.03908169555664062, 0.03920515060424805, 0.039008255004882815, 0.038729217529296874, 0.03857049560546875, 0.03867766571044922, 0.03879100799560547, 0.038832897186279296, 0.038840576171875, 0.03879100799560547, 0.03859881591796875, 0.03896115112304688, 0.038948863983154294, 0.038868896484375, 0.03919267272949219, 0.03923763275146484, 0.03917004776000976, 0.038967201232910156, 0.039038143157958984, 0.03898780822753906, 0.03888217544555664, 0.03883318328857422, 0.03905436706542969, 0.03907372665405273, 0.03896310424804687, 0.03932579040527344, 0.0394420166015625, 0.03948118209838867, 0.03928326416015625, 0.039316608428955076, 0.03961494445800781, 0.039597793579101564, 0.039527103424072264, 0.03927449417114258, 0.03925596618652344, 0.039209056854248046, 0.03916377639770508, 0.03912307357788086, 0.039180030822753904, 0.03932928085327148, 0.03935308837890625, 0.03930252838134766, 0.03923212814331055, 0.039378944396972655, 0.03937279891967774, 0.03932947158813477, 0.039522624969482424, 0.03953251266479492, 0.039507999420166015, 0.03958736038208008, 0.03966620635986328, 0.039851966857910155, 0.03983769607543945, 0.03962265777587891, 0.038776832580566405, 0.03861276626586914, 0.03865827178955078, 0.0387209587097168, 0.038591041564941406, 0.03849324798583984, 0.03844944000244141, 0.03865628814697265, 0.038711681365966796, 0.038587455749511716, 0.03879199981689453, 0.038801345825195316, 0.03873196792602539, 0.03877273559570313, 0.039106559753417966, 0.03906150436401367, 0.039016448974609375, 0.03899347305297852, 0.039239776611328124, 0.03908339309692383, 0.0388271369934082, 0.03894051361083985, 0.03888329696655273, 0.03881129455566406, 0.039161407470703125, 0.03909846496582031, 0.03899260711669922, 0.038997791290283204, 0.038964542388916015, 0.03905014419555664, 0.03933996963500976, 0.03930527877807617, 0.03916377639770508, 0.03909235382080078, 0.03899369430541992, 0.03901871871948242, 0.03913900756835938, 0.039113025665283206, 0.03903398513793945, 0.03906233596801758, 0.039465023040771485, 0.03954483032226563, 0.03953987121582031, 0.0396317138671875, 0.03934793472290039, 0.039366943359375, 0.03929817581176758, 0.039433216094970705, 0.039363872528076174, 0.03964364624023437, 0.03957487869262695, 0.03942211151123047, 0.039420127868652344, 0.03935171127319336, 0.03979702377319336, 0.03962265777587891, 0.0396192626953125, 0.03957350540161133, 0.03949702453613281, 0.03953529739379883, 0.03953664016723633, 0.039593982696533206, 0.03954867172241211, 0.03917619323730469, 0.03869696044921875, 0.03838771057128906, 0.03904307174682617, 0.03878911972045898, 0.038899711608886715, 0.03872870254516601, 0.03873689651489258, 0.038735870361328126, 0.03878815841674805, 0.03876927947998047, 0.03884268951416016, 0.03889299011230469, 0.03895552062988281, 0.03900831985473633, 0.03900131225585937, 0.03912783813476563, 0.03897139358520508, 0.038860801696777345, 0.038793182373046874, 0.038846752166748044, 0.039161441802978515, 0.03918044662475586, 0.03889766311645508, 0.038876480102539065, 0.038769344329833984, 0.03886297607421875, 0.03880467224121094, 0.03898643112182617, 0.0389956169128418, 0.039036991119384766, 0.038803775787353514, 0.03935990524291992, 0.03929964828491211, 0.03949977493286133, 0.03919647979736328, 0.039184574127197266, 0.03951395034790039, 0.039591712951660155, 0.03950758361816406, 0.039524513244628905, 0.03971535873413086, 0.03946092987060547, 0.03930918502807617, 0.03919257736206055, 0.03938111877441406, 0.03942399978637695, 0.039376609802246096, 0.039385215759277344, 0.039333343505859375, 0.03920966339111328, 0.03952844619750977, 0.039495681762695314, 0.039472862243652346, 0.03946246337890625, 0.03936966323852539, 0.03958147048950195, 0.03954617691040039, 0.03978092956542969, 0.03971903991699219, 0.03956531143188476, 0.039831550598144534, 0.03990323257446289]",tokens/s,25.55306739425163,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.754496,11243.814912,0.0,10848.567296,10616.027648,s,1,14.2756416015625,14.2756416015625,0.0,14.2756416015625,14.2756416015625,14.2756416015625,14.2756416015625,[14.2756416015625],,kWh,0.00021442627039165626,2.3645609914202468e-05,6.747199842199736e-05,0.0003055438787278561,,MB,4062.838784,11665.342464,0.0,11255.414784,11070.470656,s,10,3.731231964111328,0.3731231964111328,0.0011575441375760197,0.3732413330078125,0.37406481323242186,0.3748029296875,0.37539342285156246,"[0.37119436645507814, 0.37189498901367185, 0.3733327331542969, 0.37314993286132814, 0.3737827453613281, 0.3720987243652344, 0.3734735412597656, 0.3728630981445312, 0.3739007873535156, 0.3755410461425781]",tokens/s,686.1004688594102,kWh,1.091028810925901e-05,1.2032047072948179e-06,7.232546938296628e-06,1.9346039754850457e-05,tokens/kWh,13232682.411696969,MB,4067.155968,11667.439616,0.0,11257.511936,11070.473216,s,10,28.777669921875,2.8777669921875004,0.004039830674810794,2.8787237548828126,2.8814861328125,2.8832177978515627,2.8846031298828128,"[2.87059619140625, 2.87385693359375, 2.873241455078125, 2.87652734375, 2.8801279296875, 2.878455810546875, 2.881101318359375, 2.87982177734375, 2.87899169921875, 2.884949462890625]",tokens/s,21.89197394056956,kWh,8.428800495532387e-05,9.29711071260248e-06,5.61396148705021e-05,0.0001497247305384285,tokens/kWh,420772.17152733734,,s,630,28.774752719879153,0.04567421066647485,0.0004152863056896368,0.04566843032836914,0.045996459197998045,0.04609201831817627,0.04765810729980469,"[0.0477325439453125, 0.04525689697265625, 0.04514815902709961, 0.045197311401367186, 0.04511334228515625, 0.04492697525024414, 0.04501619338989258, 0.04558732986450195, 0.04525875091552734, 0.04495974349975586, 0.045055999755859374, 0.04554499053955078, 0.04532681655883789, 0.04513587188720703, 0.04541219329833984, 0.045509822845458986, 0.045292510986328124, 0.04523622512817383, 0.045270687103271486, 0.04557830429077148, 0.045416736602783205, 0.0454901123046875, 0.045709217071533206, 0.04562335968017578, 0.04540985488891602, 0.04540060806274414, 0.04520959854125976, 0.04558848190307617, 0.04547375869750977, 0.04528047943115234, 0.04534764862060547, 0.04553062438964844, 0.045482494354248046, 0.04548825454711914, 0.04541977691650391, 0.04564035034179687, 0.04565923309326172, 0.045625438690185545, 0.045778976440429685, 0.045738529205322266, 0.045666526794433594, 0.045951038360595706, 0.045868545532226565, 0.045916606903076175, 0.045668319702148436, 0.04589324951171875, 0.04563190460205078, 0.04556729507446289, 0.04560351943969727, 0.045568000793457034, 0.04591820907592774, 0.04558028793334961, 0.04563763046264648, 0.04591408157348633, 0.04581584167480469, 0.0456190071105957, 0.04587948989868164, 0.04596332931518555, 0.04575839996337891, 0.04563558578491211, 0.04558848190307617, 0.04590796661376953, 0.04584175872802734, 0.0480522575378418, 0.045537246704101565, 0.044889888763427734, 0.04483913421630859, 0.0452006721496582, 0.04505059051513672, 0.04500889587402344, 0.04507209777832031, 0.04545264053344727, 0.04529446411132813, 0.045168704986572265, 0.04543283081054687, 0.04544688034057617, 0.04538531112670898, 0.04545753479003906, 0.04558905410766602, 0.04584774398803711, 0.04581232070922851, 0.045314273834228515, 0.045292766571044925, 0.04594972610473633, 0.04574316787719727, 0.04578345489501953, 0.045566497802734376, 0.045501983642578125, 0.045287296295166014, 0.0453125114440918, 0.04537558364868164, 0.045635551452636716, 0.04546563339233398, 0.04535500717163086, 0.04526079940795898, 0.045520896911621096, 0.0455530891418457, 0.04543094253540039, 0.04569494247436524, 0.045801025390625, 0.04575651168823242, 0.04574492645263672, 0.04566220855712891, 0.045889022827148435, 0.046002689361572265, 0.04598191833496094, 0.04584220886230469, 0.04592156982421875, 0.045515487670898434, 0.04579315185546875, 0.04564534378051758, 0.045736000061035155, 0.04557244873046875, 0.04574982452392578, 0.04575689697265625, 0.04566851043701172, 0.045471969604492186, 0.04548992156982422, 0.04567862319946289, 0.04580352020263672, 0.0458526725769043, 0.046241119384765626, 0.045920639038085936, 0.045777183532714844, 0.04561414337158203, 0.04609868621826172, 0.04836604690551758, 0.04594255828857422, 0.04524486541748047, 0.04480924987792969, 0.04500783920288086, 0.045197311401367186, 0.04501913452148437, 0.045072383880615234, 0.04503504180908203, 0.045009471893310546, 0.04537334442138672, 0.04514815902709961, 0.045139198303222654, 0.04550457763671875, 0.04567315292358398, 0.04530944061279297, 0.04547430419921875, 0.04551472091674805, 0.04539779281616211, 0.04534435272216797, 0.04559939193725586, 0.04592025756835937, 0.04605747222900391, 0.0456703987121582, 0.04545238494873047, 0.0455115852355957, 0.045313919067382816, 0.04506841659545899, 0.04528537750244141, 0.045445121765136716, 0.04532428741455078, 0.04533033752441406, 0.04536323165893555, 0.04554153442382813, 0.04559862518310547, 0.04551475143432617, 0.04588899230957031, 0.04593699264526367, 0.04588972854614258, 0.045453311920166016, 0.045742080688476565, 0.0461475830078125, 0.04592435073852539, 0.046134590148925785, 0.04595167922973633, 0.045864959716796876, 0.045638912200927736, 0.04581657409667969, 0.04562124633789062, 0.04546355056762695, 0.04545705413818359, 0.045580608367919925, 0.04576204681396484, 0.0456110725402832, 0.04556604766845703, 0.04591811370849609, 0.04564015960693359, 0.04564275360107422, 0.0459048957824707, 0.04627251052856445, 0.04580108642578125, 0.04570150375366211, 0.04603084945678711, 0.04742211151123047, 0.045461505889892576, 0.04511334228515625, 0.04483686447143555, 0.044886016845703126, 0.045156097412109374, 0.04538803100585938, 0.045218910217285156, 0.04511222457885742, 0.04541439819335937, 0.045700992584228516, 0.04534489440917969, 0.04515020751953125, 0.04522335815429687, 0.04556240081787109, 0.04565200042724609, 0.045754432678222656, 0.04584236907958984, 0.045699073791503904, 0.0457105598449707, 0.04594150543212891, 0.045760223388671875, 0.045559585571289064, 0.045343265533447266, 0.04520345687866211, 0.045379585266113284, 0.04530995178222656, 0.04530995178222656, 0.045442497253417966, 0.04568300628662109, 0.04565769577026367, 0.045684993743896486, 0.04582646560668945, 0.04578003311157226, 0.04563040161132813, 0.04567244720458984, 0.04595817565917969, 0.04600931167602539, 0.04583187103271484, 0.045682079315185545, 0.04593936157226562, 0.04602249526977539, 0.04592262268066406, 0.045826145172119144, 0.04565606307983398, 0.04550764846801758, 0.04552201461791992, 0.045547359466552736, 0.04579328155517578, 0.045690879821777344, 0.045547489166259766, 0.04581788635253906, 0.04597715377807617, 0.04570111846923828, 0.04568313598632812, 0.04591180801391601, 0.04599123382568359, 0.0457081298828125, 0.04592854309082031, 0.04598332977294922, 0.04605788803100586, 0.046047233581542966, 0.046137344360351565, 0.04845094299316406, 0.04585327911376953, 0.04522208023071289, 0.04488816070556641, 0.04491872024536133, 0.0451541748046875, 0.04512294387817383, 0.04530044937133789, 0.04519289779663086, 0.04542828750610352, 0.045628223419189456, 0.04560844802856445, 0.045443550109863284, 0.045625343322753906, 0.04559667205810547, 0.045446750640869144, 0.04576707077026367, 0.04563353729248047, 0.04559667205810547, 0.04542252731323242, 0.045674560546875, 0.045932350158691404, 0.04574227142333984, 0.04533657455444336, 0.04531520080566406, 0.04557699203491211, 0.045537376403808595, 0.0453570556640625, 0.045547359466552736, 0.04550876617431641, 0.045784351348876956, 0.04563017654418945, 0.045774848937988284, 0.04591129684448242, 0.04579983901977539, 0.045840736389160155, 0.04594883346557617, 0.04575609588623047, 0.045605281829833984, 0.04577254486083984, 0.045964672088623044, 0.04593907165527344, 0.04591606521606445, 0.04585276794433594, 0.045752254486083985, 0.04567510223388672, 0.04575129699707031, 0.04576764678955078, 0.04569494247436524, 0.04578003311157226, 0.0459186897277832, 0.04595267105102539, 0.0457957763671875, 0.04606556701660156, 0.04571184158325195, 0.0458045768737793, 0.04575065612792969, 0.04590655899047852, 0.04616387176513672, 0.04593638229370117, 0.0457743034362793, 0.04608918380737305, 0.046206878662109374, 0.04830179214477539, 0.04582428741455078, 0.045090816497802735, 0.045006847381591795, 0.045225311279296875, 0.04516476821899414, 0.04521315383911133, 0.04525564956665039, 0.045561569213867184, 0.04551046371459961, 0.045335006713867185, 0.04528486251831055, 0.04526515197753906, 0.04534636688232422, 0.04546630477905273, 0.04525056076049805, 0.046088191986083986, 0.04558848190307617, 0.04563132858276367, 0.04533036804199219, 0.04567062377929688, 0.04603875350952148, 0.04568681716918945, 0.04587545776367188, 0.04551891326904297, 0.04535433578491211, 0.04540060806274414, 0.04529568099975586, 0.04561641693115234, 0.04542867279052734, 0.04541708755493164, 0.04569664001464844, 0.045711296081542965, 0.0457344970703125, 0.04571750259399414, 0.045614974975585934, 0.04582320022583008, 0.04569295883178711, 0.04561337661743164, 0.04579385757446289, 0.04595465469360351, 0.045942943572998045, 0.04606351852416992, 0.045950431823730466, 0.04605759811401367, 0.04566502380371094, 0.045767807006835935, 0.045638526916503906, 0.04577648162841797, 0.04562505722045898, 0.04581356811523438, 0.045908863067626954, 0.0458342399597168, 0.04563497543334961, 0.04588140869140625, 0.04582620620727539, 0.04574860763549805, 0.045851905822753905, 0.04605763244628906, 0.04595315170288086, 0.04599827194213867, 0.04583251190185547, 0.04596246337890625, 0.04879795074462891, 0.04600428771972656, 0.04512550354003906, 0.04509254455566406, 0.04501542282104492, 0.04497593688964844, 0.04499110412597656, 0.045049823760986325, 0.04576668930053711, 0.04559798431396484, 0.04549296188354492, 0.04565737533569336, 0.04549043273925781, 0.04529558563232422, 0.04520505523681641, 0.04546636962890625, 0.04568252944946289, 0.04557612609863281, 0.04537184143066406, 0.04535228729248047, 0.04593231964111328, 0.045920639038085936, 0.04594736099243164, 0.04581292724609375, 0.045537120819091795, 0.0454128303527832, 0.04554598236083984, 0.045440799713134764, 0.0455, 0.04542652893066406, 0.04557852935791016, 0.04563772964477539, 0.04575068664550781, 0.04573788833618164, 0.045623294830322264, 0.045782432556152344, 0.045800128936767576, 0.04552054214477539, 0.046077919006347653, 0.045857311248779294, 0.04587299346923828, 0.04571088027954102, 0.04618492889404297, 0.04592355346679688, 0.04586511993408203, 0.045763198852539065, 0.04573183822631836, 0.04594217681884766, 0.04583046340942383, 0.04587747192382813, 0.045684799194335934, 0.04566835021972656, 0.04595097732543945, 0.046134815216064454, 0.04593916702270508, 0.04589158248901367, 0.04589363098144531, 0.04592639923095703, 0.04571446228027344, 0.04584684753417969, 0.046262943267822265, 0.046165729522705076, 0.04615536117553711, 0.047457313537597655, 0.04550124740600586, 0.0452402229309082, 0.045407806396484375, 0.04519379043579102, 0.045115520477294925, 0.045281024932861326, 0.04544307327270508, 0.04547343826293945, 0.04537200164794922, 0.04547993469238281, 0.04560688018798828, 0.04551068878173828, 0.04521984100341797, 0.04549836730957031, 0.045708576202392576, 0.04555388641357422, 0.04559920120239258, 0.04552092742919922, 0.045676544189453126, 0.04592355346679688, 0.045533824920654296, 0.04554975891113281, 0.04575379180908203, 0.04573443222045898, 0.04558607864379883, 0.045609310150146486, 0.04559836959838867, 0.04544944000244141, 0.045647457122802736, 0.04573443222045898, 0.045764606475830076, 0.0457891845703125, 0.04564787292480469, 0.04560076904296875, 0.04581990432739258, 0.04554342269897461, 0.045913150787353516, 0.045790145874023434, 0.045856769561767576, 0.04604524612426758, 0.04593388748168945, 0.045723552703857424, 0.04562931060791016, 0.04605177688598633, 0.0460681266784668, 0.04580966567993164, 0.045551616668701174, 0.0457523193359375, 0.045864959716796876, 0.04571136093139649, 0.04576169586181641, 0.04593340682983398, 0.04579452896118164, 0.0457053108215332, 0.04580767822265625, 0.046029441833496096, 0.04604927825927734, 0.045795360565185544, 0.045995903015136716, 0.04600636672973633, 0.04589977645874024, 0.045939743041992186, 0.04760575866699219, 0.045616191864013673, 0.04523619079589844, 0.0449705924987793, 0.04478188705444336, 0.045047840118408206, 0.0452525749206543, 0.04536463928222656, 0.0452960319519043, 0.04542819213867187, 0.04552169418334961, 0.04553932952880859, 0.045192798614501956, 0.045467456817626956, 0.04576537704467774, 0.04562723159790039, 0.04550857543945312, 0.04561318588256836, 0.04573174285888672, 0.04584447860717773, 0.045682689666748044, 0.046044960021972656, 0.04591843032836914, 0.04558438491821289, 0.045363201141357425, 0.04560486221313476, 0.04559667205810547, 0.04516454315185547, 0.04539311981201172, 0.045478687286376954, 0.04557136154174805, 0.04557440185546875, 0.045644256591796876, 0.04552908706665039, 0.045897727966308595, 0.04566220855712891, 0.04569091033935547, 0.046047199249267576, 0.0457891845703125, 0.04555571365356445, 0.045780990600585936, 0.045946750640869144, 0.04598723220825195, 0.04613808059692383, 0.04597132873535156, 0.045967071533203126, 0.04598748779296875, 0.0458493423461914, 0.045676544189453126, 0.04586700820922852, 0.04562124633789062, 0.04579663848876953, 0.04565884780883789, 0.04564582443237305, 0.04571955108642578, 0.045972606658935544, 0.04611065673828125, 0.04594537734985352, 0.04597983932495117, 0.04610054397583008, 0.045961376190185546, 0.0457154541015625, 0.046063617706298826, 0.04767948913574219, 0.045876480102539065, 0.04532275390625, 0.04503308868408203, 0.04509302520751953, 0.04543503952026367, 0.04541676712036133, 0.04523782348632813, 0.04544732666015625, 0.04568707275390625, 0.04567244720458984, 0.04557619094848633, 0.04550451278686524, 0.04534864044189453, 0.045813407897949215, 0.045732414245605466, 0.046094337463378904, 0.045757854461669925, 0.04581232070922851, 0.045674495697021485, 0.046034015655517575, 0.04604169464111328, 0.04577312088012695, 0.045631103515625, 0.04540454483032227, 0.04566835021972656, 0.045588096618652346, 0.04561753463745117, 0.04551459121704102, 0.045805824279785155, 0.04539587020874023, 0.04555939102172851, 0.04574044799804688, 0.04571263885498047, 0.04553305435180664, 0.045728641510009764, 0.04589494323730469, 0.045927040100097655, 0.04574627304077149, 0.0461475830078125, 0.04589683151245117, 0.04599625778198242, 0.045949600219726563, 0.04602880096435547, 0.04594233703613281, 0.045841121673583986, 0.045851905822753905, 0.04606000137329101, 0.04585827255249023, 0.045607295989990235, 0.045905406951904294, 0.045970081329345706, 0.04572979354858398, 0.04597350311279297, 0.04614924621582031, 0.04588787078857422, 0.04601651382446289, 0.04595302581787109, 0.04627046585083008, 0.04598524856567383, 0.045792896270751955, 0.04625065612792969, 0.04609868621826172]",tokens/s,21.894193362251276,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4975.235072,7338.917888,0.0,6943.670272,6539.1744,s,1,11.454900390625,11.454900390625,0.0,11.454900390625,11.454900390625,11.454900390625,11.454900390625,[11.454900390625],,kWh,0.00013009809213333862,1.4343599980473967e-05,4.150808876199341e-05,0.000185949780875806,,MB,5064.54016,7634.61632,0.0,7224.68864,6917.39904,s,10,1.997946273803711,0.1997946273803711,0.00045300925581402235,0.19987035369873046,0.20022368927001952,0.20027427444458007,0.20031474258422852,"[0.19856562805175781, 0.1997248992919922, 0.19992588806152345, 0.20032485961914062, 0.19982698059082032, 0.19996701049804688, 0.19991372680664063, 0.19982464599609376, 0.19966018676757813, 0.20021244812011718]",tokens/s,1281.3157358462124,kWh,5.8498406213332145e-06,6.45020581985509e-07,3.872714209280092e-06,1.0367575412598817e-05,tokens/kWh,24692369.219606094,MB,5068.857344,7636.713472,0.0,7226.785792,6917.4016,s,10,18.880694335937502,1.8880694335937502,0.010499768530445765,1.8859650268554686,1.8976787353515625,1.9055944946289063,1.9119271020507813,"[1.91351025390625, 1.895919677734375, 1.8854495849609374, 1.89361572265625, 1.88930224609375, 1.88648046875, 1.8767947998046874, 1.880902099609375, 1.876064697265625, 1.88265478515625]",tokens/s,33.36741693873294,kWh,5.5153869485750264e-05,6.083324457757696e-06,3.659748483351961e-05,9.783467877702756e-05,tokens/kWh,643943.4440581304,,s,630,18.878338212966945,0.0299656162110586,0.0006856709627015122,0.02980006408691406,0.030400528717041016,0.030849382019042967,0.03327409896850587,"[0.030861824035644532, 0.030451583862304687, 0.03032486343383789, 0.03016703987121582, 0.029781984329223632, 0.030013471603393554, 0.03058278465270996, 0.03886899185180664, 0.031029247283935548, 0.030678079605102538, 0.030370752334594728, 0.030089216232299806, 0.030066015243530274, 0.034293663024902346, 0.03078390312194824, 0.030098880767822266, 0.03010243225097656, 0.029872127532958984, 0.029851839065551757, 0.029764896392822267, 0.029610496520996094, 0.029790239334106447, 0.02987775993347168, 0.03008799934387207, 0.030015167236328126, 0.02981888008117676, 0.029759328842163087, 0.03001692771911621, 0.029969120025634767, 0.03016435241699219, 0.029794975280761717, 0.02984979248046875, 0.030721023559570314, 0.02986191940307617, 0.03232438278198242, 0.03017081642150879, 0.030112064361572266, 0.029941728591918945, 0.030117919921875, 0.03122208023071289, 0.030279359817504882, 0.03154051208496094, 0.030308448791503906, 0.029794944763183593, 0.02997657585144043, 0.02969379234313965, 0.029880479812622072, 0.033753089904785157, 0.02998271942138672, 0.029650367736816407, 0.029880895614624023, 0.029817951202392577, 0.02981340789794922, 0.029634880065917968, 0.029757343292236327, 0.029998464584350584, 0.029837055206298826, 0.02992630386352539, 0.029756511688232422, 0.02974604797363281, 0.029570335388183593, 0.029770656585693358, 0.029640544891357423, 0.03107459259033203, 0.030399520874023436, 0.03091760063171387, 0.02980988883972168, 0.029723392486572266, 0.029596704483032227, 0.02965116882324219, 0.029739999771118165, 0.03291091156005859, 0.03019139289855957, 0.029810239791870117, 0.029913631439208985, 0.03311040115356445, 0.02971238327026367, 0.029869279861450194, 0.02955913543701172, 0.029892416000366212, 0.02954876708984375, 0.029661600112915038, 0.029618175506591796, 0.02976950454711914, 0.029865663528442384, 0.029700639724731446, 0.029871103286743163, 0.02974208068847656, 0.029655040740966795, 0.029841407775878907, 0.030093311309814453, 0.02967884826660156, 0.029558847427368164, 0.03000595283508301, 0.030541215896606445, 0.029909280776977538, 0.02993548774719238, 0.030134719848632814, 0.030530912399291992, 0.030296735763549805, 0.02987615966796875, 0.029996448516845704, 0.029931968688964843, 0.02979862403869629, 0.030011199951171876, 0.029849599838256836, 0.029860031127929686, 0.029715679168701173, 0.029702943801879884, 0.033931262969970705, 0.02998886489868164, 0.030223487854003906, 0.029889408111572265, 0.029767679214477538, 0.02973695945739746, 0.029521024703979493, 0.029892608642578124, 0.029834112167358397, 0.029870080947875976, 0.029844863891601563, 0.030227071762084962, 0.030070783615112305, 0.029855743408203125, 0.029708032608032228, 0.030226015090942384, 0.03050294494628906, 0.031162271499633788, 0.030193920135498046, 0.03017545509338379, 0.030168703079223633, 0.029969184875488282, 0.029743104934692382, 0.02998476791381836, 0.029804447174072265, 0.029849695205688476, 0.029768735885620116, 0.029815776824951172, 0.0343633918762207, 0.03083417510986328, 0.02981235122680664, 0.02983526420593262, 0.02956540870666504, 0.029684127807617186, 0.029696128845214845, 0.02968947219848633, 0.030079231262207032, 0.029884416580200194, 0.029878271102905272, 0.029851648330688478, 0.02975334358215332, 0.029784063339233398, 0.029572511672973634, 0.029631071090698242, 0.02960588836669922, 0.02954444885253906, 0.029542400360107423, 0.029792255401611328, 0.029613344192504883, 0.03027836799621582, 0.03022233581542969, 0.02967897605895996, 0.029706880569458007, 0.029658271789550782, 0.029598175048828126, 0.029616256713867188, 0.029827327728271485, 0.029617887496948242, 0.029535808563232423, 0.029548799514770508, 0.02951603126525879, 0.02972694396972656, 0.03334096145629883, 0.02995350456237793, 0.029604768753051756, 0.030033184051513673, 0.029591808319091795, 0.02947539138793945, 0.029466495513916016, 0.0295644474029541, 0.029655008316040038, 0.029743520736694336, 0.029673696517944336, 0.029756799697875976, 0.02965328025817871, 0.029485408782958984, 0.02955264091491699, 0.029792255401611328, 0.029865407943725587, 0.029841983795166015, 0.03007619285583496, 0.02976963233947754, 0.029856607437133788, 0.02969094467163086, 0.02988729667663574, 0.029671552658081055, 0.029609983444213867, 0.02981888008117676, 0.031327903747558596, 0.030644575119018556, 0.034277023315429686, 0.030075231552124024, 0.02991923141479492, 0.029566976547241212, 0.02943791961669922, 0.029585439682006835, 0.029493247985839844, 0.029629919052124025, 0.029825504302978516, 0.029897056579589843, 0.029812448501586913, 0.029853599548339844, 0.02964080047607422, 0.029569055557250975, 0.029906143188476564, 0.03101568031311035, 0.029775871276855468, 0.0297574405670166, 0.029839359283447265, 0.029627391815185547, 0.03092572784423828, 0.03095350456237793, 0.03012118339538574, 0.029922143936157226, 0.030963903427124025, 0.030211904525756835, 0.029953088760375977, 0.029922239303588866, 0.030031360626220704, 0.029956672668457033, 0.0299085750579834, 0.02974959945678711, 0.029689855575561523, 0.02974515151977539, 0.03235430526733398, 0.030428192138671876, 0.029799392700195312, 0.030703296661376955, 0.029643072128295898, 0.02978803253173828, 0.02970035171508789, 0.02979414367675781, 0.029613920211791992, 0.02976582336425781, 0.029859872817993165, 0.029738975524902345, 0.029631872177124024, 0.029931264877319334, 0.029992000579833984, 0.030194879531860352, 0.029938304901123047, 0.02973695945739746, 0.029853696823120116, 0.030394367218017578, 0.02985318374633789, 0.029823007583618163, 0.02994223976135254, 0.029724672317504884, 0.029511648178100584, 0.029576288223266602, 0.029627328872680665, 0.02959974479675293, 0.029822240829467772, 0.02993020820617676, 0.029664800643920897, 0.02965305519104004, 0.029622400283813476, 0.029737247467041015, 0.029503488540649415, 0.029542400360107423, 0.029636480331420897, 0.029525856018066406, 0.029800735473632812, 0.029732223510742187, 0.03025391960144043, 0.029535135269165038, 0.029772672653198242, 0.02998886489868164, 0.03010083198547363, 0.03015747261047363, 0.031243392944335938, 0.03040959930419922, 0.030328832626342773, 0.029898752212524415, 0.03006163215637207, 0.02974611282348633, 0.030076927185058593, 0.030013439178466796, 0.029829120635986327, 0.029829120635986327, 0.030121503829956056, 0.029913536071777345, 0.029750656127929688, 0.029593759536743165, 0.029693887710571288, 0.0297825927734375, 0.029882368087768556, 0.030492671966552733, 0.030234016418457032, 0.030144672393798828, 0.030091712951660157, 0.02996214485168457, 0.029911136627197264, 0.0302336311340332, 0.030188127517700194, 0.02986969566345215, 0.02985241508483887, 0.030424224853515626, 0.030243679046630858, 0.029916799545288086, 0.02989299201965332, 0.030373695373535157, 0.030562496185302733, 0.03095369529724121, 0.030701568603515625, 0.03076483154296875, 0.03082035255432129, 0.030614912033081056, 0.030304672241210938, 0.03002934455871582, 0.029854368209838868, 0.029539648056030272, 0.02958515167236328, 0.029700639724731446, 0.02954662322998047, 0.02987654495239258, 0.029702144622802733, 0.02959987258911133, 0.029671295166015625, 0.0299683837890625, 0.02959929656982422, 0.029654527664184572, 0.0296964168548584, 0.031095327377319334, 0.030271263122558595, 0.031910112380981445, 0.030261247634887696, 0.030036127090454102, 0.029941247940063476, 0.030393888473510742, 0.03092134475708008, 0.030120128631591796, 0.030057567596435547, 0.03017184066772461, 0.029761760711669923, 0.030002208709716798, 0.029715423583984376, 0.02975948715209961, 0.029734432220458986, 0.02963484764099121, 0.029747360229492186, 0.030574623107910155, 0.029693952560424806, 0.029918943405151367, 0.029480960845947264, 0.0297161922454834, 0.029831743240356444, 0.029468896865844727, 0.030084543228149414, 0.029981311798095704, 0.029849023818969728, 0.029740928649902344, 0.02959401512145996, 0.02961577606201172, 0.029704160690307617, 0.029751487731933594, 0.0298272647857666, 0.029915136337280275, 0.02960383987426758, 0.029661216735839845, 0.02962633514404297, 0.02981216049194336, 0.029889087677001953, 0.030676448822021484, 0.03002627182006836, 0.029738304138183593, 0.02975174331665039, 0.02970035171508789, 0.02970992088317871, 0.03064028739929199, 0.03038719940185547, 0.02999135971069336, 0.029710655212402345, 0.02980784034729004, 0.029792224884033203, 0.029666208267211915, 0.02965648078918457, 0.029997440338134767, 0.029734752655029298, 0.029701791763305663, 0.029807231903076173, 0.029554176330566406, 0.029600351333618165, 0.02975334358215332, 0.029700096130371095, 0.02975276756286621, 0.029678144454956056, 0.029642751693725586, 0.029742271423339843, 0.029735744476318358, 0.030286880493164064, 0.0298505916595459, 0.029574560165405273, 0.029616767883300782, 0.03035887908935547, 0.029605791091918944, 0.029938016891479492, 0.029828575134277342, 0.029815647125244142, 0.029597759246826172, 0.029798559188842774, 0.02957910346984863, 0.029480991363525392, 0.029652927398681642, 0.029746912002563478, 0.029675519943237305, 0.0295053768157959, 0.029608160018920898, 0.02957855987548828, 0.02954745674133301, 0.029741056442260744, 0.030066688537597655, 0.02995315170288086, 0.02996940803527832, 0.02978803253173828, 0.029679616928100585, 0.0296092472076416, 0.029754079818725587, 0.029857791900634766, 0.02958131217956543, 0.02971772766113281, 0.029737695693969727, 0.030076608657836915, 0.030384511947631837, 0.030074880599975585, 0.02997657585144043, 0.029751424789428712, 0.029624191284179688, 0.02958892822265625, 0.029524511337280273, 0.029663263320922853, 0.029749216079711913, 0.030654464721679688, 0.03000934410095215, 0.029889888763427734, 0.029778079986572267, 0.02967398452758789, 0.029519840240478514, 0.029576223373413087, 0.029617151260375976, 0.029853696823120116, 0.029738208770751954, 0.029621023178100586, 0.02956819152832031, 0.02973369598388672, 0.029938976287841798, 0.029799135208129882, 0.029818336486816407, 0.030083616256713866, 0.02994175910949707, 0.03007855987548828, 0.030493087768554687, 0.029888511657714844, 0.029784063339233398, 0.03298099136352539, 0.029698047637939453, 0.029648895263671874, 0.029816831588745117, 0.029941791534423827, 0.029954015731811525, 0.029857215881347657, 0.029931615829467774, 0.029879840850830078, 0.029763872146606446, 0.02976425552368164, 0.029837312698364257, 0.029773759841918945, 0.02972438430786133, 0.02974086380004883, 0.02962486457824707, 0.029710336685180663, 0.029865983963012696, 0.02974742317199707, 0.029746976852416993, 0.029722623825073242, 0.029942943572998048, 0.02983203125, 0.029676799774169923, 0.02984832000732422, 0.029792224884033203, 0.029890592575073243, 0.030058496475219725, 0.029691328048706056, 0.029704896926879883, 0.02982080078125, 0.02979840087890625, 0.02980659294128418, 0.03000934410095215, 0.029621696472167967, 0.029739391326904296, 0.029536191940307616, 0.029554943084716796, 0.029486879348754883, 0.029442272186279296, 0.02963043212890625, 0.030341119766235353, 0.03016294479370117, 0.029848672866821288, 0.029780031204223633, 0.029522207260131834, 0.02957574462890625, 0.02959916877746582, 0.029643295288085937, 0.029574687957763673, 0.02968832015991211, 0.02971161651611328, 0.02953696060180664, 0.029515775680541992, 0.02965510368347168, 0.029624319076538085, 0.02954966354370117, 0.031160448074340822, 0.03062044715881348, 0.030048255920410157, 0.029834592819213867, 0.029903039932250977, 0.02966691207885742, 0.029655040740966795, 0.029526464462280272, 0.02956742477416992, 0.02964905548095703, 0.02981203269958496, 0.029766176223754885, 0.03027334403991699, 0.029617471694946287, 0.029678623199462892, 0.02988630485534668, 0.02966934394836426, 0.029538335800170897, 0.02970419120788574, 0.02961305618286133, 0.029640832901000978, 0.02957401657104492, 0.029467744827270506, 0.02979631996154785, 0.029722976684570312, 0.02969046401977539, 0.029695968627929687, 0.029855775833129882, 0.029998783111572266, 0.029475103378295897, 0.029655071258544923, 0.02959974479675293, 0.029599231719970705, 0.030714656829833983, 0.02981990432739258, 0.030044160842895507, 0.02970083236694336, 0.02979151916503906, 0.029798208236694337, 0.029705087661743165, 0.029632543563842772, 0.02976358413696289, 0.029740543365478517, 0.02964694404602051, 0.02965135955810547, 0.029724672317504884, 0.029788000106811523, 0.030671072006225587, 0.03011849594116211, 0.029857215881347657, 0.029796287536621092, 0.029710975646972657, 0.03000934410095215, 0.029698047637939453, 0.02979430389404297, 0.02978348731994629, 0.029637184143066406, 0.029691904067993165, 0.03036275291442871, 0.030026239395141603, 0.031056255340576173, 0.030431232452392577, 0.030060543060302734, 0.029851648330688478, 0.030015487670898438, 0.030284799575805665, 0.02995439910888672, 0.030118335723876954, 0.03005836868286133, 0.029866336822509765, 0.030104831695556642, 0.029897823333740234, 0.029734560012817383, 0.0296910400390625, 0.029644800186157227, 0.029594240188598634, 0.029616352081298827, 0.02940108871459961, 0.029642784118652343, 0.02956812858581543, 0.029436159133911132, 0.02969798469543457, 0.02962499237060547, 0.02938265609741211, 0.02945996856689453, 0.029624959945678712, 0.02989459228515625, 0.02980963134765625, 0.029989856719970703, 0.029902912139892577, 0.029699583053588868, 0.029589056015014648, 0.029803199768066405, 0.03133443260192871, 0.030503072738647462, 0.030381664276123047, 0.030456192016601564, 0.030111040115356445, 0.029788095474243163, 0.02958620834350586, 0.029655168533325196, 0.029529024124145507, 0.02984441566467285, 0.02974515151977539, 0.029675647735595702, 0.029603712081909178, 0.029603071212768554, 0.02955673599243164, 0.029688192367553712, 0.02970252799987793]",tokens/s,33.371581380360766,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.868352,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2327216796875,16.2327216796875,0.0,16.2327216796875,16.2327216796875,16.2327216796875,16.2327216796875,[16.2327216796875],,kWh,0.00026652513839583964,2.939236722192133e-05,8.68925695139966e-05,0.00038281007513175756,,MB,2067.243008,14033.027072,0.0,13625.196544,13297.870848,s,10,22.9517958984375,2.29517958984375,0.0008820631523208208,2.2952384033203126,2.296043969726562,2.2965340698242187,2.296926149902344,"[2.294716552734375, 2.293971923828125, 2.294431884765625, 2.29522900390625, 2.295479248046875, 2.29407421875, 2.295247802734375, 2.29568603515625, 2.29593505859375, 2.297024169921875]",tokens/s,111.53811280511947,kWh,6.69229592179181e-05,7.379358171590439e-06,4.43492299238013e-05,0.00011865154731330984,tokens/kWh,2157578.2684401874,MB,2071.445504,14335.01696,0.0,13927.186432,13689.870848,s,10,1357.6977968750002,135.76977968750003,0.03905213328064623,135.753359375,135.8288109375,135.83175703125,135.83411390625,"[135.726765625, 135.7214375, 135.73778125, 135.75059375, 135.75296875, 135.75375, 135.787078125, 135.8045625, 135.82815625, 135.834703125]",tokens/s,0.4640207868423038,kWh,0.003959727415000008,0.0004367888125291182,0.0026341422462009987,0.007030658473730125,tokens/kWh,8960.753851918405,,s,630,1357.6917192382805,2.155066221013145,0.0009203916848711222,2.154985595703125,2.1563291015625,2.156659765625,2.1572001464843753,"[2.15377978515625, 2.15362353515625, 2.154102783203125, 2.152888427734375, 2.153879638671875, 2.153713623046875, 2.1530703125, 2.153484619140625, 2.15326904296875, 2.15319775390625, 2.154849609375, 2.15480126953125, 2.154350830078125, 2.153640380859375, 2.15429931640625, 2.153489501953125, 2.155323974609375, 2.1542197265625, 2.154045654296875, 2.15464697265625, 2.154449462890625, 2.15431298828125, 2.154271484375, 2.15437890625, 2.1539443359375, 2.15463818359375, 2.1542236328125, 2.154859619140625, 2.154393798828125, 2.15407470703125, 2.154638671875, 2.154382080078125, 2.154458251953125, 2.1549345703125, 2.15419140625, 2.154756103515625, 2.15472314453125, 2.154986572265625, 2.153968017578125, 2.154627685546875, 2.154684326171875, 2.154903564453125, 2.154151611328125, 2.154827880859375, 2.15438232421875, 2.153913330078125, 2.155095458984375, 2.154193603515625, 2.15475, 2.155017822265625, 2.1539208984375, 2.154256103515625, 2.1548037109375, 2.154446044921875, 2.155332275390625, 2.15518212890625, 2.15475537109375, 2.154677001953125, 2.15482568359375, 2.15515771484375, 2.154954833984375, 2.154704833984375, 2.1553427734375, 2.15356201171875, 2.153287109375, 2.15346142578125, 2.15304443359375, 2.15363134765625, 2.15324658203125, 2.153400390625, 2.1534482421875, 2.15419482421875, 2.153303955078125, 2.1533408203125, 2.154169921875, 2.15413916015625, 2.153862060546875, 2.153770751953125, 2.154093994140625, 2.1539375, 2.153967529296875, 2.154238037109375, 2.15394921875, 2.15406396484375, 2.154262451171875, 2.154094482421875, 2.15416015625, 2.153773193359375, 2.154221435546875, 2.154911865234375, 2.154006591796875, 2.154006591796875, 2.15403466796875, 2.15473388671875, 2.153947265625, 2.154082275390625, 2.1540576171875, 2.1540595703125, 2.153810302734375, 2.15459765625, 2.154572021484375, 2.1547216796875, 2.15456787109375, 2.154612548828125, 2.154059814453125, 2.154459228515625, 2.15434033203125, 2.154974853515625, 2.1545126953125, 2.15436083984375, 2.15488427734375, 2.15504296875, 2.154566162109375, 2.1549423828125, 2.15516357421875, 2.15512060546875, 2.154883056640625, 2.155005859375, 2.155716064453125, 2.154611328125, 2.155560302734375, 2.154902099609375, 2.155333251953125, 2.154781005859375, 2.15556298828125, 2.154715087890625, 2.154047607421875, 2.15368701171875, 2.15332666015625, 2.153343017578125, 2.154083740234375, 2.15368115234375, 2.15378759765625, 2.15322802734375, 2.15338232421875, 2.15393408203125, 2.1533740234375, 2.15382666015625, 2.1550546875, 2.15336181640625, 2.15402294921875, 2.153701416015625, 2.15465087890625, 2.1540625, 2.154532958984375, 2.154282958984375, 2.154406005859375, 2.154145751953125, 2.153818115234375, 2.15392041015625, 2.15444287109375, 2.153777099609375, 2.1542412109375, 2.154507080078125, 2.15466943359375, 2.154838134765625, 2.154627685546875, 2.15440966796875, 2.154647216796875, 2.15475244140625, 2.154928466796875, 2.154638427734375, 2.153850830078125, 2.15507666015625, 2.15435986328125, 2.155682861328125, 2.154911865234375, 2.155358154296875, 2.15447509765625, 2.15534765625, 2.154611083984375, 2.155585205078125, 2.1549453125, 2.15463720703125, 2.155443359375, 2.155148193359375, 2.155347412109375, 2.154822021484375, 2.154760009765625, 2.15597216796875, 2.155488037109375, 2.155116455078125, 2.1555029296875, 2.155637451171875, 2.155179931640625, 2.155326904296875, 2.15504736328125, 2.156221923828125, 2.155205322265625, 2.15432421875, 2.153321044921875, 2.15322216796875, 2.15401416015625, 2.154091064453125, 2.15307666015625, 2.154629150390625, 2.15393896484375, 2.15340771484375, 2.153814697265625, 2.153936279296875, 2.154150390625, 2.153486572265625, 2.154694091796875, 2.15483154296875, 2.154114013671875, 2.153946044921875, 2.1546708984375, 2.15391015625, 2.1548994140625, 2.154569091796875, 2.154937255859375, 2.154496826171875, 2.154492919921875, 2.154501708984375, 2.15464599609375, 2.154235107421875, 2.154486572265625, 2.15464697265625, 2.154468017578125, 2.155227294921875, 2.154882568359375, 2.154663330078125, 2.153976806640625, 2.15427001953125, 2.1545458984375, 2.154760009765625, 2.155342041015625, 2.1553828125, 2.154933837890625, 2.154500732421875, 2.15493212890625, 2.15534375, 2.1550771484375, 2.155051513671875, 2.155622314453125, 2.155462646484375, 2.155150390625, 2.15542578125, 2.15520458984375, 2.15525, 2.15550732421875, 2.15552490234375, 2.1555712890625, 2.1557412109375, 2.155888671875, 2.15494384765625, 2.155469482421875, 2.1559990234375, 2.156082763671875, 2.1556552734375, 2.156632568359375, 2.156023681640625, 2.15436474609375, 2.153816650390625, 2.154111083984375, 2.15410400390625, 2.154985595703125, 2.15435107421875, 2.153399658203125, 2.154334228515625, 2.153712158203125, 2.1545986328125, 2.15471875, 2.1541240234375, 2.154567626953125, 2.1542216796875, 2.15448486328125, 2.1534501953125, 2.1555380859375, 2.154695068359375, 2.1541083984375, 2.154921875, 2.154289306640625, 2.154621337890625, 2.15501416015625, 2.1543505859375, 2.154255859375, 2.15504052734375, 2.154267578125, 2.154643310546875, 2.154623291015625, 2.1547373046875, 2.1549189453125, 2.15486279296875, 2.154537109375, 2.15472119140625, 2.15438818359375, 2.154477783203125, 2.155399169921875, 2.15554248046875, 2.155152587890625, 2.155150146484375, 2.155085693359375, 2.154963134765625, 2.154284423828125, 2.1551826171875, 2.15504296875, 2.1550439453125, 2.15484326171875, 2.15489306640625, 2.155017822265625, 2.155342529296875, 2.15538623046875, 2.15503662109375, 2.154445068359375, 2.15603759765625, 2.15510107421875, 2.156349365234375, 2.15521826171875, 2.155336181640625, 2.15549951171875, 2.1556796875, 2.155181396484375, 2.156218505859375, 2.155602783203125, 2.154482177734375, 2.153758544921875, 2.15450244140625, 2.15416357421875, 2.153860107421875, 2.153955078125, 2.154624267578125, 2.15406884765625, 2.1538037109375, 2.15387744140625, 2.154406005859375, 2.154027099609375, 2.154689697265625, 2.15476708984375, 2.15424169921875, 2.15487939453125, 2.154475341796875, 2.1543466796875, 2.15468994140625, 2.154701416015625, 2.15486669921875, 2.1554482421875, 2.15520458984375, 2.15374853515625, 2.153969482421875, 2.154931640625, 2.154985595703125, 2.15489794921875, 2.154056884765625, 2.15442724609375, 2.15461181640625, 2.15497216796875, 2.15499462890625, 2.154851318359375, 2.154536376953125, 2.154569580078125, 2.1551787109375, 2.15495703125, 2.154661376953125, 2.154598876953125, 2.1551669921875, 2.154668701171875, 2.155399169921875, 2.15532958984375, 2.15520263671875, 2.15521484375, 2.15503466796875, 2.1550322265625, 2.154961181640625, 2.1554052734375, 2.154875, 2.1551328125, 2.1555322265625, 2.15577392578125, 2.1547763671875, 2.155614013671875, 2.1548134765625, 2.15576953125, 2.1561640625, 2.155378662109375, 2.155300048828125, 2.15560888671875, 2.156122314453125, 2.15542138671875, 2.15410693359375, 2.15370751953125, 2.15404345703125, 2.15413134765625, 2.15398388671875, 2.154730712890625, 2.154179443359375, 2.1545166015625, 2.15442236328125, 2.153889404296875, 2.1549775390625, 2.154921875, 2.15444189453125, 2.15519091796875, 2.154968505859375, 2.155385498046875, 2.154739013671875, 2.1551982421875, 2.15461572265625, 2.154502197265625, 2.155098388671875, 2.15466259765625, 2.1547666015625, 2.15472607421875, 2.1551494140625, 2.154901611328125, 2.15481689453125, 2.155205322265625, 2.15503466796875, 2.156209228515625, 2.155299560546875, 2.1552314453125, 2.155640869140625, 2.155310791015625, 2.1551865234375, 2.155093994140625, 2.155556884765625, 2.155173828125, 2.1561201171875, 2.1563681640625, 2.15586328125, 2.15506396484375, 2.155530029296875, 2.155470703125, 2.15595166015625, 2.156271484375, 2.156151123046875, 2.155916748046875, 2.155580322265625, 2.15549951171875, 2.156786865234375, 2.156016357421875, 2.15598291015625, 2.1558330078125, 2.15690283203125, 2.156203125, 2.1568740234375, 2.156804443359375, 2.156328857421875, 2.15601171875, 2.15711328125, 2.156693603515625, 2.155583251953125, 2.154637451171875, 2.154427734375, 2.15435888671875, 2.15477880859375, 2.154145263671875, 2.15453759765625, 2.1538388671875, 2.154702880859375, 2.154565185546875, 2.154799560546875, 2.155702392578125, 2.155001708984375, 2.15463134765625, 2.155059326171875, 2.15557080078125, 2.155644287109375, 2.155435791015625, 2.155279296875, 2.155478271484375, 2.155039306640625, 2.1556474609375, 2.155759521484375, 2.1556796875, 2.155052978515625, 2.155063232421875, 2.156125244140625, 2.155266845703125, 2.15598583984375, 2.15598583984375, 2.155769775390625, 2.155423828125, 2.156295166015625, 2.155389892578125, 2.154956787109375, 2.1549384765625, 2.155702392578125, 2.156359130859375, 2.15553466796875, 2.156149169921875, 2.15556884765625, 2.15610107421875, 2.156392822265625, 2.15614208984375, 2.155922119140625, 2.155926513671875, 2.15628515625, 2.1563779296875, 2.156031982421875, 2.156570556640625, 2.155749267578125, 2.156455810546875, 2.156539306640625, 2.156162109375, 2.156419921875, 2.156111083984375, 2.1556201171875, 2.157068115234375, 2.15618359375, 2.15617529296875, 2.156353515625, 2.156662841796875, 2.156812255859375, 2.15483154296875, 2.154446533203125, 2.1550927734375, 2.155013427734375, 2.155321044921875, 2.153851806640625, 2.15472314453125, 2.154208740234375, 2.155258544921875, 2.15474609375, 2.155627685546875, 2.1557392578125, 2.1559658203125, 2.155125244140625, 2.15587890625, 2.15556103515625, 2.155802734375, 2.156337158203125, 2.155599853515625, 2.155122802734375, 2.154838134765625, 2.15573291015625, 2.155853271484375, 2.15604833984375, 2.155375244140625, 2.155175048828125, 2.156044677734375, 2.155989501953125, 2.156078857421875, 2.156032470703125, 2.156312255859375, 2.1562412109375, 2.155767333984375, 2.15622900390625, 2.155845703125, 2.156160888671875, 2.155933349609375, 2.156331298828125, 2.156830810546875, 2.155802734375, 2.156553466796875, 2.15624560546875, 2.156656005859375, 2.156861083984375, 2.155530029296875, 2.155895751953125, 2.156756591796875, 2.15710546875, 2.156838623046875, 2.15633251953125, 2.156612060546875, 2.156882080078125, 2.156545166015625, 2.15689306640625, 2.1569375, 2.156795654296875, 2.156451904296875, 2.157452392578125, 2.15695458984375, 2.15725048828125, 2.15652978515625, 2.15766015625, 2.1569189453125, 2.15613720703125, 2.154751953125, 2.15535205078125, 2.155133056640625, 2.155610107421875, 2.15499560546875, 2.155558837890625, 2.155183837890625, 2.15576611328125, 2.155431884765625, 2.15549072265625, 2.155479736328125, 2.15575341796875, 2.15545654296875, 2.156015380859375, 2.15538916015625, 2.156316650390625, 2.156632080078125, 2.156115966796875, 2.15632275390625, 2.1554462890625, 2.156004638671875, 2.156026611328125, 2.156030029296875, 2.156322998046875, 2.155793212890625, 2.15531201171875, 2.15627783203125, 2.1556279296875, 2.15658349609375, 2.156557373046875, 2.15524658203125, 2.156424560546875, 2.156294921875, 2.155815185546875, 2.15559130859375, 2.15583447265625, 2.156342041015625, 2.156015625, 2.156478271484375, 2.156675048828125, 2.1556572265625, 2.156151123046875, 2.156275634765625, 2.156451904296875, 2.1573857421875, 2.156472412109375, 2.155664794921875, 2.156966552734375, 2.156527587890625, 2.156632080078125, 2.156265625, 2.1570087890625, 2.15602685546875, 2.156317138671875, 2.157116943359375, 2.1564873046875, 2.15671337890625, 2.156304931640625, 2.157250732421875, 2.15592333984375, 2.157657470703125, 2.157234130859375]",tokens/s,0.4640228640073425,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3563.528192,4495.179776,0.0,4116.709376,3980.386816,s,1,10.4573623046875,10.4573623046875,0.0,10.4573623046875,10.4573623046875,10.4573623046875,10.4573623046875,[10.4573623046875],,kWh,9.463678974167351e-05,1.0428446453185679e-05,2.954335696799676e-05,0.00013460859316285595,,MB,3465.621504,4826.529792,0.0,4418.699264,4245.764608,s,10,6.514956481933594,0.6514956481933594,0.0008382393185001823,0.6516083984375001,0.6524576538085937,0.6526914428710937,0.6528784741210938,"[0.6507066650390625, 0.6516373901367187, 0.6515794067382813, 0.6514669799804688, 0.6501195068359376, 0.6517879638671875, 0.6524057006835937, 0.6503561401367187, 0.6529252319335938, 0.6519714965820312]",tokens/s,392.94199540688413,kWh,1.9035242236718612e-05,2.0992695936843544e-06,1.2672371248999888e-05,3.380688307940286e-05,tokens/kWh,7572422.438316127,MB,3469.758464,4837.015552,0.0,4429.185024,4245.767168,s,10,384.47273437499996,38.447273437499994,0.025700547755004608,38.459927734375,38.465746484375,38.4658712890625,38.4659711328125,"[38.38426171875, 38.41635546875, 38.44120703125, 38.4502265625, 38.4564609375, 38.46393359375, 38.46571875, 38.46339453125, 38.46599609375, 38.4651796875]",tokens/s,1.638607744250395,kWh,0.0011221179002866139,0.00012377796577120082,0.0007458437355633994,0.001991739601621214,tokens/kWh,31630.640847187035,,s,630,384.46834374999986,0.6102672123015873,0.0005672002210965215,0.6103543701171874,0.6108875366210937,0.6110109985351563,0.6112549932861329,"[0.6090960083007813, 0.6084502563476563, 0.608760498046875, 0.6085468139648438, 0.6086820068359375, 0.608964599609375, 0.6090198364257813, 0.6083390502929688, 0.60896337890625, 0.6084527587890625, 0.6092669677734375, 0.6092006225585938, 0.6084193115234375, 0.6095037231445313, 0.6087141723632813, 0.6089382934570312, 0.6088442993164063, 0.6087393188476562, 0.6091402587890625, 0.6090695190429688, 0.608712646484375, 0.6101893310546875, 0.6080819091796875, 0.6089801025390625, 0.6094119873046875, 0.608943603515625, 0.610287109375, 0.6082645874023438, 0.6091060180664063, 0.6097963256835938, 0.6085675659179688, 0.6099081420898438, 0.6093010864257813, 0.608392578125, 0.6096492309570313, 0.6088048706054687, 0.610361328125, 0.6084198608398438, 0.6096354370117187, 0.6103089599609375, 0.6092575073242188, 0.6096132202148438, 0.6098903198242187, 0.6089487915039062, 0.6100578002929687, 0.60937255859375, 0.6100594482421875, 0.6092112426757812, 0.6090424194335937, 0.6103367919921875, 0.6088253173828125, 0.6102559814453125, 0.6092584228515625, 0.609912841796875, 0.6095703735351562, 0.6097042846679688, 0.6093067626953125, 0.60988330078125, 0.6095225219726562, 0.6101954345703124, 0.6098411865234376, 0.6093475341796875, 0.6101852416992187, 0.6092809448242188, 0.6101094360351562, 0.6082826538085937, 0.6092533569335937, 0.6097572021484375, 0.6091590576171875, 0.6098058471679687, 0.609083740234375, 0.610011474609375, 0.609449462890625, 0.609554931640625, 0.6091915283203125, 0.6089580688476562, 0.6100978393554688, 0.60969384765625, 0.60918115234375, 0.6101787109375, 0.6088201293945312, 0.6105702514648438, 0.6092473754882812, 0.6092838134765625, 0.6100452270507812, 0.6097061767578125, 0.6102218627929688, 0.6087933959960937, 0.6097158813476562, 0.6104061889648438, 0.6090422973632813, 0.6100711059570313, 0.6098965454101563, 0.6100003051757813, 0.6095570678710938, 0.6095827026367188, 0.610037353515625, 0.60992333984375, 0.6097310791015625, 0.61005419921875, 0.6099200439453125, 0.6091724243164063, 0.6103319091796875, 0.6097229614257812, 0.609804443359375, 0.6102564697265624, 0.609752685546875, 0.6098746948242187, 0.6103369750976563, 0.6098450927734375, 0.6097754516601562, 0.6103055419921875, 0.6095953979492188, 0.6103026123046875, 0.6103894653320312, 0.6097640991210938, 0.6101337890625, 0.6101913452148438, 0.6103736572265624, 0.6095872192382813, 0.6099100952148437, 0.6101613159179687, 0.610305908203125, 0.6104202270507812, 0.6102186279296875, 0.6095318603515625, 0.6104039916992188, 0.6099992065429688, 0.6098433227539063, 0.6099674682617188, 0.6096532592773437, 0.6100801391601562, 0.6097854614257813, 0.6095120239257813, 0.6103056640625, 0.6099771728515625, 0.609578369140625, 0.6103779907226563, 0.6103719482421875, 0.6095155029296875, 0.6104514770507813, 0.6100208740234375, 0.6097679443359375, 0.6097694702148437, 0.6106480712890625, 0.6100399169921875, 0.6097529296875, 0.610355224609375, 0.609977783203125, 0.6100938720703125, 0.610328369140625, 0.6099795532226563, 0.6102411499023438, 0.609832763671875, 0.6103208618164062, 0.610081787109375, 0.6100912475585938, 0.6101895751953125, 0.610353515625, 0.6101071166992188, 0.6107816772460938, 0.6100471801757813, 0.6102208862304688, 0.6105856323242187, 0.6097660522460937, 0.6105784301757813, 0.6104160766601563, 0.6101141967773438, 0.6105151977539063, 0.60994970703125, 0.6100787353515625, 0.6105149536132812, 0.61008056640625, 0.6100582275390625, 0.6106473999023437, 0.6098645629882813, 0.6100541381835938, 0.6104146118164062, 0.6102568969726563, 0.610663818359375, 0.610531982421875, 0.6101392822265626, 0.6105437622070312, 0.6101736450195312, 0.6103326416015625, 0.61028759765625, 0.610763916015625, 0.6102324829101563, 0.6103866577148438, 0.6103729248046875, 0.6099683227539062, 0.6100995483398437, 0.610337890625, 0.6098373413085938, 0.6101339111328125, 0.61000927734375, 0.6096589965820313, 0.6109815063476562, 0.6097310180664063, 0.61061767578125, 0.610209716796875, 0.6096117553710938, 0.6103079223632812, 0.6105637817382813, 0.610521484375, 0.6099417114257812, 0.6102159423828125, 0.610492431640625, 0.6102261962890625, 0.610174072265625, 0.6103909301757813, 0.6102794189453125, 0.609967529296875, 0.6100049438476562, 0.6103330688476563, 0.6102509765625, 0.6103941040039063, 0.6103749389648437, 0.60979638671875, 0.6105010375976563, 0.610197509765625, 0.6100459594726563, 0.6109686889648438, 0.6098619384765624, 0.6103475952148437, 0.6103693237304687, 0.6103880615234375, 0.6099354858398438, 0.6102138671875, 0.6106185913085938, 0.6098746948242187, 0.6102774047851562, 0.6105184326171875, 0.6099586181640625, 0.6109733276367187, 0.6100340576171875, 0.6104267578125, 0.61057421875, 0.6102254638671875, 0.6105209350585937, 0.6104053955078125, 0.61082421875, 0.610802734375, 0.6102413940429687, 0.6108724975585937, 0.6108927001953125, 0.6102405395507813, 0.6106473999023437, 0.6105316772460937, 0.6103616943359375, 0.6100801391601562, 0.6112569580078125, 0.6104248046875, 0.6101829833984375, 0.6103634643554687, 0.6095133666992187, 0.6109862060546875, 0.610718994140625, 0.6098419189453125, 0.6104116821289063, 0.6095, 0.6103265380859375, 0.6106603393554687, 0.6099819946289062, 0.6097412719726563, 0.6111395874023438, 0.6095827026367188, 0.6108819580078125, 0.6099450073242187, 0.6100506591796875, 0.6106132202148438, 0.6102151489257812, 0.6099361572265625, 0.6103777465820313, 0.6101986083984375, 0.6105732421875, 0.61054931640625, 0.610621826171875, 0.6099002075195312, 0.61038818359375, 0.6103051147460937, 0.6107286376953125, 0.610433349609375, 0.6107156372070313, 0.6105252075195312, 0.6105189819335938, 0.61015576171875, 0.6110543823242187, 0.610620849609375, 0.6102533569335937, 0.6102498168945313, 0.6105712280273438, 0.6102937622070312, 0.6101273193359374, 0.610498291015625, 0.6101920166015625, 0.6104590454101563, 0.610794189453125, 0.6101744384765625, 0.6108883056640625, 0.610123779296875, 0.6103654174804688, 0.6105947875976563, 0.6106603393554687, 0.6108671264648438, 0.6105887451171875, 0.6101355590820312, 0.610887451171875, 0.610244873046875, 0.6103985595703125, 0.6108070068359375, 0.6105148315429687, 0.6110955200195313, 0.6108405151367188, 0.6107074584960938, 0.6110088500976563, 0.6105499267578125, 0.6099988403320312, 0.6107484130859375, 0.61009716796875, 0.6108262329101563, 0.6096029052734375, 0.610570556640625, 0.610397705078125, 0.609651611328125, 0.6106480712890625, 0.61031005859375, 0.6104925537109375, 0.6104390869140625, 0.6105394897460937, 0.6101621704101563, 0.6107075805664063, 0.6103699340820312, 0.6103582763671875, 0.6105354614257813, 0.610065185546875, 0.610385986328125, 0.6104017333984375, 0.6105086669921875, 0.6109661865234375, 0.610418701171875, 0.6102097778320312, 0.6107197265625, 0.6104658203125, 0.6108712768554687, 0.610318359375, 0.6100889892578125, 0.6107152709960938, 0.6109371948242187, 0.6102835083007813, 0.6110003051757813, 0.6104874267578125, 0.6102968139648437, 0.61108154296875, 0.6101611938476562, 0.6107177124023437, 0.6106275634765626, 0.6112010498046875, 0.6103121337890625, 0.6109384765625, 0.6103938598632812, 0.6107551879882812, 0.610673828125, 0.61046630859375, 0.610532958984375, 0.6106406860351562, 0.6106869506835938, 0.6105858154296875, 0.6110704956054688, 0.610211669921875, 0.610906005859375, 0.6102861328125, 0.6105409545898437, 0.6107675170898438, 0.61068603515625, 0.6106018676757813, 0.6112501831054687, 0.6102958374023437, 0.6106951904296875, 0.6100008544921875, 0.6104107666015625, 0.610318115234375, 0.6100132446289063, 0.6111651611328125, 0.6099130249023438, 0.610789794921875, 0.6106354370117187, 0.61014697265625, 0.6103236694335937, 0.610946044921875, 0.6106171264648438, 0.610324462890625, 0.6105066528320312, 0.6101213989257812, 0.6107918701171875, 0.6104920043945312, 0.61086083984375, 0.6102144775390625, 0.6106419067382812, 0.6106988525390625, 0.610336181640625, 0.6100687255859375, 0.6110331420898437, 0.6101918334960937, 0.6103512573242188, 0.6108096923828125, 0.6102547607421875, 0.6108285522460938, 0.6110157470703125, 0.6107221069335937, 0.6106929931640624, 0.6105497436523437, 0.610037841796875, 0.61115380859375, 0.6100897216796874, 0.6103983154296875, 0.610609130859375, 0.6102425537109375, 0.6104266967773437, 0.6110127563476563, 0.6101115112304687, 0.6108079833984374, 0.6104503784179688, 0.6108599243164062, 0.610407958984375, 0.6111585083007812, 0.6106439819335937, 0.6110303955078125, 0.610271240234375, 0.6106846313476563, 0.6108927001953125, 0.6101299438476563, 0.6107689208984375, 0.6109710693359375, 0.6105823364257813, 0.61077783203125, 0.6106378173828125, 0.6107095336914062, 0.610710693359375, 0.6108168334960937, 0.6104063720703125, 0.6110336303710937, 0.610076171875, 0.6098011474609375, 0.6101842651367188, 0.6108245239257812, 0.6096748657226563, 0.610658935546875, 0.6100380859375, 0.6109224853515625, 0.6098063354492187, 0.61053076171875, 0.610688720703125, 0.6106028442382813, 0.610356201171875, 0.6102913818359375, 0.6101486206054687, 0.6109524536132812, 0.610075439453125, 0.610819580078125, 0.6102649536132813, 0.6107093505859374, 0.6104130859375, 0.6104392700195312, 0.61022021484375, 0.6107393798828125, 0.6105497436523437, 0.6100529174804687, 0.6108324584960938, 0.61034228515625, 0.61023291015625, 0.6107208251953125, 0.6103804931640625, 0.6105541381835937, 0.61051904296875, 0.6109224853515625, 0.6100930786132812, 0.610405517578125, 0.6113425903320312, 0.61042724609375, 0.6109452514648438, 0.6101565551757813, 0.6107545776367187, 0.6108323974609375, 0.610150390625, 0.6103222045898438, 0.6109002075195312, 0.610995849609375, 0.6104231567382813, 0.6113660278320312, 0.6103663330078125, 0.6107053833007813, 0.6102097778320312, 0.610680419921875, 0.6104946899414062, 0.6103143920898437, 0.6110802001953125, 0.6103775024414062, 0.6107463989257812, 0.6105205078125, 0.6111751708984375, 0.6102006225585938, 0.61135498046875, 0.6102411499023438, 0.6107234497070313, 0.6103710327148437, 0.61014111328125, 0.6103770141601562, 0.6103634033203125, 0.61069384765625, 0.6104965209960938, 0.6104432373046875, 0.61039208984375, 0.61032373046875, 0.6102998657226563, 0.6108250122070312, 0.610334716796875, 0.6104146118164062, 0.6105001831054687, 0.6102429809570312, 0.6105613403320312, 0.6103067626953125, 0.6107095336914062, 0.6102866821289062, 0.6101697387695313, 0.6107027587890625, 0.61000927734375, 0.6103187866210937, 0.610703369140625, 0.610620849609375, 0.6109596557617187, 0.6103853149414062, 0.6109967651367187, 0.6100850830078125, 0.6110531616210938, 0.6100833740234375, 0.6104219360351563, 0.6108773803710937, 0.6101287231445313, 0.6106746826171875, 0.6106624145507813, 0.610909912109375, 0.6104019775390624, 0.61080029296875, 0.6104452514648437, 0.6104496459960937, 0.6108167724609375, 0.6103964233398438, 0.6106337890625, 0.6113442993164062, 0.6102157592773437, 0.610970458984375, 0.6105879516601562, 0.6110172729492187, 0.6101951293945312, 0.6109410400390625, 0.6104273681640625, 0.6108549194335937, 0.6107973022460937, 0.610593017578125, 0.6105426025390625, 0.6108866577148437, 0.6106618041992188, 0.610755126953125, 0.6107421875, 0.6102978515625, 0.6112400512695313, 0.6103338623046874, 0.6110248413085938, 0.6092952880859375, 0.6105497436523437, 0.6100065307617187, 0.6108591918945312, 0.6095916137695313, 0.6105489501953125, 0.6105137329101562, 0.6101810913085938, 0.6112948608398437, 0.60995849609375, 0.6097098388671875, 0.61123583984375, 0.6098510131835938, 0.6106725463867188, 0.6104515991210937, 0.6106319580078124, 0.6102650756835938, 0.61079736328125, 0.6099724731445313, 0.6109937744140626, 0.6100443115234375, 0.6105042724609375, 0.61080810546875, 0.6102652587890625, 0.6105252075195312, 0.61050830078125, 0.6104171142578125, 0.610428955078125, 0.610639892578125, 0.6108561401367187, 0.6106427612304688, 0.6103059692382813, 0.6109490966796876, 0.6110658569335937, 0.6107730102539063, 0.6106644287109375, 0.6104304809570312, 0.610922607421875, 0.6101253662109375, 0.6109519653320312, 0.6105042114257813, 0.610482666015625, 0.6106760864257812, 0.6104493408203125, 0.6108674926757812, 0.6101047973632813, 0.6105118408203125, 0.610850830078125, 0.6100556030273437, 0.6108903198242187, 0.6104965209960938, 0.6106492309570313, 0.61102685546875, 0.6107894287109376, 0.6108814697265625, 0.6110543823242187, 0.6104352416992187, 0.6114283447265625, 0.61051904296875, 0.6107484130859375, 0.6108098754882813]",tokens/s,1.6386264571359783,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5180.567552,5584.584704,0.0,5182.062592,5181.108736,s,1,11.058919921875,11.058919921875,0.0,11.058919921875,11.058919921875,11.058919921875,11.058919921875,[11.058919921875],,kWh,0.00011530209433749784,1.2711294180273348e-05,3.4602805459992e-05,0.0001626161939777632,,MB,5213.7984,5739.773952,0.0,5322.571776,5283.621376,s,10,2.440563400268555,0.24405634002685547,0.0007691004744948949,0.24409329986572265,0.2450234130859375,0.24511565704345703,0.24518945220947266,"[0.2422626495361328, 0.2437012481689453, 0.2441497344970703, 0.24379766845703124, 0.244036865234375, 0.24375567626953126, 0.24446499633789062, 0.24520790100097656, 0.24500291442871094, 0.2441837463378906]",tokens/s,1048.9381262204877,kWh,7.149432002743991e-06,7.884526903470215e-07,4.752178598487769e-06,1.269006329157878e-05,tokens/kWh,20173264.239736572,MB,5221.220352,5756.551168,0.0,5339.348992,5283.623936,s,10,24.758136962890624,2.4758136962890624,0.010295311844426745,2.4763955078124997,2.4855232666015623,2.486075109863281,2.4865165844726564,"[2.485400634765625, 2.4732705078125, 2.4768505859375, 2.47284716796875, 2.448720947265625, 2.4759404296875, 2.473294189453125, 2.486626953125, 2.481483154296875, 2.483702392578125]",tokens/s,25.446179611345226,kWh,7.269821247642271e-05,8.01754476626919e-06,4.8101252573112665e-05,0.00012881700981580457,tokens/kWh,489065.8468946275,,s,630,24.755305088043233,0.03929413506038606,0.0005142077816655496,0.03921575927734375,0.03971001434326172,0.03999087085723877,0.0413761264038086,"[0.0412760009765625, 0.03926406478881836, 0.039380958557128906, 0.039801055908203126, 0.041425086975097655, 0.040357311248779296, 0.03984159851074219, 0.03956777572631836, 0.039565471649169924, 0.03940556716918946, 0.04016080093383789, 0.0394532470703125, 0.03915539169311524, 0.03952252960205078, 0.039196670532226564, 0.039526142120361325, 0.03941996765136719, 0.03915539169311524, 0.03924979019165039, 0.0392869758605957, 0.03913363265991211, 0.03859561538696289, 0.03840304183959961, 0.0388724479675293, 0.03911129760742187, 0.039378944396972655, 0.03923763275146484, 0.03892428970336914, 0.03914473724365235, 0.03924860763549805, 0.0391352653503418, 0.03909603118896485, 0.03930752182006836, 0.039196670532226564, 0.03940304183959961, 0.039551456451416014, 0.04173110580444336, 0.039384033203125, 0.04032438278198242, 0.039190784454345706, 0.03946748733520508, 0.03984195327758789, 0.03941676712036133, 0.03935302352905273, 0.03948384094238281, 0.03955209732055664, 0.039430816650390624, 0.03929471969604492, 0.03948799896240234, 0.039223072052001956, 0.03922739028930664, 0.03928473663330078, 0.039052894592285156, 0.039207103729248044, 0.039510238647460935, 0.03923353576660156, 0.03931366348266602, 0.039124351501464844, 0.0391541748046875, 0.03949667358398438, 0.03908291244506836, 0.03941203308105469, 0.03909190368652344, 0.03971286392211914, 0.039221118927001954, 0.03924790573120117, 0.03912499237060547, 0.03936796951293945, 0.039158496856689456, 0.039425247192382815, 0.03931596755981445, 0.03898777770996094, 0.03899446487426758, 0.03910015869140625, 0.038989761352539065, 0.03891177749633789, 0.03921744155883789, 0.03936870574951172, 0.03921331024169922, 0.038921985626220706, 0.038752254486083985, 0.038817790985107424, 0.03903241729736328, 0.03917660903930664, 0.03891814422607422, 0.0392437744140625, 0.039665088653564454, 0.03916243362426758, 0.0392171516418457, 0.03899907302856445, 0.038588863372802734, 0.03906614303588867, 0.038919326782226565, 0.03914652633666992, 0.03902649688720703, 0.03939059066772461, 0.039641822814941406, 0.03910553741455078, 0.03968297576904297, 0.0389304313659668, 0.03900368118286133, 0.03933590316772461, 0.03919830322265625, 0.03942623901367188, 0.03913596725463867, 0.04057292938232422, 0.0395797119140625, 0.03918175888061524, 0.03922691345214844, 0.03955187225341797, 0.0392457275390625, 0.03929427337646484, 0.0392938232421875, 0.039569408416748046, 0.0394936637878418, 0.03957731246948242, 0.0393034896850586, 0.039158912658691404, 0.03960710525512695, 0.039185855865478514, 0.03982767868041992, 0.039378814697265625, 0.03906198501586914, 0.039214366912841796, 0.03961532974243164, 0.039180160522460934, 0.03958047866821289, 0.039188480377197264, 0.03970268630981445, 0.03923072052001953, 0.03947375869750976, 0.03930316925048828, 0.03897292709350586, 0.03890431976318359, 0.039139328002929685, 0.03921327972412109, 0.03903055953979492, 0.03944019317626953, 0.03943206405639649, 0.039356735229492186, 0.039204158782958985, 0.03913593673706055, 0.03904025650024414, 0.03898028945922852, 0.03891584014892578, 0.039127361297607424, 0.03944422531127929, 0.03962496185302734, 0.039309120178222655, 0.039184574127197266, 0.039661441802978516, 0.039649406433105466, 0.03946707153320313, 0.0390819206237793, 0.04118947219848633, 0.03964508819580078, 0.039839744567871094, 0.03930112075805664, 0.03920272064208984, 0.03913737487792969, 0.039122943878173826, 0.03927852630615234, 0.03942111968994141, 0.039242622375488284, 0.03920076751708984, 0.039436286926269534, 0.039640510559082034, 0.039141952514648436, 0.03916352081298828, 0.03894262313842774, 0.03924425506591797, 0.03922041702270508, 0.03911148834228516, 0.039894977569580076, 0.03904547119140625, 0.039300830841064456, 0.039169151306152346, 0.039150463104248044, 0.03896934509277344, 0.03922668838500976, 0.039121185302734375, 0.03923820877075195, 0.03942588806152344, 0.03883011245727539, 0.0394730224609375, 0.039376895904541014, 0.03943180847167969, 0.03942038345336914, 0.039204864501953124, 0.039495552062988284, 0.039204734802246094, 0.03881833648681641, 0.038973857879638675, 0.038960254669189454, 0.03871014404296875, 0.03902182388305664, 0.038647743225097654, 0.0389947509765625, 0.039143264770507814, 0.03885481643676758, 0.03922857666015625, 0.0396247673034668, 0.03929987335205078, 0.03878464126586914, 0.03891187286376953, 0.039442943572998046, 0.0391736946105957, 0.03881017684936523, 0.038952705383300784, 0.039112831115722654, 0.040359935760498046, 0.038752254486083985, 0.03864985656738281, 0.038981632232666014, 0.03871855926513672, 0.03856009674072266, 0.03956374359130859, 0.039114494323730466, 0.038977504730224606, 0.038699390411376954, 0.040738815307617186, 0.044181503295898435, 0.039096446990966795, 0.03908185577392578, 0.039395584106445315, 0.03907148742675781, 0.039428096771240234, 0.03952595138549805, 0.039333694458007815, 0.03905513763427734, 0.03927443313598633, 0.03911977767944336, 0.03907968139648438, 0.03902694320678711, 0.039038528442382814, 0.03883871841430664, 0.0388403205871582, 0.03877840042114258, 0.039166431427001956, 0.038973377227783206, 0.040373470306396486, 0.040711006164550784, 0.03930931091308594, 0.03879081726074219, 0.03898185729980469, 0.0389879035949707, 0.03991961669921875, 0.03931891250610352, 0.03915635299682617, 0.03912041473388672, 0.03916348648071289, 0.03915225601196289, 0.039600128173828124, 0.03906140899658203, 0.03876873779296875, 0.03845478439331055, 0.03861872100830078, 0.038728065490722656, 0.038521472930908206, 0.03842614364624024, 0.03829990386962891, 0.03853529739379883, 0.03862326431274414, 0.03905737686157226, 0.03928678512573242, 0.03890585708618164, 0.04112588882446289, 0.0389582405090332, 0.03916067123413086, 0.039059070587158205, 0.038789630889892575, 0.039249984741210935, 0.038700607299804686, 0.038821342468261716, 0.039201568603515625, 0.038844417572021485, 0.0387869758605957, 0.03879110336303711, 0.03895721435546875, 0.0395898551940918, 0.03867171096801758, 0.038924991607666014, 0.03894892883300781, 0.03876038360595703, 0.03858227157592774, 0.038727680206298826, 0.03861679840087891, 0.038535457611083984, 0.03880931091308594, 0.03873001480102539, 0.038580062866210935, 0.039274593353271485, 0.03873388671875, 0.038581760406494144, 0.03900057601928711, 0.03843638229370117, 0.03863113784790039, 0.03928940963745117, 0.039405311584472656, 0.038981311798095705, 0.039386974334716794, 0.03877775955200195, 0.03901996612548828, 0.039064449310302736, 0.0390447998046875, 0.03856524658203125, 0.038650497436523434, 0.038424190521240235, 0.03848396682739258, 0.03862089538574219, 0.03870355224609375, 0.03866755294799805, 0.03870835113525391, 0.03879276657104492, 0.038420734405517576, 0.03999884796142578, 0.04037273788452148, 0.039406753540039065, 0.039220191955566405, 0.039271839141845705, 0.03955152130126953, 0.03924505615234375, 0.03920876693725586, 0.03970969772338867, 0.03973529434204102, 0.03930486297607422, 0.04010019302368164, 0.03914678573608398, 0.039095073699951174, 0.039163841247558596, 0.03909353637695313, 0.0392138557434082, 0.03917203140258789, 0.03893145751953125, 0.038919166564941404, 0.03920076751708984, 0.039428096771240234, 0.038762592315673826, 0.03909212875366211, 0.039282913208007815, 0.03907900619506836, 0.03884921646118164, 0.03935174560546875, 0.039107135772705075, 0.039337982177734376, 0.03946905517578125, 0.039368350982666014, 0.03899427032470703, 0.03921017456054687, 0.03947993469238281, 0.042234046936035156, 0.039139328002929685, 0.038911006927490235, 0.03918127822875977, 0.039137279510498044, 0.03879116821289062, 0.038940414428710934, 0.039212352752685545, 0.03895305633544922, 0.03885551834106445, 0.03923535919189453, 0.03922726440429687, 0.03927500915527344, 0.039077728271484376, 0.03953184127807617, 0.039178081512451175, 0.039014560699462894, 0.03943075180053711, 0.039432479858398435, 0.03911660766601562, 0.039726207733154294, 0.039172000885009765, 0.03911164855957031, 0.039051265716552735, 0.039481441497802736, 0.03917324829101562, 0.03896966552734375, 0.03922582244873047, 0.039636993408203126, 0.039210334777832034, 0.03910335922241211, 0.03940230560302734, 0.039185375213623044, 0.039341793060302735, 0.04251881790161133, 0.03920896148681641, 0.03898291015625, 0.039828224182128905, 0.03906745529174805, 0.03918048095703125, 0.03897958374023437, 0.03872550582885742, 0.0390382080078125, 0.039088287353515626, 0.03912480163574219, 0.03892540740966797, 0.0391657600402832, 0.039051265716552735, 0.0396082878112793, 0.03902012634277344, 0.038984127044677734, 0.03883827209472656, 0.0389095344543457, 0.039043487548828124, 0.03909222412109375, 0.03887104034423828, 0.03938860702514648, 0.03914931106567383, 0.03887567901611328, 0.03938127899169922, 0.039591552734375, 0.039145854949951174, 0.03943577575683594, 0.03897971343994141, 0.039022975921630856, 0.039684097290039064, 0.03933766555786133, 0.03940998458862305, 0.03903078460693359, 0.03916799926757813, 0.03950511932373047, 0.039502304077148435, 0.03936902236938476, 0.03898275375366211, 0.038755008697509766, 0.03893062210083008, 0.039028766632080075, 0.039301151275634764, 0.03926217651367187, 0.039226497650146484, 0.038917182922363285, 0.038948673248291016, 0.039171966552734375, 0.039442558288574216, 0.039757823944091795, 0.039702529907226565, 0.03966748809814453, 0.039134815216064454, 0.03922294235229492, 0.03921123123168945, 0.03922576141357422, 0.040189697265625, 0.039479393005371094, 0.03927872085571289, 0.03893475341796875, 0.03899311828613281, 0.039103008270263674, 0.0392806396484375, 0.03947724914550781, 0.03920182418823242, 0.03923452758789062, 0.03987251281738281, 0.04126924896240235, 0.039106559753417966, 0.039378944396972655, 0.03957104110717773, 0.03924822235107422, 0.03956947326660156, 0.03895296096801758, 0.03930521774291992, 0.03990323257446289, 0.03971686553955078, 0.03972406387329101, 0.039543777465820315, 0.039725055694580076, 0.0393043212890625, 0.039224224090576174, 0.03993596649169922, 0.03948044967651367, 0.039435104370117186, 0.03973251342773437, 0.04047334289550781, 0.03964495849609375, 0.039636257171630856, 0.03957766342163086, 0.0395447998046875, 0.039812000274658206, 0.039263553619384765, 0.03916624069213867, 0.04170191955566406, 0.03973011016845703, 0.03914377593994141, 0.03937318420410156, 0.038887008666992184, 0.03899811172485351, 0.03984848022460938, 0.03934566497802734, 0.039020832061767576, 0.038744640350341794, 0.03879196929931641, 0.03898863983154297, 0.03911478424072266, 0.03929702377319336, 0.03934207916259766, 0.0392355842590332, 0.03904499053955078, 0.03858848190307617, 0.03885615921020508, 0.03938159942626953, 0.039346176147460936, 0.03934822463989258, 0.039213054656982424, 0.04110038375854492, 0.03960105514526367, 0.03994617462158203, 0.039566398620605465, 0.039107391357421875, 0.03903807830810547, 0.03914854431152344, 0.039222335815429686, 0.03953871917724609, 0.04079644775390625, 0.03949631881713867, 0.039280769348144534, 0.03933171081542969, 0.03909142303466797, 0.03944502258300781, 0.03941596984863281, 0.03946710586547852, 0.03918643188476562, 0.03942412948608399, 0.03976793670654297, 0.039441505432128904, 0.03958025741577149, 0.039907646179199216, 0.03929679870605469, 0.03918467330932617, 0.03928057479858398, 0.03941785430908203, 0.03976297760009766, 0.039527393341064455, 0.039005374908447264, 0.03915433502197266, 0.03923369598388672, 0.03918643188476562, 0.03942588806152344, 0.03936886215209961, 0.039561088562011716, 0.03941616058349609, 0.039497440338134765, 0.03928684616088867, 0.0391596794128418, 0.040044353485107424, 0.0392457275390625, 0.0390742073059082, 0.039175552368164064, 0.03931107330322266, 0.03904940795898437, 0.03927308654785156, 0.03939952087402344, 0.03917420959472656, 0.038981441497802735, 0.03937254333496094, 0.039239166259765625, 0.03926310348510742, 0.03949939346313477, 0.0393383674621582, 0.039293952941894535, 0.039247039794921876, 0.03938079833984375, 0.039351390838623046, 0.03938396835327149, 0.039366241455078124, 0.03952646255493164, 0.039211647033691406, 0.03941142272949219, 0.039617729187011716, 0.0404797134399414, 0.039255455017089845, 0.03899356842041016, 0.039248001098632815, 0.03914425659179688, 0.03906150436401367, 0.039271678924560544, 0.03952217483520508, 0.03930976104736328, 0.039116287231445314, 0.03915980911254883, 0.03905795288085938, 0.03904751968383789, 0.03890796661376953, 0.03926947021484375, 0.039011199951171874, 0.039124160766601565, 0.0391627197265625, 0.03888300704956055, 0.039747486114501955, 0.03910284805297851, 0.039400894165039065, 0.03897398376464844, 0.039007423400878906, 0.0393260498046875, 0.0392320327758789, 0.03913318252563477, 0.03916799926757813, 0.03902620697021485, 0.038991905212402346, 0.03925030517578125, 0.039207134246826175, 0.03907683181762695, 0.03905625534057617, 0.038983680725097655, 0.039110111236572265, 0.03942646408081055, 0.039011489868164065, 0.03916873550415039, 0.03923574447631836, 0.03927452850341797, 0.039182464599609376, 0.039224575042724606, 0.039510719299316405, 0.03910860824584961, 0.03968000030517578, 0.03970659255981445, 0.04000732803344727, 0.039790977478027345, 0.04062822341918945, 0.03998112106323242, 0.03959782409667969, 0.04067929458618164, 0.041417022705078126, 0.03974553680419922, 0.04123017501831055, 0.03938028717041016, 0.03965407943725586, 0.039565471649169924, 0.03946495819091797, 0.03976396942138672, 0.03959603118896484, 0.03953571319580078]",tokens/s,25.449090518552698,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,873.938944,655.294464,0.0,260.046848,258.555392,s,1,7.4411923828125,7.4411923828125,0.0,7.4411923828125,7.4411923828125,7.4411923828125,7.4411923828125,[7.4411923828125],,kWh,1.4626477424997118e-05,1.6062564338542084e-06,4.513892499995176e-06,2.07466263588465e-05,,MB,1319.145472,751.763456,0.0,341.835776,317.950464,s,18,0.19939472007751463,0.011077484448750811,0.0007115572758054694,0.010883920192718506,0.01117632646560669,0.01184773926734924,0.013511051759719844,"[0.010726367950439453, 0.010890496253967286, 0.010988991737365722, 0.010856703758239746, 0.01148083209991455, 0.011015359878540038, 0.010823936462402343, 0.010953760147094727, 0.010739520072937011, 0.010793663978576661, 0.010769696235656738, 0.011025983810424805, 0.010902239799499511, 0.010877344131469726, 0.01104582405090332, 0.010825599670410156, 0.010751520156860352, 0.0139268798828125]",tokens/s,23109.939913196507,kWh,3.150733660412048e-07,3.474662900105478e-08,1.616532970691219e-07,5.114732921113814e-07,tokens/kWh,500514892.8563252,MB,1333.055488,779.026432,0.0,369.098752,317.953024,s,18,10.063167541503903,0.5590648634168837,0.0031527165675258478,0.5581517944335938,0.5640977416992188,0.5648694519042968,0.5651240368652344,"[0.5586320190429688, 0.559837646484375, 0.5648132934570312, 0.5637910766601563, 0.56099658203125, 0.5651876831054687, 0.5630997924804687, 0.5564115600585937, 0.5565952758789062, 0.5572677612304687, 0.5553729248046875, 0.5580917358398437, 0.5545128784179687, 0.5589156494140625, 0.55782763671875, 0.5582118530273438, 0.5575255126953125, 0.55607666015625]",tokens/s,112.68817649343515,kWh,1.6115394765672077e-05,1.777027075129568e-06,6.047225126819507e-06,2.393964696762115e-05,tokens/kWh,2631617.754648126,,s,1134,10.054431803703311,0.008866341978574345,0.00013876498947876364,0.008839359760284424,0.008989321804046631,0.009089022159576415,0.00937534260749817,"[0.008696415901184081, 0.00890287971496582, 0.008814208030700684, 0.00887337589263916, 0.008897279739379883, 0.008929280281066895, 0.00889241600036621, 0.008814911842346191, 0.00882259178161621, 0.008838272094726563, 0.009082719802856446, 0.008922016143798828, 0.008884448051452637, 0.008914719581604004, 0.00890060806274414, 0.008834912300109863, 0.008976575851440429, 0.008871904373168945, 0.008902463912963867, 0.008875359535217285, 0.008836159706115723, 0.008844927787780761, 0.008833184242248535, 0.008888319969177246, 0.00882688045501709, 0.008797375679016114, 0.008774463653564452, 0.008787839889526368, 0.00891654396057129, 0.008857184410095215, 0.00884335994720459, 0.008941439628601075, 0.00886620807647705, 0.008852160453796387, 0.008825056076049805, 0.008850527763366698, 0.008800864219665527, 0.008890368461608887, 0.008830975532531739, 0.008824831962585449, 0.008785152435302734, 0.00880297565460205, 0.008880224227905274, 0.008826144218444824, 0.008827615737915039, 0.008820735931396484, 0.008817919731140137, 0.008876799583435058, 0.00883471965789795, 0.00881481647491455, 0.008794143676757812, 0.008809951782226563, 0.008894207954406739, 0.008811391830444337, 0.00905136013031006, 0.008822976112365722, 0.008869664192199707, 0.008956735610961913, 0.008833024024963379, 0.008894463539123536, 0.008887616157531739, 0.008861568450927735, 0.008835424423217773, 0.00864463996887207, 0.008844544410705566, 0.008923423767089845, 0.00891539192199707, 0.00886137580871582, 0.008851648330688477, 0.008812704086303712, 0.00882588768005371, 0.009081567764282227, 0.008823007583618163, 0.008828384399414062, 0.008818976402282715, 0.008928735733032227, 0.008883999824523927, 0.008854528427124024, 0.00884547233581543, 0.008795519828796387, 0.008854240417480468, 0.008789183616638184, 0.009019968032836913, 0.008924351692199708, 0.008846143722534179, 0.008855648040771484, 0.008789248466491699, 0.00878275203704834, 0.008724224090576172, 0.008804351806640624, 0.008854592323303222, 0.008811327934265136, 0.008792192459106445, 0.00880844783782959, 0.008902463912963867, 0.008840895652770997, 0.008845919609069825, 0.008777631759643554, 0.008747008323669434, 0.008820735931396484, 0.008762592315673829, 0.008765727996826171, 0.008800640106201171, 0.008779359817504882, 0.008828991889953614, 0.008943967819213867, 0.00875327968597412, 0.008773280143737792, 0.008825471878051758, 0.008971487998962403, 0.0090251522064209, 0.009048959732055665, 0.009054207801818847, 0.008970239639282226, 0.008893952369689942, 0.008879743576049804, 0.008866687774658204, 0.009093119621276855, 0.008986623764038085, 0.008951807975769043, 0.008972288131713867, 0.009048383712768554, 0.009010304450988769, 0.009102080345153809, 0.009015232086181641, 0.009069472312927247, 0.009036352157592773, 0.00890499210357666, 0.008979840278625488, 0.00883148765563965, 0.008892448425292968, 0.008890175819396972, 0.008944767951965332, 0.008866687774658204, 0.008934816360473634, 0.008869695663452148, 0.0088439359664917, 0.0088919677734375, 0.008968768119812011, 0.008919103622436524, 0.008842687606811523, 0.008860159873962402, 0.008830975532531739, 0.008879648208618165, 0.008872415542602539, 0.008818079948425293, 0.008783647537231445, 0.008784704208374024, 0.008879199981689453, 0.008892671585083008, 0.009140928268432618, 0.008806367874145508, 0.008824383735656739, 0.008878527641296386, 0.008873984336853028, 0.009246496200561524, 0.00918950366973877, 0.009011296272277832, 0.008970080375671386, 0.008999072074890136, 0.009164799690246582, 0.00911359977722168, 0.00927948760986328, 0.00930799961090088, 0.00924454402923584, 0.00912332820892334, 0.009059103965759277, 0.009127936363220214, 0.008980480194091797, 0.009166303634643555, 0.008900511741638183, 0.009051936149597168, 0.008858688354492187, 0.008837984085083008, 0.008901535987854004, 0.00894364833831787, 0.008996864318847657, 0.008888319969177246, 0.008937184333801269, 0.00890499210357666, 0.008953856468200684, 0.0089683837890625, 0.008858528137207031, 0.00911036777496338, 0.009080287933349609, 0.008854111671447755, 0.00881065559387207, 0.008826720237731933, 0.008921088218688965, 0.008660927772521972, 0.008849472045898437, 0.008801504135131836, 0.008815391540527344, 0.008850527763366698, 0.008889151573181151, 0.008833120346069336, 0.009223584175109864, 0.008841535568237305, 0.00893564796447754, 0.008924896240234376, 0.00901360034942627, 0.008869888305664063, 0.008970239639282226, 0.008904704093933105, 0.00908681583404541, 0.008882335662841797, 0.008865311622619628, 0.008970463752746582, 0.008945919990539552, 0.008898176193237305, 0.008923328399658204, 0.008933664321899414, 0.008928511619567871, 0.00894223976135254, 0.008912896156311035, 0.009047167778015137, 0.008901503562927246, 0.008908703804016113, 0.008929375648498536, 0.00900476837158203, 0.009005151748657226, 0.008982720375061036, 0.00887615966796875, 0.008824383735656739, 0.008961343765258789, 0.008887295722961425, 0.008938495635986327, 0.008920063972473144, 0.009129983901977539, 0.009041343688964844, 0.009002976417541504, 0.00898960018157959, 0.009161919593811034, 0.008932191848754883, 0.008873824119567872, 0.008865823745727539, 0.008906368255615235, 0.008888480186462402, 0.008931327819824218, 0.008931327819824218, 0.009047776222229005, 0.009058591842651367, 0.008966143608093263, 0.008860735893249512, 0.008909760475158691, 0.009124896049499513, 0.009132512092590332, 0.008905216217041016, 0.009025535583496093, 0.009060352325439454, 0.008881312370300293, 0.008835455894470216, 0.008610112190246582, 0.008862751960754395, 0.008855999946594239, 0.0088721923828125, 0.008881664276123047, 0.008894911766052246, 0.008886591911315918, 0.009281503677368163, 0.008832063674926758, 0.008765727996826171, 0.008897215843200683, 0.008869088172912598, 0.008942208290100097, 0.008964287757873534, 0.008867839813232421, 0.008835040092468261, 0.008800479888916015, 0.0088220796585083, 0.009029120445251464, 0.009063360214233398, 0.008986111640930175, 0.008921152114868163, 0.00890931224822998, 0.008851455688476563, 0.008818047523498535, 0.008752832412719727, 0.008813504219055177, 0.008757247924804687, 0.008844927787780761, 0.008800928115844726, 0.008863871574401855, 0.00876643180847168, 0.008747648239135743, 0.00881980800628662, 0.008874912261962891, 0.008779775619506837, 0.008857600212097168, 0.008992639541625977, 0.008945088386535644, 0.008954015731811524, 0.008911231994628906, 0.009695391654968261, 0.008935423851013183, 0.008964096069335938, 0.00892080020904541, 0.008885760307312012, 0.00885206413269043, 0.008826175689697265, 0.008816639900207519, 0.008870911598205567, 0.008902784347534179, 0.008931072235107421, 0.008888480186462402, 0.008830816268920898, 0.008738816261291504, 0.008994815826416015, 0.008809887886047363, 0.00885206413269043, 0.008876031875610351, 0.008976384162902832, 0.008904704093933105, 0.009381888389587402, 0.008841216087341308, 0.008732704162597657, 0.009003392219543457, 0.008978240013122559, 0.009071999549865723, 0.008950143814086915, 0.008972543716430664, 0.008873984336853028, 0.008843263626098634, 0.00884768009185791, 0.008969311714172363, 0.00904368019104004, 0.008938336372375488, 0.008890175819396972, 0.008972512245178223, 0.008988672256469727, 0.008986623764038085, 0.009901920318603515, 0.009130144119262695, 0.008978431701660156, 0.008885631561279297, 0.00889510440826416, 0.009031935691833496, 0.00886143970489502, 0.008861696243286133, 0.00887615966796875, 0.010073984146118164, 0.008867839813232421, 0.00887993621826172, 0.008902463912963867, 0.008865856170654296, 0.00888044834136963, 0.008859295845031739, 0.008950112342834472, 0.009028863906860352, 0.008896415710449218, 0.008811552047729493, 0.008777536392211915, 0.008837216377258301, 0.00878816032409668, 0.008865119934082032, 0.01015436840057373, 0.010173983573913574, 0.00917955207824707, 0.008904864311218261, 0.008853407859802246, 0.008812543869018554, 0.008903008460998535, 0.008727392196655274, 0.008933343887329102, 0.008815648078918457, 0.00880620765686035, 0.00878819179534912, 0.008779552459716797, 0.008841312408447266, 0.008770912170410157, 0.008935999870300294, 0.008761343955993652, 0.00875216007232666, 0.008903072357177735, 0.008790592193603515, 0.008736767768859864, 0.008779328346252441, 0.008786368370056152, 0.008630463600158692, 0.008860159873962402, 0.008972384452819825, 0.008847104072570802, 0.00886614418029785, 0.00919961643218994, 0.00891651153564453, 0.009375712394714356, 0.009331071853637695, 0.008909184455871582, 0.008920831680297852, 0.008959391593933105, 0.008946271896362304, 0.009013248443603515, 0.008830911636352539, 0.008806719779968261, 0.008791808128356934, 0.008867839813232421, 0.00881049633026123, 0.00909721565246582, 0.009187328338623046, 0.009062623977661133, 0.009024831771850586, 0.009107616424560547, 0.009240896224975585, 0.009114975929260255, 0.008986495971679687, 0.008944128036499023, 0.008945152282714844, 0.008964799880981445, 0.008900704383850098, 0.008937472343444825, 0.008847104072570802, 0.009066752433776856, 0.008882176399230958, 0.008816896438598633, 0.00878985595703125, 0.008843168258666993, 0.008861536026000977, 0.00880832004547119, 0.009095071792602539, 0.008870271682739258, 0.008849408149719238, 0.008889375686645508, 0.008815263748168945, 0.008830880165100098, 0.008839327812194825, 0.008911104202270507, 0.008871935844421386, 0.00898252773284912, 0.00882688045501709, 0.008835071563720704, 0.008953248023986816, 0.008790623664855958, 0.008804351806640624, 0.00890880012512207, 0.008912639617919922, 0.008887999534606934, 0.008995391845703126, 0.008855551719665527, 0.008896512031555176, 0.008839167594909669, 0.008886272430419923, 0.008570816040039063, 0.008754048347473144, 0.00875820827484131, 0.0088406400680542, 0.008876640319824219, 0.008798175811767579, 0.008783072471618652, 0.008784671783447266, 0.008843263626098634, 0.008832544326782226, 0.008906368255615235, 0.008801119804382324, 0.008916383743286132, 0.008843680381774903, 0.008783424377441406, 0.008764320373535157, 0.0088570556640625, 0.008782079696655273, 0.008806303977966308, 0.008804415702819824, 0.00876483154296875, 0.008759200096130371, 0.008909536361694335, 0.008908927917480469, 0.008790047645568847, 0.00873686408996582, 0.008828672409057617, 0.008906784057617188, 0.008861120223999024, 0.008777503967285155, 0.008738592147827148, 0.00874953556060791, 0.008783967971801757, 0.008858016014099121, 0.008812704086303712, 0.008800095558166503, 0.00880793571472168, 0.008786432266235352, 0.008892671585083008, 0.009213312149047852, 0.008842656135559082, 0.008907744407653809, 0.008864831924438477, 0.008817600250244141, 0.008762687683105468, 0.008850111961364746, 0.00873846435546875, 0.008929280281066895, 0.008813247680664063, 0.008815744400024413, 0.008996416091918945, 0.008857664108276367, 0.008794015884399414, 0.008846464157104493, 0.008774815559387207, 0.008858336448669433, 0.008853471755981446, 0.008777759552001952, 0.00876905632019043, 0.008755680084228515, 0.008853504180908203, 0.008831232070922852, 0.008848480224609375, 0.008577568054199219, 0.008760767936706543, 0.008821727752685547, 0.008799263954162598, 0.008934368133544921, 0.009063712120056152, 0.00890339183807373, 0.008826848030090331, 0.008939552307128906, 0.008970239639282226, 0.008859519958496093, 0.008847647666931153, 0.008849023818969726, 0.008849120140075684, 0.008822784423828126, 0.008770048141479492, 0.008783871650695801, 0.008840736389160156, 0.008827712059020996, 0.008863200187683105, 0.008845503807067872, 0.009011103630065917, 0.008814687728881837, 0.00880031967163086, 0.008890111923217774, 0.008757439613342285, 0.008820735931396484, 0.008773632049560547, 0.008839167594909669, 0.008771583557128907, 0.008755200386047364, 0.008740863800048827, 0.00886518383026123, 0.00885001564025879, 0.008832832336425782, 0.00876153564453125, 0.008847423553466797, 0.008990655899047852, 0.008824319839477538, 0.008827391624450684, 0.008742176055908202, 0.008733344078063966, 0.008767552375793457, 0.008871935844421386, 0.00877785587310791, 0.008779647827148438, 0.008773504257202148, 0.008834367752075195, 0.008773759841918945, 0.008778240203857422, 0.008749343872070313, 0.008885791778564453, 0.008815072059631347, 0.008717951774597167, 0.008747296333312988, 0.008828479766845703, 0.00875766372680664, 0.008846464157104493, 0.009016223907470703, 0.008859135627746583, 0.008773695945739747, 0.008867679595947265, 0.00889254379272461, 0.00863920021057129, 0.008769344329833985, 0.008857919692993163, 0.008896512031555176, 0.00894156837463379, 0.008899999618530273, 0.009499072074890137, 0.008808128356933594, 0.008757344245910645, 0.008849791526794434, 0.008762368202209473, 0.008765439987182617, 0.00886844825744629, 0.008785951614379882, 0.008777600288391113, 0.008788479804992675, 0.008761311531066894, 0.008732928276062011, 0.008785696029663085, 0.008835136413574219, 0.008828351974487305, 0.008825247764587402, 0.008885600090026856, 0.008751487731933594, 0.009046112060546875, 0.008783935546875, 0.008712703704833985, 0.008728287696838378, 0.00872047996520996, 0.008795295715332031, 0.008835840225219726, 0.008801695823669433, 0.008720928192138672, 0.008720735549926757, 0.008759008407592774, 0.00874931240081787, 0.008945343971252441, 0.0088373441696167, 0.00878112030029297, 0.008938112258911134, 0.008949664115905762, 0.008801312446594238, 0.008770527839660645, 0.008891807556152344, 0.008792672157287598, 0.008763199806213378, 0.008794303894042968, 0.008757247924804687, 0.008792287826538086, 0.008924960136413574, 0.008841440200805665, 0.008828703880310058, 0.008828927993774414, 0.008843296051025391, 0.008871904373168945, 0.009116800308227539, 0.008915455818176269, 0.008954239845275878, 0.008984512329101562, 0.008844511985778809, 0.008819552421569824, 0.008830975532531739, 0.008928576469421386, 0.008539392471313476, 0.008749823570251464, 0.00880844783782959, 0.008757023811340333, 0.008847071647644042, 0.008870400428771973, 0.008867839813232421, 0.008812543869018554, 0.008755200386047364, 0.008787967681884766, 0.008971424102783203, 0.008829119682312012, 0.008770303726196289, 0.008722335815429687, 0.00871833610534668, 0.008798208236694336, 0.008830975532531739, 0.008730624198913574, 0.00870809555053711, 0.008744959831237792, 0.008793248176574706, 0.008810463905334473, 0.009030207633972168, 0.008826848030090331, 0.008778079986572266, 0.008894463539123536, 0.008752960205078125, 0.008726719856262208, 0.0087073917388916, 0.008811200141906738, 0.008825984001159668, 0.008813535690307617, 0.008815903663635254, 0.008796287536621093, 0.008794816017150878, 0.008882271766662597, 0.008862527847290039, 0.008805120468139648, 0.008767647743225098, 0.008742912292480469, 0.008901727676391602, 0.008842047691345214, 0.008734496116638183, 0.008737024307250976, 0.008753215789794922, 0.008816703796386719, 0.008912832260131835, 0.008795392036437989, 0.008844032287597656, 0.008763168334960937, 0.008839391708374023, 0.008873791694641113, 0.008906944274902344, 0.008930720329284669, 0.008785663604736328, 0.008827743530273437, 0.008775775909423827, 0.008730143547058105, 0.008912991523742676, 0.008868127822875977, 0.008863743782043456, 0.008822784423828126, 0.008804351806640624, 0.009036288261413575, 0.008863776206970216, 0.008851072311401368, 0.00883897590637207, 0.00878441619873047, 0.008890624046325684, 0.008850527763366698, 0.00881116771697998, 0.008814592361450196, 0.008930848121643067, 0.008802783966064454, 0.008787103652954101, 0.00877455997467041, 0.008758336067199708, 0.008917344093322755, 0.008733216285705567, 0.008816287994384766, 0.008773759841918945, 0.008700127601623536, 0.008742912292480469, 0.00887507152557373, 0.008868800163269044, 0.008837151527404784, 0.008726495742797852, 0.008720383644104004, 0.008726655960083008, 0.009335968017578125, 0.008852191925048828, 0.009571807861328125, 0.00894825553894043, 0.008861696243286133, 0.008806528091430664, 0.00880339241027832, 0.008902815818786622, 0.008870559692382813, 0.00881980800628662, 0.008767775535583496, 0.008821632385253906, 0.008794048309326171, 0.008863519668579102, 0.009127967834472657, 0.00897433567047119, 0.008904704093933105, 0.008919136047363281, 0.0088306884765625, 0.008873472213745117, 0.008800959587097168, 0.00880246353149414, 0.008867903709411621, 0.008821855545043946, 0.00878985595703125, 0.00889737606048584, 0.008783935546875, 0.008896448135375977, 0.008779392242431641, 0.008753536224365234, 0.008722784042358399, 0.0087325439453125, 0.008810272216796875, 0.008820480346679687, 0.008768863677978515, 0.008825632095336914, 0.008798336029052735, 0.008509440422058106, 0.008810272216796875, 0.008795552253723145, 0.008790399551391602, 0.008730143547058105, 0.008757247924804687, 0.008852448463439942, 0.008802240371704101, 0.008777503967285155, 0.008746527671813964, 0.008747424125671387, 0.008808192253112792, 0.008816384315490722, 0.008801055908203124, 0.00871014404296875, 0.008714240074157715, 0.00882595157623291, 0.008733599662780762, 0.008760479927062989, 0.008766304016113282, 0.008740863800048827, 0.008840479850769044, 0.008774368286132813, 0.008797823905944824, 0.008886655807495118, 0.008748096466064453, 0.008790431976318359, 0.008753664016723632, 0.008749088287353516, 0.008722432136535644, 0.008814592361450196, 0.008847359657287598, 0.008859583854675293, 0.008883616447448731, 0.00882960033416748, 0.008766752243041992, 0.008829248428344727, 0.008864159584045411, 0.008763392448425293, 0.008754336357116699, 0.008733087539672852, 0.008787584304809571, 0.008844096183776856, 0.008914143562316894, 0.008769920349121094, 0.008786175727844238, 0.00880246353149414, 0.008771039962768555, 0.008755040168762208, 0.008786751747131347, 0.008742848396301269, 0.00881657600402832, 0.00878003215789795, 0.008761119842529298, 0.00871228790283203, 0.008798080444335938, 0.008832415580749512, 0.008821344375610352, 0.009197376251220702, 0.00874720001220703, 0.008817791938781738, 0.00884006404876709, 0.008943455696105957, 0.008640512466430664, 0.00880844783782959, 0.00894156837463379, 0.008839167594909669, 0.008965408325195313, 0.008848095893859863, 0.008749055862426757, 0.008927231788635253, 0.008881759643554688, 0.008740384101867676, 0.00885043239593506, 0.008740351676940919, 0.008756832122802734, 0.008844063758850098, 0.00881049633026123, 0.008834112167358398, 0.008965056419372558, 0.00880454444885254, 0.008935232162475586, 0.008867584228515624, 0.008849663734436035, 0.008807583808898926, 0.008853952407836914, 0.008790207862854003, 0.008757696151733399, 0.009234208106994629, 0.008841183662414551, 0.009639967918395996, 0.009012895584106445, 0.008845024108886719, 0.00890124797821045, 0.008851455688476563, 0.008785920143127441, 0.008766464233398438, 0.008796416282653808, 0.00885427188873291, 0.008900575637817383, 0.008753087997436524, 0.008732128143310546, 0.008752896308898925, 0.008816864013671875, 0.008876704216003417, 0.009280896186828614, 0.008807040214538575, 0.008794112205505371, 0.008874143600463867, 0.008858688354492187, 0.008899552345275879, 0.008793696403503418, 0.008771807670593261, 0.008820735931396484, 0.008758432388305664, 0.008858464241027831, 0.009440640449523925, 0.008871904373168945, 0.00880463981628418, 0.008740544319152832, 0.008766143798828126, 0.008745247840881348, 0.00883683204650879, 0.008879679679870606, 0.008825568199157715, 0.008845024108886719, 0.008599648475646973, 0.008887136459350586, 0.008775039672851563, 0.008849920272827149, 0.008818816184997559, 0.008841183662414551, 0.00886787223815918, 0.00883414363861084, 0.008862624168395996, 0.008796256065368652, 0.008818592071533203, 0.008947711944580078, 0.00887980842590332, 0.008816127777099609, 0.008866463661193848, 0.008833087921142579, 0.008877984046936035, 0.008818592071533203, 0.008771871566772461, 0.008828415870666503, 0.008894335746765137, 0.008822848320007324, 0.008979007720947266, 0.008754207611083984, 0.00876028823852539, 0.008857248306274414, 0.008870240211486816, 0.008853407859802246, 0.008785887718200684, 0.008789407730102538, 0.008847807884216309, 0.008787327766418458, 0.008984767913818359, 0.008788736343383789, 0.008764800071716308, 0.008834783554077148, 0.008805120468139648, 0.00875113582611084, 0.008767200469970702, 0.008792448043823241, 0.008843263626098634, 0.008822784423828126, 0.009174495697021485, 0.008815072059631347, 0.008947903633117676, 0.008968064308166504, 0.008839167594909669, 0.008789119720458984, 0.008801088333129883, 0.008878144264221191, 0.008823904037475586, 0.008833951950073242, 0.008828512191772461, 0.008822463989257812, 0.0088951997756958, 0.008970239639282226, 0.008886272430419923, 0.008983584403991699, 0.009073599815368653, 0.008810527801513672, 0.008812000274658203, 0.008790464401245117, 0.008831071853637695, 0.008551487922668457, 0.008846272468566894, 0.008812543869018554, 0.008817888259887696, 0.008800224304199218, 0.008841312408447266, 0.008819135665893554, 0.008833184242248535, 0.008832480430603027, 0.008993184089660645, 0.008876288414001465, 0.008816415786743165, 0.009124064445495605, 0.008947839736938476, 0.008978464126586915, 0.008940544128417969, 0.008813407897949219, 0.008752703666687012, 0.00883148765563965, 0.008868895530700683, 0.008844191551208497, 0.00878326416015625, 0.008734368324279785, 0.008739775657653808, 0.008872032165527344, 0.008826208114624024, 0.008905280113220214, 0.00891113567352295, 0.008803839683532716, 0.008798591613769531, 0.008848896026611328, 0.00874672031402588, 0.008736384391784669, 0.00880742359161377, 0.008839167594909669, 0.008808639526367188, 0.00878163242340088, 0.00915065574645996, 0.008877535820007325, 0.008784223556518554, 0.008730112075805664, 0.008794912338256836, 0.008828160285949706, 0.008864224433898926, 0.008730624198913574, 0.008801600456237794, 0.008786144256591797, 0.008819168090820312, 0.008798208236694336, 0.009343168258666993, 0.008835968017578124, 0.00907260799407959, 0.009374591827392578, 0.008831071853637695, 0.008866080284118652, 0.008912608146667481, 0.008919072151184083, 0.008803872108459473, 0.008769856452941895, 0.008740127563476563, 0.008739839553833008, 0.00888815975189209, 0.008801664352416992, 0.008638688087463379, 0.008779904365539552, 0.008810015678405761, 0.008840864181518554, 0.008870719909667969, 0.00879753589630127, 0.00882755184173584, 0.008829119682312012, 0.008814687728881837, 0.00894761562347412, 0.008859295845031739, 0.00883523178100586, 0.008859647750854491, 0.008820063591003418, 0.008827520370483398, 0.008820832252502442, 0.00884553623199463, 0.008838272094726563, 0.00886240005493164, 0.008859552383422852, 0.008845312118530273, 0.008785823822021484, 0.00890384006500244, 0.008821696281433105, 0.008822784423828126, 0.008737024307250976, 0.008772768020629883, 0.008743616104125976, 0.008932255744934081, 0.008796223640441895, 0.00875001621246338, 0.008773183822631836, 0.008718784332275391, 0.008828927993774414, 0.008780927658081054, 0.008731648445129395, 0.008760640144348145, 0.008747584342956543, 0.008764543533325196, 0.008775615692138672, 0.008885184288024902, 0.00880624008178711, 0.00879849624633789, 0.008777600288391113, 0.00880838394165039, 0.008773152351379394, 0.008731264114379883, 0.008828831672668456, 0.008813632011413575, 0.00882579231262207, 0.008789248466491699, 0.008773951530456544, 0.008763872146606445, 0.008826592445373534, 0.008936032295227051, 0.008917887687683106, 0.008903712272644042, 0.00886143970489502, 0.008935423851013183, 0.009125887870788574, 0.009605119705200196, 0.009017696380615234, 0.009166175842285157, 0.0087193603515625, 0.008868864059448242, 0.00880620765686035, 0.008820927619934082, 0.008824864387512206, 0.008843232154846191, 0.00892518424987793, 0.008889439582824708, 0.008778335571289063, 0.008844960212707519, 0.008774016380310059, 0.008775360107421875, 0.008723039627075196, 0.008768575668334961, 0.008801216125488282, 0.009195232391357422, 0.008799967765808105, 0.008796704292297364, 0.008781855583190917, 0.008905823707580567, 0.008811424255371094, 0.008771583557128907, 0.008822336196899414, 0.008902079582214355, 0.008829216003417968, 0.008770272254943848, 0.008764960289001466, 0.00876147174835205, 0.008742976188659667, 0.008816767692565917, 0.008761216163635253, 0.008745247840881348, 0.008800224304199218, 0.008785663604736328, 0.008821023941040038, 0.008783840179443359, 0.008863648414611817, 0.008811871528625488, 0.008796192169189454, 0.008825599670410156, 0.008804351806640624, 0.008793215751647949, 0.008770432472229004, 0.008859423637390137, 0.008881759643554688, 0.008894559860229492, 0.008737312316894532, 0.008769248008728027, 0.008778335571289063, 0.008863264083862305, 0.008815936088562012, 0.008842016220092773, 0.008832768440246582, 0.008895872116088867, 0.008864704132080079, 0.008781824111938476, 0.008865983963012695, 0.00885091209411621, 0.008851936340332031, 0.008806400299072266, 0.00878707218170166, 0.008847135543823242, 0.00875823974609375]",tokens/s,112.78608499610277,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.066752,4878.958592,0.0,4483.710976,4465.672704,s,1,10.4649501953125,10.4649501953125,0.0,10.4649501953125,10.4649501953125,10.4649501953125,10.4649501953125,[10.4649501953125],,kWh,0.0001039129356708372,1.1454931439208603e-05,3.2453081518002413e-05,0.00014782094862804822,,MB,2150.473728,5302.583296,0.0,4892.655616,4837.669376,s,10,1.8069629058837893,0.18069629058837888,0.00038065052024590545,0.18078084564208985,0.18096441345214842,0.1810408920288086,0.18110207489013672,"[0.18040179443359375, 0.17968453979492188, 0.18072915649414062, 0.1807381134033203, 0.18081753540039064, 0.18074415588378906, 0.18085792541503906, 0.18111737060546876, 0.18092489624023436, 0.18094741821289062]",tokens/s,1416.7418665121404,kWh,5.291679808333072e-06,5.835758006887894e-07,3.5194075774285953e-06,9.394663186450456e-06,tokens/kWh,27249513.358735252,MB,2154.483712,5470.355456,0.0,5060.427776,5014.227968,s,10,15.805345214843749,1.5805345214843751,0.0018606053167179034,1.580326416015625,1.5817849731445313,1.583616668701172,1.5850820251464843,"[1.5804212646484375, 1.5793768310546874, 1.578682373046875, 1.57875439453125, 1.580556884765625, 1.5811287841796875, 1.5802315673828125, 1.5793668212890626, 1.5854483642578125, 1.5813779296875]",tokens/s,39.85993291739867,kWh,4.620386784125182e-05,5.096016547766361e-06,3.064478642057143e-05,8.194467080958962e-05,tokens/kWh,768811.4355403255,,s,630,15.80259872817993,0.02508349004473005,0.00030434212382670045,0.02506243133544922,0.0253934139251709,0.025437215518951416,0.025531290798187257,"[0.025378976821899414, 0.02512076759338379, 0.024822816848754883, 0.024773056030273438, 0.024729824066162108, 0.02475449562072754, 0.02472492790222168, 0.024762752532958985, 0.02472979164123535, 0.024735807418823242, 0.024715200424194336, 0.02478665542602539, 0.02487286376953125, 0.0248590087890625, 0.024809728622436522, 0.02471443176269531, 0.024785087585449218, 0.02492608070373535, 0.024797439575195312, 0.02476643180847168, 0.024733983993530273, 0.02481113624572754, 0.024938880920410158, 0.024821983337402344, 0.024854207992553713, 0.024904960632324218, 0.024951648712158204, 0.024877056121826172, 0.02660883140563965, 0.0250130558013916, 0.024995840072631836, 0.024965343475341798, 0.02494438362121582, 0.024997119903564454, 0.02495158386230469, 0.02497331237792969, 0.02511052894592285, 0.02531443214416504, 0.025232255935668944, 0.025182207107543944, 0.025568607330322266, 0.02527299118041992, 0.025204320907592774, 0.02526025581359863, 0.025254079818725586, 0.025157663345336916, 0.02517398452758789, 0.0252127685546875, 0.02525609588623047, 0.025364320755004884, 0.025487520217895507, 0.025272319793701172, 0.025306848526000975, 0.025264415740966797, 0.025956064224243163, 0.025404735565185545, 0.02520124816894531, 0.025223552703857424, 0.02531942367553711, 0.025268192291259765, 0.025228960037231445, 0.025341312408447267, 0.02541472053527832, 0.025241567611694337, 0.024945856094360352, 0.024897567749023436, 0.024748863220214842, 0.024667903900146483, 0.02466022491455078, 0.024772607803344726, 0.024819135665893555, 0.024816192626953126, 0.024796703338623046, 0.024764896392822266, 0.02493440055847168, 0.02511193656921387, 0.024814207077026366, 0.0247459831237793, 0.024774816513061522, 0.024821599960327147, 0.024816831588745116, 0.024885248184204102, 0.0248855037689209, 0.024975423812866212, 0.02500249671936035, 0.024913919448852538, 0.024903615951538085, 0.024917728424072267, 0.024973344802856446, 0.024973119735717773, 0.024923648834228516, 0.024953855514526366, 0.025001983642578125, 0.02494054412841797, 0.024975231170654297, 0.025273759841918944, 0.025061439514160157, 0.02496575927734375, 0.025009567260742188, 0.025055776596069335, 0.02515065574645996, 0.025077856063842774, 0.025155872344970704, 0.02520323181152344, 0.025324735641479492, 0.02512361526489258, 0.025122848510742188, 0.025198591232299804, 0.025158912658691406, 0.025101055145263673, 0.02533580780029297, 0.025401344299316408, 0.025380800247192383, 0.025312448501586916, 0.02529974365234375, 0.025434207916259766, 0.025302080154418944, 0.02525062370300293, 0.02532364845275879, 0.025358335494995117, 0.02531839942932129, 0.025357311248779296, 0.02539107131958008, 0.025418975830078124, 0.025383743286132812, 0.02545254325866699, 0.024966751098632813, 0.024760704040527343, 0.02467840003967285, 0.024513824462890625, 0.024592735290527343, 0.024573312759399415, 0.02459075164794922, 0.024592927932739258, 0.0247807674407959, 0.024873151779174804, 0.024837343215942383, 0.024754880905151367, 0.02472563171386719, 0.024885120391845702, 0.024907167434692384, 0.024883808135986327, 0.024856576919555663, 0.024833951950073242, 0.02487715148925781, 0.024868831634521485, 0.02485865592956543, 0.024861696243286133, 0.02490671920776367, 0.02498966407775879, 0.025004095077514648, 0.02513100814819336, 0.02510211181640625, 0.025188575744628905, 0.0251146240234375, 0.025048095703125, 0.025091039657592774, 0.025102527618408203, 0.025066688537597658, 0.025172000885009767, 0.025217727661132814, 0.02516694450378418, 0.025074495315551757, 0.025147359848022462, 0.02509008026123047, 0.025082944869995117, 0.02503715133666992, 0.02510710334777832, 0.025407424926757814, 0.025047040939331053, 0.02502000045776367, 0.025257408142089845, 0.02537980842590332, 0.025324607849121095, 0.02524880027770996, 0.025276544570922852, 0.025280288696289063, 0.025384960174560548, 0.02524569511413574, 0.025341535568237306, 0.025313695907592772, 0.025329343795776366, 0.025327840805053712, 0.025438079833984373, 0.025393375396728514, 0.025405439376831054, 0.025364479064941405, 0.02533184051513672, 0.025382783889770506, 0.025343679428100587, 0.024797119140625, 0.024867263793945313, 0.024775999069213867, 0.024736127853393554, 0.02468176078796387, 0.024713279724121094, 0.02471388816833496, 0.024757440567016602, 0.02466694450378418, 0.02462918472290039, 0.024659296035766602, 0.02474671936035156, 0.024793088912963866, 0.024771808624267578, 0.024777055740356446, 0.024880735397338868, 0.024873823165893556, 0.024896831512451173, 0.024916416168212892, 0.02489779281616211, 0.024923231124877928, 0.024877920150756835, 0.02491935920715332, 0.02497203254699707, 0.024981727600097658, 0.02493417549133301, 0.024993759155273437, 0.025053440093994142, 0.025032672882080078, 0.025003360748291015, 0.025002464294433594, 0.02509823989868164, 0.025096288681030275, 0.025103328704833984, 0.025267135620117186, 0.025264127731323242, 0.025302911758422853, 0.02534003257751465, 0.025204736709594725, 0.025181631088256835, 0.025241952896118164, 0.025413568496704102, 0.025181888580322265, 0.025207231521606446, 0.025184415817260743, 0.025153535842895508, 0.025223167419433593, 0.025296543121337892, 0.025239871978759765, 0.025118751525878905, 0.025165824890136718, 0.025154592514038086, 0.025210880279541017, 0.02527507209777832, 0.025329631805419923, 0.025337503433227538, 0.025407392501831053, 0.025328384399414063, 0.025370624542236327, 0.025447967529296876, 0.025432640075683594, 0.025331615447998047, 0.025241600036621094, 0.024887296676635744, 0.024879104614257814, 0.024840192794799806, 0.024758399963378905, 0.024823680877685547, 0.024785120010375975, 0.024743711471557617, 0.024919456481933593, 0.024916479110717774, 0.02481315231323242, 0.02479871940612793, 0.02476972770690918, 0.024794784545898438, 0.024768672943115234, 0.024802911758422853, 0.024751775741577147, 0.024813823699951172, 0.02484230422973633, 0.02475667190551758, 0.024737184524536132, 0.02498620796203613, 0.02507980728149414, 0.025030656814575194, 0.02489958381652832, 0.024887487411499022, 0.025031648635864257, 0.025060192108154297, 0.02502230453491211, 0.02502057647705078, 0.025036800384521486, 0.025038240432739257, 0.025024639129638673, 0.025037279129028322, 0.025093599319458006, 0.025098751068115235, 0.025091615676879883, 0.025108991622924806, 0.02540083122253418, 0.02540185546875, 0.025255935668945313, 0.025244895935058593, 0.02532841682434082, 0.025280511856079102, 0.02528665542602539, 0.02527177619934082, 0.025343679428100587, 0.02545136070251465, 0.025395200729370116, 0.025401344299316408, 0.02539491271972656, 0.02531068801879883, 0.025279296875, 0.025470975875854493, 0.025362432479858397, 0.025282400131225586, 0.0252642879486084, 0.025185407638549803, 0.025182432174682617, 0.025275039672851562, 0.02532966423034668, 0.02540345573425293, 0.025481151580810546, 0.02501180839538574, 0.024852575302124022, 0.024852800369262695, 0.024819711685180663, 0.024760351181030274, 0.02470908737182617, 0.024861791610717773, 0.024851360321044923, 0.024833696365356445, 0.024809919357299804, 0.02477609634399414, 0.024888992309570312, 0.02486966323852539, 0.024864288330078126, 0.024848928451538087, 0.024792383193969727, 0.024890047073364258, 0.025016319274902343, 0.024922111511230468, 0.024897695541381836, 0.02497724723815918, 0.024946687698364257, 0.0249036808013916, 0.02495052719116211, 0.02502681541442871, 0.024989696502685548, 0.0249036808013916, 0.024921791076660156, 0.025003744125366212, 0.0251312313079834, 0.02527680015563965, 0.024801279067993166, 0.024919071197509766, 0.025037792205810545, 0.025038848876953124, 0.025032543182373048, 0.02505686378479004, 0.024994304656982422, 0.02533510398864746, 0.025035520553588868, 0.02525814437866211, 0.025255775451660155, 0.02532761573791504, 0.02537676811218262, 0.025300991058349608, 0.025280511856079102, 0.025308767318725587, 0.025239967346191407, 0.02536038398742676, 0.025397247314453125, 0.025319232940673828, 0.025303232192993165, 0.02534809684753418, 0.025327072143554688, 0.025241439819335937, 0.02543996810913086, 0.025471744537353517, 0.025450719833374023, 0.025450496673583983, 0.025450496673583983, 0.025436159133911132, 0.025645055770874024, 0.025417728424072264, 0.02519584083557129, 0.024853376388549803, 0.02471299171447754, 0.024602527618408202, 0.02470911979675293, 0.02488047981262207, 0.024758047103881835, 0.02469171142578125, 0.024673887252807617, 0.024852767944335937, 0.024847488403320312, 0.024800224304199217, 0.024755584716796876, 0.024848928451538087, 0.025014015197753908, 0.024940128326416015, 0.024796831130981446, 0.024888288497924804, 0.024899616241455078, 0.024936447143554686, 0.024830015182495117, 0.024905664443969727, 0.024987680435180664, 0.025010143280029297, 0.024954879760742187, 0.024832000732421877, 0.024995199203491213, 0.025078239440917968, 0.025075872421264647, 0.025032703399658202, 0.024997888565063478, 0.025370655059814454, 0.025220928192138673, 0.025079967498779297, 0.025176063537597656, 0.025167423248291014, 0.02510995292663574, 0.025068544387817384, 0.025223167419433593, 0.025217023849487305, 0.025190399169921874, 0.025183359146118165, 0.025145952224731444, 0.025112287521362305, 0.02513363265991211, 0.025145343780517578, 0.025177663803100585, 0.02513350486755371, 0.025264127731323242, 0.025455936431884766, 0.025160383224487305, 0.025343040466308593, 0.025334720611572267, 0.025290752410888673, 0.025438207626342774, 0.025323392868041993, 0.025313024520874024, 0.025512319564819336, 0.025490463256835936, 0.025475423812866212, 0.02551158332824707, 0.025393760681152344, 0.02542777633666992, 0.025227327346801758, 0.024924095153808595, 0.02494054412841797, 0.024767616271972655, 0.02473664093017578, 0.024749216079711915, 0.02478780746459961, 0.024753183364868165, 0.02473878479003906, 0.02473369598388672, 0.024725439071655274, 0.024729408264160157, 0.024796831130981446, 0.024789600372314452, 0.02478291130065918, 0.024776351928710937, 0.024803615570068358, 0.024903520584106446, 0.024850656509399414, 0.024923648834228516, 0.02490208053588867, 0.02487727928161621, 0.024864543914794923, 0.02492006492614746, 0.025056991577148437, 0.025069984436035156, 0.02515977668762207, 0.025044767379760743, 0.025036575317382813, 0.025045215606689455, 0.0251146240234375, 0.024978464126586913, 0.02498454475402832, 0.025012224197387696, 0.0251312313079834, 0.024981279373168946, 0.02513920021057129, 0.025206560134887694, 0.025180383682250975, 0.02515932846069336, 0.025123104095458985, 0.025366336822509765, 0.025260128021240235, 0.025128416061401367, 0.02529158401489258, 0.02526585578918457, 0.025292512893676757, 0.025264608383178712, 0.025208255767822266, 0.025213504791259767, 0.025250848770141603, 0.02517091178894043, 0.02521244812011719, 0.0252127685546875, 0.025248224258422852, 0.025308992385864256, 0.025443967819213868, 0.02550364875793457, 0.025434944152832033, 0.02531942367553711, 0.025421024322509766, 0.02540355110168457, 0.025449087142944336, 0.025401344299316408, 0.025222272872924806, 0.025035392761230468, 0.02483331108093262, 0.024686880111694336, 0.02468524742126465, 0.024731679916381834, 0.02470275115966797, 0.024680639266967775, 0.024814912796020508, 0.0248624324798584, 0.024946815490722658, 0.02492297554016113, 0.024803327560424804, 0.02482579231262207, 0.02485641670227051, 0.02493667221069336, 0.02484947204589844, 0.02482681655883789, 0.024825664520263673, 0.024903871536254882, 0.024928255081176756, 0.02506342315673828, 0.025024255752563476, 0.025147647857666017, 0.025051103591918946, 0.025002208709716797, 0.02502022361755371, 0.025142911911010743, 0.02499827194213867, 0.024870912551879884, 0.024867967605590822, 0.0250984001159668, 0.02512131118774414, 0.025196735382080077, 0.025206783294677734, 0.025247743606567383, 0.025118719100952147, 0.02515497589111328, 0.025328224182128906, 0.025316383361816405, 0.025227840423583985, 0.025268672943115234, 0.02524291229248047, 0.02968150329589844, 0.025079839706420897, 0.02509609603881836, 0.02511555290222168, 0.025191871643066407, 0.02524790382385254, 0.02539094352722168, 0.02521343994140625, 0.02525539207458496, 0.025338239669799804, 0.02535238456726074, 0.02541366386413574, 0.02538265609741211, 0.025343391418457033, 0.02537353515625, 0.025319520950317382, 0.025340896606445312, 0.025539039611816406, 0.02551036834716797, 0.024875072479248046, 0.02473382377624512, 0.02472332763671875, 0.024762367248535155, 0.024809471130371095, 0.0247576961517334, 0.02487763214111328, 0.02490777587890625, 0.02486800003051758, 0.024957792282104492, 0.02487500762939453, 0.024829952239990235, 0.02485043144226074, 0.024903039932250976, 0.024824352264404298, 0.024780895233154295, 0.02478188705444336, 0.024804288864135743, 0.024774528503417968, 0.024946815490722658, 0.025026559829711914, 0.024949951171875, 0.024905824661254884, 0.024996448516845703, 0.024950912475585937, 0.024999584197998047, 0.025039199829101563, 0.02517100715637207, 0.02521183967590332, 0.025417728424072264, 0.025054336547851563, 0.025008319854736328, 0.025010879516601563, 0.025101472854614258, 0.025121631622314452, 0.025034751892089844, 0.025059328079223633, 0.025057279586791992, 0.025198591232299804, 0.025165824890136718, 0.02524470329284668, 0.02535113525390625, 0.025320608139038085, 0.025254751205444338, 0.025285984039306642, 0.025256607055664064, 0.025266176223754884, 0.025273504257202147, 0.025215391159057618, 0.025223264694213866, 0.02532796859741211, 0.025425504684448243, 0.025409055709838868, 0.02545724868774414, 0.025284704208374024, 0.025243839263916015, 0.02543939208984375, 0.025381727218627928, 0.025603328704833984, 0.02538390350341797, 0.025435583114624023, 0.025416032791137695, 0.025475103378295897]",tokens/s,39.86686056114015,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2226.302976,2551.119872,0.0,2155.872256,2032.413184,s,1,8.8302705078125,8.8302705078125,0.0,8.8302705078125,8.8302705078125,8.8302705078125,8.8302705078125,[8.8302705078125],,kWh,4.982181362083793e-05,5.4883185529186004e-06,1.5603901372000795e-05,7.091403354575732e-05,,MB,2278.109184,2827.943936,0.0,2418.016256,2279.563776,s,10,0.7768848495483398,0.07768848495483398,0.00018375020280323443,0.07763300704956055,0.07796125717163085,0.07802150688171387,0.07806970664978027,"[0.07808175659179688, 0.07794786834716796, 0.0775453109741211, 0.07749890899658203, 0.0775704345703125, 0.07762175750732422, 0.07768287658691406, 0.07777859497070312, 0.0775130844116211, 0.07764425659179687]",tokens/s,3295.211641066647,kWh,2.320956485185219e-06,2.5596015964888385e-07,1.5457397727461e-06,4.1226564175802024e-06,tokens/kWh,62095885.29093566,MB,2280.640512,2911.830016,0.0,2501.902336,2389.801984,s,10,13.968810546875003,1.3968810546875001,0.007208082407808386,1.3988131713867187,1.4043124389648438,1.4047796325683592,1.4051533874511717,"[1.4042086181640625, 1.399265380859375, 1.403767578125, 1.4041328125, 1.405246826171875, 1.3888050537109375, 1.389106201171875, 1.3983609619140625, 1.38671240234375, 1.3892047119140625]",tokens/s,45.10047565509713,kWh,4.043888521106479e-05,4.46003448440396e-06,2.2972029841453642e-05,6.787094953692238e-05,tokens/kWh,928232.1881429913,,s,630,13.961103496551505,0.022160481740557958,0.00038663754401263264,0.022070960044860838,0.022473354148864748,0.022643639183044434,0.023520151882171634,"[0.023783424377441405, 0.02206924819946289, 0.02187264060974121, 0.021822751998901366, 0.021869279861450194, 0.022290143966674805, 0.02199171257019043, 0.02190540885925293, 0.02216671943664551, 0.022102272033691406, 0.021979711532592774, 0.021938175201416017, 0.021950464248657226, 0.021906784057617187, 0.022032064437866213, 0.021890016555786134, 0.022204160690307617, 0.021960384368896486, 0.022028608322143553, 0.022403327941894532, 0.026284032821655274, 0.02223411178588867, 0.022187007904052734, 0.02222800064086914, 0.022481887817382813, 0.022255008697509765, 0.022736608505249025, 0.02326412773132324, 0.02292531204223633, 0.022677152633666993, 0.02248534393310547, 0.02237225532531738, 0.022280031204223633, 0.022093952178955076, 0.02204431915283203, 0.022130367279052734, 0.022099903106689453, 0.022219615936279295, 0.02222230339050293, 0.022047264099121094, 0.022208192825317382, 0.02295020866394043, 0.022237152099609376, 0.022106143951416017, 0.02213478469848633, 0.022255392074584962, 0.022191648483276368, 0.022223039627075194, 0.022204927444458008, 0.02227609634399414, 0.02227609634399414, 0.022468288421630858, 0.022290143966674805, 0.022190656661987305, 0.022165504455566407, 0.022073375701904298, 0.022046144485473634, 0.021994047164916992, 0.022001663208007814, 0.022024192810058595, 0.02209721565246582, 0.02201055908203125, 0.02207459259033203, 0.022956031799316406, 0.022390783309936522, 0.022339263916015626, 0.022362783432006837, 0.022232736587524414, 0.022413312911987306, 0.02225152015686035, 0.022179840087890625, 0.022124544143676757, 0.02200371170043945, 0.022013952255249023, 0.021951520919799804, 0.022024639129638673, 0.021946880340576173, 0.022091808319091796, 0.02214297676086426, 0.022788095474243163, 0.022384288787841797, 0.022202367782592772, 0.02214672088623047, 0.022090431213378905, 0.022108224868774413, 0.022372064590454103, 0.022130016326904298, 0.022012800216674806, 0.022026239395141603, 0.02191155242919922, 0.021977088928222657, 0.021936128616333008, 0.02191974449157715, 0.021935712814331054, 0.022019872665405272, 0.022013599395751954, 0.02231983947753906, 0.02264463996887207, 0.022671808242797853, 0.022517663955688477, 0.022202367782592772, 0.02224127960205078, 0.02215116882324219, 0.022138496398925782, 0.021980575561523438, 0.0220927677154541, 0.022255615234375, 0.021946048736572264, 0.021981151580810546, 0.023011680603027343, 0.021925888061523437, 0.02206719970703125, 0.022070816040039062, 0.02244041633605957, 0.022356063842773437, 0.022564512252807617, 0.02239923286437988, 0.022300384521484376, 0.022060384750366212, 0.022098880767822265, 0.02201190376281738, 0.022140928268432617, 0.02204182434082031, 0.022016799926757813, 0.022460416793823244, 0.022029727935791017, 0.022417856216430665, 0.022187295913696288, 0.022159584045410158, 0.022195775985717772, 0.02268992042541504, 0.02261075210571289, 0.022517440795898437, 0.02212076759338379, 0.02230067253112793, 0.022034431457519533, 0.022125696182250975, 0.022194591522216797, 0.02234976005554199, 0.022587423324584962, 0.022409727096557617, 0.0224768009185791, 0.022624256134033204, 0.02253824043273926, 0.022335487365722655, 0.022218751907348632, 0.02218121528625488, 0.022192575454711913, 0.02226924705505371, 0.02247488021850586, 0.022344480514526366, 0.022465599060058593, 0.022282943725585938, 0.022114559173583983, 0.022482656478881837, 0.022022432327270507, 0.02224083137512207, 0.022006111145019533, 0.022171743392944337, 0.02198019218444824, 0.022115135192871095, 0.021938047409057616, 0.022119712829589844, 0.022047264099121094, 0.022360544204711914, 0.022161407470703123, 0.022609920501708985, 0.022616352081298828, 0.022410112380981444, 0.02231587219238281, 0.022378496170043945, 0.022316608428955078, 0.022342079162597655, 0.022161407470703123, 0.02215936088562012, 0.02206675148010254, 0.02205740737915039, 0.0220446720123291, 0.022149120330810547, 0.022108160018920898, 0.02197427177429199, 0.021858335494995117, 0.02206332778930664, 0.022101760864257813, 0.022333696365356447, 0.022225151062011717, 0.02246272087097168, 0.022642688751220705, 0.022451839447021484, 0.02286185646057129, 0.022526464462280273, 0.022526975631713866, 0.02248940849304199, 0.022270656585693358, 0.022564863204956053, 0.022386655807495118, 0.02280841636657715, 0.022431936264038086, 0.022246719360351563, 0.022305471420288086, 0.022205440521240235, 0.022340543746948244, 0.022180959701538085, 0.02376550483703613, 0.023262847900390626, 0.02228646469116211, 0.021996192932128907, 0.022412384033203125, 0.02189411163330078, 0.022038463592529298, 0.021999711990356444, 0.02195452880859375, 0.02191564750671387, 0.022226367950439453, 0.021967008590698243, 0.022002080917358398, 0.02208563232421875, 0.02207561683654785, 0.022126367568969726, 0.02230067253112793, 0.02228620719909668, 0.022414783477783203, 0.022188735961914063, 0.022013952255249023, 0.02199078369140625, 0.02212723159790039, 0.022036575317382814, 0.022103967666625975, 0.021997568130493163, 0.02199295997619629, 0.022026752471923827, 0.02205286407470703, 0.02224742317199707, 0.022097728729248048, 0.022209823608398436, 0.02228316879272461, 0.02206822395324707, 0.02216969680786133, 0.02210908889770508, 0.022402431488037108, 0.022558719635009765, 0.022485376358032227, 0.022151424407958985, 0.022284063339233398, 0.022378719329833985, 0.022391807556152343, 0.022477535247802733, 0.022448415756225585, 0.022201471328735352, 0.022197248458862305, 0.02216643142700195, 0.022315999984741212, 0.022747007369995118, 0.02616524887084961, 0.023795360565185546, 0.02264441680908203, 0.022417184829711913, 0.02247318458557129, 0.02233996772766113, 0.022361440658569334, 0.022499488830566405, 0.022378751754760742, 0.02226521682739258, 0.022413503646850585, 0.02239356803894043, 0.022169599533081053, 0.02214476776123047, 0.0220797119140625, 0.02235372734069824, 0.022227071762084962, 0.022204511642456053, 0.022390432357788086, 0.022331743240356444, 0.022304479598999023, 0.022317344665527344, 0.0224399356842041, 0.022208511352539064, 0.022054912567138672, 0.02196803283691406, 0.021950592041015626, 0.021945056915283204, 0.021884927749633788, 0.023011327743530274, 0.02221612739562988, 0.023364128112792967, 0.022064735412597656, 0.02207379150390625, 0.021972415924072265, 0.021997312545776367, 0.022100799560546874, 0.022216352462768554, 0.022128671646118165, 0.022100032806396483, 0.022062463760375975, 0.021975103378295897, 0.022045215606689452, 0.02185651206970215, 0.021985055923461914, 0.021829887390136717, 0.022368255615234374, 0.0218787841796875, 0.02186355209350586, 0.02255961608886719, 0.022003679275512694, 0.02201398468017578, 0.02211840057373047, 0.022023775100708007, 0.02219664001464844, 0.022153215408325197, 0.02226371192932129, 0.02212873649597168, 0.02208697509765625, 0.021917472839355467, 0.021971872329711914, 0.02201190376281738, 0.022270175933837892, 0.021952512741088868, 0.021895040512084962, 0.021894752502441408, 0.02195510482788086, 0.022030336380004883, 0.02217932891845703, 0.02202470397949219, 0.02245427131652832, 0.021999616622924805, 0.02200364875793457, 0.021844032287597657, 0.021956672668457033, 0.021931615829467774, 0.02199177551269531, 0.021792768478393554, 0.02194819259643555, 0.021913728713989257, 0.022000831604003908, 0.021960639953613283, 0.022019039154052733, 0.022161407470703123, 0.02208563232421875, 0.021952512741088868, 0.021982688903808594, 0.021960927963256837, 0.02221878433227539, 0.02193027114868164, 0.022330944061279296, 0.022352319717407226, 0.022394880294799805, 0.022101984024047852, 0.02232499122619629, 0.022266143798828124, 0.022078880310058592, 0.022106719970703126, 0.022040576934814454, 0.022150943756103516, 0.02190332794189453, 0.021901567459106444, 0.021936128616333008, 0.02193164825439453, 0.021896928787231446, 0.0219204158782959, 0.022072608947753907, 0.022180736541748045, 0.022256799697875976, 0.02226041603088379, 0.021984607696533202, 0.021944095611572265, 0.021764991760253906, 0.02198944091796875, 0.021882816314697264, 0.021944320678710938, 0.022191104888916017, 0.022064128875732423, 0.021880416870117186, 0.021882623672485352, 0.021803680419921874, 0.02190438461303711, 0.02194054412841797, 0.021946687698364258, 0.02204265594482422, 0.022526111602783203, 0.022200159072875977, 0.021932031631469725, 0.021966848373413086, 0.021796096801757814, 0.022024799346923828, 0.021940223693847655, 0.021850271224975584, 0.021798912048339843, 0.02184726333618164, 0.021809247970581053, 0.021907903671264647, 0.021797056198120116, 0.021876800537109376, 0.021932031631469725, 0.022038528442382813, 0.02212819290161133, 0.02217731285095215, 0.022080415725708007, 0.021979135513305666, 0.021986400604248047, 0.02193292808532715, 0.021880863189697265, 0.02189516830444336, 0.02190336036682129, 0.02231062316894531, 0.022022144317626953, 0.022063392639160156, 0.022591487884521484, 0.021917695999145507, 0.021827583312988282, 0.021893119812011717, 0.02207043266296387, 0.02235887908935547, 0.02243699264526367, 0.022244224548339842, 0.022192127227783204, 0.021985279083251954, 0.0220546875, 0.021899488449096678, 0.02188649559020996, 0.02187104034423828, 0.02181532859802246, 0.022063104629516602, 0.02198271942138672, 0.022039039611816406, 0.02196201515197754, 0.021859039306640626, 0.02192355155944824, 0.0219072322845459, 0.022428064346313475, 0.023104896545410158, 0.02225430488586426, 0.022007648468017577, 0.02212236785888672, 0.022281503677368163, 0.02213337516784668, 0.02192617607116699, 0.021944416046142577, 0.021935680389404296, 0.021944671630859374, 0.021905183792114258, 0.021936447143554687, 0.02201260757446289, 0.02197817611694336, 0.021865407943725587, 0.02205286407470703, 0.02327756881713867, 0.025399295806884766, 0.022755327224731444, 0.022487039566040038, 0.022203968048095702, 0.022081024169921876, 0.02219718360900879, 0.022993919372558593, 0.022952959060668944, 0.022512800216674806, 0.02230672073364258, 0.022178752899169922, 0.022054336547851563, 0.0218855037689209, 0.022255008697509765, 0.021869152069091798, 0.021954559326171876, 0.021952384948730468, 0.021932159423828125, 0.02209721565246582, 0.022176448822021484, 0.022215808868408203, 0.021989856719970703, 0.022091936111450196, 0.021879039764404296, 0.02231500816345215, 0.02186444854736328, 0.021843967437744142, 0.02187017631530762, 0.021782943725585938, 0.021960704803466798, 0.021817344665527344, 0.021790143966674804, 0.022001216888427735, 0.021969919204711915, 0.021872095108032227, 0.02188751983642578, 0.021948415756225585, 0.022321151733398437, 0.02215116882324219, 0.022005664825439454, 0.021999711990356444, 0.02185603141784668, 0.021992992401123047, 0.021907136917114257, 0.023540512084960937, 0.023470304489135743, 0.022000959396362305, 0.021928831100463866, 0.021897024154663085, 0.021939456939697264, 0.0218505916595459, 0.022026752471923827, 0.022066879272460937, 0.022227039337158205, 0.02202614402770996, 0.021893375396728514, 0.021900224685668945, 0.021986207962036132, 0.02213039970397949, 0.021889888763427734, 0.021872671127319335, 0.021997535705566406, 0.02195644760131836, 0.022024383544921877, 0.022117631912231445, 0.021919551849365233, 0.02228665542602539, 0.021860128402709962, 0.02198819160461426, 0.021932031631469725, 0.02210767936706543, 0.022071104049682617, 0.022041088104248048, 0.02190118408203125, 0.02204425621032715, 0.022022560119628908, 0.0219015998840332, 0.02185775947570801, 0.0219116153717041, 0.021898880004882812, 0.02186467170715332, 0.02188966369628906, 0.021942176818847657, 0.021868640899658204, 0.02194144058227539, 0.02181407928466797, 0.022176895141601562, 0.02192438316345215, 0.02202412796020508, 0.022116767883300782, 0.022091007232666014, 0.022033151626586915, 0.021977088928222657, 0.021999040603637696, 0.02243168067932129, 0.022260351181030272, 0.02224742317199707, 0.02208563232421875, 0.02206719970703125, 0.02194540786743164, 0.021895328521728517, 0.02183225631713867, 0.02192131233215332, 0.021875295639038086, 0.021940319061279297, 0.02185139274597168, 0.02220515251159668, 0.02194175910949707, 0.022032224655151367, 0.02204537582397461, 0.022138879776000975, 0.022507455825805663, 0.02196665573120117, 0.022015840530395507, 0.021889440536499022, 0.021900896072387696, 0.021878559112548827, 0.021895200729370116, 0.021919519424438476, 0.02189593505859375, 0.02188889694213867, 0.022648895263671875, 0.022239231109619142, 0.021970815658569336, 0.022139007568359376, 0.021929983139038087, 0.022150976181030273, 0.02201580810546875, 0.022094207763671873, 0.021954559326171876, 0.02189676856994629, 0.02192019271850586, 0.02187468719482422, 0.021829023361206054, 0.021936159133911132, 0.022133312225341796, 0.021935935974121093, 0.0219150390625, 0.02194099235534668, 0.021970975875854493, 0.022107999801635744, 0.022081695556640624, 0.02206719970703125, 0.021932031631469725, 0.02191564750671387, 0.021940223693847655, 0.02190540885925293, 0.021843360900878905, 0.021852767944335938, 0.02191564750671387, 0.021945856094360353, 0.021965152740478517, 0.02187280082702637, 0.022231039047241212, 0.021972991943359374, 0.021893119812011717, 0.021897279739379882, 0.021968832015991212, 0.02192959976196289, 0.022032768249511718, 0.021997568130493163, 0.02220185661315918, 0.021995359420776368, 0.021978784561157226, 0.02211327934265137, 0.021995519638061522, 0.02187398338317871, 0.021826240539550783, 0.021803007125854493, 0.021834911346435545, 0.02179692840576172, 0.021790655136108398, 0.021902175903320314, 0.022269952774047853, 0.021979135513305666, 0.022200319290161134, 0.022423839569091795, 0.022502304077148438, 0.02258188819885254, 0.02243708801269531, 0.02234220886230469, 0.0224051513671875, 0.02214950370788574, 0.022185983657836913]",tokens/s,45.12537280134154,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1169.977344,1101.98784,0.0,706.740224,681.6384,s,1,8.240458984375,8.240458984375,0.0,8.240458984375,8.240458984375,8.240458984375,8.240458984375,[8.240458984375],,kWh,3.042966298750116e-05,3.348905752097596e-06,8.997784976011447e-06,4.2776353715610206e-05,,MB,1477.947392,1406.07488,0.0,996.1472,949.238272,s,10,0.2649172801971435,0.026491728019714354,0.00029793763482712345,0.026321727752685545,0.02689329662322998,0.026940312099456788,0.02697792448043823,"[0.026882848739624023, 0.026987327575683593, 0.026252416610717772, 0.02670947265625, 0.026139328002929688, 0.026283327102661132, 0.026788543701171875, 0.026230560302734376, 0.026358240127563475, 0.026285215377807616]",tokens/s,9663.39378878918,kWh,7.905608014752041e-07,8.718526049061943e-08,4.875897294107915e-07,1.365335791376615e-06,tokens/kWh,187499662.43973225,MB,1498.300416,1414.463488,0.0,1004.535808,949.240832,s,10,12.595625854492187,1.2595625854492187,0.007672994935525482,1.2575475463867187,1.2665170166015625,1.2722932983398436,1.2769143237304688,"[1.278069580078125, 1.26454345703125, 1.250636962890625, 1.25344140625, 1.259962646484375, 1.2652333984375, 1.255451171875, 1.254578369140625, 1.2596439208984376, 1.25406494140625]",tokens/s,50.01736374817077,kWh,3.732229134477349e-05,4.116098889717311e-06,1.5007144888589593e-05,5.6445535123080395e-05,tokens/kWh,1116120.165441385,,s,630,12.593511472702021,0.019989700750320676,0.0003206107088227993,0.019910863876342774,0.02026016883850098,0.02048119831085205,0.02131405834197999,"[0.02076630401611328, 0.02052751922607422, 0.020439136505126954, 0.020158016204833984, 0.020348800659179687, 0.02035145568847656, 0.020225791931152343, 0.020098335266113283, 0.020228767395019533, 0.020021568298339842, 0.020414464950561522, 0.020178239822387697, 0.01999942398071289, 0.019785728454589844, 0.01985955238342285, 0.02021798324584961, 0.020110143661499023, 0.019719135284423827, 0.019830976486206055, 0.020127552032470703, 0.020385631561279295, 0.020480159759521485, 0.020315168380737304, 0.0209946231842041, 0.020470176696777344, 0.02031001663208008, 0.020108768463134766, 0.02002998352050781, 0.01989836883544922, 0.019933183670043944, 0.020107040405273436, 0.020168928146362303, 0.019895776748657227, 0.019812896728515626, 0.01998624038696289, 0.02004412841796875, 0.020193119049072266, 0.02015827178955078, 0.020273344039916992, 0.020234304428100584, 0.019994239807128906, 0.020224319458007813, 0.021409408569335937, 0.019943807601928713, 0.020174047470092774, 0.02036796760559082, 0.02034294319152832, 0.019965343475341797, 0.02000048065185547, 0.019989408493041993, 0.020133888244628906, 0.020100608825683593, 0.02318751907348633, 0.02056550407409668, 0.02014508819580078, 0.020105215072631837, 0.02022400093078613, 0.02007859230041504, 0.020330656051635743, 0.021389152526855467, 0.020841567993164063, 0.02059561538696289, 0.020523008346557618, 0.020568384170532226, 0.020301408767700195, 0.020048704147338867, 0.020000448226928712, 0.020154687881469728, 0.020146175384521483, 0.02004755210876465, 0.020164352416992187, 0.02005459213256836, 0.020008960723876954, 0.019984064102172853, 0.020611391067504883, 0.019893312454223634, 0.019960128784179687, 0.01991334342956543, 0.019918848037719726, 0.020000768661499024, 0.020059776306152344, 0.019853696823120118, 0.01985536003112793, 0.01999977684020996, 0.02113020706176758, 0.020808736801147462, 0.020384735107421875, 0.020258207321166993, 0.020103103637695314, 0.020191200256347658, 0.020443359375, 0.020122079849243163, 0.02029497528076172, 0.02003424072265625, 0.02003321647644043, 0.019908287048339843, 0.019873823165893555, 0.019794143676757813, 0.0199048957824707, 0.01995955276489258, 0.020133312225341798, 0.02018771171569824, 0.019949855804443358, 0.02037881660461426, 0.01994211196899414, 0.019732736587524415, 0.01975276756286621, 0.020083808898925783, 0.020108192443847657, 0.019970048904418947, 0.01989593505859375, 0.01991436767578125, 0.019864320755004883, 0.01991231918334961, 0.01977494430541992, 0.019865888595581055, 0.019937919616699218, 0.01995699119567871, 0.019800832748413086, 0.019918848037719726, 0.020229408264160156, 0.020214048385620118, 0.02011123275756836, 0.02001158332824707, 0.01998147201538086, 0.019925695419311523, 0.0198089599609375, 0.019951520919799806, 0.019779680252075195, 0.019796255111694337, 0.019748512268066405, 0.019847007751464845, 0.0198305606842041, 0.019767744064331055, 0.019793376922607422, 0.019814144134521483, 0.019888927459716797, 0.019799711227416993, 0.019751264572143556, 0.0199267520904541, 0.019900703430175783, 0.019939136505126954, 0.0199354248046875, 0.01993280029296875, 0.020251007080078126, 0.020122751235961914, 0.01995395278930664, 0.019790016174316406, 0.0198701114654541, 0.019916000366210936, 0.019927295684814453, 0.02000092887878418, 0.0203001594543457, 0.019891584396362304, 0.01987443161010742, 0.019810464859008788, 0.019715967178344725, 0.02018876838684082, 0.019917184829711915, 0.019707807540893553, 0.01983292770385742, 0.019742719650268553, 0.019705312728881836, 0.019691904067993163, 0.01977471923828125, 0.019614463806152345, 0.019675296783447267, 0.01963827133178711, 0.019779584884643556, 0.01989360046386719, 0.019791616439819335, 0.019762239456176757, 0.019731327056884764, 0.01976176071166992, 0.019874176025390624, 0.019736576080322265, 0.019761152267456054, 0.019961856842041017, 0.019762432098388672, 0.019886751174926758, 0.019861215591430663, 0.020029823303222657, 0.01985945510864258, 0.019779584884643556, 0.01982464027404785, 0.019741792678833008, 0.019766176223754883, 0.019775487899780272, 0.019936607360839843, 0.01998585510253906, 0.019958208084106446, 0.019869823455810547, 0.019900415420532228, 0.01987583923339844, 0.01984716796875, 0.01984214401245117, 0.019798944473266602, 0.019808256149291992, 0.019655967712402345, 0.019673728942871095, 0.019730527877807616, 0.019900415420532228, 0.01988761520385742, 0.019734655380249023, 0.0197857608795166, 0.01996556854248047, 0.01993187141418457, 0.019795967102050782, 0.019769344329833984, 0.01982259178161621, 0.01979372787475586, 0.019959999084472657, 0.01978748893737793, 0.01977987289428711, 0.019775775909423827, 0.01981996726989746, 0.019800352096557616, 0.019900703430175783, 0.019914464950561525, 0.02009516716003418, 0.019945247650146485, 0.019948896408081056, 0.020085151672363282, 0.019900703430175783, 0.019932735443115236, 0.020009408950805663, 0.019982559204101562, 0.019877664566040038, 0.01969270324707031, 0.020119808197021485, 0.019993183135986328, 0.02025187110900879, 0.02015648078918457, 0.01995644760131836, 0.019885663986206056, 0.019807968139648437, 0.019991231918334962, 0.019853311538696287, 0.019944511413574218, 0.0197291202545166, 0.019765472412109374, 0.01996143913269043, 0.02023200035095215, 0.02006275177001953, 0.02001296043395996, 0.01992310333251953, 0.019804159164428712, 0.019832639694213866, 0.019763391494750978, 0.01984716796875, 0.019900415420532228, 0.019877023696899414, 0.019928543090820313, 0.019927391052246092, 0.020050111770629882, 0.020622976303100587, 0.01988595199584961, 0.019819007873535157, 0.019767295837402343, 0.01984511947631836, 0.0200581111907959, 0.019926496505737305, 0.019868192672729493, 0.021798912048339843, 0.02038118362426758, 0.02036787223815918, 0.020105215072631837, 0.019947200775146483, 0.019878208160400392, 0.019767488479614258, 0.019787296295166016, 0.019812320709228514, 0.01991628837585449, 0.01989846420288086, 0.019870431900024414, 0.019901567459106446, 0.020454336166381835, 0.019989631652832032, 0.019947904586791992, 0.019806720733642577, 0.02017683219909668, 0.02019327926635742, 0.020313535690307617, 0.020238367080688477, 0.02020102310180664, 0.020214752197265626, 0.020024511337280275, 0.020144384384155275, 0.01987180709838867, 0.019810815811157227, 0.019868959426879884, 0.01993974494934082, 0.019802431106567382, 0.019752960205078125, 0.019715232849121092, 0.019770048141479493, 0.019822111129760744, 0.019763839721679687, 0.019765247344970704, 0.01975654411315918, 0.01992755126953125, 0.01992428779602051, 0.019853599548339845, 0.01991107177734375, 0.02001203155517578, 0.019892448425292968, 0.01989459228515625, 0.02007206344604492, 0.020089696884155274, 0.019832223892211915, 0.020075103759765626, 0.02001919937133789, 0.019997760772705077, 0.01995622444152832, 0.01983459281921387, 0.019830175399780273, 0.019931711196899415, 0.019814367294311522, 0.019861888885498048, 0.020291584014892578, 0.020150272369384766, 0.02233568000793457, 0.021124000549316405, 0.019907487869262695, 0.01988921546936035, 0.020192192077636718, 0.02002943992614746, 0.019804159164428712, 0.019920064926147462, 0.020043712615966797, 0.01991062355041504, 0.019921823501586913, 0.01992038345336914, 0.01989059257507324, 0.01985955238342285, 0.01983283233642578, 0.020092927932739257, 0.01982259178161621, 0.01985536003112793, 0.019783679962158202, 0.01984921646118164, 0.02003126335144043, 0.019990751266479492, 0.019889503479003905, 0.019986783981323242, 0.01999839973449707, 0.02006687927246094, 0.020104480743408204, 0.020111520767211913, 0.019968544006347656, 0.020172191619873048, 0.020203487396240234, 0.020175392150878907, 0.020695295333862305, 0.020547391891479493, 0.020977216720581053, 0.02008438491821289, 0.019859615325927733, 0.019792640686035156, 0.01972809600830078, 0.019800352096557616, 0.02021990394592285, 0.020176511764526367, 0.0201582088470459, 0.019831167221069337, 0.019880191802978515, 0.020068351745605468, 0.019859039306640625, 0.0198701114654541, 0.019826751708984375, 0.0199964485168457, 0.02026630401611328, 0.020052448272705078, 0.020093120574951173, 0.019932704925537108, 0.020294303894042968, 0.020191232681274415, 0.020247840881347658, 0.020064287185668946, 0.020150976181030275, 0.02025062370300293, 0.02046566390991211, 0.020112991333007812, 0.019851680755615234, 0.01984921646118164, 0.019935232162475586, 0.02002943992614746, 0.019802112579345704, 0.019715103149414062, 0.019921215057373046, 0.019899040222167968, 0.019931007385253906, 0.019931264877319336, 0.019773088455200195, 0.0198372802734375, 0.019910655975341796, 0.022226783752441408, 0.019900575637817382, 0.019740671157836915, 0.01982259178161621, 0.01968332862854004, 0.019910655975341796, 0.019881824493408203, 0.019773120880126952, 0.01980259132385254, 0.020684799194335936, 0.019795967102050782, 0.01973151969909668, 0.019759296417236328, 0.019755199432373048, 0.019929183959960937, 0.019933664321899414, 0.019937280654907227, 0.02008883285522461, 0.01986764717102051, 0.01984921646118164, 0.019844383239746095, 0.019710016250610352, 0.019800512313842774, 0.019828319549560547, 0.019839071273803712, 0.01983951950073242, 0.01988403129577637, 0.019998720169067383, 0.019932191848754884, 0.020022239685058594, 0.019887840270996094, 0.019826175689697266, 0.01985206413269043, 0.019763200759887696, 0.019742176055908202, 0.019886623382568358, 0.019793920516967774, 0.019933183670043944, 0.01983692741394043, 0.019687231063842774, 0.0196231689453125, 0.019718719482421876, 0.019734912872314454, 0.019748096466064454, 0.01998691177368164, 0.019900224685668946, 0.019924768447875975, 0.020724128723144532, 0.020008703231811524, 0.019941024780273438, 0.019805952072143553, 0.019729183197021483, 0.019809951782226564, 0.019837120056152343, 0.019788000106811525, 0.02048204803466797, 0.019982336044311523, 0.019873760223388673, 0.0198123836517334, 0.019973119735717772, 0.020351999282836913, 0.019848800659179686, 0.01970012855529785, 0.019740671157836915, 0.019802112579345704, 0.019955711364746095, 0.019930335998535158, 0.01985820770263672, 0.01978982353210449, 0.019726335525512697, 0.0198922233581543, 0.01983014488220215, 0.01990060806274414, 0.019765695571899413, 0.019826719284057617, 0.020092384338378906, 0.02013433647155762, 0.019900224685668946, 0.01988140869140625, 0.019893056869506837, 0.019808256149291992, 0.019827871322631835, 0.019831647872924806, 0.01981644821166992, 0.01978278350830078, 0.01990131187438965, 0.02005734443664551, 0.020161279678344725, 0.019932159423828123, 0.019962879180908204, 0.01982259178161621, 0.019978368759155273, 0.0198286075592041, 0.019826528549194335, 0.01984118461608887, 0.019930496215820312, 0.02007094383239746, 0.019998815536499022, 0.019933183670043944, 0.019840160369873048, 0.019851711273193358, 0.01982512092590332, 0.01978156852722168, 0.01970979118347168, 0.01992310333251953, 0.019823871612548827, 0.019864320755004883, 0.02002943992614746, 0.0199332160949707, 0.01987740707397461, 0.019855424880981444, 0.01987228775024414, 0.019834943771362305, 0.019810304641723633, 0.01972617530822754, 0.019855520248413087, 0.01983807945251465, 0.019816320419311525, 0.019893247604370116, 0.019673088073730468, 0.019703359603881837, 0.019660383224487304, 0.019634784698486327, 0.01966924858093262, 0.01983692741394043, 0.019744768142700195, 0.019832544326782227, 0.019832128524780272, 0.019843584060668946, 0.019740224838256836, 0.02064886474609375, 0.020162431716918946, 0.020026624679565428, 0.019853408813476563, 0.019851999282836916, 0.019828800201416016, 0.02078950309753418, 0.020059104919433593, 0.019829248428344725, 0.01971545600891113, 0.019755552291870117, 0.01977587127685547, 0.019812288284301757, 0.01993075180053711, 0.019984832763671877, 0.01987107276916504, 0.02101705551147461, 0.02064134407043457, 0.019874399185180663, 0.02002742385864258, 0.020006208419799804, 0.019874496459960936, 0.02004924774169922, 0.020154560089111328, 0.02098627281188965, 0.020230207443237305, 0.02161664009094238, 0.02024630355834961, 0.02003376007080078, 0.019895904541015624, 0.019831199645996094, 0.020331647872924803, 0.01991155242919922, 0.019976192474365235, 0.019992576599121094, 0.019974143981933593, 0.01983036804199219, 0.019829151153564453, 0.019918687820434572, 0.019843231201171874, 0.019971872329711916, 0.019947519302368166, 0.019852319717407228, 0.019720640182495117, 0.01974892807006836, 0.019973728179931642, 0.019950464248657228, 0.01977872085571289, 0.01981923294067383, 0.020088703155517577, 0.020008863449096678, 0.020135263442993163, 0.019893247604370116, 0.019916351318359376, 0.020001440048217772, 0.02042822456359863, 0.02080995178222656, 0.019763328552246093, 0.019793920516967774, 0.019795967102050782, 0.019898656845092774, 0.01988934326171875, 0.019885759353637695, 0.019841888427734374, 0.01987993621826172, 0.01994704055786133, 0.02016876792907715, 0.019800256729125977, 0.01972879981994629, 0.019797855377197266, 0.01976300811767578, 0.019911872863769532, 0.020279935836791992, 0.019860063552856445, 0.019754079818725585, 0.02025948715209961, 0.019875104904174806, 0.019987167358398436, 0.019871295928955077, 0.019932640075683593, 0.01981500816345215, 0.01999910354614258, 0.020165792465209963, 0.01991894340515137, 0.01986636734008789, 0.019916799545288084, 0.01989753532409668, 0.020126527786254882, 0.019961727142333983, 0.019949087142944334, 0.01979372787475586, 0.019820640563964844, 0.019744831085205077, 0.019679391860961914, 0.019701759338378907, 0.019668895721435545, 0.019730112075805665, 0.019673599243164062, 0.019655040740966797, 0.019920896530151368, 0.01981158447265625, 0.0198023681640625, 0.019742303848266602, 0.019753887176513673]",tokens/s,50.025761390347874,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1845.997568,2760.835072,0.0,2365.587456,2314.318336,s,1,8.89519921875,8.89519921875,0.0,8.89519921875,8.89519921875,8.89519921875,8.89519921875,[8.89519921875],,kWh,5.4660155666647367e-05,6.017403879746309e-06,1.6371401986001177e-05,7.704896153239485e-05,,MB,1897.926656,3092.185088,0.0,2682.257408,2607.60832,s,10,0.48081442642211913,0.04808144264221191,0.00017205538202914872,0.04805052757263184,0.048120225143432616,0.04835307178497314,0.048539349098205564,"[0.04858591842651367, 0.04797695922851562, 0.048004062652587894, 0.0480684814453125, 0.04806121444702149, 0.048005023956298826, 0.048048126220703126, 0.04805516815185547, 0.04805292892456055, 0.04795654296875]",tokens/s,5324.299478802475,kWh,1.4727333594430756e-06,1.624139553476128e-07,9.766018421407165e-07,2.611749156931405e-06,tokens/kWh,98018601.56462322,MB,1902.24384,3092.185088,0.0,2682.257408,2607.61088,s,10,13.363730224609373,1.3363730224609376,0.007735199172256561,1.3361744995117188,1.3469821655273437,1.3478011413574218,1.3484563220214845,"[1.3357327880859375, 1.3290960693359375, 1.3468001708984374, 1.336394287109375, 1.340294921875, 1.3486201171875, 1.323949951171875, 1.340486328125, 1.32640087890625, 1.3359547119140625]",tokens/s,47.142526032129254,kWh,3.889775208722348e-05,4.289722245383801e-06,1.924219211065894e-05,6.242966644326624e-05,tokens/kWh,1009135.6175553503,,s,630,13.36166525077821,0.021208992461552695,0.00041553953410139157,0.021115599632263185,0.021474857711791993,0.021689506912231444,0.02319046390533448,"[0.02129305648803711, 0.021180543899536133, 0.021196672439575195, 0.02103091239929199, 0.02104115104675293, 0.021098495483398438, 0.02106582450866699, 0.021067680358886717, 0.021098207473754883, 0.021122848510742188, 0.021251712799072266, 0.021353343963623046, 0.021423744201660155, 0.021078336715698243, 0.02110451126098633, 0.021038656234741212, 0.021176959991455076, 0.02127257537841797, 0.02119785690307617, 0.02130646324157715, 0.02111680030822754, 0.02102783966064453, 0.02122444725036621, 0.021110240936279296, 0.020978048324584962, 0.02158198356628418, 0.02154889678955078, 0.02133967971801758, 0.02140755271911621, 0.021250911712646484, 0.021047136306762696, 0.02124812889099121, 0.021020671844482423, 0.021082111358642578, 0.02101180839538574, 0.021189279556274414, 0.021307392120361326, 0.021180416107177736, 0.021315584182739256, 0.021372224807739256, 0.021383712768554688, 0.02140380859375, 0.021399423599243163, 0.02138140869140625, 0.02134000015258789, 0.02157129669189453, 0.021334239959716797, 0.021293119430541994, 0.021174272537231444, 0.021140607833862304, 0.021111135482788087, 0.02106617546081543, 0.02106787109375, 0.020996095657348633, 0.020959232330322267, 0.021084096908569334, 0.021122976303100584, 0.021334304809570312, 0.02127039909362793, 0.021155839920043946, 0.020972543716430665, 0.02106060791015625, 0.021151744842529296, 0.021368831634521485, 0.021350400924682617, 0.021755712509155273, 0.021196191787719726, 0.0211822395324707, 0.021084896087646486, 0.021018911361694335, 0.021040767669677735, 0.020989568710327148, 0.021039648056030272, 0.02111631965637207, 0.021034912109375, 0.021082239151000978, 0.021086944580078124, 0.02099001693725586, 0.021042335510253907, 0.021154144287109374, 0.021143455505371094, 0.021142112731933595, 0.021155839920043946, 0.02126848030090332, 0.021217279434204102, 0.021143104553222655, 0.021322175979614257, 0.02098329544067383, 0.021658111572265625, 0.021204832077026368, 0.02096143913269043, 0.021548671722412108, 0.020979616165161134, 0.0210416316986084, 0.020912128448486327, 0.020926464080810548, 0.020992000579833983, 0.021034015655517577, 0.02091209602355957, 0.020855615615844727, 0.020863168716430663, 0.020946943283081054, 0.020926496505737305, 0.02113942337036133, 0.021172096252441406, 0.021250175476074218, 0.02089574432373047, 0.020936704635620116, 0.02089369583129883, 0.02096892738342285, 0.021162015914916992, 0.020886016845703126, 0.021323776245117186, 0.021045248031616212, 0.02108006477355957, 0.02130339241027832, 0.020977088928222656, 0.0209781436920166, 0.02090188789367676, 0.02110652732849121, 0.020981376647949218, 0.02099830436706543, 0.021023103713989258, 0.02096678352355957, 0.021254783630371095, 0.02098099136352539, 0.02143846321105957, 0.021618688583374023, 0.02132508850097656, 0.02137980842590332, 0.021354496002197267, 0.021250207901000975, 0.021538656234741212, 0.021157888412475585, 0.02104115104675293, 0.02127462387084961, 0.021511199951171876, 0.021150976181030273, 0.021108064651489258, 0.021082496643066405, 0.02106547164916992, 0.021182207107543944, 0.021368640899658203, 0.02099065589904785, 0.020965152740478516, 0.021358816146850586, 0.02105529594421387, 0.020983999252319335, 0.0210831356048584, 0.024739904403686522, 0.0228787841796875, 0.021362432479858397, 0.021254783630371095, 0.02116534423828125, 0.02111756706237793, 0.02106172752380371, 0.021104032516479493, 0.021132991790771483, 0.021003168106079103, 0.021217279434204102, 0.021114751815795897, 0.02104275131225586, 0.021023296356201173, 0.021061632156372072, 0.021060800552368163, 0.020963167190551756, 0.021052543640136718, 0.02116796875, 0.02113849639892578, 0.021437376022338868, 0.021114879608154297, 0.02102796745300293, 0.02115827178955078, 0.021101055145263673, 0.02247475242614746, 0.021249055862426758, 0.021365440368652344, 0.02134668731689453, 0.0214136962890625, 0.021428319931030275, 0.02347996711730957, 0.02214512062072754, 0.021247327804565428, 0.021750688552856445, 0.02141107177734375, 0.02150003242492676, 0.021273216247558593, 0.021304512023925783, 0.021391775131225584, 0.02135878372192383, 0.02228873634338379, 0.021049087524414062, 0.020902143478393555, 0.021244960784912108, 0.021127519607543947, 0.021000640869140625, 0.02100022315979004, 0.021049503326416016, 0.021017919540405272, 0.021086912155151367, 0.02127257537841797, 0.02121548843383789, 0.021052255630493164, 0.021250112533569336, 0.021453664779663085, 0.021243904113769533, 0.020992191314697265, 0.021112640380859374, 0.02104729652404785, 0.020961503982543945, 0.020909759521484376, 0.02107811164855957, 0.021440511703491212, 0.0230231990814209, 0.021638816833496093, 0.021410560607910155, 0.021470783233642578, 0.02130169677734375, 0.02104319953918457, 0.02122537612915039, 0.02126652717590332, 0.02101043128967285, 0.021135360717773437, 0.0223372802734375, 0.02143984031677246, 0.02131648063659668, 0.021026687622070314, 0.02106153678894043, 0.02107744026184082, 0.021086847305297852, 0.020926752090454102, 0.02100419235229492, 0.02123776054382324, 0.021271680831909178, 0.021131999969482423, 0.021161727905273438, 0.02112348747253418, 0.02117580795288086, 0.02114918327331543, 0.02121939277648926, 0.021301631927490235, 0.02090451240539551, 0.020944896697998046, 0.021379072189331053, 0.021102399826049806, 0.021158079147338867, 0.02101980781555176, 0.02097990417480469, 0.02089846420288086, 0.02103209686279297, 0.021019487380981444, 0.02102681541442871, 0.02105276870727539, 0.021016544342041015, 0.020980384826660155, 0.02101865577697754, 0.020869056701660157, 0.020944576263427734, 0.021173856735229493, 0.021473440170288086, 0.021213855743408203, 0.021769599914550783, 0.02137264060974121, 0.021226303100585937, 0.02122140884399414, 0.021154848098754883, 0.021187360763549806, 0.02121299171447754, 0.021281152725219726, 0.02146441650390625, 0.02180073547363281, 0.02128783988952637, 0.021124256134033202, 0.02154787254333496, 0.024763551712036133, 0.021267295837402344, 0.021190784454345704, 0.021176191329956056, 0.021106687545776368, 0.021333343505859376, 0.021391103744506836, 0.021160863876342775, 0.02120694351196289, 0.02111087989807129, 0.020961280822753905, 0.02130240058898926, 0.021023616790771485, 0.021182464599609374, 0.021147647857666017, 0.021124191284179687, 0.021119840621948244, 0.021026687622070314, 0.02119603157043457, 0.02099500846862793, 0.02149580764770508, 0.02148761558532715, 0.021542207717895508, 0.02124777603149414, 0.021016576766967773, 0.021097375869750978, 0.0211125431060791, 0.02106172752380371, 0.02121308708190918, 0.021152000427246093, 0.021128351211547852, 0.02091916847229004, 0.021182464599609374, 0.021315584182739256, 0.021339359283447264, 0.021307552337646484, 0.021279359817504884, 0.02125823974609375, 0.02104729652404785, 0.021249664306640624, 0.021423871994018556, 0.022934303283691407, 0.021489376068115233, 0.021776575088500977, 0.021768287658691408, 0.021327072143554688, 0.02118079948425293, 0.021159456253051757, 0.021162879943847655, 0.021185855865478515, 0.02129596710205078, 0.021337600708007814, 0.021194175720214845, 0.021160863876342775, 0.02125823974609375, 0.021544960021972655, 0.02130534362792969, 0.02107187271118164, 0.02114112091064453, 0.021062015533447265, 0.021058624267578124, 0.021046207427978515, 0.02107529640197754, 0.02122819137573242, 0.021118976593017577, 0.020908031463623047, 0.02104297637939453, 0.021042783737182616, 0.020967039108276367, 0.02286899185180664, 0.021313535690307618, 0.021323104858398438, 0.022659744262695312, 0.021001855850219728, 0.021038623809814454, 0.021162847518920898, 0.021088031768798827, 0.02098745536804199, 0.02092233657836914, 0.021424831390380858, 0.0232587833404541, 0.021631328582763672, 0.021325824737548828, 0.021252031326293944, 0.02111267280578613, 0.021259872436523438, 0.021455488204956054, 0.02211123275756836, 0.021692703247070313, 0.021601247787475585, 0.02295577621459961, 0.021519584655761717, 0.02138806343078613, 0.02131350326538086, 0.021057184219360352, 0.021156448364257813, 0.021131040573120118, 0.02107596778869629, 0.021188575744628905, 0.021182016372680666, 0.021457376480102538, 0.021169855117797853, 0.022363487243652343, 0.02112406349182129, 0.021180416107177736, 0.021076192855834962, 0.021159168243408202, 0.021070016860961913, 0.021108896255493163, 0.020961471557617187, 0.021303295135498047, 0.02109619140625, 0.02101696014404297, 0.020950912475585937, 0.020920320510864256, 0.020918272018432618, 0.02097385597229004, 0.021022560119628907, 0.02090729522705078, 0.02100806427001953, 0.020973567962646485, 0.02086390495300293, 0.02086502456665039, 0.02084454345703125, 0.020941823959350587, 0.02101759910583496, 0.021275871276855467, 0.020972320556640625, 0.021050432205200195, 0.021074880599975587, 0.020961280822753905, 0.02128825569152832, 0.021190656661987304, 0.020992704391479492, 0.02087936019897461, 0.021492767333984374, 0.021211328506469725, 0.020982559204101563, 0.021014528274536134, 0.021002464294433594, 0.020977439880371093, 0.020907360076904295, 0.021039775848388672, 0.020987903594970703, 0.021130815505981445, 0.021070207595825195, 0.021030975341796876, 0.02103910446166992, 0.021087871551513673, 0.020859264373779298, 0.020942623138427735, 0.021002464294433594, 0.020921567916870117, 0.020768768310546876, 0.020846399307250976, 0.020997087478637697, 0.020989887237548827, 0.02089369583129883, 0.020887615203857422, 0.02103910446166992, 0.020931840896606446, 0.020869375228881836, 0.020886016845703126, 0.020953088760375976, 0.02106777572631836, 0.02101817512512207, 0.021039487838745118, 0.021174272537231444, 0.021139455795288087, 0.021503711700439455, 0.021831968307495116, 0.02105958366394043, 0.02127359962463379, 0.021148672103881837, 0.021161983489990235, 0.020993343353271486, 0.024013376235961913, 0.021709983825683593, 0.021321760177612305, 0.021033920288085938, 0.021014528274536134, 0.02130512046813965, 0.021569759368896484, 0.021450431823730468, 0.02114182472229004, 0.021538816452026367, 0.02103193664550781, 0.021072895050048827, 0.021013887405395507, 0.02100204849243164, 0.02097760009765625, 0.021101312637329103, 0.021108863830566406, 0.02168560028076172, 0.02128505516052246, 0.02124847984313965, 0.02133206367492676, 0.021178112030029297, 0.021126495361328126, 0.02102764892578125, 0.02102272033691406, 0.02122444725036621, 0.02117849540710449, 0.02115814399719238, 0.02109913635253906, 0.021237247467041014, 0.02123529624938965, 0.021201791763305663, 0.021133344650268556, 0.021398847579956054, 0.02114134407043457, 0.021250911712646484, 0.021362272262573243, 0.021268735885620116, 0.021187936782836914, 0.021420864105224608, 0.0210402889251709, 0.021137535095214845, 0.021106559753417967, 0.0209785270690918, 0.021284448623657228, 0.021492128372192384, 0.021958656311035156, 0.021465087890625, 0.02128633689880371, 0.02125404739379883, 0.021213600158691406, 0.02106598472595215, 0.02094697570800781, 0.020950016021728517, 0.021710847854614256, 0.021114879608154297, 0.02103059196472168, 0.020961759567260742, 0.020993408203125, 0.020799072265625, 0.020853504180908203, 0.02097983932495117, 0.02103628730773926, 0.02087603187561035, 0.020876863479614257, 0.02090438461303711, 0.020914176940917968, 0.02086502456665039, 0.02084454345703125, 0.020964927673339843, 0.020910112380981446, 0.02095555114746094, 0.020936704635620116, 0.020954368591308593, 0.020929279327392577, 0.020887199401855468, 0.020951391220092774, 0.02093609619140625, 0.020855327606201172, 0.020875551223754882, 0.02101219177246094, 0.021071935653686525, 0.02116636848449707, 0.02098873519897461, 0.021003168106079103, 0.02105958366394043, 0.020940351486206054, 0.020904064178466797, 0.021063999176025392, 0.02088960075378418, 0.020873472213745116, 0.020927648544311523, 0.02104140853881836, 0.02104560089111328, 0.021445920944213867, 0.020975391387939454, 0.021016672134399415, 0.02095734405517578, 0.023843584060668947, 0.021544223785400392, 0.02129337692260742, 0.021107040405273437, 0.021060800552368163, 0.020976448059082033, 0.020960575103759767, 0.021392063140869142, 0.02094607925415039, 0.020853376388549803, 0.020895967483520506, 0.021219327926635743, 0.02093062400817871, 0.021053375244140624, 0.020991167068481444, 0.021060415267944336, 0.020916223526000977, 0.020924543380737303, 0.020932479858398436, 0.021146303176879884, 0.021149728775024416, 0.021008384704589843, 0.021166080474853514, 0.020971616744995116, 0.021002143859863282, 0.02106777572631836, 0.021004287719726563, 0.021097503662109374, 0.021005504608154296, 0.021102304458618163, 0.021071935653686525, 0.02119875144958496, 0.021602399826049806, 0.02132352066040039, 0.02114995193481445, 0.021141664505004883, 0.02107129669189453, 0.021019039154052736, 0.021129215240478515, 0.021169696807861328, 0.02121161651611328, 0.021331552505493165, 0.021259744644165038, 0.02111199951171875, 0.021544704437255858, 0.02109235191345215, 0.021151744842529296, 0.02113155174255371, 0.02097737693786621, 0.021137184143066406, 0.021589567184448242, 0.021234336853027343, 0.021570688247680665, 0.020976512908935548, 0.021024063110351564, 0.02099884796142578, 0.022760671615600588, 0.02390838432312012, 0.021390079498291015, 0.021200895309448242, 0.02103091239929199, 0.021743616104125976, 0.021024768829345702, 0.020891679763793945, 0.020966432571411134, 0.021060543060302736, 0.02098899269104004, 0.02093769645690918, 0.020985952377319338, 0.021132192611694335, 0.02106857681274414, 0.021004480361938478, 0.02098092842102051, 0.02099078369140625, 0.02097267150878906, 0.020839296340942382, 0.021020671844482423, 0.021018848419189454, 0.02107779121398926, 0.021022111892700195, 0.021662303924560547, 0.021121023178100586]",tokens/s,47.14981165714417,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,810.528768,554.631168,0.0,159.383552,143.673856,s,1,7.36071435546875,7.36071435546875,0.0,7.36071435546875,7.36071435546875,7.36071435546875,7.36071435546875,[7.36071435546875],,kWh,1.0813214037504318e-05,1.185590018481142e-06,3.592502873996095e-06,1.5591306929981556e-05,,MB,1303.117824,609.15712,0.0,199.22944,186.684928,s,32,0.20167136001586916,0.006302230000495911,0.000168425978490321,0.006266864061355591,0.006366675138473511,0.006626598286628723,0.006932350602149964,"[0.006303711891174317, 0.006245567798614502, 0.006314752101898194, 0.006281055927276611, 0.006163584232330322, 0.006309823989868164, 0.006356768131256104, 0.006266240119934082, 0.006225791931152344, 0.006520671844482422, 0.0062631678581237795, 0.006187839984893799, 0.006303487777709961, 0.006266240119934082, 0.006181600093841552, 0.006254816055297851, 0.006316256046295166, 0.007011551856994629, 0.006272352218627929, 0.006756063938140869, 0.006186975955963135, 0.006275455951690673, 0.006190144062042237, 0.006277184009552002, 0.0062674880027771, 0.00620630407333374, 0.006271008014678955, 0.006223264217376709, 0.006367775917053222, 0.006219423770904541, 0.00614844799041748, 0.006236544132232666]",tokens/s,40620.54224930792,kWh,1.8653826384802866e-07,2.0571929875286933e-08,7.918076098593644e-08,2.8629095470925205e-07,tokens/kWh,894195208.7168993,MB,1316.548608,613.351424,0.0,203.423744,186.687488,s,32,10.012835144042967,0.31290109825134277,0.001958896783369078,0.31242938232421874,0.3154934631347656,0.31649754333496094,0.31895257751464845,"[0.3195748291015625, 0.3123941955566406, 0.31391668701171876, 0.3119555969238281, 0.31176416015625, 0.3124645690917969, 0.3128881225585938, 0.3126595153808594, 0.31342831420898437, 0.31559823608398435, 0.31210430908203124, 0.3114669494628906, 0.3137422180175781, 0.3132228088378906, 0.31349627685546877, 0.31101092529296875, 0.3145505065917969, 0.3116888427734375, 0.3113709716796875, 0.31756756591796875, 0.31312869262695314, 0.3112661437988281, 0.3110016784667969, 0.31158636474609375, 0.3106305847167969, 0.3118937072753906, 0.3117104187011719, 0.31084735107421874, 0.3113258972167969, 0.3141195983886719, 0.3128370361328125, 0.3156220703125]",tokens/s,201.3415751880623,kWh,8.93105547428338e-06,9.849508270135154e-07,3.2166215389534714e-06,1.3132627840250369e-05,tokens/kWh,4797212.01014396,,s,2016,9.99823603630067,0.0049594424783237405,0.00011499753868454484,0.004934175968170166,0.0050223200321197516,0.005089400053024292,0.00545977780818939,"[0.005140672206878662, 0.005021312236785889, 0.005017792224884033, 0.005008959770202636, 0.00498137617111206, 0.00510975980758667, 0.005124095916748047, 0.005224448204040527, 0.005203648090362549, 0.005284160137176514, 0.005240928173065185, 0.005596799850463867, 0.005198304176330566, 0.005713727951049805, 0.005277696132659912, 0.0051113600730896, 0.005108160018920898, 0.0050687680244445804, 0.0050826559066772465, 0.005220320224761963, 0.005112287998199463, 0.005017951965332031, 0.005125855922698975, 0.005063680171966553, 0.004981503963470459, 0.00505401611328125, 0.005067455768585205, 0.0050785279273986815, 0.004995584011077881, 0.0049909758567810054, 0.004932960033416748, 0.004951807975769043, 0.004944799900054931, 0.004920864105224609, 0.004951935768127441, 0.0050183038711547855, 0.0049558720588684085, 0.004910880088806152, 0.004959904193878174, 0.005035967826843262, 0.0049795198440551755, 0.004982495784759521, 0.005095583915710449, 0.005507232189178467, 0.0049749441146850586, 0.004953728199005127, 0.0050094079971313476, 0.00496454381942749, 0.0049723520278930665, 0.005053440093994141, 0.005011744022369384, 0.005027711868286133, 0.004985695838928223, 0.004980127811431884, 0.005007455825805664, 0.004971007823944092, 0.004956319808959961, 0.005011295795440674, 0.004978240013122559, 0.005058559894561767, 0.0049333758354187015, 0.004917695999145508, 0.004942240238189698, 0.004884479999542236, 0.0049879360198974605, 0.004936768054962158, 0.00494377613067627, 0.0049064640998840335, 0.004995872020721435, 0.00494975996017456, 0.004927487850189209, 0.004927487850189209, 0.004974592208862305, 0.004984831809997559, 0.004917407989501953, 0.004955999851226807, 0.00502569580078125, 0.00498803186416626, 0.005108191967010498, 0.004957024097442627, 0.004972415924072266, 0.004992800235748291, 0.0049500160217285155, 0.004933792114257813, 0.00493174409866333, 0.00517468786239624, 0.0049647679328918455, 0.004933504104614258, 0.004915200233459473, 0.005033120155334473, 0.004936543941497803, 0.00491315221786499, 0.004910528182983398, 0.004990592002868653, 0.004946879863739014, 0.004924575805664063, 0.004916063785552979, 0.004972479820251465, 0.004970111846923828, 0.004909408092498779, 0.004986048221588135, 0.005101471900939941, 0.004936704158782959, 0.004919519901275635, 0.004898591995239258, 0.004941823959350586, 0.004966400146484375, 0.004911327838897705, 0.004898335933685303, 0.004905055999755859, 0.005129727840423584, 0.004915872097015381, 0.004892096042633056, 0.004885056018829346, 0.00494979190826416, 0.004937119960784912, 0.004887680053710938, 0.004894303798675537, 0.0049890241622924806, 0.004951488018035888, 0.00487286376953125, 0.004898496150970459, 0.004896992206573486, 0.004978496074676514, 0.004929728031158447, 0.004919328212738037, 0.004826272010803222, 0.004882847785949707, 0.004951935768127441, 0.004966976165771485, 0.004937248229980469, 0.004923871994018555, 0.004931615829467773, 0.005019040107727051, 0.004958720207214356, 0.004925536155700684, 0.004916607856750488, 0.0050366721153259275, 0.00497376012802124, 0.004928319931030273, 0.0049235520362854, 0.005005023956298828, 0.004947743892669678, 0.005089759826660156, 0.0050198397636413575, 0.005158592224121094, 0.004959392070770264, 0.004915743827819824, 0.004919616222381592, 0.004988319873809814, 0.0049359679222106935, 0.004966879844665527, 0.004998688220977783, 0.004989247798919677, 0.00494979190826416, 0.004913375854492187, 0.004925439834594727, 0.005012767791748047, 0.004956831932067871, 0.004925504207611084, 0.004917535781860351, 0.004931104183197021, 0.004954304218292236, 0.004941343784332275, 0.00505244779586792, 0.005046720027923584, 0.004976064205169678, 0.005040703773498535, 0.005074944019317627, 0.005128064155578613, 0.005016736030578613, 0.0049714879989624025, 0.004989120006561279, 0.0050133118629455565, 0.004978432178497315, 0.005203936100006103, 0.004962592124938965, 0.0050709757804870605, 0.004982367992401123, 0.004919583797454834, 0.004923391819000244, 0.00499507188796997, 0.005074944019317627, 0.004941023826599121, 0.0049344320297241215, 0.004974592208862305, 0.004945343971252442, 0.004926015853881836, 0.004914976119995117, 0.004784383773803711, 0.004888319969177246, 0.00498089599609375, 0.004916927814483643, 0.004890783786773682, 0.004888576030731201, 0.0049006400108337405, 0.004962528228759765, 0.004902912139892578, 0.004898111820220947, 0.0048913278579711915, 0.004971903800964355, 0.00493395185470581, 0.004888895988464355, 0.004897791862487793, 0.004968800067901611, 0.004915647983551026, 0.005030111789703369, 0.0049192957878112795, 0.004999135971069336, 0.005074304103851318, 0.0049424958229064946, 0.004948224067687988, 0.00496614408493042, 0.004974592208862305, 0.004943871974945068, 0.0049269118309021, 0.004936255931854248, 0.0050152640342712405, 0.004945631980895996, 0.004926015853881836, 0.004929535865783692, 0.005045472145080566, 0.004966911792755127, 0.00493126392364502, 0.004933792114257813, 0.004971968173980713, 0.004939807891845703, 0.004943967819213867, 0.004926047801971436, 0.004999296188354492, 0.004952191829681396, 0.004931359767913818, 0.004951488018035888, 0.004997951984405518, 0.004953311920166016, 0.00495411205291748, 0.004926239967346192, 0.005006656169891357, 0.004956863880157471, 0.004921343803405762, 0.005025792121887207, 0.004904960155487061, 0.0049725441932678225, 0.00491315221786499, 0.004917247772216797, 0.004912320137023926, 0.004989759922027588, 0.004940991878509522, 0.004911231994628906, 0.004924320220947266, 0.00499894380569458, 0.004941823959350586, 0.004853055953979492, 0.004907264232635498, 0.00490070390701294, 0.004915808200836182, 0.004995168209075928, 0.004951871871948242, 0.004908383846282959, 0.004913919925689697, 0.004958208084106445, 0.0049090561866760255, 0.004929376125335693, 0.0049153599739074705, 0.00491315221786499, 0.004989952087402344, 0.004951039791107178, 0.004921343803405762, 0.004939199924468994, 0.004987743854522705, 0.004939487934112549, 0.004921343803405762, 0.0049210238456726075, 0.0049708161354064945, 0.004941311836242676, 0.004898560047149658, 0.004932415962219238, 0.0049376640319824215, 0.004955520153045654, 0.0049424638748168945, 0.004907360076904297, 0.004898752212524414, 0.004970208168029785, 0.005074912071228027, 0.004902944087982177, 0.004917247772216797, 0.0049879360198974605, 0.004957151889801025, 0.004929535865783692, 0.00491315221786499, 0.004974751949310303, 0.004928544044494629, 0.005005152225494385, 0.004913536071777344, 0.004954720020294189, 0.004941215991973877, 0.004946527957916259, 0.004902912139892578, 0.004898816108703613, 0.004978367805480957, 0.004931104183197021, 0.0049119038581848145, 0.0049023680686950686, 0.0049894719123840334, 0.004935679912567138, 0.004925407886505127, 0.004931615829467773, 0.005005536079406739, 0.005074719905853272, 0.005015552043914795, 0.004921535968780518, 0.004998176097869873, 0.004944255828857422, 0.004917407989501953, 0.004897280216217041, 0.004851712226867676, 0.004919551849365235, 0.005010848045349121, 0.004945600032806396, 0.0049097280502319336, 0.0049021439552307125, 0.004976607799530029, 0.004934783935546875, 0.004906655788421631, 0.004908383846282959, 0.004967328071594238, 0.0049600000381469726, 0.004903168201446533, 0.004921088218688965, 0.004898816108703613, 0.004984831809997559, 0.004941760063171386, 0.004912864208221435, 0.004908736228942871, 0.004991615772247314, 0.004947840213775635, 0.0049032001495361325, 0.0049023680686950686, 0.0049872961044311525, 0.004925439834594727, 0.004902912139892578, 0.004902976036071777, 0.004929599761962891, 0.004935647964477539, 0.0049622077941894535, 0.004904960155487061, 0.004902912139892578, 0.004952064037322998, 0.004908480167388916, 0.004892928123474121, 0.004902944087982177, 0.004961599826812744, 0.004930528163909912, 0.0049170880317687985, 0.0049025602340698245, 0.0049541440010070804, 0.004968544006347656, 0.004913119792938232, 0.004897183895111084, 0.004917247772216797, 0.004976640224456787, 0.0049192638397216795, 0.005021535873413086, 0.004917439937591553, 0.004976640224456787, 0.004929535865783692, 0.00491487979888916, 0.004968768119812012, 0.0053116798400878905, 0.004977471828460693, 0.005061952114105225, 0.004948671817779541, 0.005167103767395019, 0.005076288223266601, 0.004963007926940918, 0.0049575681686401364, 0.00514899206161499, 0.004997439861297607, 0.004909503936767578, 0.0049459199905395506, 0.0049090561866760255, 0.004963967800140381, 0.004938432216644287, 0.004910783767700195, 0.0049294400215148925, 0.004976736068725586, 0.0050440959930419926, 0.004923520088195801, 0.004907008171081543, 0.004951231956481934, 0.004968992233276367, 0.004923679828643799, 0.004918816089630127, 0.004905087947845459, 0.00501091194152832, 0.00504307222366333, 0.004947872161865235, 0.004923103809356689, 0.0050191679000854495, 0.004968768119812012, 0.004929215908050537, 0.004917727947235107, 0.005021215915679932, 0.004946688175201416, 0.004924960136413574, 0.004909952163696289, 0.004976319789886475, 0.004957312107086181, 0.005378015995025635, 0.00493455982208252, 0.005002816200256347, 0.004958816051483154, 0.004912992000579834, 0.004914976119995117, 0.004983007907867432, 0.005089280128479004, 0.004925439834594727, 0.004947968006134033, 0.004994944095611572, 0.004999135971069336, 0.004921696186065674, 0.0049227199554443355, 0.0049730238914489746, 0.00497599983215332, 0.0049056000709533695, 0.004904096126556397, 0.004889567852020263, 0.0049844799041748045, 0.0050845761299133305, 0.004889408111572266, 0.004913504123687744, 0.004961952209472657, 0.0049552001953125, 0.004926591873168945, 0.004994688034057618, 0.004989120006561279, 0.004966080188751221, 0.004919392108917236, 0.004902400016784668, 0.004969183921813965, 0.004935679912567138, 0.004882431983947754, 0.004911104202270508, 0.004890048027038575, 0.004887328147888183, 0.00495411205291748, 0.004896543979644776, 0.004890175819396972, 0.004903359889984131, 0.0049827837944030765, 0.004960256099700928, 0.004921343803405762, 0.00491264009475708, 0.004927999973297119, 0.005006432056427002, 0.004934336185455322, 0.004925343990325928, 0.00494214391708374, 0.004995296001434326, 0.0050001277923583986, 0.004904863834381104, 0.0049285759925842285, 0.004963712215423584, 0.004935423851013184, 0.004906847953796387, 0.0049222722053527835, 0.005035871982574463, 0.004974751949310303, 0.0049459199905395506, 0.0049502081871032715, 0.00498252820968628, 0.004949535846710205, 0.0049342079162597655, 0.004937376022338867, 0.004990496158599853, 0.0049613118171691895, 0.004962048053741455, 0.004935679912567138, 0.004923423767089844, 0.005021664142608642, 0.004941823959350586, 0.005175295829772949, 0.0049348797798156735, 0.005085504055023194, 0.004910880088806152, 0.0050183038711547855, 0.004976640224456787, 0.004999167919158935, 0.004933440208435059, 0.004933631896972656, 0.004928736209869384, 0.005006303787231445, 0.005115903854370117, 0.004928639888763428, 0.004961152076721192, 0.004999167919158935, 0.004968448162078858, 0.004929120063781738, 0.004929952144622803, 0.004993023872375488, 0.004947008132934571, 0.004916351795196533, 0.00490831995010376, 0.004977439880371094, 0.00488486385345459, 0.00496288013458252, 0.004927616119384765, 0.0049212160110473635, 0.004904960155487061, 0.004974143981933594, 0.004938240051269531, 0.0049006400108337405, 0.004909120082855225, 0.005009664058685303, 0.004935776233673096, 0.004910655975341797, 0.004909247875213623, 0.004906367778778076, 0.004972799777984619, 0.004949440002441406, 0.004916160106658935, 0.005070655822753906, 0.005066944122314453, 0.004943871974945068, 0.004925439834594727, 0.004929376125335693, 0.0050157117843627926, 0.0049500160217285155, 0.004941184043884278, 0.004917600154876709, 0.00504585599899292, 0.004953855991363525, 0.004918208122253418, 0.0050503678321838375, 0.005011231899261475, 0.00512175989151001, 0.004951583862304688, 0.004952479839324951, 0.005007936000823975, 0.0049618239402771, 0.004955935955047608, 0.005056672096252442, 0.005001503944396973, 0.004983168125152588, 0.0049558720588684085, 0.004929696083068848, 0.004994368076324463, 0.004965055942535401, 0.004923391819000244, 0.004925439834594727, 0.00504249620437622, 0.005052000045776367, 0.004917376041412354, 0.005030111789703369, 0.004973887920379639, 0.004962751865386963, 0.00491315221786499, 0.004927487850189209, 0.00499129581451416, 0.005121376037597656, 0.005063007831573487, 0.004966368198394775, 0.004960256099700928, 0.004940095901489258, 0.004929056167602539, 0.004953663825988769, 0.004919936180114746, 0.004837535858154297, 0.005166399955749512, 0.005185632228851318, 0.004962944030761719, 0.004972511768341064, 0.005197824001312256, 0.0049517440795898435, 0.004940095901489258, 0.005025951862335205, 0.005374144077301026, 0.004986144065856933, 0.005155136108398437, 0.005506175994873047, 0.005598144054412842, 0.004953375816345215, 0.005445824146270752, 0.0049352960586547855, 0.004920224189758301, 0.004968448162078858, 0.005054304122924805, 0.004935808181762695, 0.004911136150360107, 0.004914656162261963, 0.004999551773071289, 0.0049378881454467775, 0.0050462718009948735, 0.0049090561866760255, 0.004986688137054444, 0.004930047988891601, 0.004910816192626953, 0.004910399913787842, 0.004975264072418213, 0.004916800022125244, 0.0049996161460876464, 0.005217440128326416, 0.004985695838928223, 0.004925439834594727, 0.004909215927124023, 0.004906847953796387, 0.004976640224456787, 0.00496230411529541, 0.004941823959350586, 0.004915200233459473, 0.004966176033020019, 0.0049686717987060544, 0.0050421757698059086, 0.004941855907440186, 0.0049072961807250974, 0.005013408184051514, 0.004947743892669678, 0.004909023761749267, 0.004920896053314209, 0.004978752136230469, 0.004962719917297363, 0.0049090561866760255, 0.004900735855102539, 0.004991104125976562, 0.004924831867218017, 0.004978591918945312, 0.004899136066436768, 0.004968832015991211, 0.004937727928161621, 0.004931039810180664, 0.0048653120994567875, 0.005022496223449707, 0.004890143871307373, 0.004959936141967773, 0.004913887977600098, 0.004911104202270508, 0.004911104202270508, 0.0049725441932678225, 0.005000736236572266, 0.004926271915435791, 0.004904607772827149, 0.004933184146881104, 0.004931104183197021, 0.0049284157752990726, 0.004909023761749267, 0.004884064197540283, 0.004958655834197998, 0.004920479774475098, 0.004932447910308838, 0.004902912139892578, 0.004953440189361572, 0.005042431831359863, 0.004925856113433838, 0.0049621758460998535, 0.005000991821289063, 0.005000927925109863, 0.004928127765655517, 0.004937727928161621, 0.004986656188964844, 0.004978591918945312, 0.004933695793151855, 0.004900767803192139, 0.004911168098449707, 0.004986591815948486, 0.004927455902099609, 0.004942431926727295, 0.004925248146057129, 0.004997312068939209, 0.004957888126373291, 0.004935679912567138, 0.004933152198791504, 0.005063456058502197, 0.005000671863555908, 0.004907072067260742, 0.004891007900238037, 0.004984960079193115, 0.004939487934112549, 0.004910687923431397, 0.005075615882873535, 0.00497049617767334, 0.004955264091491699, 0.004903808116912842, 0.004880383968353271, 0.004888576030731201, 0.004976640224456787, 0.004904416084289551, 0.004928224086761475, 0.00490067195892334, 0.005152607917785644, 0.004933536052703857, 0.004901120185852051, 0.004918655872344971, 0.005017792224884033, 0.004832672119140625, 0.004946752071380615, 0.004916192054748535, 0.0049160318374633786, 0.004917247772216797, 0.0049725441932678225, 0.004931583881378174, 0.004918816089630127, 0.004915840148925781, 0.005025023937225342, 0.005007775783538818, 0.004924736022949219, 0.00489353609085083, 0.004974656105041504, 0.005006400108337403, 0.0049081602096557615, 0.004899680137634277, 0.004942431926727295, 0.004933919906616211, 0.004905087947845459, 0.004896607875823975, 0.004905055999755859, 0.004967584133148194, 0.00491974401473999, 0.004903264045715332, 0.004900479793548584, 0.004979104042053223, 0.004949215888977051, 0.00491542387008667, 0.004913504123687744, 0.00508134412765503, 0.004952064037322998, 0.004927487850189209, 0.005005311965942383, 0.004960256099700928, 0.004959455966949463, 0.004913951873779297, 0.0049164161682128905, 0.004938560009002686, 0.005009088039398193, 0.00492575979232788, 0.004939519882202149, 0.004912928104400634, 0.004972671985626221, 0.004933568000793457, 0.004897024154663086, 0.004911263942718506, 0.004945216178894043, 0.004928192138671875, 0.0049210238456726075, 0.004892223834991455, 0.004897535800933838, 0.0049909758567810054, 0.004937727928161621, 0.0049285759925842285, 0.004905920028686523, 0.0049827837944030765, 0.004923391819000244, 0.004908927917480469, 0.004898367881774902, 0.005033728122711182, 0.004947999954223633, 0.004894527912139892, 0.004827231884002686, 0.004894847869873047, 0.004892096042633056, 0.004960671901702881, 0.004922783851623535, 0.004914944171905518, 0.004908127784729004, 0.004954048156738281, 0.004942048072814942, 0.0049006080627441405, 0.004907008171081543, 0.004935488224029541, 0.004982016086578369, 0.004944831848144531, 0.004978687763214112, 0.004944096088409424, 0.004994592189788818, 0.0049136638641357425, 0.004929120063781738, 0.004923232078552246, 0.005078368186950684, 0.005070047855377197, 0.005023136138916016, 0.005102047920227051, 0.0051274561882019044, 0.005151391983032227, 0.005082111835479736, 0.005128928184509277, 0.005075327873229981, 0.00504531192779541, 0.005036831855773926, 0.005058464050292969, 0.004991072177886963, 0.004964352130889893, 0.00495411205291748, 0.005027103900909424, 0.004967040061950684, 0.004925536155700684, 0.00491315221786499, 0.005023744106292725, 0.004964416027069092, 0.00496940803527832, 0.0049316477775573735, 0.004944767951965332, 0.00494598388671875, 0.004967455863952637, 0.005005695819854736, 0.00524073600769043, 0.004950975894927979, 0.004904704093933105, 0.004911327838897705, 0.0049287681579589845, 0.004968992233276367, 0.0049231362342834475, 0.004903456211090088, 0.004939487934112549, 0.005029888153076172, 0.004932928085327149, 0.004918975830078125, 0.004908415794372558, 0.004960959911346436, 0.0049304962158203125, 0.004906144142150879, 0.004806911945343018, 0.004887839794158935, 0.004902656078338623, 0.00496943998336792, 0.004928736209869384, 0.0048997759819030765, 0.004906847953796387, 0.005009024143218994, 0.004940159797668457, 0.0049229440689086915, 0.005048031806945801, 0.005133376121520996, 0.005221888065338135, 0.005219615936279297, 0.005217152118682861, 0.005197247982025146, 0.0051205759048461915, 0.005043424129486084, 0.005030687808990479, 0.004972415924072266, 0.004927584171295166, 0.004987167835235596, 0.00498252820968628, 0.004974592208862305, 0.004923488140106201, 0.004908991813659668, 0.004935520172119141, 0.004969791889190674, 0.004905663967132569, 0.004921088218688965, 0.004890975952148438, 0.004962495803833008, 0.004923232078552246, 0.00490012788772583, 0.0048904638290405275, 0.005042848110198975, 0.004940000057220459, 0.004924799919128418, 0.004895359992980957, 0.004910272121429443, 0.004926271915435791, 0.004955967903137207, 0.004902239799499512, 0.004901535987854004, 0.004985023975372314, 0.00495411205291748, 0.004924960136413574, 0.004925920009613037, 0.005087232112884522, 0.004943679809570312, 0.0049276800155639644, 0.00491315221786499, 0.0049862079620361325, 0.004922016143798828, 0.004915200233459473, 0.004923456192016602, 0.004978240013122559, 0.004944255828857422, 0.004921343803405762, 0.004927487850189209, 0.004892096042633056, 0.004989823818206787, 0.0049202880859375, 0.004890848159790039, 0.004896895885467529, 0.004912543773651123, 0.004947328090667725, 0.0049344320297241215, 0.004889984130859375, 0.004879072189331055, 0.004900864124298096, 0.004975743770599365, 0.004915296077728271, 0.004877088069915772, 0.004889664173126221, 0.004992127895355225, 0.0049269118309021, 0.004934304237365723, 0.004921055793762207, 0.004968448162078858, 0.004997312068939209, 0.004929344177246094, 0.004907008171081543, 0.005462240219116211, 0.005614751815795898, 0.006055776119232178, 0.005397280216217041, 0.00496454381942749, 0.004939104080200195, 0.004997600078582764, 0.004936863899230957, 0.0049090561866760255, 0.004922207832336426, 0.004945504188537598, 0.004958752155303955, 0.004921120166778564, 0.0048919677734375, 0.004887328147888183, 0.004968448162078858, 0.005000671863555908, 0.0048932161331176754, 0.004882016181945801, 0.004941472053527832, 0.004913919925689697, 0.004895999908447266, 0.004877056121826172, 0.004914175987243652, 0.0049398717880249025, 0.004885536193847656, 0.004894527912139892, 0.004881631851196289, 0.004950655937194824, 0.004907392024993897, 0.004878176212310791, 0.004881984233856202, 0.0049647998809814456, 0.004911104202270508, 0.004909279823303222, 0.004888351917266846, 0.004896768093109131, 0.0050032639503479, 0.0049168958663940426, 0.004946271896362305, 0.004902463912963867, 0.0049853758811950685, 0.004935840129852295, 0.004874239921569825, 0.004917247772216797, 0.004902944087982177, 0.004892608165740967, 0.004976672172546387, 0.004905151844024658, 0.004893631935119629, 0.004963551998138428, 0.004996767997741699, 0.004941023826599121, 0.004917791843414307, 0.00491545581817627, 0.00497049617767334, 0.004925439834594727, 0.0049209918975830075, 0.004925695896148682, 0.004943967819213867, 0.00495411205291748, 0.004917247772216797, 0.004897024154663086, 0.004938784122467041, 0.0049731841087341305, 0.004943583965301514, 0.004929887771606445, 0.004886655807495117, 0.0050627517700195315, 0.004928959846496582, 0.004893152236938476, 0.004908959865570068, 0.004966400146484375, 0.004947135925292969, 0.004905792236328125, 0.004896224021911621, 0.0049129600524902345, 0.00497327995300293, 0.004920608043670655, 0.0048913278579711915, 0.004896800041198731, 0.004993311882019043, 0.0049286079406738285, 0.005003903865814209, 0.004895967960357666, 0.004973343849182129, 0.004930592060089111, 0.004881631851196289, 0.004919007778167724, 0.004923423767089844, 0.004924928188323975, 0.004892704010009766, 0.004876959800720215, 0.004873631954193115, 0.0050917439460754395, 0.0049909758567810054, 0.004898303985595703, 0.004936192035675049, 0.00498092794418335, 0.004923200130462647, 0.004889855861663819, 0.004868864059448242, 0.004904895782470703, 0.004919360160827637, 0.004933536052703857, 0.004888351917266846, 0.004906400203704834, 0.004890431880950927, 0.004958144187927246, 0.004911968231201172, 0.004875743865966797, 0.004886208057403564, 0.004895584106445312, 0.004961696147918701, 0.004958816051483154, 0.0048798398971557615, 0.004907872200012207, 0.004973824024200439, 0.004932127952575684, 0.004922560214996338, 0.004944608211517334, 0.00499507188796997, 0.004957312107086181, 0.004907167911529541, 0.004930560111999512, 0.004936768054962158, 0.004952544212341308, 0.005071040153503418, 0.004918528079986572, 0.004938144207000733, 0.0050059518814086915, 0.004946879863739014, 0.004903711795806885, 0.004908063888549805, 0.004983551979064941, 0.004945312023162842, 0.004909471988677978, 0.004933856010437012, 0.005012735843658447, 0.004910016059875488, 0.00493123197555542, 0.0048990721702575684, 0.0050711679458618165, 0.004966176033020019, 0.005049824237823486, 0.004936031818389893, 0.005001408100128174, 0.005069888114929199, 0.004913856029510498, 0.004943168163299561, 0.004979584217071533, 0.004954016208648682, 0.0049214081764221195, 0.00490831995010376, 0.00495084810256958, 0.005081088066101074, 0.00493945598602295, 0.0049155521392822265, 0.004921088218688965, 0.0049686717987060544, 0.004923391819000244, 0.004925439834594727, 0.004906432151794433, 0.004976704120635987, 0.005050655841827393, 0.005214272022247314, 0.00598031997680664, 0.005379871845245361, 0.0054271998405456545, 0.00486240005493164, 0.004984640121459961, 0.005197792053222656, 0.004910399913787842, 0.005126495838165283, 0.004956416130065918, 0.004916607856750488, 0.004905983924865722, 0.0049600000381469726, 0.004955615997314453, 0.004921472072601319, 0.004901567935943604, 0.004904575824737549, 0.004948480129241943, 0.004963744163513184, 0.004903359889984131, 0.0050670399665832516, 0.004990687847137451, 0.00493171215057373, 0.004893887996673584, 0.004917024135589599, 0.0049836158752441405, 0.004945727825164795, 0.0048867521286010745, 0.004877471923828125, 0.004957312107086181, 0.004962016105651856, 0.004925439834594727, 0.004931583881378174, 0.004911104202270508, 0.005005504131317139, 0.004943679809570312, 0.004941504001617431, 0.004919616222381592, 0.004993279933929443, 0.004988287925720215, 0.0049313921928405766, 0.004923967838287353, 0.0049799041748046875, 0.005020480155944824, 0.0049296321868896485, 0.004906976222991943, 0.004975872039794922, 0.004915967941284179, 0.004898975849151612, 0.004902688026428223, 0.004883488178253174, 0.004956768035888672, 0.00492902421951294, 0.004879039764404297, 0.0048807039260864256, 0.0049409279823303225, 0.004928063869476318, 0.004903103828430176, 0.004889920234680176, 0.004934336185455322, 0.00501145601272583, 0.004930880069732666, 0.00490937614440918, 0.004872576236724853, 0.004959968090057373, 0.0048925762176513675, 0.004874207973480225, 0.004814655780792237, 0.00488640022277832, 0.004907519817352295, 0.0049721598625183105, 0.0049276800155639644, 0.004904607772827149, 0.00491487979888916, 0.00497926378250122, 0.004941279888153076, 0.0049136319160461425, 0.004882048130035401, 0.00497654390335083, 0.005003615856170655, 0.00490115213394165, 0.004896768093109131, 0.004882431983947754, 0.0050299839973449705, 0.0049294400215148925, 0.004900352001190185, 0.004905471801757813, 0.00495849609375, 0.004919072151184082, 0.004881824016571045, 0.0048891201019287105, 0.004952064037322998, 0.004922560214996338, 0.004895552158355713, 0.004903935909271241, 0.004955135822296143, 0.005002528190612793, 0.00491158390045166, 0.004905216217041016, 0.004933919906616211, 0.004977952003479004, 0.0049320321083068846, 0.004898816108703613, 0.004892831802368164, 0.004970335960388183, 0.004939775943756103, 0.004907392024993897, 0.004901599884033203, 0.0049407038688659664, 0.004962016105651856, 0.0049169921875, 0.004931583881378174, 0.0048893442153930666, 0.004987711906433105, 0.004901855945587158, 0.004886271953582763, 0.004904352188110352, 0.005019584178924561, 0.004946080207824707, 0.0049237761497497555, 0.00492083215713501, 0.0049777917861938475, 0.004946815967559815, 0.00494271993637085, 0.004933023929595947, 0.004973152160644531, 0.0049903359413146975, 0.004931968212127685, 0.004941055774688721, 0.005190656185150146, 0.005370783805847168, 0.005498591899871826, 0.005083424091339111, 0.0050032639503479, 0.005531712055206299, 0.007346208095550537, 0.005023839950561523, 0.005019743919372558, 0.004979423999786377, 0.00510595178604126, 0.0049387521743774416, 0.004962016105651856, 0.005017600059509277, 0.004980480194091797, 0.004947679996490479, 0.0049507198333740236, 0.00500105619430542, 0.006049280166625977, 0.005109632015228271, 0.005034239768981934, 0.004962592124938965, 0.004960256099700928, 0.004912896156311035, 0.00495417594909668, 0.0049359679222106935, 0.0049213762283325194, 0.004894688129425049, 0.004888895988464355, 0.004947648048400879, 0.004910463809967041, 0.0048847999572753905, 0.0048807039260864256, 0.00499894380569458, 0.005361504077911377, 0.004901247978210449, 0.004900320053100586, 0.00495465612411499, 0.004941823959350586, 0.004888351917266846, 0.004899040222167968, 0.004959263801574707, 0.004926047801971436, 0.004929567813873291, 0.004911776065826416, 0.004881184101104736, 0.0049919037818908696, 0.004912896156311035, 0.004873983860015869, 0.004878911972045898, 0.004966335773468018, 0.004917247772216797, 0.004917247772216797, 0.004891808032989502, 0.004919871807098388, 0.005060895919799805, 0.004900288105010986, 0.004883008003234863, 0.004906816005706787, 0.0049823040962219236, 0.004917920112609863, 0.004888063907623291, 0.0048849921226501464, 0.004964352130889893, 0.004827328205108642, 0.004947775840759277, 0.0049192957878112795, 0.0049376640319824215, 0.004934783935546875, 0.005016831874847412, 0.0049714879989624025, 0.004960383892059326, 0.004952672004699707, 0.004994688034057618, 0.005103839874267578, 0.004943168163299561, 0.00492796802520752, 0.005042272090911865, 0.004956448078155517, 0.004939775943756103, 0.004912608146667481, 0.004963871955871582, 0.004944896221160889, 0.0049090561866760255, 0.004919360160827637, 0.004949952125549316, 0.004933631896972656, 0.004900191783905029, 0.004895391941070556, 0.004894720077514648, 0.0049437122344970705, 0.004933792114257813, 0.00493126392364502, 0.00493555212020874, 0.005330783843994141, 0.004946368217468262, 0.004925504207611084, 0.005036128044128418, 0.005056511878967285, 0.004952064037322998, 0.004937727928161621, 0.004927072048187256, 0.005015967845916748, 0.004974688053131103, 0.004929152011871338, 0.004934144020080566, 0.005146399974822998, 0.005107423782348633, 0.004954304218292236, 0.004916768074035644, 0.004983359813690185, 0.0049519681930541995, 0.004911200046539306, 0.004898816108703613, 0.005006879806518555, 0.004945727825164795, 0.004906752109527588, 0.0049222722053527835, 0.004996992111206054, 0.004970111846923828, 0.00497270393371582, 0.004949920177459717, 0.004972991943359375, 0.004978687763214112, 0.004941504001617431, 0.004939807891845703, 0.004943967819213867, 0.0048009281158447265, 0.004974815845489502, 0.004929279804229737, 0.004903264045715332, 0.004910496234893799, 0.004899199962615967, 0.004963520050048828, 0.004920000076293945, 0.004906847953796387, 0.004890912055969239, 0.004966271877288818, 0.004928639888763428, 0.004889408111572266, 0.004907072067260742, 0.005000192165374756, 0.004924416065216064, 0.004894720077514648, 0.004911104202270508, 0.004931583881378174, 0.004936863899230957, 0.004931968212127685, 0.004887135982513427, 0.004924960136413574, 0.004995488166809082, 0.004986720085144043, 0.004929855823516845, 0.004908927917480469, 0.005014976024627685, 0.004952544212341308, 0.004911295890808106, 0.004915232181549072, 0.004966176033020019, 0.00501145601272583, 0.00491545581817627, 0.004896512031555176, 0.004941472053527832, 0.004947487831115723, 0.004911839962005615, 0.004898623943328857, 0.0048887357711791994, 0.005002848148345947, 0.004914847850799561, 0.004925792217254639, 0.004913504123687744, 0.004980671882629395, 0.004948224067687988, 0.004925439834594727, 0.004936863899230957, 0.0049836478233337405, 0.004995007991790771, 0.004892735958099365, 0.004918784141540527, 0.0049333758354187015, 0.0049301118850708005, 0.004929887771606445, 0.004890687942504883, 0.004890399932861328, 0.005023744106292725, 0.00493779182434082, 0.004925216197967529, 0.0049378881454467775, 0.005000895977020263, 0.004963647842407226, 0.004881184101104736, 0.0049147200584411625, 0.004911359786987305, 0.004883008003234863, 0.005113664150238037, 0.004913087844848633, 0.004898240089416504, 0.004897632122039795, 0.004973440170288086, 0.004946368217468262, 0.00490339183807373, 0.0049147200584411625, 0.004962783813476562, 0.004969632148742676, 0.00492575979232788, 0.0049025602340698245, 0.004909952163696289, 0.0049725441932678225, 0.004911104202270508, 0.004894368171691895, 0.00494646406173706, 0.0049764480590820314, 0.004923711776733399, 0.004886208057403564, 0.004902847766876221, 0.00495849609375, 0.0049201598167419435, 0.004888576030731201, 0.00491206407546997, 0.004897024154663086, 0.004980480194091797, 0.004921311855316162, 0.004915008068084717, 0.0048949441909790035, 0.004957759857177734, 0.004911456108093262, 0.004925024032592773, 0.004893184185028076, 0.004964352130889893, 0.004918528079986572, 0.0048893442153930666, 0.004900544166564941, 0.004886847972869873, 0.004978687763214112, 0.004914783954620362, 0.004896448135375976, 0.004905824184417725, 0.004999040126800537, 0.004924863815307617, 0.004925280094146728, 0.0048891201019287105, 0.004961631774902344, 0.004948832035064697, 0.004908512115478516, 0.004958752155303955, 0.004976640224456787, 0.004952064037322998, 0.004933631896972656, 0.0048865280151367185, 0.004896768093109131, 0.005003168106079102, 0.004943967819213867, 0.004988639831542969, 0.00487772798538208, 0.00489740800857544, 0.005010816097259521, 0.004950655937194824, 0.004952064037322998, 0.004907008171081543, 0.00491542387008667, 0.004997983932495117, 0.00492844820022583, 0.004914400100708008, 0.004905695915222168, 0.0049889922142028805, 0.004915264129638672, 0.004949471950531006, 0.00492796802520752, 0.005021183967590332, 0.004966464042663574, 0.004927103996276856, 0.004935488224029541, 0.004982016086578369, 0.005150400161743164, 0.004918687820434571, 0.005040544033050537, 0.005003520011901855, 0.0049498558044433595, 0.004898975849151612, 0.00490009593963623, 0.004926239967346192, 0.004941760063171386, 0.004900896072387696, 0.004890624046325683, 0.004906623840332031, 0.0049565439224243165, 0.004913375854492187, 0.004875328063964844, 0.004895296096801758, 0.004957888126373291, 0.004929056167602539, 0.004901343822479248, 0.004897471904754639, 0.004885312080383301, 0.004950463771820068, 0.004911776065826416, 0.004936704158782959, 0.004889664173126221, 0.004953536033630371, 0.004991360187530517, 0.004935840129852295, 0.005012608051300048, 0.004979072093963623, 0.004921696186065674, 0.004892000198364258, 0.0048831038475036625, 0.004923391819000244, 0.004954271793365479, 0.004923232078552246, 0.004897984027862548, 0.004897600173950196, 0.004990848064422607, 0.004926688194274902, 0.004916128158569336, 0.0049292478561401365, 0.005003551959991455, 0.004872608184814453, 0.004930912017822265, 0.004925087928771972, 0.004891136169433594, 0.004880864143371582, 0.0049807682037353515, 0.004919007778167724, 0.004892928123474121, 0.004902912139892578, 0.005011104106903076, 0.0049153599739074705, 0.004899007797241211, 0.004888319969177246, 0.0049192638397216795, 0.004932191848754883, 0.0050028800964355465, 0.004948031902313232, 0.0048863358497619625, 0.004997312068939209, 0.004904511928558349, 0.0048800320625305175, 0.004915999889373779, 0.004984320163726807, 0.004929952144622803, 0.004892767906188965, 0.004908991813659668, 0.004933695793151855, 0.0049268159866333, 0.004911424160003662, 0.004882783889770508, 0.004910208225250244, 0.004977663993835449, 0.004933504104614258, 0.004888576030731201, 0.00493174409866333, 0.005061728000640869, 0.004958975791931153, 0.004904607772827149, 0.004907519817352295, 0.004954239845275879, 0.0049407358169555665, 0.004879136085510254, 0.0048925762176513675, 0.0049398717880249025, 0.004960256099700928, 0.004902976036071777, 0.004890048027038575, 0.004897280216217041, 0.004946239948272705, 0.004902592182159424, 0.004918655872344971, 0.004896639823913574, 0.004956319808959961, 0.0049030079841613766, 0.004881951808929443, 0.004899807929992676, 0.004917247772216797, 0.00497270393371582, 0.004908895969390869, 0.004901919841766357, 0.004888671875, 0.004963200092315673, 0.004937727928161621, 0.00486195182800293, 0.004899839878082276, 0.004894015789031983, 0.0048921918869018555, 0.004970304012298584, 0.004903264045715332, 0.0049582719802856445, 0.004905151844024658, 0.00500710391998291, 0.004958208084106445, 0.004898272037506104, 0.004886816024780274, 0.00493609619140625, 0.004929376125335693, 0.004943071842193603, 0.0049119038581848145, 0.004929535865783692, 0.00516096019744873, 0.004966176033020019, 0.004954432010650635, 0.004945151805877686, 0.00502236795425415, 0.004952064037322998, 0.004941472053527832, 0.005025311946868897, 0.004993855953216553, 0.004970848083496094, 0.004904704093933105, 0.004984000205993652, 0.0049663038253784176, 0.0049236159324646, 0.004973055839538575, 0.004911200046539306, 0.004952064037322998, 0.004929535865783692, 0.004892000198364258, 0.004908031940460205, 0.00492303991317749, 0.004964352130889893, 0.004924575805664063, 0.004926303863525391, 0.004918687820434571, 0.005077600002288818, 0.00496230411529541, 0.004937727928161621, 0.00491107177734375, 0.004945951938629151, 0.004904960155487061, 0.004909152030944824, 0.004894847869873047, 0.0049634242057800294, 0.004946623802185059, 0.0049311680793762205, 0.00494598388671875, 0.004962656021118164, 0.004990464210510254, 0.004944511890411377, 0.004986752033233643, 0.0049090561866760255, 0.004974431991577148, 0.004920959949493408, 0.004875103950500489, 0.004912735939025879, 0.004785632133483887, 0.00491100788116455, 0.004971424102783203, 0.004890624046325683, 0.004896768093109131, 0.004876543998718261, 0.004963391780853271, 0.004921792030334472, 0.004875936031341553, 0.004899424076080322, 0.004952095985412598, 0.004920864105224609, 0.004879968166351318, 0.004911200046539306, 0.00489731216430664, 0.004950399875640869, 0.004910528182983398, 0.004877952098846436, 0.004881184101104736, 0.004966815948486328, 0.004919072151184082, 0.004879968166351318, 0.004870368003845215, 0.004931136131286621, 0.0049480957984924315, 0.0049073281288146975, 0.004904287815093994, 0.004897439956665039, 0.0049779839515686035, 0.00493228816986084, 0.005016895771026611, 0.004901567935943604, 0.005364992141723633, 0.005319424152374267, 0.004945824146270752, 0.005002943992614746, 0.0049647679328918455, 0.00491315221786499, 0.004917215824127197, 0.004924992084503174, 0.004950496196746826, 0.004903071880340576, 0.004902592182159424, 0.004907167911529541, 0.004980127811431884, 0.004927743911743164, 0.004904511928558349, 0.004948256015777588, 0.00497001600265503, 0.005057024002075195, 0.004931424140930176, 0.00489734411239624, 0.004976319789886475, 0.004924895763397216, 0.004903840065002441, 0.004894720077514648, 0.0049060797691345215, 0.0049705920219421384, 0.00499180793762207, 0.00495411205291748, 0.004918879985809326, 0.005007167816162109, 0.004960864067077637, 0.004906559944152832, 0.004958015918731689, 0.004916927814483643, 0.005090271949768067, 0.004929535865783692, 0.004924928188323975, 0.004905471801757813, 0.0049090561866760255, 0.004968448162078858, 0.004923391819000244, 0.004898816108703613, 0.004900352001190185, 0.004985119819641113, 0.004928895950317383, 0.004903647899627685, 0.004904255867004394, 0.004978911876678467, 0.004913887977600098, 0.004889472007751465, 0.0049342079162597655, 0.004864799976348877, 0.004964000225067139, 0.0048990721702575684, 0.004893695831298828, 0.004864863872528076, 0.004957183837890625, 0.004921599864959717, 0.004907680034637451, 0.004905983924865722, 0.004918272018432617, 0.004929696083068848, 0.004896607875823975, 0.004880383968353271, 0.0048724479675292965, 0.004943615913391113, 0.0049060478210449215, 0.004861983776092529, 0.004889503955841064, 0.004943295955657959, 0.005022272109985352, 0.00490831995010376, 0.00490399980545044, 0.004947616100311279, 0.005014592170715332, 0.004883296012878418, 0.004898496150970459, 0.004878528118133545, 0.004942048072814942, 0.004902912139892578, 0.0049090561866760255, 0.004876287937164306, 0.00495849609375, 0.004915264129638672, 0.004885983943939209, 0.004931776046752929, 0.004917024135589599, 0.005044447898864746, 0.004982207775115967, 0.0049567360877990725, 0.0049593281745910645, 0.004987808227539062, 0.004933631896972656, 0.004955840110778808, 0.004860288143157959, 0.00493833589553833, 0.00499507188796997, 0.004943871974945068, 0.004907008171081543, 0.0049192957878112795, 0.004923232078552246, 0.004946080207824707, 0.004911104202270508, 0.004933472156524658, 0.004894879817962647, 0.004970272064208984, 0.004912384033203125, 0.004911776065826416, 0.004913472175598145, 0.004998816013336182, 0.004940127849578858, 0.005001215934753418, 0.004908256053924561, 0.0049684162139892575, 0.004973120212554931, 0.004880640029907226, 0.004896768093109131, 0.005087232112884522, 0.004974592208862305, 0.004961535930633545, 0.00491590404510498, 0.004947999954223633, 0.0051528000831604, 0.004965504169464111, 0.0049509119987487795, 0.004904607772827149, 0.00498908805847168, 0.004929376125335693, 0.004915328025817871, 0.004908512115478516, 0.004956639766693115, 0.004947487831115723, 0.004883200168609619, 0.004894720077514648, 0.004964352130889893, 0.004933631896972656, 0.004902272224426269, 0.004909696102142334, 0.004884223937988281, 0.004958240032196045, 0.004904416084289551, 0.004882559776306152, 0.004877984046936035, 0.004946208000183106, 0.004973120212554931, 0.004905087947845459, 0.0048925762176513675, 0.004968607902526855, 0.004937376022338867, 0.004880671977996826, 0.004874239921569825, 0.004900864124298096, 0.004972095966339111, 0.004904672145843506, 0.004893407821655273, 0.004902912139892578, 0.004976640224456787, 0.005228544235229492, 0.00609062385559082, 0.00565670394897461, 0.005205152034759522, 0.004921440124511719, 0.004914944171905518, 0.004919424057006836, 0.004959104061126709, 0.004925439834594727, 0.004906847953796387, 0.004931104183197021, 0.005060575962066651, 0.004936351776123047, 0.004929183959960937, 0.005044576168060303, 0.004999135971069336, 0.004964384078979492, 0.004935679912567138, 0.005027679920196533, 0.004984992027282715, 0.0049398717880249025, 0.004918687820434571, 0.004907519817352295, 0.004997119903564453, 0.004925439834594727, 0.004911104202270508, 0.004892672061920166, 0.004933119773864746, 0.004958655834197998, 0.004925504207611084, 0.004904960155487061, 0.004896063804626465, 0.0049600000381469726, 0.00490502405166626, 0.0048951997756958, 0.004909471988677978, 0.004986048221588135, 0.004993855953216553, 0.00494704008102417, 0.005143199920654297, 0.005004576206207275, 0.004935776233673096, 0.004920191764831543, 0.00491315221786499, 0.004968448162078858, 0.004929344177246094, 0.004894752025604248, 0.004894176006317139, 0.004902847766876221, 0.0049630718231201175, 0.004894720077514648, 0.0048837437629699705, 0.00488428783416748, 0.004967328071594238, 0.004925439834594727, 0.004894720077514648, 0.004902912139892578, 0.004959807872772217, 0.004917407989501953, 0.00490115213394165, 0.004873600006103516, 0.004906879901885986, 0.004997568130493164, 0.0048558077812194825, 0.0049192957878112795, 0.0049090561866760255, 0.00488592004776001, 0.00489737606048584, 0.004956160068511963, 0.00496566390991211, 0.005740992069244385, 0.0050005121231079105, 0.004942240238189698, 0.004901440143585205, 0.004910719871520996, 0.0049565439224243165, 0.004910848140716553, 0.004897024154663086, 0.004902912139892578, 0.004923391819000244, 0.004956160068511963, 0.004916287899017334, 0.004897696018218994, 0.004888607978820801, 0.0049576001167297365, 0.004932191848754883, 0.004910560131072998, 0.004913695812225342, 0.00496230411529541, 0.004943871974945068, 0.004898816108703613, 0.004933599948883057, 0.004974624156951904, 0.004978367805480957, 0.004947648048400879, 0.004909599781036377, 0.004900959968566895, 0.0049909758567810054, 0.00495411205291748, 0.0049192957878112795, 0.004904960155487061, 0.00497049617767334, 0.004920959949493408, 0.004894432067871094, 0.004902719974517822, 0.00494268798828125, 0.004907008171081543, 0.005056511878967285, 0.004921343803405762, 0.004894432067871094, 0.004963808059692383, 0.004958687782287597, 0.0049088640213012695, 0.004897056102752685, 0.004962560176849365, 0.004998303890228271, 0.005817183971405029, 0.005048319816589355, 0.004933279991149902, 0.004890975952148438, 0.004914463996887207, 0.004958943843841553, 0.00491315221786499, 0.004898272037506104, 0.004891168117523193, 0.004902016162872314, 0.004799071788787842, 0.005559391975402832, 0.004971424102783203, 0.005586944103240967, 0.005717984199523926, 0.00562179183959961, 0.004939775943756103, 0.005017055988311768, 0.004950560092926025, 0.0049192957878112795, 0.004912896156311035, 0.004995039939880371, 0.004945888042449951, 0.0049073281288146975, 0.0049231362342834475, 0.004993279933929443, 0.004964352130889893, 0.004918943881988525, 0.004904863834381104, 0.0049585280418396, 0.0049706239700317385, 0.004930848121643066, 0.004903103828430176, 0.0050078401565551755, 0.004986944198608398, 0.004925439834594727, 0.004913407802581787, 0.004900735855102539, 0.004986752033233643, 0.004960256099700928, 0.005027040004730225, 0.00521830415725708, 0.005036255836486816, 0.00516870403289795, 0.004904287815093994, 0.005216991901397705, 0.004996032238006592, 0.005007359981536865, 0.004926623821258545, 0.004942207813262939, 0.004975071907043457, 0.004925439834594727, 0.0048865280151367185, 0.004933631896972656, 0.0049909758567810054, 0.004923391819000244, 0.004960256099700928, 0.0049862079620361325, 0.005001791954040527, 0.005130112171173096, 0.004925663948059082, 0.004921343803405762, 0.00499507188796997, 0.004918975830078125, 0.004922688007354736, 0.0048846077919006345, 0.004946720123291015, 0.004902624130249024, 0.004964735984802246, 0.004883584022521973, 0.004884384155273438, 0.004981279850006104, 0.0049054079055786135]",tokens/s,201.63556778220635,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7435.067392,8041.463808,0.0,7646.216192,7627.584,s,1,12.991546875,12.991546875,0.0,12.991546875,12.991546875,12.991546875,12.991546875,[12.991546875],,kWh,0.00017130919977500125,1.8889409411852197e-05,5.410198772600079e-05,0.00024430059691285424,,MB,1762.79552,8725.13536,0.0,8315.20768,8191.863296,s,10,3.3267133789062506,0.33267133789062503,0.00033820709782437364,0.3326336975097656,0.3330281829833984,0.3331999130249023,0.3333372970581055,"[0.3326554870605469, 0.3323913879394531, 0.3329900207519531, 0.3326119079589844, 0.33216793823242186, 0.33296771240234374, 0.3325362243652344, 0.3326815185546875, 0.33337164306640626, 0.3323395385742188]",tokens/s,769.5282726285459,kWh,9.738502531527805e-06,1.0739790810460098e-06,6.445282934000018e-06,1.7257764546573834e-05,tokens/kWh,14833902.694009311,MB,1768.218624,9039.70816,0.0,8629.78048,8480.067584,s,10,26.721745361328125,2.6721745361328124,0.005471856679455445,2.672157958984375,2.678659765625,2.6799368164062503,2.68095845703125,"[2.662966552734375, 2.66588330078125, 2.66883642578125, 2.668659912109375, 2.671700927734375, 2.672614990234375, 2.674096923828125, 2.677396484375, 2.6783759765625, 2.6812138671875]",tokens/s,23.576304297538137,kWh,7.822668066305538e-05,8.628155067764689e-06,5.198445825419998e-05,0.0001388392939850201,tokens/kWh,453762.030846954,,s,630,26.71208351898191,0.042400132569812586,0.00038199873817920996,0.042404895782470704,0.042909495162963864,0.0430070894241333,0.04318935146331787,"[0.041968673706054685, 0.041872352600097654, 0.04167382431030273, 0.041505695343017575, 0.04162355041503906, 0.04158464050292969, 0.04158464050292969, 0.041723358154296876, 0.04179532623291016, 0.041729854583740233, 0.04183324813842773, 0.0419898567199707, 0.042024608612060546, 0.04181238555908203, 0.04197587203979492, 0.042237632751464846, 0.04200515365600586, 0.04196768188476562, 0.041816001892089845, 0.0421409912109375, 0.04211977767944336, 0.041957374572753905, 0.04215135955810547, 0.04218537521362305, 0.04213324737548828, 0.042106880187988284, 0.04203494262695313, 0.041931262969970705, 0.04193830490112305, 0.04223040008544922, 0.04237324905395508, 0.04228099060058594, 0.042094432830810546, 0.04206099319458008, 0.04231865692138672, 0.042377120971679685, 0.04252880096435547, 0.042377281188964847, 0.042254337310791014, 0.04240137481689453, 0.04259267044067383, 0.042563552856445315, 0.04243833541870117, 0.04298976135253906, 0.04271635055541992, 0.042678817749023434, 0.04269308853149414, 0.04247343826293945, 0.04242956924438476, 0.042584705352783206, 0.04251855850219727, 0.04254537582397461, 0.042635265350341796, 0.042759552001953124, 0.04259916687011719, 0.04263248062133789, 0.042711360931396485, 0.042708255767822265, 0.04276348876953125, 0.042938144683837894, 0.04294246292114258, 0.042850303649902347, 0.04279296112060547, 0.04178739166259766, 0.041696990966796875, 0.04185756683349609, 0.041946430206298825, 0.04193734359741211, 0.04174233627319336, 0.04173619079589844, 0.041646080017089845, 0.041586593627929686, 0.041707775115966794, 0.04190419387817383, 0.042065345764160156, 0.04214204788208008, 0.04193075180053711, 0.042446849822998046, 0.04219811248779297, 0.042166591644287106, 0.0420931510925293, 0.042008575439453126, 0.04215398406982422, 0.04222886276245117, 0.042087295532226565, 0.04201846313476563, 0.0420621452331543, 0.0422973747253418, 0.042231807708740236, 0.042162174224853514, 0.042356929779052734, 0.042167713165283206, 0.0421393928527832, 0.04213766479492188, 0.04205369567871094, 0.042154529571533206, 0.042468894958496095, 0.04252463912963867, 0.042418113708496095, 0.04243513488769531, 0.04259417724609375, 0.04256371307373047, 0.04252684783935547, 0.04258598327636719, 0.0425206413269043, 0.042794944763183594, 0.04275830459594727, 0.04268592071533203, 0.042567550659179686, 0.04252262496948242, 0.042496192932128904, 0.04250246429443359, 0.042562782287597654, 0.04244969558715821, 0.04287030410766601, 0.04279539108276367, 0.04247283172607422, 0.04252131271362305, 0.04260611343383789, 0.042525150299072265, 0.0425082893371582, 0.042469024658203125, 0.042562911987304684, 0.04277657699584961, 0.04295977783203125, 0.04297942352294922, 0.04216144180297852, 0.042003360748291016, 0.041912033081054685, 0.04245455932617188, 0.04202473449707031, 0.04196246337890625, 0.0417786865234375, 0.04169705581665039, 0.041800064086914064, 0.041976158142089846, 0.041816062927246093, 0.04155801773071289, 0.041645919799804684, 0.04207222366333008, 0.04208025741577148, 0.04196352005004883, 0.04217804718017578, 0.04217292785644531, 0.04215135955810547, 0.04224208068847656, 0.0422630729675293, 0.04205977630615235, 0.04193667221069336, 0.04215343856811524, 0.0420032958984375, 0.042280223846435545, 0.04224985504150391, 0.04216320037841797, 0.04211097717285156, 0.042395648956298826, 0.04213564682006836, 0.042366081237792966, 0.04232886505126953, 0.042190113067626954, 0.042672863006591795, 0.04247500610351562, 0.04249856185913086, 0.04287088012695312, 0.04260444641113281, 0.04260416030883789, 0.04253084945678711, 0.042546878814697264, 0.04235862350463867, 0.04241900634765625, 0.0425984001159668, 0.04271820831298828, 0.0426690559387207, 0.04253619384765625, 0.042619552612304684, 0.04289750289916992, 0.04292822265625, 0.042563518524169924, 0.04251651382446289, 0.04278879928588867, 0.0428359375, 0.042775806427001954, 0.04266387176513672, 0.04277936172485351, 0.0427369270324707, 0.04270095825195312, 0.042728096008300784, 0.043040702819824216, 0.04320060729980469, 0.04206374359130859, 0.041754463195800784, 0.041728832244873046, 0.04168201446533203, 0.04189276885986328, 0.04190329742431641, 0.04199488067626953, 0.04189734268188477, 0.04202969741821289, 0.041940193176269534, 0.04178409576416016, 0.041918464660644535, 0.04188774490356445, 0.04182220840454102, 0.04175811386108398, 0.04192111968994141, 0.042238079071044925, 0.042210559844970706, 0.04216419219970703, 0.042207294464111325, 0.04207062530517578, 0.04236876678466797, 0.042157791137695313, 0.04222201538085937, 0.042057281494140626, 0.042080863952636716, 0.04211727905273437, 0.04219062423706055, 0.04226399993896485, 0.042204830169677736, 0.04218921661376953, 0.04230604934692383, 0.04241628646850586, 0.042347423553466795, 0.04243552017211914, 0.04227635192871094, 0.04225075149536133, 0.04237865447998047, 0.04232252883911133, 0.04254294586181641, 0.042547359466552734, 0.04249615859985351, 0.04274467086791992, 0.04273660659790039, 0.042704704284667966, 0.042676448822021484, 0.04251443099975586, 0.043003902435302735, 0.04288918304443359, 0.042876510620117186, 0.04285472106933594, 0.04268239974975586, 0.042579551696777344, 0.04261312103271484, 0.042665313720703125, 0.04301107025146484, 0.042907424926757816, 0.042790912628173826, 0.04281532669067383, 0.04297439956665039, 0.04281238555908203, 0.042742977142333986, 0.042990623474121095, 0.04211113739013672, 0.041942432403564454, 0.04183100891113281, 0.04189593505859375, 0.041957374572753905, 0.04180377578735352, 0.04193484878540039, 0.042261825561523435, 0.04219359970092774, 0.04192870330810547, 0.04189583969116211, 0.041992286682128906, 0.041998207092285154, 0.04206713485717774, 0.04198441696166992, 0.04212310409545898, 0.042183361053466796, 0.0420203857421875, 0.042240478515625, 0.04227260971069336, 0.04217647933959961, 0.04210502243041992, 0.0420208625793457, 0.04226371383666992, 0.042039329528808594, 0.04203193664550781, 0.04230348968505859, 0.042419967651367185, 0.04236838531494141, 0.04233715057373047, 0.042518081665039065, 0.042326465606689456, 0.04232134246826172, 0.042404415130615235, 0.04239116668701172, 0.04248175811767578, 0.04251871871948242, 0.04240188980102539, 0.04272742462158203, 0.042557342529296875, 0.04254316711425781, 0.04253084945678711, 0.042518527984619144, 0.042687839508056644, 0.04272528076171875, 0.042603233337402346, 0.042462974548339846, 0.04291401672363281, 0.042772544860839846, 0.04274176025390625, 0.042580032348632814, 0.04242995071411133, 0.04265619277954102, 0.04300339126586914, 0.042871295928955076, 0.042661727905273436, 0.042868896484375, 0.04310835266113281, 0.04299161529541016, 0.04272537612915039, 0.04292403030395508, 0.042967041015625, 0.042989791870117186, 0.04278681564331055, 0.042059135437011716, 0.04200716781616211, 0.042003456115722655, 0.0419420166015625, 0.04177644729614258, 0.04206809616088867, 0.041968193054199215, 0.041893184661865236, 0.041816318511962894, 0.04177123260498047, 0.04177657699584961, 0.042097152709960936, 0.04223004913330078, 0.042218494415283206, 0.0421629753112793, 0.042109153747558595, 0.04205507278442383, 0.042148448944091796, 0.04204748916625976, 0.0421580810546875, 0.042041343688964845, 0.04194655990600586, 0.04236111831665039, 0.042374881744384765, 0.042266334533691406, 0.0422632942199707, 0.04224726486206055, 0.04248166275024414, 0.04233267211914062, 0.04234086227416992, 0.042229759216308595, 0.04212940979003906, 0.042240001678466796, 0.04250419235229492, 0.04252057647705078, 0.0423807373046875, 0.04235638427734375, 0.042557823181152345, 0.042496543884277344, 0.04254105758666992, 0.0423768310546875, 0.0425432014465332, 0.042670398712158206, 0.04269862365722656, 0.042504287719726565, 0.0430263671875, 0.042820926666259765, 0.04275276947021484, 0.04271513748168945, 0.04269055938720703, 0.04293804931640625, 0.04295916748046875, 0.04318207931518555, 0.042866687774658206, 0.04305100631713867, 0.042928031921386715, 0.04272137451171875, 0.042625022888183595, 0.04278208160400391, 0.04301465606689453, 0.04302979278564453, 0.04289827346801758, 0.042319072723388675, 0.04217081451416015, 0.04205097579956055, 0.04193273544311524, 0.041882625579833986, 0.04186111831665039, 0.04184473419189453, 0.04212531280517578, 0.042039295196533204, 0.041885696411132815, 0.04183049774169922, 0.041825439453125, 0.04204787063598633, 0.04245663833618164, 0.042195327758789064, 0.04216262435913086, 0.042102783203125, 0.042123264312744144, 0.04194918441772461, 0.04195734405517578, 0.04187526321411133, 0.041973983764648434, 0.04219084930419922, 0.04227686309814453, 0.04215091323852539, 0.04234902572631836, 0.042359329223632815, 0.04244188690185547, 0.0422657585144043, 0.042407615661621094, 0.0425382080078125, 0.042611488342285155, 0.042543102264404296, 0.042568737030029294, 0.04241302490234375, 0.042788063049316406, 0.04273846435546875, 0.043053054809570314, 0.042291263580322265, 0.04244473648071289, 0.04272889709472656, 0.042748287200927736, 0.04265798568725586, 0.04260646438598633, 0.04271321487426758, 0.04249401473999023, 0.04266364669799805, 0.042923904418945315, 0.0427454719543457, 0.04275225448608398, 0.04270947265625, 0.043030529022216796, 0.04290899276733398, 0.042687007904052734, 0.042614944458007814, 0.04260236740112305, 0.042692737579345705, 0.04306867218017578, 0.043394912719726564, 0.0432116813659668, 0.042971134185791016, 0.04279305648803711, 0.04274959945678711, 0.04220713424682617, 0.041984001159667966, 0.04183017730712891, 0.04178351974487305, 0.041869152069091795, 0.041716960906982424, 0.041842689514160154, 0.04210374450683594, 0.04216153717041016, 0.042116832733154294, 0.0425335693359375, 0.042216960906982424, 0.04222563171386719, 0.04232268905639648, 0.04226867294311523, 0.04198604965209961, 0.041818111419677735, 0.04197817611694336, 0.04240351867675781, 0.042405376434326174, 0.04229785537719727, 0.04246092987060547, 0.04250812911987305, 0.04243497467041016, 0.04244844818115234, 0.04232032012939453, 0.042180126190185546, 0.042181087493896485, 0.04245094299316406, 0.04286806488037109, 0.04247747039794922, 0.04236569595336914, 0.04277657699584961, 0.04272537612915039, 0.04258127975463867, 0.04260054397583008, 0.042528385162353514, 0.0424637451171875, 0.042676097869873045, 0.042722049713134765, 0.04257369613647461, 0.042559486389160156, 0.042848255157470705, 0.04271279907226563, 0.04250019073486328, 0.04270870590209961, 0.04278076934814453, 0.0429029426574707, 0.04282467269897461, 0.04285440063476562, 0.042729503631591795, 0.042635231018066405, 0.04258390426635742, 0.042812896728515626, 0.04300051116943359, 0.04295043182373047, 0.042782943725585935, 0.04308361434936524, 0.04296828842163086, 0.042898368835449216, 0.04282905578613281, 0.04293241500854492, 0.04297580718994141, 0.041940990447998046, 0.04180307388305664, 0.04196217727661133, 0.04189401626586914, 0.04168454360961914, 0.04203948974609375, 0.04205110549926758, 0.041941600799560545, 0.04181196975708008, 0.04196761703491211, 0.041971710205078124, 0.042017951965332034, 0.041785568237304685, 0.041964160919189454, 0.042171806335449216, 0.04224060821533203, 0.042302913665771484, 0.04227743911743164, 0.04230963134765625, 0.04228851318359375, 0.04256335830688476, 0.04237395095825195, 0.042355838775634765, 0.0423223991394043, 0.04221177673339844, 0.042358814239501955, 0.04244617462158203, 0.042471710205078124, 0.042430686950683597, 0.042698879241943356, 0.042602718353271486, 0.04238016128540039, 0.04223017501831055, 0.042608543395996096, 0.042584545135498045, 0.04250361633300781, 0.042754302978515624, 0.042736064910888674, 0.04266300964355469, 0.042619583129882815, 0.04256175994873047, 0.04259337615966797, 0.04254608154296875, 0.042780670166015625, 0.04289708709716797, 0.04281171035766602, 0.04300969696044922, 0.042858463287353515, 0.04274214553833008, 0.04264348983764649, 0.043014110565185545, 0.042968929290771486, 0.04304873657226563, 0.0430145263671875, 0.04290351867675781, 0.04289932632446289, 0.04286495971679687, 0.04288905715942383, 0.043126911163330076, 0.043159423828125, 0.043026462554931644, 0.04336022567749023, 0.04314316940307617, 0.04210038375854492, 0.04226108932495117, 0.04212892913818359, 0.0418554573059082, 0.041750560760498046, 0.041955135345458985, 0.04216233444213867, 0.04228227233886719, 0.0419376335144043, 0.042176513671875, 0.042228832244873046, 0.042259361267089846, 0.04250032043457031, 0.042434337615966794, 0.04232499313354492, 0.04209561538696289, 0.04191027069091797, 0.04191372680664063, 0.04228505706787109, 0.0422795524597168, 0.042215614318847655, 0.04240364837646484, 0.04239155197143555, 0.042246143341064454, 0.0421550407409668, 0.04211606216430664, 0.04208639907836914, 0.04215580749511719, 0.04240982437133789, 0.042546817779541016, 0.042614688873291014, 0.04257059097290039, 0.042516288757324217, 0.042743297576904295, 0.04266668701171875, 0.04273561477661133, 0.042790912628173826, 0.042823585510253906, 0.042937664031982424, 0.04291584014892578, 0.04274460983276367, 0.042796222686767575, 0.04262380981445312, 0.042635265350341796, 0.04252057647705078, 0.04279216003417969, 0.04289007949829102, 0.04276380920410156, 0.04321116638183594, 0.04293769454956055, 0.04288070297241211, 0.04278992080688476, 0.04271507263183594, 0.04303257751464844, 0.042974624633789066, 0.04287548828125, 0.04326604843139648, 0.04319232177734375, 0.04315692901611328, 0.04296345520019531, 0.04312649536132813, 0.043180000305175784, 0.043028865814208984]",tokens/s,23.584831918944637,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1057.681408,904.855552,0.0,509.607936,491.434496,s,1,7.827791015625,7.827791015625,0.0,7.827791015625,7.827791015625,7.827791015625,7.827791015625,[7.827791015625],,kWh,2.41184667208131e-05,2.652280298336202e-06,8.146395406000151e-06,3.491714242514945e-05,,MB,1365.553152,1018.10176,0.0,608.17408,592.24832,s,10,0.1971610870361328,0.019716108703613282,0.0005599603234482525,0.019589552879333498,0.02018427448272705,0.020702905559539794,0.02111781042098999,"[0.02006902313232422, 0.019475648880004883, 0.01972447967529297, 0.01926380729675293, 0.01932441520690918, 0.019350496292114258, 0.02122153663635254, 0.01970345687866211, 0.019276031494140623, 0.0197521915435791]",tokens/s,12984.306581403867,kWh,5.748851891567857e-07,6.339963632436003e-08,3.531416867126034e-07,9.914265121937492e-07,tokens/kWh,258213792.80401096,MB,1379.516416,1032.781824,0.0,622.854144,605.085696,s,10,10.358093688964845,1.0358093688964842,0.009877266112672058,1.0333173828125002,1.049001318359375,1.050311328125,1.0513593359375,"[1.03319384765625, 1.0235807495117188, 1.0278206787109374, 1.03344091796875, 1.0273258056640624, 1.024480712890625, 1.051621337890625, 1.04366015625, 1.04425927734375, 1.048710205078125]",tokens/s,60.82200247630318,kWh,3.0259939236261438e-05,3.3371566101980955e-06,1.1780895798287674e-05,4.537799164474721e-05,tokens/kWh,1388338.2167551846,,s,630,10.352869088172914,0.0164331255367824,0.0004075164655791157,0.016304304122924804,0.016814908027648925,0.016989174556732177,0.017438395004272463,"[0.01586176013946533, 0.016242368698120117, 0.016171327590942385, 0.01624678421020508, 0.016216064453125, 0.016260448455810546, 0.016392511367797853, 0.016185920715332033, 0.01618451118469238, 0.01612041664123535, 0.016279296875, 0.016339071273803712, 0.01632473564147949, 0.016285696029663087, 0.01627894401550293, 0.016599647521972655, 0.018066495895385743, 0.01744771194458008, 0.016621728897094727, 0.01654374313354492, 0.01660927963256836, 0.01647760009765625, 0.016271968841552735, 0.016304128646850585, 0.016245855331420898, 0.016272287368774414, 0.016975200653076172, 0.016218528747558594, 0.016504575729370117, 0.016314783096313477, 0.01625904083251953, 0.01626675224304199, 0.016287391662597656, 0.01626620864868164, 0.016310272216796876, 0.016314367294311523, 0.016313728332519532, 0.01653619194030762, 0.016403488159179688, 0.016202816009521483, 0.01615862464904785, 0.01617647933959961, 0.016181919097900392, 0.01620969581604004, 0.016224479675292967, 0.016205087661743164, 0.016226079940795897, 0.01639878463745117, 0.01629350471496582, 0.01616796875, 0.016236095428466796, 0.01652364730834961, 0.01670047950744629, 0.01660809516906738, 0.016584831237792967, 0.01648627281188965, 0.016368736267089845, 0.016306751251220702, 0.01634547233581543, 0.016610496520996092, 0.01667718315124512, 0.016578752517700194, 0.016421152114868165, 0.015839903831481934, 0.016209823608398437, 0.01616217613220215, 0.016243423461914062, 0.016265216827392577, 0.01643654441833496, 0.01640108871459961, 0.016358432769775392, 0.016206432342529296, 0.01616099166870117, 0.016283647537231445, 0.01632271957397461, 0.016228351593017578, 0.016348608016967775, 0.016316352844238283, 0.0161246395111084, 0.016379711151123046, 0.016160863876342774, 0.01613209533691406, 0.016134016036987303, 0.016164255142211915, 0.01622025680541992, 0.016361343383789063, 0.01635215950012207, 0.01628758430480957, 0.016215103149414063, 0.016188352584838868, 0.01618534469604492, 0.016187103271484374, 0.016135967254638672, 0.01626755142211914, 0.016318336486816406, 0.016257408142089844, 0.01647817611694336, 0.016280799865722655, 0.016315168380737304, 0.016320512771606444, 0.0161876163482666, 0.01625654411315918, 0.0163143367767334, 0.01620012855529785, 0.016309984207153322, 0.016285823822021483, 0.016207872390747072, 0.016204832077026367, 0.01641468811035156, 0.01618227195739746, 0.016170944213867187, 0.01630419158935547, 0.01618931198120117, 0.01616099166870117, 0.0163286075592041, 0.01626755142211914, 0.01620534324645996, 0.016220447540283203, 0.016265119552612305, 0.016273408889770507, 0.01617078399658203, 0.01623891258239746, 0.016109472274780275, 0.016080896377563478, 0.016113664627075194, 0.016202016830444334, 0.016091136932373046, 0.016179296493530275, 0.016117696762084962, 0.016109439849853517, 0.016152511596679686, 0.016150688171386717, 0.01615603256225586, 0.01609337615966797, 0.016148927688598633, 0.01615667152404785, 0.01651737594604492, 0.016281343460083007, 0.01611961555480957, 0.016113855361938476, 0.01608415985107422, 0.01609996795654297, 0.016212160110473633, 0.016187263488769532, 0.016178655624389648, 0.016217824935913085, 0.016173311233520508, 0.016199359893798827, 0.016219135284423827, 0.016196800231933595, 0.016157503128051757, 0.016117696762084962, 0.016179264068603514, 0.016157760620117187, 0.016141088485717773, 0.016236703872680665, 0.016216352462768556, 0.016183008193969728, 0.0162523193359375, 0.01630064010620117, 0.02134739112854004, 0.01658780860900879, 0.016338848114013673, 0.016250879287719726, 0.016158336639404296, 0.01614886474609375, 0.01613209533691406, 0.016275455474853515, 0.01631158447265625, 0.01625529670715332, 0.016565984725952148, 0.016212671279907227, 0.016196640014648437, 0.0162825927734375, 0.01622425651550293, 0.0162795524597168, 0.01618889617919922, 0.01624038314819336, 0.016226303100585936, 0.016423295974731446, 0.01633113670349121, 0.016242591857910157, 0.016271488189697265, 0.016158720016479493, 0.016317792892456055, 0.0162718391418457, 0.016617727279663087, 0.016230335235595705, 0.016150527954101563, 0.015823519706726075, 0.016231903076171873, 0.01623721694946289, 0.016244735717773438, 0.01618659210205078, 0.016149280548095703, 0.016464000701904298, 0.016401536941528322, 0.016699487686157227, 0.01620470428466797, 0.016287488937377928, 0.016150527954101563, 0.016259071350097656, 0.016144384384155275, 0.0161527042388916, 0.01611782455444336, 0.016248640060424806, 0.017026496887207032, 0.018632736206054688, 0.016906784057617186, 0.016684864044189455, 0.01652694320678711, 0.0162063045501709, 0.01612607955932617, 0.016180864334106447, 0.016167295455932616, 0.01623040008544922, 0.01639833641052246, 0.01677948760986328, 0.01671046447753906, 0.016597984313964843, 0.016350879669189453, 0.016255008697509767, 0.016285120010375978, 0.01704051208496094, 0.016162559509277342, 0.016076799392700195, 0.01625699234008789, 0.016188928604125977, 0.01627190399169922, 0.01617715263366699, 0.016269311904907227, 0.016156448364257812, 0.016205535888671876, 0.01623276710510254, 0.016146623611450195, 0.016281600952148437, 0.01617100715637207, 0.01619340705871582, 0.016273536682128907, 0.0162708797454834, 0.016237024307250977, 0.016354688644409178, 0.016347776412963866, 0.016648191452026367, 0.01624239921569824, 0.016353567123413085, 0.017019935607910156, 0.017358816146850586, 0.01706723213195801, 0.0163438720703125, 0.016294879913330076, 0.01627686309814453, 0.015849727630615235, 0.01619580841064453, 0.016463647842407225, 0.016912384033203123, 0.017225439071655274, 0.016990528106689454, 0.016549983978271485, 0.01648627281188965, 0.016344768524169922, 0.01625734329223633, 0.016170591354370118, 0.016096960067749022, 0.01607244873046875, 0.016242784500122072, 0.016313215255737305, 0.016219808578491212, 0.016183616638183594, 0.01628960037231445, 0.016130271911621093, 0.016221920013427735, 0.016204063415527343, 0.016088672637939453, 0.016189855575561525, 0.01617862319946289, 0.016195232391357423, 0.016304031372070312, 0.016264192581176756, 0.016342016220092775, 0.016516096115112306, 0.01645529556274414, 0.016374143600463867, 0.016488447189331054, 0.016504959106445314, 0.01642214393615723, 0.01647235107421875, 0.01622483253479004, 0.016191488265991212, 0.01621993637084961, 0.016269311904907227, 0.01630793571472168, 0.01626576042175293, 0.01622198486328125, 0.016205791473388673, 0.01611955261230469, 0.01632076835632324, 0.016293888092041017, 0.016107519149780272, 0.01621811294555664, 0.016239936828613282, 0.01627177619934082, 0.01626550483703613, 0.016291200637817384, 0.01624025535583496, 0.016147615432739258, 0.016387935638427734, 0.016261024475097655, 0.016291999816894533, 0.01631987190246582, 0.01621664047241211, 0.01618364715576172, 0.01615763282775879, 0.016154752731323243, 0.016208480834960938, 0.01608185577392578, 0.016400159835815428, 0.016557823181152342, 0.016228607177734375, 0.01613209533691406, 0.016297727584838866, 0.01616716766357422, 0.01618889617919922, 0.016180992126464844, 0.01622505569458008, 0.016105279922485352, 0.0162674560546875, 0.01613209533691406, 0.01609324836730957, 0.016131872177124022, 0.01616265678405762, 0.01624095916748047, 0.016373760223388673, 0.016268896102905273, 0.01615648078918457, 0.016194143295288087, 0.016151615142822266, 0.016229312896728517, 0.016184383392333985, 0.01622643280029297, 0.016681791305541992, 0.016826400756835936, 0.016551904678344727, 0.016285951614379884, 0.01619852828979492, 0.016143232345581054, 0.016107519149780272, 0.016096384048461913, 0.016144832611083983, 0.01615011215209961, 0.01610428810119629, 0.016129215240478514, 0.016131935119628907, 0.016174047470092774, 0.016154592514038085, 0.016162879943847658, 0.016187360763549805, 0.01640608024597168, 0.016343488693237304, 0.01622844886779785, 0.01615247917175293, 0.016174144744873047, 0.016189599990844728, 0.01616873550415039, 0.01618841552734375, 0.017328128814697266, 0.01658448028564453, 0.016340736389160157, 0.016328224182128905, 0.016288127899169922, 0.016164480209350587, 0.016262079238891603, 0.01620377540588379, 0.016109664916992186, 0.016183168411254882, 0.01609657669067383, 0.016079584121704103, 0.016484352111816408, 0.016813631057739257, 0.01618124771118164, 0.01621887969970703, 0.016124992370605468, 0.01633951950073242, 0.016161184310913086, 0.016135168075561524, 0.016140352249145506, 0.01604412841796875, 0.016239423751831055, 0.016943103790283204, 0.016729120254516602, 0.01667888069152832, 0.01643631935119629, 0.016123807907104493, 0.016281055450439452, 0.01641904067993164, 0.016693376541137697, 0.016704832077026367, 0.01646214485168457, 0.016578784942626955, 0.016687456130981444, 0.016908607482910155, 0.017069759368896483, 0.01706188774108887, 0.01683404731750488, 0.016796159744262695, 0.016705535888671876, 0.01659859275817871, 0.016832351684570312, 0.017008575439453125, 0.016630207061767578, 0.016713951110839845, 0.016629791259765624, 0.016803007125854492, 0.016943904876708986, 0.01704140853881836, 0.016951007843017576, 0.016803104400634764, 0.016761184692382813, 0.016558752059936524, 0.01658470344543457, 0.016640256881713868, 0.016779136657714844, 0.01679350471496582, 0.016740095138549804, 0.01667679977416992, 0.016769311904907228, 0.016987520217895506, 0.01690687942504883, 0.01679155158996582, 0.016874496459960937, 0.017265663146972657, 0.017022975921630858, 0.016834144592285157, 0.01677916717529297, 0.016924192428588867, 0.01693199920654297, 0.01674220848083496, 0.016936960220336913, 0.016687103271484375, 0.016778911590576172, 0.01678371238708496, 0.016444992065429688, 0.016578304290771485, 0.01649135971069336, 0.016471424102783204, 0.016510656356811523, 0.01669001579284668, 0.01669071960449219, 0.01650361633300781, 0.016446527481079103, 0.016373632431030273, 0.016427839279174804, 0.016725887298583986, 0.01686140823364258, 0.01659679985046387, 0.016554079055786132, 0.016639328002929686, 0.016943103790283204, 0.017109119415283203, 0.017005279541015626, 0.01662169647216797, 0.016690879821777343, 0.016891263961791993, 0.016929376602172853, 0.016746528625488283, 0.0166297607421875, 0.016748640060424806, 0.016612735748291016, 0.01660982322692871, 0.016742399215698242, 0.016682655334472656, 0.01641097640991211, 0.016478208541870116, 0.016489791870117187, 0.016495296478271484, 0.016420703887939453, 0.016373216629028322, 0.016512895584106447, 0.016386592864990234, 0.01652560043334961, 0.01640447998046875, 0.01638400077819824, 0.01660518455505371, 0.016687263488769533, 0.0166276798248291, 0.016869247436523436, 0.016617055892944335, 0.016630176544189454, 0.016621023178100585, 0.016611871719360353, 0.016678911209106445, 0.016472000122070313, 0.016488319396972657, 0.016332447052001955, 0.016306720733642577, 0.016510976791381835, 0.01650217628479004, 0.01628835105895996, 0.016314655303955077, 0.016298816680908202, 0.016160831451416016, 0.016195999145507813, 0.016257728576660156, 0.01622115135192871, 0.01577129554748535, 0.016197952270507812, 0.01621798324584961, 0.016296031951904297, 0.016171072006225588, 0.016275295257568358, 0.016244512557983398, 0.016573856353759766, 0.0166409912109375, 0.01660313606262207, 0.016549440383911134, 0.016605152130126952, 0.016586271286010743, 0.016579519271850585, 0.016785408020019533, 0.01697996711730957, 0.016885759353637696, 0.01675468826293945, 0.01669273567199707, 0.016538368225097657, 0.01650265693664551, 0.016553855895996093, 0.01658608055114746, 0.0169202880859375, 0.01661238479614258, 0.016563199996948243, 0.01660809516906738, 0.016572479248046876, 0.016473663330078124, 0.01662406349182129, 0.016490495681762696, 0.016774335861206056, 0.016680864334106444, 0.016700159072875975, 0.016605344772338868, 0.0167524471282959, 0.016638368606567384, 0.016529184341430664, 0.016362560272216796, 0.01630022430419922, 0.016313087463378905, 0.016287776947021486, 0.016219871520996094, 0.016262912750244142, 0.016204511642456055, 0.01627510452270508, 0.016245920181274413, 0.016356191635131835, 0.016615552902221678, 0.01731167984008789, 0.016808000564575196, 0.01667465591430664, 0.017039680480957033, 0.016524543762207033, 0.016472671508789064, 0.01654547119140625, 0.016714143753051757, 0.01666649627685547, 0.016674623489379883, 0.01693427276611328, 0.017171295166015624, 0.016803552627563476, 0.016686431884765623, 0.01665420722961426, 0.0168623046875, 0.01694927978515625, 0.01666342353820801, 0.016617536544799805, 0.016531391143798826, 0.017415584564208983, 0.019873983383178712, 0.017082784652709963, 0.017135616302490234, 0.016903360366821288, 0.017021440505981447, 0.016859136581420898, 0.01665999984741211, 0.016519968032836913, 0.016595008850097657, 0.01693075180053711, 0.019927040100097656, 0.01665843200683594, 0.016347135543823242, 0.016347007751464845, 0.01625823974609375, 0.017125631332397463, 0.01623062324523926, 0.01626588821411133, 0.016236352920532226, 0.016174272537231447, 0.016210752487182616, 0.016216127395629883, 0.01626451110839844, 0.016283424377441406, 0.01645654487609863, 0.0164003849029541, 0.016426559448242187, 0.016502368927001954, 0.01658572769165039, 0.01650876808166504, 0.01626038360595703, 0.01630441665649414, 0.016228351593017578, 0.016303775787353515, 0.016359296798706055, 0.016489376068115236, 0.01636761665344238, 0.016293376922607423, 0.01630259132385254, 0.017792448043823243, 0.016761568069458006, 0.017176704406738283, 0.016275232315063476, 0.01632863998413086, 0.01635251235961914, 0.016363807678222656, 0.016351711273193358, 0.01650217628479004, 0.016206432342529296, 0.016273216247558595, 0.016263359069824217, 0.01626470375061035, 0.016347808837890623, 0.016565088272094727, 0.016519359588623047, 0.016238431930541992]",tokens/s,60.85269644911382,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3591.528448,4609.409024,0.0,4206.886912,4070.564864,s,1,10.3852275390625,10.3852275390625,0.0,10.3852275390625,10.3852275390625,10.3852275390625,10.3852275390625,[10.3852275390625],,kWh,9.134361489166925e-05,1.0068475187233027e-05,2.7655855458004475e-05,0.00012906794553690676,,MB,3603.951616,4783.47264,0.0,4366.270464,4197.065728,s,10,1.7514608612060547,0.17514608612060545,0.0004245356513764438,0.17517752075195314,0.17563513641357423,0.1756477912902832,0.1756579151916504,"[0.1753883514404297, 0.17451078796386718, 0.1745284729003906, 0.17524362182617187, 0.1750978546142578, 0.17563232421875, 0.1751114196777344, 0.17467234802246093, 0.175615234375, 0.1756604461669922]",tokens/s,1461.63700069049,kWh,5.1781405067976e-06,5.707598114686513e-07,3.4232434793332697e-06,9.17214379759952e-06,tokens/kWh,27910596.00123134,MB,3611.480064,4793.9584,0.0,4376.756224,4197.068288,s,10,20.225977294921872,2.0225977294921873,0.020314569555725914,2.0217301025390624,2.0496035888671873,2.0540643676757813,2.0576329907226563,"[2.0048492431640623, 2.0381734619140626, 1.995053466796875, 2.0486123046875, 2.058525146484375, 2.0264163818359373, 1.9951151123046875, 2.01577197265625, 2.0181939697265623, 2.0252662353515625]",tokens/s,31.148062257450167,kWh,5.8116734184036995e-05,6.410392450480075e-06,3.789584513146676e-05,0.00010242297176598385,tokens/kWh,615096.388180793,,s,630,20.223344085693366,0.03210054616776724,0.0006397085772861648,0.032003679275512696,0.0328206413269043,0.033063246726989744,0.03425157215118409,"[0.032599742889404294, 0.03198422431945801, 0.03157155227661133, 0.03144976043701172, 0.03169254493713379, 0.031460832595825196, 0.03158070373535156, 0.031494144439697266, 0.03137740707397461, 0.03158627128601074, 0.03318172836303711, 0.03208931350708008, 0.03181843185424805, 0.0317031364440918, 0.03148185539245606, 0.031641408920288085, 0.0317196159362793, 0.03203891372680664, 0.03177903938293457, 0.031497119903564456, 0.03158652877807617, 0.03227471923828125, 0.03235878372192383, 0.03220182418823242, 0.03201526260375977, 0.032089569091796874, 0.03192681694030762, 0.032075775146484374, 0.03198108863830566, 0.03204143905639648, 0.03189145660400391, 0.03195289611816406, 0.031938304901123045, 0.03227027130126953, 0.03233824157714844, 0.03213475036621094, 0.03254902267456055, 0.03214281463623047, 0.032266719818115235, 0.03271648025512695, 0.0319105281829834, 0.03168636894226074, 0.032024864196777345, 0.03171516799926758, 0.03172502326965332, 0.03455648040771484, 0.03164172744750977, 0.03150592041015625, 0.03144755172729492, 0.03141433525085449, 0.031606271743774415, 0.03143926429748535, 0.031333536148071287, 0.03124518394470215, 0.03125043106079101, 0.031285247802734374, 0.031172607421875, 0.031160320281982422, 0.031235424041748047, 0.03117942428588867, 0.031098079681396485, 0.031275808334350584, 0.03120672035217285, 0.032249057769775394, 0.03173046493530273, 0.033478656768798826, 0.03306713485717774, 0.032021793365478515, 0.03211324691772461, 0.032075294494628905, 0.032233951568603515, 0.031991615295410156, 0.03211280059814453, 0.0327916145324707, 0.03319087982177735, 0.03286566543579102, 0.032860801696777346, 0.03279888153076172, 0.033193824768066406, 0.03305849456787109, 0.03306937789916992, 0.03293097686767578, 0.032922462463378904, 0.032685630798339846, 0.03284832000732422, 0.032839359283447264, 0.03271427154541016, 0.03277699279785156, 0.03256934356689453, 0.03225955200195312, 0.032354846954345706, 0.03238092803955078, 0.032350208282470705, 0.03273849487304688, 0.03259270477294922, 0.03227238464355469, 0.03223961639404297, 0.032331424713134764, 0.03213516616821289, 0.031895904541015624, 0.03196723175048828, 0.031940607070922854, 0.03225395202636719, 0.03188515281677246, 0.03193052864074707, 0.03215564727783203, 0.03189766311645508, 0.03169177627563476, 0.03172828865051269, 0.03223116683959961, 0.03207785415649414, 0.03226265716552734, 0.032546817779541014, 0.0324587516784668, 0.03240988922119141, 0.03236428833007812, 0.03204079818725586, 0.03202060699462891, 0.032, 0.03182547187805176, 0.031676864624023436, 0.03179315185546875, 0.03174720001220703, 0.03215244674682617, 0.03196518325805664, 0.03213011169433594, 0.03258572769165039, 0.03180339241027832, 0.032150848388671875, 0.031820640563964844, 0.03170697593688965, 0.03252975845336914, 0.0314989128112793, 0.03135238456726074, 0.03133612823486328, 0.031875295639038084, 0.031430879592895505, 0.03141257667541504, 0.03130364799499512, 0.031209056854248046, 0.03132246398925781, 0.03139705657958984, 0.0313250560760498, 0.031285247802734374, 0.031196352005004882, 0.03146630477905273, 0.031312000274658205, 0.03133609580993652, 0.03129990386962891, 0.031424415588378905, 0.0318047046661377, 0.031990495681762696, 0.03131936073303223, 0.031414592742919925, 0.03157564735412598, 0.03177961540222168, 0.031733760833740236, 0.032882591247558594, 0.031461471557617186, 0.03162704086303711, 0.03191993522644043, 0.031730112075805665, 0.03154649543762207, 0.03183497619628906, 0.03168649673461914, 0.03153868865966797, 0.03139347267150879, 0.03155814361572266, 0.03133657646179199, 0.03137161636352539, 0.031356927871704104, 0.03146678352355957, 0.0312959041595459, 0.03139001655578613, 0.031334592819213865, 0.03134444808959961, 0.03129958343505859, 0.03153919982910156, 0.03155670356750488, 0.03141519927978516, 0.03171257591247559, 0.031864799499511716, 0.03230972671508789, 0.03199923133850097, 0.03179827117919922, 0.03187881660461426, 0.03248998260498047, 0.03298643112182617, 0.03284134292602539, 0.03349094390869141, 0.0336385612487793, 0.0348383674621582, 0.032734878540039064, 0.03324143981933594, 0.032849918365478514, 0.03273932647705078, 0.03253238296508789, 0.03248137664794922, 0.03238620758056641, 0.03208179092407227, 0.03201696014404297, 0.0320035514831543, 0.03199596786499023, 0.03202883148193359, 0.03202089691162109, 0.0318856315612793, 0.03199375915527344, 0.03237472152709961, 0.03214966583251953, 0.03224515151977539, 0.032242271423339845, 0.032034847259521486, 0.03204211044311524, 0.032228160858154296, 0.03215977478027344, 0.032188480377197265, 0.03219001770019531, 0.032409568786621094, 0.03225846481323242, 0.032307422637939454, 0.032133983612060546, 0.03236140823364258, 0.032457855224609374, 0.03299212646484375, 0.033605728149414066, 0.03305052947998047, 0.03294617462158203, 0.033116161346435545, 0.03308707046508789, 0.033071262359619144, 0.03301545715332031, 0.03318960189819336, 0.032856960296630856, 0.032702465057373044, 0.03246627044677734, 0.03251788711547852, 0.03257993698120117, 0.03279679870605469, 0.03230559921264648, 0.0324128303527832, 0.0323072624206543, 0.03245558547973633, 0.03210432052612305, 0.032299007415771484, 0.032030433654785154, 0.032319774627685545, 0.03209532928466797, 0.03245318222045898, 0.03220896148681641, 0.03214713668823242, 0.03242812728881836, 0.032027137756347655, 0.032925376892089846, 0.032501953125, 0.03245888137817383, 0.032893150329589844, 0.03286608123779297, 0.03322489547729492, 0.03302169418334961, 0.032970207214355465, 0.03289968109130859, 0.032845054626464844, 0.032965377807617186, 0.03313049697875976, 0.03306905746459961, 0.032804256439208986, 0.03278473663330078, 0.03274892807006836, 0.03571392059326172, 0.03245657730102539, 0.03248735809326172, 0.032491710662841795, 0.03291484832763672, 0.03230073547363281, 0.03228121566772461, 0.03209040069580078, 0.03218431854248047, 0.03234323120117188, 0.03232169723510742, 0.03193718338012695, 0.03223273468017578, 0.032559776306152345, 0.03212249755859375, 0.03208867263793945, 0.032069023132324216, 0.03221871948242187, 0.032240161895751955, 0.03211500930786133, 0.03219660949707031, 0.03230515289306641, 0.03242803192138672, 0.03240262222290039, 0.03261667251586914, 0.032761505126953125, 0.03301625442504883, 0.03279705429077148, 0.03323065567016602, 0.033119617462158205, 0.03343561553955078, 0.03371721649169922, 0.03376278305053711, 0.03279216003417969, 0.03293695831298828, 0.032898303985595706, 0.03270931243896484, 0.03262828826904297, 0.03269385528564453, 0.0323199691772461, 0.03234207916259765, 0.032532798767089845, 0.03255849456787109, 0.03235692977905273, 0.03214697647094727, 0.032119102478027343, 0.03214355087280273, 0.03270041656494141, 0.032575199127197266, 0.03244867324829102, 0.032517822265625, 0.03259027099609375, 0.03246451187133789, 0.03274585723876953, 0.032110591888427735, 0.032763904571533206, 0.032974143981933594, 0.032647743225097656, 0.032655712127685546, 0.03359519958496094, 0.03296672058105469, 0.03716700744628906, 0.03295235061645508, 0.03251804733276367, 0.03250937652587891, 0.03237337493896485, 0.03228633499145508, 0.03240703964233398, 0.032674686431884765, 0.03213452911376953, 0.032401729583740234, 0.03196960067749023, 0.03195849609375, 0.03204150390625, 0.032001121520996094, 0.03196998405456543, 0.03200227355957031, 0.032183521270751955, 0.03186979293823242, 0.03171116828918457, 0.03166316795349121, 0.031976383209228514, 0.03178428840637207, 0.031527584075927734, 0.03157516860961914, 0.03647100830078125, 0.0319965763092041, 0.032159744262695314, 0.031649791717529296, 0.03160883140563965, 0.03134886360168457, 0.031228832244873047, 0.03120249557495117, 0.03132985687255859, 0.03171145629882813, 0.03141632080078125, 0.03136316871643066, 0.03149305534362793, 0.03154633522033692, 0.03191926383972168, 0.03140201568603516, 0.031417152404785154, 0.03141158485412598, 0.03179583930969238, 0.03138764762878418, 0.031389696121215824, 0.03131155204772949, 0.031291711807250974, 0.03133030319213867, 0.03157113647460937, 0.032783454895019534, 0.032078750610351564, 0.031966527938842776, 0.03178313636779785, 0.031666656494140626, 0.03161235237121582, 0.03173411178588867, 0.031516895294189454, 0.031719423294067385, 0.03164377593994141, 0.03146329689025879, 0.031508480072021484, 0.03145657539367676, 0.031289920806884766, 0.03132428741455078, 0.03143680000305176, 0.031662080764770506, 0.031903392791748045, 0.031617151260375974, 0.031531232833862305, 0.03141632080078125, 0.031933696746826175, 0.031476287841796874, 0.031547679901123046, 0.031538816452026365, 0.031510143280029296, 0.031615648269653324, 0.03154944038391113, 0.03165388870239258, 0.03181785583496094, 0.031846271514892575, 0.0318791675567627, 0.03209785461425781, 0.032245857238769535, 0.03204950332641601, 0.03220275115966797, 0.031788063049316403, 0.031984607696533204, 0.031969087600708004, 0.031827648162841796, 0.03178937530517578, 0.03144927978515625, 0.03225619125366211, 0.03174579238891601, 0.03163347244262695, 0.03169619178771973, 0.03154604721069336, 0.032474494934082034, 0.03142720031738281, 0.03142476844787598, 0.03140787124633789, 0.031473663330078124, 0.031428352355957034, 0.03133875274658203, 0.031406080245971676, 0.03136463928222656, 0.031568159103393556, 0.03131616020202637, 0.0313447036743164, 0.031365055084228516, 0.031275007247924806, 0.03134464073181152, 0.031154144287109376, 0.03265241622924805, 0.03211465454101563, 0.0321278076171875, 0.03207798385620117, 0.032034366607666016, 0.03198921585083008, 0.03205564880371094, 0.03186342430114746, 0.03194393539428711, 0.03197004890441894, 0.03221913528442383, 0.03199494361877441, 0.03202944183349609, 0.031983200073242186, 0.03208867263793945, 0.031987712860107424, 0.03192444801330566, 0.03201520156860352, 0.031893535614013674, 0.03193065643310547, 0.0321030387878418, 0.032062721252441403, 0.03197222328186035, 0.032235393524169924, 0.03191398429870605, 0.032169120788574215, 0.03199084854125977, 0.03193984031677246, 0.03202412796020508, 0.03212550354003906, 0.03209667205810547, 0.03202867126464844, 0.03210380935668945, 0.0321952018737793, 0.03211836624145508, 0.032221599578857424, 0.03227443313598633, 0.031936511993408204, 0.03201433563232422, 0.03192831993103027, 0.03187507247924805, 0.03208556747436524, 0.03194444847106934, 0.03178566360473633, 0.03165526390075683, 0.031721311569213866, 0.031779647827148434, 0.031909887313842776, 0.03179481506347656, 0.03175168037414551, 0.031894399642944334, 0.031929759979248046, 0.03182243156433105, 0.03173356819152832, 0.03189369583129883, 0.031968704223632814, 0.03189961624145508, 0.03200889587402344, 0.03197532844543457, 0.03177574348449707, 0.03189836883544922, 0.032135425567626955, 0.03190169525146484, 0.03281856155395508, 0.032223838806152344, 0.031975360870361326, 0.031893535614013674, 0.032278591156005856, 0.03200972747802734, 0.03206345748901367, 0.031827999114990235, 0.03176236724853516, 0.03187564849853516, 0.03190732765197754, 0.031965152740478515, 0.03187334442138672, 0.031719648361206054, 0.03197500801086426, 0.03184444808959961, 0.03180550384521484, 0.031922367095947264, 0.032032833099365235, 0.03197542381286621, 0.03185641670227051, 0.0319531192779541, 0.03188435173034668, 0.032206783294677736, 0.03193955230712891, 0.031787040710449216, 0.031878751754760744, 0.031964927673339846, 0.03217299270629883, 0.0318317756652832, 0.03194684791564941, 0.031713184356689454, 0.03160268783569336, 0.031606496810913084, 0.03195139122009277, 0.031979263305664064, 0.032061695098876956, 0.032007614135742185, 0.032128734588623045, 0.03231769561767578, 0.03235219192504883, 0.03231391906738281, 0.032302944183349606, 0.032150848388671875, 0.032111297607421874, 0.0321003532409668, 0.03191398429870605, 0.03202463912963867, 0.032042945861816406, 0.032378078460693356, 0.03198646354675293, 0.03201436614990234, 0.03201839828491211, 0.0318948802947998, 0.03320899200439453, 0.03221299362182617, 0.03240697479248047, 0.03191184043884277, 0.031912607192993166, 0.03239491271972656, 0.031879520416259764, 0.03187507247924805, 0.03196928024291992, 0.03238889694213867, 0.031763168334960935, 0.031669567108154294, 0.031793855667114256, 0.03191398429870605, 0.03187302398681641, 0.031897567749023435, 0.03218191909790039, 0.03203071975708008, 0.03205363082885742, 0.032004096984863284, 0.03221702575683594, 0.031881120681762694, 0.031805280685424805, 0.031923967361450194, 0.03161727905273438, 0.031955039978027344, 0.03420931243896484, 0.03225468826293945, 0.03199731254577637, 0.03189145660400391, 0.031887552261352536, 0.03246080017089844, 0.03199158477783203, 0.03257609558105469, 0.03224579238891601, 0.032258174896240235, 0.032270175933837894, 0.032083553314208986, 0.03247468948364258, 0.03225462341308594, 0.03225155258178711, 0.03220233535766601, 0.03229731369018555, 0.03217839813232422, 0.03213715362548828, 0.03175628852844238, 0.0318243522644043, 0.03196742439270019, 0.03195884895324707, 0.03184230422973633, 0.03177471923828125, 0.03178700828552246, 0.0321223030090332, 0.03160940742492676, 0.03168198394775391, 0.03200380706787109, 0.03184931182861328, 0.0317255687713623, 0.03184025573730469, 0.03195084762573242, 0.03174188804626465, 0.03177068710327149, 0.032008094787597655, 0.037103710174560545, 0.032352256774902347, 0.03181977653503418, 0.03426883316040039, 0.032053600311279296, 0.031819583892822266, 0.03213740921020508, 0.031757535934448244, 0.0316014404296875]",tokens/s,31.15211793511846,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3590.53312,4609.409024,0.0,4206.886912,4070.564864,s,1,10.2306328125,10.2306328125,0.0,10.2306328125,10.2306328125,10.2306328125,10.2306328125,[10.2306328125],,kWh,8.822761569999784e-05,9.724524280805748e-06,2.802502241999738e-05,0.00012597716240080097,,MB,3616.272384,4783.47264,0.0,4366.270464,4197.065728,s,10,1.6875668029785156,0.16875668029785157,0.00043956967470854794,0.16892181396484374,0.1691445556640625,0.16925013427734376,0.16933459716796875,"[0.16816026306152343, 0.1681755828857422, 0.16910429382324219, 0.16912109375, 0.16808642578125, 0.16882330322265626, 0.1690622100830078, 0.16902032470703124, 0.1686575927734375, 0.169355712890625]",tokens/s,1516.9769845446474,kWh,4.967155582626527e-06,5.477859752861424e-07,3.297309605830414e-06,8.812251163743085e-06,tokens/kWh,29050465.680469967,MB,3620.2496,4791.861248,0.0,4374.659072,4197.068288,s,10,17.10925341796875,1.7109253417968748,0.016390697756024867,1.7063275146484376,1.7390421508789062,1.742282733154297,1.7448751989746094,"[1.738322021484375, 1.7455233154296874, 1.7058992919921876, 1.7082659912109375, 1.6966365966796875, 1.7024412841796874, 1.6914332275390624, 1.70807666015625, 1.7065859375, 1.706069091796875]",tokens/s,36.82217947267947,kWh,4.937923422487194e-05,5.446276323082536e-06,3.286185821036891e-05,8.76873687583234e-05,tokens/kWh,718461.5172298684,,s,630,17.106712766647345,0.027153512328011647,0.0005691094259290827,0.026989168167114257,0.02773168888092041,0.027953979396820067,0.029107978229522713,"[0.028067743301391602, 0.027437152862548827, 0.027453088760375978, 0.02751113510131836, 0.027459840774536132, 0.027303680419921875, 0.02734489631652832, 0.027213823318481444, 0.02856959915161133, 0.027201824188232422, 0.02725449562072754, 0.027222015380859374, 0.02715238380432129, 0.027322368621826174, 0.027234304428100587, 0.027114784240722656, 0.02784124755859375, 0.02710937690734863, 0.027137184143066408, 0.02706489562988281, 0.027189535140991213, 0.027211551666259767, 0.027304384231567384, 0.02718492889404297, 0.027319904327392577, 0.02735526466369629, 0.027400480270385743, 0.027321887969970704, 0.027521503448486327, 0.027632640838623046, 0.027792383193969726, 0.02784992027282715, 0.02773062324523926, 0.02787504005432129, 0.02807811164855957, 0.02788803291320801, 0.027947328567504884, 0.027579904556274414, 0.02784614372253418, 0.027924671173095703, 0.027702751159667968, 0.027721887588500978, 0.02802364730834961, 0.027637760162353517, 0.027514591217041015, 0.027486495971679688, 0.02735270309448242, 0.027646080017089843, 0.027214080810546874, 0.030486528396606444, 0.027430400848388672, 0.028727392196655273, 0.02785055923461914, 0.02797123146057129, 0.02817715263366699, 0.027371776580810546, 0.02740790367126465, 0.02745779228210449, 0.027258975982666016, 0.027350784301757813, 0.02738617515563965, 0.02737766456604004, 0.02752457618713379, 0.02795827293395996, 0.027564064025878906, 0.027679136276245117, 0.02755846405029297, 0.027456863403320313, 0.028539039611816405, 0.02787993621826172, 0.027663648605346678, 0.027557600021362306, 0.028256832122802736, 0.027865535736083986, 0.027702304840087892, 0.02864022445678711, 0.02774537658691406, 0.02794998359680176, 0.027666271209716795, 0.02786524772644043, 0.02772377586364746, 0.027842464447021483, 0.027702848434448243, 0.028360319137573243, 0.02769603157043457, 0.027527168273925783, 0.028475040435791014, 0.027506719589233397, 0.027308351516723634, 0.027213823318481444, 0.02738505554199219, 0.027219743728637696, 0.02760188865661621, 0.027565919876098632, 0.027332799911499023, 0.027291103363037108, 0.027300384521484374, 0.027469823837280274, 0.027457536697387694, 0.027313247680664062, 0.02729257583618164, 0.027666431427001953, 0.027817983627319336, 0.027250688552856447, 0.027862239837646484, 0.02780659294128418, 0.02745881652832031, 0.027524864196777344, 0.027782047271728515, 0.027625375747680665, 0.02764329528808594, 0.027679040908813478, 0.027677055358886718, 0.027771135330200196, 0.02774127960205078, 0.028199583053588866, 0.027938816070556642, 0.027803071975708006, 0.027658559799194335, 0.028616352081298826, 0.027603551864624022, 0.027584447860717773, 0.02759891128540039, 0.027544736862182617, 0.027754432678222658, 0.027490591049194334, 0.02793606376647949, 0.02717964744567871, 0.02692243194580078, 0.02671392059326172, 0.02702207946777344, 0.027011072158813477, 0.02731007957458496, 0.026614816665649414, 0.026639328002929688, 0.027146240234375, 0.02674015998840332, 0.027859519958496094, 0.0298221435546875, 0.027078752517700196, 0.027199647903442384, 0.02696659278869629, 0.02678169631958008, 0.026984447479248046, 0.026918912887573244, 0.02680326461791992, 0.027015392303466796, 0.026780288696289064, 0.026583040237426758, 0.026714176177978517, 0.026882080078125, 0.027215871810913086, 0.0267509765625, 0.02689023971557617, 0.026693567276000977, 0.02693049621582031, 0.027185920715332032, 0.027303136825561524, 0.02673539161682129, 0.026849056243896486, 0.026966239929199217, 0.02711292839050293, 0.026937599182128905, 0.026994592666625978, 0.026979743957519533, 0.026691999435424805, 0.02723030471801758, 0.02690300750732422, 0.026836992263793946, 0.027165983200073244, 0.026716575622558594, 0.026959327697753905, 0.027171680450439453, 0.026657983779907225, 0.027310911178588866, 0.028728864669799806, 0.027435359954833986, 0.02727334403991699, 0.0269899845123291, 0.026927711486816407, 0.02716796875, 0.02715679931640625, 0.02699238395690918, 0.027589344024658204, 0.02692905616760254, 0.026970144271850585, 0.027127328872680663, 0.026687103271484373, 0.026857568740844728, 0.027261791229248048, 0.02680428886413574, 0.02693667221069336, 0.02672287940979004, 0.026730207443237303, 0.026860063552856445, 0.026808160781860352, 0.026648672103881835, 0.026800031661987304, 0.026881376266479493, 0.026833568572998047, 0.026723360061645506, 0.026722368240356446, 0.027109888076782225, 0.027023008346557617, 0.02678860855102539, 0.026785055160522462, 0.02686025619506836, 0.026677248001098632, 0.02656870460510254, 0.02777907180786133, 0.02690870475769043, 0.026939359664916993, 0.02693017578125, 0.02672492790222168, 0.026672672271728516, 0.02671504020690918, 0.02692300796508789, 0.02679376029968262, 0.02691913604736328, 0.0270696964263916, 0.026918752670288086, 0.02691561508178711, 0.0268985595703125, 0.02690457534790039, 0.026918912887573244, 0.026806272506713868, 0.027022911071777345, 0.02716694450378418, 0.027148191452026366, 0.02693766403198242, 0.02706345558166504, 0.027124576568603516, 0.02704159927368164, 0.02691904067993164, 0.0268472957611084, 0.026714111328125, 0.02692815971374512, 0.026627040863037108, 0.02653593635559082, 0.02680531120300293, 0.02810540771484375, 0.028221376419067384, 0.02694486427307129, 0.026929855346679688, 0.026876192092895507, 0.026674335479736327, 0.02708310317993164, 0.030218751907348632, 0.034176223754882815, 0.02728611183166504, 0.027084543228149415, 0.027269567489624023, 0.02795724868774414, 0.02698838424682617, 0.027140159606933594, 0.02676950454711914, 0.02691219139099121, 0.02692563247680664, 0.027387903213500975, 0.026976160049438477, 0.026969823837280273, 0.02707084846496582, 0.02703366470336914, 0.027074432373046874, 0.02720096015930176, 0.026991359710693358, 0.02694540786743164, 0.027043840408325196, 0.026846847534179687, 0.02687014389038086, 0.027064319610595702, 0.026798080444335938, 0.026861568450927735, 0.02698633575439453, 0.02686582374572754, 0.026953727722167968, 0.026840288162231444, 0.026810367584228514, 0.026804544448852538, 0.026963743209838867, 0.02687811279296875, 0.026702239990234376, 0.026662784576416014, 0.026693599700927734, 0.026756832122802734, 0.026841184616088868, 0.026884576797485352, 0.026849248886108398, 0.026779680252075194, 0.026646368026733397, 0.02669081687927246, 0.02668191909790039, 0.02698624038696289, 0.02712022399902344, 0.027119455337524415, 0.027074720382690428, 0.026656768798828126, 0.026776575088500978, 0.026971168518066406, 0.02666694450378418, 0.026708000183105467, 0.026979551315307618, 0.0272927360534668, 0.02695487976074219, 0.02725334358215332, 0.026806272506713868, 0.026641855239868163, 0.02685366439819336, 0.02704412841796875, 0.026648576736450196, 0.02688572883605957, 0.027021728515625, 0.02677894401550293, 0.02664134407043457, 0.027383199691772463, 0.02766102409362793, 0.02688198471069336, 0.02706211280822754, 0.02697420883178711, 0.026888256072998048, 0.026765247344970704, 0.02715398406982422, 0.026843839645385743, 0.026891456604003907, 0.02714886474609375, 0.026948671340942382, 0.02681884765625, 0.026856096267700195, 0.027112991333007812, 0.02693168067932129, 0.026648000717163087, 0.026619775772094727, 0.026747583389282226, 0.026798080444335938, 0.027074560165405274, 0.027024831771850586, 0.02727174377441406, 0.026752416610717773, 0.02692156791687012, 0.026578943252563478, 0.02673823928833008, 0.02693552017211914, 0.026968063354492186, 0.026869407653808595, 0.0276213436126709, 0.026976703643798828, 0.026925024032592774, 0.026855615615844725, 0.027006975173950197, 0.027072032928466796, 0.02697612762451172, 0.027208063125610353, 0.02730415916442871, 0.027303936004638672, 0.02737766456604004, 0.027207679748535156, 0.027098112106323242, 0.02726515197753906, 0.0273437442779541, 0.02706572723388672, 0.027035839080810548, 0.02695952033996582, 0.02677577590942383, 0.027050048828125, 0.026887840270996093, 0.026941951751708985, 0.02800383949279785, 0.027152959823608398, 0.026851615905761718, 0.02694963264465332, 0.026904159545898438, 0.026898847579956055, 0.027060224533081056, 0.027104927062988282, 0.02696598434448242, 0.027336864471435546, 0.026964256286621094, 0.026855072021484374, 0.02764851188659668, 0.026867776870727538, 0.027482112884521483, 0.026857088088989258, 0.026811904907226562, 0.026918848037719725, 0.027104511260986328, 0.02712054443359375, 0.027238752365112303, 0.02730361557006836, 0.027098880767822266, 0.02692403221130371, 0.02702275276184082, 0.027152992248535155, 0.026931455612182617, 0.02690835189819336, 0.027059616088867186, 0.026956159591674803, 0.02681270408630371, 0.026926143646240235, 0.02672230339050293, 0.026612672805786133, 0.026625503540039064, 0.02671027183532715, 0.026718496322631836, 0.02680953598022461, 0.026763999938964844, 0.027060575485229492, 0.027139839172363282, 0.02675302314758301, 0.02666649627685547, 0.026684127807617187, 0.026881792068481444, 0.02676652717590332, 0.026721120834350586, 0.02671615982055664, 0.026683616638183593, 0.026677024841308593, 0.02674278450012207, 0.026660863876342773, 0.02658515167236328, 0.02664147186279297, 0.026737279891967773, 0.0266693115234375, 0.02679952049255371, 0.026755392074584963, 0.026789215087890624, 0.026840095520019532, 0.026826656341552735, 0.026953727722167968, 0.026838144302368163, 0.02650611114501953, 0.02673811149597168, 0.026675775527954103, 0.026561824798583985, 0.026740480422973632, 0.026669343948364257, 0.026680000305175783, 0.026570112228393554, 0.026748640060424805, 0.026783775329589844, 0.026866559982299806, 0.026930240631103514, 0.02778726387023926, 0.027254783630371093, 0.02704159927368164, 0.027000192642211915, 0.02725766372680664, 0.027213823318481444, 0.02740777587890625, 0.02733695983886719, 0.02683535957336426, 0.026781152725219727, 0.02692095947265625, 0.02690505599975586, 0.02690015983581543, 0.02692969512939453, 0.0272936954498291, 0.0269453125, 0.026836992263793946, 0.027054079055786134, 0.026984512329101564, 0.026941375732421877, 0.026902528762817384, 0.026879999160766603, 0.02683839988708496, 0.02697279930114746, 0.026754751205444335, 0.027107648849487305, 0.026843135833740234, 0.026938528060913087, 0.02689833641052246, 0.02672662353515625, 0.027084735870361327, 0.026993440628051757, 0.026871807098388673, 0.027340799331665038, 0.026947200775146483, 0.02781808090209961, 0.02982326316833496, 0.027383199691772463, 0.02742947196960449, 0.02705116844177246, 0.02686227226257324, 0.02709724807739258, 0.027064287185668945, 0.026822687149047852, 0.02689638328552246, 0.027193536758422853, 0.02749625587463379, 0.02713599967956543, 0.026910240173339844, 0.0269289608001709, 0.02714691162109375, 0.026968063354492186, 0.02688204765319824, 0.027185152053833008, 0.02693734359741211, 0.02712166404724121, 0.02693129539489746, 0.027022911071777345, 0.02731862449645996, 0.0271646728515625, 0.02712166404724121, 0.027313375473022462, 0.027065120697021484, 0.02759916877746582, 0.027123712539672853, 0.027009023666381835, 0.028821504592895508, 0.028559648513793945, 0.0272523193359375, 0.0271648006439209, 0.02684716796875, 0.026763328552246092, 0.02681785583496094, 0.02666156768798828, 0.026728448867797853, 0.027281408309936524, 0.026744447708129882, 0.026824064254760742, 0.02683113670349121, 0.0266964168548584, 0.026676992416381835, 0.026733919143676756, 0.02708723258972168, 0.026925119400024414, 0.02674940872192383, 0.02660335922241211, 0.0266343994140625, 0.026726655960083008, 0.027102975845336913, 0.026850591659545897, 0.026962656021118164, 0.026841087341308592, 0.026740095138549805, 0.0268538875579834, 0.027131776809692382, 0.027091455459594727, 0.02724336051940918, 0.027321247100830077, 0.027553184509277344, 0.027031999588012695, 0.026989952087402343, 0.027692895889282226, 0.027353792190551757, 0.02723865509033203, 0.027392255783081056, 0.02738969612121582, 0.0269201602935791, 0.02711836814880371, 0.027088895797729492, 0.027045888900756834, 0.02918400001525879, 0.027441440582275392, 0.026824415206909178, 0.026918272018432617, 0.026907264709472658, 0.027225215911865233, 0.027054912567138673, 0.02683113670349121, 0.026791616439819334, 0.026879167556762694, 0.026766239166259767, 0.02697439956665039, 0.026763071060180665, 0.027077823638916015, 0.02687001609802246, 0.027007551193237306, 0.027812223434448242, 0.026888191223144533, 0.027033599853515625, 0.026682527542114257, 0.027091808319091797, 0.02774835205078125, 0.02892185592651367, 0.027096992492675782, 0.027137184143066408, 0.02693996810913086, 0.026958208084106445, 0.027181087493896486, 0.026943519592285158, 0.028493663787841798, 0.03038559913635254, 0.027097759246826173, 0.027307680130004883, 0.027064672470092775, 0.026843135833740234, 0.026570207595825197, 0.026761760711669923, 0.026894336700439454, 0.026853376388549805, 0.026779584884643555, 0.026923168182373048, 0.026643840789794922, 0.02678665542602539, 0.026817663192749024, 0.02673436737060547, 0.02674355125427246, 0.026755104064941405, 0.026797183990478514, 0.02696076774597168, 0.026892288208007813, 0.027006303787231446, 0.02693391990661621, 0.026806272506713868, 0.02677555274963379, 0.026963903427124025, 0.027149568557739256, 0.02669388771057129, 0.026616224288940428, 0.02696406364440918, 0.02743916893005371, 0.026938592910766602, 0.02693814468383789, 0.026887424468994142, 0.02684320068359375, 0.026872512817382812, 0.026834207534790037, 0.027476032257080077, 0.026659135818481446, 0.026915264129638673, 0.026816415786743163, 0.026755071640014647, 0.026630144119262695, 0.026712064743041993, 0.026795135498046876, 0.026868608474731444, 0.026949567794799803, 0.028175615310668947, 0.027181472778320313, 0.027694623947143556]",tokens/s,36.82764822171447,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 194347 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 196036 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 194926 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 195487 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 196649 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3587.117056,4609.409024,0.0,4206.886912,4070.564864,s,1,9.8327783203125,9.8327783203125,0.0,9.8327783203125,9.8327783203125,9.8327783203125,9.8327783203125,[9.8327783203125],,kWh,8.254613427084223e-05,9.095390638543101e-06,2.6063354184001353e-05,0.00011770487909338668,,MB,3522.904064,4783.47264,0.0,4366.270464,4197.065728,s,10,1.7560886688232424,0.17560886688232422,0.00048806288939002483,0.17572736358642577,0.1760205093383789,0.17602655868530273,0.1760313981628418,"[0.1758105926513672, 0.17567686462402343, 0.17546514892578124, 0.17554713439941405, 0.1760191650390625, 0.17424447631835938, 0.17577786254882813, 0.175880126953125, 0.1756346893310547, 0.17603260803222656]",tokens/s,1457.785159399417,kWh,5.1982663050438805e-06,5.730763723770448e-07,3.4529218656491555e-06,9.22426454307008e-06,tokens/kWh,27752890.087299727,MB,3527.102464,4793.9584,0.0,4376.756224,4197.068288,s,10,19.72680834960937,1.972680834960937,0.008595188301442907,1.9741355590820313,1.9812188842773437,1.983751837158203,1.9857781994628907,"[1.9763466796875, 1.96691796875, 1.96374267578125, 1.963957275390625, 1.9719244384765624, 1.9585361328125, 1.9800162353515625, 1.9784261474609375, 1.9862847900390626, 1.980656005859375]",tokens/s,31.93623564617209,kWh,5.7253962266207866e-05,6.3151852060677025e-06,3.747663865575024e-05,0.00010104578612802583,tokens/kWh,623479.7354159676,,s,630,19.72447334098815,0.03130868784283835,0.000569287045220454,0.0311867036819458,0.03171148090362549,0.03212369194030762,0.03275064445495606,"[0.03221913528442383, 0.03190982437133789, 0.031221824645996092, 0.031102560043334962, 0.031517087936401365, 0.031090463638305664, 0.03190521621704102, 0.03119388771057129, 0.03132617568969726, 0.03138972854614258, 0.033040382385253905, 0.03342950439453125, 0.031074304580688477, 0.03136288070678711, 0.031156415939331054, 0.03094105529785156, 0.03098428726196289, 0.030803295135498048, 0.030909120559692384, 0.030940736770629883, 0.031072223663330078, 0.031082975387573243, 0.030948352813720704, 0.03125760078430176, 0.03119923210144043, 0.03180953598022461, 0.03268991851806641, 0.03162044715881348, 0.0315230712890625, 0.031326879501342775, 0.03134873580932617, 0.031356927871704104, 0.031649791717529296, 0.031647296905517576, 0.03130822372436524, 0.031913183212280274, 0.031232799530029297, 0.03133171272277832, 0.0313242244720459, 0.03128892707824707, 0.03127190399169922, 0.031471616744995115, 0.031375455856323245, 0.031317184448242184, 0.031148767471313475, 0.031278783798217774, 0.03110864067077637, 0.03111510467529297, 0.031025920867919922, 0.031215808868408204, 0.031105024337768555, 0.03106800079345703, 0.031013023376464843, 0.03191193580627441, 0.03128639984130859, 0.031123935699462892, 0.031162784576416015, 0.031152128219604492, 0.031164415359497072, 0.031116416931152344, 0.031185792922973632, 0.031022111892700197, 0.031028192520141603, 0.031957664489746095, 0.0313239688873291, 0.030945215225219726, 0.031022783279418945, 0.03136140823364258, 0.031145984649658204, 0.031150272369384766, 0.031055679321289064, 0.031296831130981445, 0.031095392227172853, 0.030945119857788087, 0.031015167236328123, 0.031120672225952148, 0.03114863967895508, 0.031140159606933594, 0.031059776306152344, 0.03100262451171875, 0.031323583602905276, 0.031154239654541015, 0.031279615402221676, 0.03167027282714844, 0.031792255401611326, 0.031347583770751954, 0.031119359970092773, 0.03127401542663574, 0.03143097686767578, 0.031109695434570313, 0.03125872039794922, 0.03152019119262695, 0.03141184043884277, 0.031615936279296875, 0.03138086318969727, 0.031076255798339843, 0.031211967468261718, 0.03118320083618164, 0.031287040710449215, 0.03102124786376953, 0.031147167205810546, 0.031091392517089842, 0.03127312088012695, 0.030983488082885743, 0.03118355178833008, 0.031098623275756836, 0.030957536697387697, 0.030904544830322265, 0.03107436752319336, 0.031062015533447264, 0.03119923210144043, 0.030968063354492186, 0.030993663787841796, 0.031382015228271484, 0.031039424896240234, 0.03103545570373535, 0.031006719589233397, 0.03119923210144043, 0.031049728393554688, 0.031086463928222657, 0.03248550415039062, 0.031249568939208983, 0.031184864044189454, 0.03140470314025879, 0.03128044891357422, 0.031075231552124022, 0.03130716705322266, 0.03130185508728028, 0.031174144744873046, 0.031013248443603515, 0.03133225631713867, 0.03112201690673828, 0.031160320281982422, 0.030966976165771484, 0.031128095626831054, 0.031053760528564452, 0.031153663635253907, 0.031161184310913085, 0.031131647109985353, 0.031268512725830075, 0.031232128143310545, 0.031127775192260742, 0.03136460876464844, 0.03127552032470703, 0.03115007972717285, 0.031929567337036134, 0.03127724838256836, 0.03150704002380371, 0.03135452842712402, 0.0311845760345459, 0.03120172882080078, 0.0311441593170166, 0.031670015335083006, 0.03109280014038086, 0.03127631950378418, 0.031361631393432614, 0.03112739181518555, 0.03101033592224121, 0.031067071914672853, 0.03118489646911621, 0.031229471206665037, 0.031181280136108398, 0.030980096817016602, 0.031148031234741212, 0.031212671279907227, 0.03138348770141602, 0.031185855865478517, 0.031054943084716798, 0.03105580711364746, 0.03095635223388672, 0.03094495964050293, 0.031149696350097657, 0.03088044738769531, 0.031053375244140625, 0.03135548782348633, 0.03096348762512207, 0.030924863815307617, 0.031237567901611328, 0.031100896835327147, 0.031132415771484376, 0.03094121551513672, 0.031162336349487306, 0.031045791625976562, 0.030969696044921877, 0.03095961570739746, 0.03097395133972168, 0.03125785636901855, 0.030944095611572266, 0.031293567657470704, 0.031916032791137694, 0.031312000274658205, 0.03079360008239746, 0.030828447341918946, 0.030867359161376954, 0.0308287353515625, 0.031092735290527345, 0.030896127700805662, 0.031160320281982422, 0.03078758430480957, 0.030965503692626954, 0.03086892890930176, 0.03084707260131836, 0.03095542335510254, 0.03104031944274902, 0.03098419189453125, 0.03094528007507324, 0.03096780776977539, 0.03077939224243164, 0.031059455871582032, 0.03086128044128418, 0.031709728240966795, 0.031057024002075197, 0.03104857635498047, 0.03100876808166504, 0.030893280029296876, 0.030952543258666993, 0.031043264389038087, 0.031065151214599608, 0.031119295120239258, 0.031334463119506835, 0.03150534439086914, 0.031850112915039065, 0.031825855255126954, 0.031713727951049805, 0.03140812873840332, 0.03107200050354004, 0.031108896255493165, 0.03117103958129883, 0.0313624324798584, 0.03125516891479492, 0.03116851234436035, 0.03135468864440918, 0.031106752395629884, 0.030991903305053713, 0.030982847213745116, 0.032417247772216794, 0.031427391052246095, 0.031198976516723632, 0.031181055068969725, 0.03144892883300781, 0.031157983779907226, 0.031693248748779296, 0.031037439346313478, 0.031369216918945314, 0.031180448532104492, 0.031305471420288083, 0.030984256744384767, 0.03102729606628418, 0.0311157112121582, 0.031068159103393556, 0.03110246467590332, 0.031136255264282226, 0.03224991989135742, 0.031482431411743166, 0.031061920166015625, 0.03121331214904785, 0.03167062377929687, 0.031098047256469728, 0.031201215744018556, 0.030984991073608397, 0.03107174491882324, 0.032148128509521486, 0.03271001434326172, 0.031168928146362306, 0.031035327911376955, 0.03093731117248535, 0.031038816452026368, 0.03100271987915039, 0.032326465606689454, 0.03124835205078125, 0.03076483154296875, 0.03100262451171875, 0.03083673667907715, 0.030943231582641603, 0.03091391944885254, 0.030855615615844725, 0.031012128829956055, 0.030923679351806642, 0.030920703887939452, 0.03112540817260742, 0.03121776008605957, 0.031340543746948245, 0.03115167999267578, 0.03102083206176758, 0.030982048034667968, 0.03121011161804199, 0.03119651222229004, 0.03123468780517578, 0.031113599777221678, 0.031084320068359376, 0.031070207595825194, 0.03127705574035645, 0.031088960647583007, 0.030934719085693358, 0.03122585678100586, 0.031057920455932617, 0.03113315200805664, 0.03099292755126953, 0.03123747253417969, 0.03118921661376953, 0.03143071937561035, 0.031117055892944338, 0.03273110580444336, 0.03258796691894531, 0.031235679626464844, 0.030910400390625, 0.030978368759155273, 0.031019487380981446, 0.031061248779296877, 0.03455683135986328, 0.03147776031494141, 0.031645023345947265, 0.031217920303344728, 0.030995904922485353, 0.031032096862792968, 0.03113372802734375, 0.031070272445678712, 0.0308752326965332, 0.032726814270019534, 0.031300224304199216, 0.0310762882232666, 0.031088703155517577, 0.030930944442749023, 0.03125817680358887, 0.03114134407043457, 0.03120755195617676, 0.032494430541992185, 0.031334304809570314, 0.031051136016845702, 0.030988288879394532, 0.0309333438873291, 0.03092108726501465, 0.031057632446289063, 0.03080624008178711, 0.030904191970825196, 0.030917984008789062, 0.031171424865722656, 0.031106239318847657, 0.031208255767822265, 0.030826431274414062, 0.03088809585571289, 0.030877504348754883, 0.030846431732177736, 0.030933151245117186, 0.03104102325439453, 0.03102409553527832, 0.03099852752685547, 0.031113088607788084, 0.030975231170654295, 0.031060863494873046, 0.031033344268798828, 0.030760959625244142, 0.03092889595031738, 0.031121376037597657, 0.031114784240722657, 0.03105843162536621, 0.030900447845458985, 0.031280351638793946, 0.03100320053100586, 0.030842432022094725, 0.030845056533813475, 0.030861600875854493, 0.030992416381835936, 0.030873632431030272, 0.030805984497070314, 0.030840831756591795, 0.030913888931274416, 0.031709440231323244, 0.030822048187255858, 0.031146303176879882, 0.03117657661437988, 0.03121004867553711, 0.031131647109985353, 0.031264448165893556, 0.03103366470336914, 0.03102720069885254, 0.03118227195739746, 0.031142463684082033, 0.032273727416992186, 0.032078529357910154, 0.03172108840942383, 0.031648128509521485, 0.031797056198120115, 0.03162851142883301, 0.03149699211120605, 0.03131411170959473, 0.03182387161254883, 0.03139129638671875, 0.03211705780029297, 0.031294687271118164, 0.031204256057739257, 0.03139705657958984, 0.03147145652770996, 0.03134982490539551, 0.031194751739501952, 0.031702880859375, 0.03155398368835449, 0.03158732795715332, 0.031611904144287106, 0.03126067161560059, 0.031068159103393556, 0.03104275131225586, 0.031099359512329103, 0.031053279876708983, 0.03133523178100586, 0.03163059234619141, 0.03155027198791504, 0.03158124732971192, 0.031218624114990233, 0.03143251228332519, 0.031199424743652344, 0.03121766471862793, 0.03244607925415039, 0.03134502410888672, 0.03143680000305176, 0.03179110336303711, 0.03153248023986816, 0.031449024200439456, 0.03137395286560059, 0.030985855102539064, 0.031226240158081054, 0.0312073917388916, 0.03191811180114746, 0.031090591430664064, 0.030893632888793945, 0.03124278450012207, 0.030873600006103515, 0.03117670440673828, 0.03163340759277344, 0.03150787162780762, 0.03128934478759766, 0.031153856277465822, 0.030981023788452147, 0.03153875160217285, 0.031328224182128904, 0.031134208679199218, 0.031030624389648438, 0.031219327926635742, 0.031431072235107424, 0.03145379257202149, 0.03174195289611816, 0.03233516693115234, 0.03162563133239746, 0.03177824020385742, 0.031918336868286136, 0.03143449592590332, 0.031203744888305664, 0.031197343826293945, 0.031076351165771485, 0.03118489646911621, 0.031064064025878906, 0.03159449577331543, 0.03152892875671387, 0.031495935440063474, 0.03149427223205566, 0.03175030326843262, 0.031087776184082032, 0.03124412727355957, 0.03133935928344726, 0.031200864791870116, 0.031308351516723634, 0.03132352066040039, 0.03144563293457031, 0.03153715133666992, 0.03139548873901367, 0.0311627197265625, 0.031434080123901365, 0.03174796867370606, 0.03147174453735352, 0.03239798355102539, 0.0314654712677002, 0.03156991958618164, 0.03137926483154297, 0.03137964820861817, 0.03136240005493164, 0.03209897613525391, 0.03148534393310547, 0.031104864120483397, 0.031260448455810545, 0.03135382461547852, 0.031524864196777344, 0.03133030319213867, 0.03115827178955078, 0.031078527450561524, 0.031065120697021484, 0.030946144104003905, 0.031139839172363282, 0.03156582450866699, 0.031801343917846676, 0.031510431289672854, 0.03137955284118652, 0.03146137619018555, 0.031362560272216795, 0.031049823760986327, 0.030992416381835936, 0.0313014087677002, 0.031187551498413086, 0.031202335357666016, 0.031232992172241212, 0.03120240020751953, 0.031044511795043944, 0.031313888549804686, 0.0311562557220459, 0.03194470405578613, 0.03223551940917969, 0.0314238395690918, 0.03123062324523926, 0.03131596755981445, 0.031352832794189454, 0.031109151840209962, 0.031162336349487306, 0.03139993667602539, 0.03171123123168945, 0.03190169525146484, 0.0317255687713623, 0.031483488082885744, 0.03252470397949219, 0.0311943359375, 0.03157891273498535, 0.03133769607543945, 0.03137612724304199, 0.031659231185913086, 0.03167750358581543, 0.03160780715942383, 0.03123481559753418, 0.031152128219604492, 0.03156959915161133, 0.03245027160644531, 0.031174688339233397, 0.03102681541442871, 0.030951520919799805, 0.03247600173950195, 0.03329203033447266, 0.03144524765014649, 0.03233942413330078, 0.033737022399902346, 0.03132563209533691, 0.03147964859008789, 0.031289920806884766, 0.031152639389038086, 0.03125644874572754, 0.03122502326965332, 0.031189823150634767, 0.03142563247680664, 0.031236703872680665, 0.03154566383361816, 0.031102783203125, 0.031025344848632813, 0.03152435111999512, 0.031448768615722655, 0.03155958366394043, 0.032129119873046875, 0.031502944946289066, 0.031440256118774414, 0.03109974479675293, 0.03178700828552246, 0.031129600524902344, 0.03094528007507324, 0.031435935974121094, 0.03275862503051758, 0.031350400924682616, 0.03144947242736816, 0.03093708801269531, 0.03100467109680176, 0.03100876808166504, 0.03124553680419922, 0.031193952560424804, 0.031254528045654296, 0.031076351165771485, 0.030891904830932616, 0.031021184921264648, 0.031410175323486327, 0.031123455047607423, 0.031254528045654296, 0.03107583999633789, 0.0311014404296875, 0.03114188766479492, 0.031120864868164063, 0.03129151916503906, 0.031121088027954102, 0.031179071426391602, 0.03130774307250977, 0.031344512939453124, 0.031658687591552735, 0.03130764770507812, 0.031090303421020506, 0.03132659149169922, 0.031117311477661135, 0.03103539276123047, 0.03100057601928711, 0.031073535919189453, 0.031019168853759764, 0.03127152061462402, 0.031086591720581053, 0.03109174346923828, 0.03109161567687988, 0.031176767349243163, 0.031184608459472657, 0.031139999389648437, 0.03133760070800781, 0.031124576568603516, 0.041355358123779294, 0.03157507133483887, 0.03179395294189453, 0.03133440017700195, 0.031352832794189454, 0.03139574432373047, 0.03107148742675781, 0.031015071868896484, 0.03114633560180664, 0.031084896087646485, 0.03126006317138672, 0.031017183303833008, 0.031065792083740235, 0.03259462356567383, 0.03226828765869141, 0.03150777626037598, 0.03166636848449707, 0.03144345664978027, 0.0314979190826416, 0.03104185676574707, 0.0312684154510498, 0.032307361602783205, 0.03151260757446289, 0.03128096008300781, 0.031158720016479492, 0.031102975845336913, 0.03107142448425293, 0.03126534461975097, 0.031107328414916993]",tokens/s,31.940016298982112,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 197218 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3605.753856,4609.409024,0.0,4206.886912,4070.564864,s,1,10.78188671875,10.78188671875,0.0,10.78188671875,10.78188671875,10.78188671875,10.78188671875,[10.78188671875],,kWh,8.879367857496163e-05,9.787047555104585e-06,2.91508566540033e-05,0.0001277315827840695,,MB,3638.276096,4783.47264,0.0,4366.270464,4197.065728,s,10,1.6871604614257814,0.16871604614257812,0.00021903228489232804,0.16869321441650392,0.16901886749267578,0.16902762222290038,0.16903462600708008,"[0.16888467407226562, 0.16869769287109376, 0.1685198974609375, 0.16864649963378905, 0.1690169219970703, 0.16835987854003906, 0.16868873596191405, 0.169036376953125, 0.16845558166503907, 0.16885420227050782]",tokens/s,1517.3423385210212,kWh,4.970537296963622e-06,5.481584790924589e-07,3.306306788169493e-06,8.825002564225573e-06,tokens/kWh,29008490.154752146,MB,3644.35456,4791.861248,0.0,4374.659072,4197.068288,s,10,16.72774255371094,1.672774255371094,0.0037802323741648568,1.6717249145507813,1.6786344970703126,1.679328173828125,1.679883115234375,"[1.6719725341796876, 1.6751610107421875, 1.6800218505859374, 1.6784803466796876, 1.670573974609375, 1.671477294921875, 1.6685081787109375, 1.6732012939453125, 1.6686107177734375, 1.6697353515625]",tokens/s,37.66198564911789,kWh,6.492695028720242e-05,7.161333269113966e-06,4.051961716143078e-05,0.00011260790071774717,tokens/kWh,559463.4088589408,,s,630,16.72539849090574,0.026548251572866288,0.00038797980981304176,0.026458352088928223,0.02682104301452637,0.027065038108825684,0.02777667995452881,"[0.02725049591064453, 0.027042112350463866, 0.02682374382019043, 0.02647750473022461, 0.026679296493530274, 0.026455135345458985, 0.026573728561401368, 0.02652774429321289, 0.026639551162719727, 0.0266760311126709, 0.026551456451416017, 0.02662486457824707, 0.026517248153686522, 0.02655174446105957, 0.026444416046142578, 0.026302656173706054, 0.02639446449279785, 0.026384288787841798, 0.027058015823364256, 0.02634079933166504, 0.02640380859375, 0.02630860710144043, 0.02652351951599121, 0.026400896072387697, 0.02668339157104492, 0.026394079208374024, 0.026376672744750976, 0.02645408058166504, 0.026433536529541016, 0.02636595153808594, 0.02633113670349121, 0.026363168716430664, 0.026404863357543946, 0.026726591110229493, 0.02674652862548828, 0.02662416076660156, 0.02677014350891113, 0.027076608657836915, 0.02685055923461914, 0.026710208892822267, 0.026975807189941407, 0.026608640670776368, 0.02667519950866699, 0.026605247497558594, 0.026536256790161132, 0.0268372802734375, 0.026407743453979494, 0.02633763122558594, 0.026766944885253906, 0.0264116153717041, 0.026277503967285155, 0.026442495346069336, 0.026248287200927735, 0.02628700828552246, 0.026228736877441407, 0.026286079406738282, 0.026279647827148436, 0.026363616943359376, 0.026237152099609376, 0.026367359161376953, 0.02639779281616211, 0.026521215438842772, 0.026384639739990234, 0.026916383743286133, 0.02659584045410156, 0.027054079055786134, 0.02655561637878418, 0.026616031646728516, 0.026468191146850586, 0.026753952026367187, 0.026793888092041016, 0.026531455993652343, 0.027152671813964843, 0.026822656631469727, 0.028738784790039062, 0.02673539161682129, 0.02669113540649414, 0.026683008193969727, 0.026587743759155274, 0.026392799377441406, 0.02663360023498535, 0.026303104400634766, 0.026465951919555666, 0.02630019187927246, 0.026398784637451173, 0.02762713623046875, 0.02657574462890625, 0.026549375534057618, 0.026499967575073242, 0.026390527725219725, 0.026441087722778322, 0.026472192764282226, 0.026628320693969726, 0.026399391174316406, 0.02643667221069336, 0.026534719467163084, 0.02648896026611328, 0.026588640213012694, 0.026675039291381836, 0.026325408935546874, 0.026321184158325194, 0.02629840087890625, 0.026273696899414063, 0.026524768829345704, 0.026389791488647462, 0.026615488052368165, 0.026733919143676756, 0.02653046417236328, 0.026285600662231446, 0.027675104141235352, 0.0265314884185791, 0.026432895660400392, 0.026413503646850585, 0.02655695915222168, 0.026746112823486327, 0.026474496841430665, 0.026303232192993162, 0.026635744094848632, 0.026341920852661134, 0.026218496322631835, 0.026308223724365233, 0.02620863914489746, 0.026304000854492186, 0.026313119888305665, 0.026326400756835937, 0.026347391128540038, 0.02676940727233887, 0.026372095108032227, 0.026253376007080078, 0.026351808547973633, 0.026543039321899414, 0.026336063385009767, 0.02649212837219238, 0.02672105598449707, 0.026663999557495117, 0.02641196823120117, 0.026234367370605468, 0.026634239196777345, 0.026792448043823244, 0.026834943771362304, 0.026863616943359377, 0.027460672378540038, 0.027263872146606444, 0.02753094482421875, 0.02710960006713867, 0.026992544174194336, 0.026803936004638672, 0.026581535339355467, 0.026578943252563478, 0.02649497604370117, 0.026644479751586913, 0.026709728240966797, 0.02644406318664551, 0.026550432205200196, 0.026706111907958983, 0.026600479125976562, 0.02656934356689453, 0.0265482234954834, 0.0265482234954834, 0.026671104431152344, 0.026790111541748048, 0.026725696563720702, 0.026796512603759766, 0.027096607208251952, 0.026968544006347656, 0.02707865524291992, 0.027288896560668945, 0.027152736663818358, 0.02689468765258789, 0.026763071060180665, 0.026469919204711916, 0.02653990364074707, 0.02653878402709961, 0.026351423263549806, 0.026457632064819336, 0.026422975540161132, 0.026634624481201172, 0.02666556739807129, 0.0265948486328125, 0.026392704010009767, 0.02640118408203125, 0.026335168838500976, 0.026326528549194338, 0.02661427116394043, 0.026489952087402343, 0.026420127868652343, 0.02670159912109375, 0.026444000244140627, 0.02634752082824707, 0.027070783615112306, 0.026576448440551757, 0.02691116714477539, 0.026793407440185546, 0.02673516845703125, 0.026883583068847656, 0.02668185615539551, 0.026436800003051757, 0.026425920486450195, 0.02642076873779297, 0.026687488555908204, 0.026468927383422852, 0.026509471893310547, 0.026564607620239256, 0.02653183937072754, 0.026437631607055666, 0.02637004852294922, 0.02639664077758789, 0.026369823455810546, 0.027088415145874022, 0.02828156852722168, 0.026714111328125, 0.026583040237426758, 0.026486175537109375, 0.026475103378295898, 0.026382335662841795, 0.026608800888061522, 0.026501632690429686, 0.026547967910766603, 0.02647897529602051, 0.026443328857421875, 0.026466304779052735, 0.02650588798522949, 0.026574848175048828, 0.026426496505737303, 0.02645487976074219, 0.026398752212524416, 0.026390527725219725, 0.026457855224609375, 0.026548032760620118, 0.026452415466308592, 0.0264006404876709, 0.02653401565551758, 0.02637740707397461, 0.026387264251708984, 0.02673807907104492, 0.026834623336791992, 0.026745759963989257, 0.026537248611450195, 0.026499008178710936, 0.026622751235961913, 0.026730495452880858, 0.026631263732910155, 0.026642528533935547, 0.026490943908691406, 0.02659328079223633, 0.026452735900878908, 0.02778544044494629, 0.027629344940185548, 0.02675632095336914, 0.026744895935058594, 0.027070976257324218, 0.026505439758300782, 0.026906879425048828, 0.026449920654296875, 0.02660966491699219, 0.026343008041381837, 0.026365856170654296, 0.02646665573120117, 0.026481887817382813, 0.0264202880859375, 0.026547744750976564, 0.026423648834228517, 0.026396671295166017, 0.02662723159790039, 0.026493791580200196, 0.026406911849975585, 0.026390527725219725, 0.02636294364929199, 0.026556415557861326, 0.026826847076416017, 0.026408031463623048, 0.026547967910766603, 0.026372287750244142, 0.026563392639160157, 0.026336416244506836, 0.02658246421813965, 0.026366527557373048, 0.02643132781982422, 0.026374111175537108, 0.02634940719604492, 0.026323135375976563, 0.026381792068481444, 0.02628432083129883, 0.026271007537841798, 0.026327392578125, 0.026640512466430663, 0.026575040817260743, 0.026714431762695313, 0.02643667221069336, 0.026258495330810545, 0.0264006404876709, 0.026416191101074217, 0.026237632751464842, 0.026222848892211915, 0.026372095108032227, 0.02638643264770508, 0.026510751724243165, 0.02626959991455078, 0.026794048309326173, 0.026233247756958008, 0.02621401596069336, 0.02636390495300293, 0.026348127365112304, 0.026309823989868163, 0.026374975204467774, 0.026302463531494142, 0.02630854415893555, 0.02625472068786621, 0.029479040145874023, 0.026794559478759767, 0.0266561279296875, 0.026489471435546873, 0.02628812789916992, 0.028020383834838868, 0.026644832611083986, 0.02675654411315918, 0.026425920486450195, 0.027276384353637696, 0.026538911819458007, 0.026543519973754884, 0.026810335159301757, 0.026472415924072266, 0.02646428871154785, 0.026358400344848633, 0.02638982391357422, 0.026374847412109374, 0.026326431274414062, 0.026413663864135743, 0.026404863357543946, 0.02653539276123047, 0.026339872360229492, 0.02636329650878906, 0.026382944107055665, 0.026386528015136718, 0.0262259521484375, 0.02633996772766113, 0.026300064086914064, 0.026767007827758788, 0.026390975952148437, 0.026535968780517578, 0.026450143814086915, 0.02666867256164551, 0.026745216369628906, 0.026583263397216797, 0.026799455642700195, 0.026618303298950194, 0.026719648361206053, 0.026595935821533204, 0.026679168701171874, 0.026664703369140626, 0.02679007911682129, 0.026519744873046876, 0.026558464050292968, 0.026687488555908204, 0.026572799682617186, 0.026572351455688478, 0.02648080062866211, 0.02753913688659668, 0.02665260887145996, 0.026639007568359376, 0.02639651107788086, 0.026478015899658203, 0.026301023483276367, 0.026437759399414062, 0.026404863357543946, 0.026464256286621093, 0.02672435188293457, 0.02650111961364746, 0.026324960708618166, 0.02643903923034668, 0.026295040130615236, 0.026388383865356444, 0.026416479110717775, 0.02631497573852539, 0.026400320053100584, 0.02652249526977539, 0.026400768280029296, 0.02636566352844238, 0.027755231857299806, 0.0269005126953125, 0.026843135833740234, 0.026628000259399414, 0.02650441551208496, 0.027046783447265625, 0.026451967239379884, 0.026692863464355468, 0.02673126411437988, 0.026476543426513673, 0.026416927337646483, 0.026803871154785157, 0.026452543258666993, 0.02672377586364746, 0.026390752792358398, 0.02645145606994629, 0.026483552932739258, 0.02635366439819336, 0.026408319473266603, 0.026204479217529296, 0.026381759643554686, 0.026357791900634767, 0.027041824340820312, 0.02650809669494629, 0.026235904693603516, 0.026315711975097657, 0.026558528900146483, 0.02636390495300293, 0.026877952575683595, 0.026422752380371093, 0.026262048721313477, 0.02649087905883789, 0.026692928314208983, 0.02689299201965332, 0.02654003143310547, 0.026517248153686522, 0.02633344078063965, 0.026281951904296875, 0.026333152770996095, 0.02638649559020996, 0.026281984329223632, 0.02716828727722168, 0.02633366394042969, 0.026348831176757813, 0.026243520736694337, 0.02636828804016113, 0.026458368301391602, 0.026412704467773437, 0.026262720108032225, 0.02627235221862793, 0.026355520248413086, 0.026241535186767577, 0.026332704544067383, 0.02629680061340332, 0.026320863723754882, 0.026115968704223634, 0.026179744720458985, 0.026179584503173828, 0.02627993583679199, 0.02622572708129883, 0.026330047607421875, 0.026382335662841795, 0.026353151321411132, 0.02678374481201172, 0.026606687545776365, 0.026377119064331055, 0.026678752899169923, 0.026485279083251954, 0.026367776870727538, 0.026474016189575195, 0.026802719116210936, 0.026554527282714843, 0.026486431121826172, 0.026386783599853515, 0.026435583114624024, 0.026617855072021485, 0.026396352767944335, 0.02655824089050293, 0.026820863723754883, 0.03153497505187988, 0.02726697540283203, 0.02666547203063965, 0.02671615982055664, 0.026482240676879883, 0.026593536376953126, 0.026656639099121093, 0.026605472564697266, 0.02652150344848633, 0.026380191802978514, 0.02659564781188965, 0.026470687866210936, 0.02633910369873047, 0.026324512481689454, 0.026376895904541016, 0.026437471389770508, 0.026417312622070314, 0.02644112014770508, 0.026733152389526366, 0.02645984077453613, 0.02627190399169922, 0.02621251106262207, 0.02653593635559082, 0.02636390495300293, 0.026498048782348634, 0.02636288070678711, 0.026387903213500978, 0.02631923294067383, 0.026378015518188476, 0.026360223770141602, 0.026299840927124025, 0.02631532859802246, 0.026671104431152344, 0.026437631607055666, 0.026517248153686522, 0.02629964828491211, 0.026290496826171874, 0.026286783218383788, 0.026445823669433592, 0.026480640411376953, 0.02627903938293457, 0.02631091117858887, 0.02623347282409668, 0.026311744689941408, 0.02645075225830078, 0.026508480072021483, 0.026295232772827148, 0.026393407821655272, 0.026234912872314452, 0.02631884765625, 0.02647859191894531, 0.026257408142089843, 0.026306560516357422, 0.026531423568725586, 0.026244735717773436, 0.02637295913696289, 0.026281408309936524, 0.026384319305419922, 0.02617401695251465, 0.026244159698486327, 0.026389440536499022, 0.02624412727355957, 0.026276832580566407, 0.026133567810058593, 0.02628620719909668, 0.026422079086303712, 0.026298368453979492, 0.026715967178344728, 0.026971744537353515, 0.026328863143920897, 0.026307167053222655, 0.026435775756835936, 0.026381696701049805, 0.026221216201782225, 0.02623676872253418, 0.026461824417114258, 0.026284543991088868, 0.026419231414794922, 0.026517440795898437, 0.026719615936279296, 0.026936256408691406, 0.026942623138427733, 0.026853792190551756, 0.02664672088623047, 0.02655843162536621, 0.026556447982788087, 0.026471519470214845, 0.02663311958312988, 0.026394624710083008, 0.026453792572021486, 0.026491104125976564, 0.026458335876464845, 0.026674911499023436, 0.026492992401123048, 0.026341312408447264, 0.02643974494934082, 0.027052032470703126, 0.027674623489379883, 0.02668854331970215, 0.026545120239257813, 0.02647235107421875, 0.026445920944213868, 0.02639660835266113, 0.026460512161254883, 0.026523359298706056, 0.026441984176635742, 0.026474111557006835, 0.02648281669616699, 0.026681407928466797, 0.026410943984985353, 0.026591648101806642, 0.02637740707397461, 0.02658905601501465, 0.026295263290405272, 0.026333311080932616, 0.026444831848144532, 0.026778720855712892, 0.026638080596923828, 0.026640384674072266, 0.026797088623046875, 0.02640176010131836, 0.02673587226867676, 0.026712064743041993, 0.026555135726928712, 0.026443552017211915, 0.026398944854736327, 0.02630860710144043, 0.026289472579956053, 0.026968767166137695, 0.029946943283081055, 0.026415615081787108, 0.026307008743286134, 0.026263551712036134, 0.02631475257873535, 0.026363744735717773, 0.026251424789428712, 0.027470848083496095, 0.027150527954101562, 0.026354496002197265, 0.026259456634521484, 0.026310335159301756, 0.026337440490722657, 0.026339487075805666, 0.026384384155273437, 0.02633497619628906, 0.026767168045043945, 0.026360511779785156, 0.02615065574645996, 0.026449695587158203, 0.026433759689331055, 0.026351615905761717, 0.02675209617614746, 0.026350624084472658, 0.026226015090942384, 0.026229280471801758, 0.026251264572143555, 0.026451967239379884, 0.026482431411743165, 0.026208511352539064, 0.026200063705444337, 0.026392160415649416, 0.026218591690063478, 0.026282304763793944, 0.02633113670349121, 0.026290271759033205, 0.02610371208190918, 0.02631884765625, 0.026447296142578125, 0.02629075241088867, 0.026619647979736327, 0.026663135528564454, 0.02644691276550293, 0.02634220886230469]",tokens/s,37.66726397236844,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2163.884032,2194.604032,0.0,1816.133632,1727.29344,s,1,9.0174072265625,9.0174072265625,0.0,9.0174072265625,9.0174072265625,9.0174072265625,9.0174072265625,[9.0174072265625],,kWh,5.976673873331038e-05,6.585365099496029e-06,1.910834862001165e-05,8.546045245281806e-05,,MB,2234.83904,2406.416384,0.0,1998.585856,1980.448768,s,10,3.2212420043945316,0.32212420043945317,0.00027432602111490955,0.3221717987060547,0.3224371398925781,0.32244046325683595,0.3224431219482422,"[0.32209222412109373, 0.32156170654296873, 0.32244378662109374, 0.3224364013671875, 0.3221385803222656, 0.32232028198242185, 0.3222109985351562, 0.32169442749023436, 0.32217874145507813, 0.32216485595703126]",tokens/s,794.7245182161284,kWh,9.410542486587303e-06,1.037575628221308e-06,6.269536265624508e-06,1.6717654380433117e-05,tokens/kWh,15313153.040155603,MB,2248.503296,2597.257216,0.0,2189.426688,2078.022144,s,10,177.882412109375,17.7882412109375,0.020592925819032642,17.790093749999997,17.8097537109375,17.81297841796875,17.81555818359375,"[17.74033203125, 17.77685546875, 17.772400390625, 17.789318359375, 17.816203125, 17.78546484375, 17.790869140625, 17.7986328125, 17.803298828125, 17.809037109375]",tokens/s,3.541665488618573,kWh,0.0005192048609513302,5.727199115571418e-05,0.0003450639392453764,0.0009215407913524209,tokens/kWh,68363.76706400963,,s,630,177.87812567138693,0.28234623122442337,0.0005002065340032206,0.2823446197509766,0.28293623046875,0.2831180969238281,0.2836704473876953,"[0.2817083740234375, 0.281108642578125, 0.28102655029296875, 0.28117919921875, 0.28116668701171876, 0.2813500671386719, 0.28130703735351564, 0.2817966003417969, 0.28123318481445314, 0.28118856811523435, 0.28105682373046875, 0.2815637817382812, 0.2812744445800781, 0.28089865112304685, 0.2817504272460937, 0.28136038208007813, 0.28095693969726565, 0.2816244812011719, 0.2815550537109375, 0.2813931579589844, 0.280936767578125, 0.2819615783691406, 0.2811295166015625, 0.2813604431152344, 0.2813358154296875, 0.28161227416992185, 0.2817576904296875, 0.2811202392578125, 0.2818810729980469, 0.28143618774414064, 0.2814886474609375, 0.2817134094238281, 0.2818887023925781, 0.2820078735351563, 0.28157867431640626, 0.2822633972167969, 0.28165399169921873, 0.28145050048828124, 0.28195550537109376, 0.2812294921875, 0.2821034851074219, 0.2815816345214844, 0.2820033264160156, 0.2816337890625, 0.28149554443359376, 0.28178369140625, 0.28162832641601565, 0.28182830810546877, 0.28160409545898435, 0.2818009033203125, 0.282184814453125, 0.2814942321777344, 0.28167578125, 0.2819154052734375, 0.28225762939453125, 0.2818082580566406, 0.2819129638671875, 0.2820143127441406, 0.2816607360839844, 0.28195724487304685, 0.28182733154296874, 0.28184173583984373, 0.2815572814941406, 0.2825162048339844, 0.2816000061035156, 0.28129278564453125, 0.28238134765625, 0.28212322998046874, 0.28228607177734377, 0.28375567626953124, 0.28194082641601564, 0.2814460754394531, 0.2823106994628906, 0.28207748413085937, 0.28189495849609375, 0.2820966491699219, 0.28307351684570314, 0.28216644287109377, 0.2815824279785156, 0.2825994262695313, 0.2821038208007812, 0.2816468200683594, 0.2814876403808594, 0.28208740234375, 0.2819433898925781, 0.28198684692382814, 0.28177877807617185, 0.2824645690917969, 0.2820888061523438, 0.28176986694335937, 0.2818728332519531, 0.282263916015625, 0.2813644714355469, 0.2818531494140625, 0.282159912109375, 0.28162213134765623, 0.2818133850097656, 0.281831298828125, 0.2822569580078125, 0.2817108154296875, 0.28234378051757814, 0.28229971313476565, 0.28194680786132814, 0.2821629638671875, 0.282089599609375, 0.28266885375976564, 0.2821143493652344, 0.28176177978515626, 0.28232809448242185, 0.28198574829101564, 0.282744140625, 0.2830360107421875, 0.2825934448242188, 0.2832596130371094, 0.28283056640625, 0.28232498168945314, 0.2819215393066406, 0.28268655395507813, 0.2822210693359375, 0.2829930419921875, 0.28259872436523437, 0.28216085815429687, 0.2821068420410156, 0.28219186401367186, 0.28174951171875, 0.28200244140625, 0.2818103637695312, 0.2818050537109375, 0.28194964599609373, 0.28164801025390623, 0.2816470947265625, 0.28212225341796876, 0.28157131958007814, 0.28144024658203126, 0.28197479248046875, 0.2823550415039062, 0.28141839599609375, 0.28154266357421875, 0.28250521850585936, 0.2820546569824219, 0.2807807922363281, 0.2821868591308594, 0.28201666259765623, 0.2817181396484375, 0.2816468200683594, 0.2822828063964844, 0.2825413818359375, 0.2814042053222656, 0.28236123657226564, 0.28229693603515627, 0.2812333984375, 0.28171878051757815, 0.2826264038085938, 0.2821457824707031, 0.281993896484375, 0.2820177307128906, 0.28194412231445315, 0.28198910522460935, 0.2825441284179688, 0.2827960205078125, 0.2821397399902344, 0.28224371337890625, 0.2820672302246094, 0.28260906982421874, 0.2822496643066406, 0.2821588745117187, 0.2823231811523437, 0.2821611328125, 0.28179608154296876, 0.2822761535644531, 0.2825519104003906, 0.28220291137695314, 0.28239599609375, 0.28195034790039064, 0.28220880126953124, 0.2816912536621094, 0.28253070068359376, 0.2816629943847656, 0.28212066650390627, 0.28200961303710936, 0.28256051635742185, 0.2823777770996094, 0.2829818420410156, 0.282862548828125, 0.28231884765625, 0.28236764526367186, 0.2824459228515625, 0.28264678955078126, 0.28200732421875, 0.2823489990234375, 0.28172946166992185, 0.2823695373535156, 0.28246728515625, 0.28236184692382815, 0.28178411865234376, 0.2822180480957031, 0.2826302185058594, 0.28216717529296875, 0.28230303955078123, 0.28263436889648436, 0.28219390869140626, 0.2823474426269531, 0.28219732666015623, 0.2821455383300781, 0.2820280456542969, 0.2820444030761719, 0.28189697265625, 0.28263177490234376, 0.282442138671875, 0.28246426391601565, 0.28231436157226564, 0.28231512451171875, 0.28233053588867185, 0.28185836791992186, 0.28206314086914064, 0.28238027954101563, 0.2822102966308594, 0.2821610412597656, 0.282501220703125, 0.2822912902832031, 0.2823437194824219, 0.28220660400390624, 0.28217788696289064, 0.28252175903320315, 0.2824208068847656, 0.28231231689453123, 0.28185836791992186, 0.28272256469726564, 0.28227789306640627, 0.2822451171875, 0.28217138671875, 0.28220416259765624, 0.28259686279296875, 0.28197296142578127, 0.28252093505859377, 0.2827130126953125, 0.28260107421875, 0.2826029663085938, 0.2827558898925781, 0.28268142700195314, 0.2825068359375, 0.28281671142578124, 0.2828082580566406, 0.2827901611328125, 0.2826642761230469, 0.28224517822265627, 0.28231536865234375, 0.28244378662109376, 0.2823638916015625, 0.28244992065429686, 0.2825603332519531, 0.283182373046875, 0.2824149169921875, 0.28214898681640627, 0.28238482666015624, 0.2825198974609375, 0.28207839965820314, 0.28243026733398435, 0.2831707458496094, 0.28252578735351563, 0.2823947448730469, 0.28252685546875, 0.2841689147949219, 0.28261346435546875, 0.2823191223144531, 0.28272845458984375, 0.28269329833984375, 0.28300521850585936, 0.28303466796875, 0.28245709228515625, 0.2824744873046875, 0.281973876953125, 0.28299066162109376, 0.28248556518554685, 0.2822279357910156, 0.2835259094238281, 0.2836643371582031, 0.2839423828125, 0.2836729431152344, 0.28421142578125, 0.2832015380859375, 0.2830908203125, 0.2829354248046875, 0.28259231567382814, 0.2824910278320312, 0.2826756591796875, 0.2829981689453125, 0.28238104248046875, 0.2826528625488281, 0.28226971435546877, 0.2829794006347656, 0.282104736328125, 0.28296807861328127, 0.2830824279785156, 0.28277996826171875, 0.28335104370117187, 0.28256869506835935, 0.2826506042480469, 0.2839163208007813, 0.2831787414550781, 0.2821790161132812, 0.282032958984375, 0.28291915893554687, 0.28312551879882814, 0.2829066162109375, 0.28294964599609373, 0.2832795104980469, 0.28237298583984377, 0.28226153564453127, 0.28324884033203124, 0.28287667846679687, 0.2823016052246094, 0.2827273254394531, 0.282625, 0.2823463134765625, 0.2826581726074219, 0.28176394653320314, 0.2821018981933594, 0.2824953918457031, 0.28187026977539065, 0.28230593872070314, 0.2822806091308594, 0.2824806518554687, 0.281499755859375, 0.2825419921875, 0.28195135498046875, 0.28178289794921874, 0.2821245422363281, 0.28252569580078124, 0.28158108520507813, 0.28236846923828124, 0.2817228698730469, 0.28231884765625, 0.28214019775390625, 0.2818748474121094, 0.2822943115234375, 0.28198092651367185, 0.2826136779785156, 0.28222476196289065, 0.2823065490722656, 0.2823987121582031, 0.2818655700683594, 0.28213723754882813, 0.2818925476074219, 0.2822043762207031, 0.2822302551269531, 0.28203631591796874, 0.28266143798828125, 0.2824294128417969, 0.2824390869140625, 0.28216995239257814, 0.2819297180175781, 0.28267111206054685, 0.2821983337402344, 0.2819841003417969, 0.28223959350585937, 0.28231475830078123, 0.28229632568359375, 0.28255587768554685, 0.28281704711914063, 0.28269158935546873, 0.2822287292480469, 0.2822830505371094, 0.2823976745605469, 0.28256997680664064, 0.2825523071289063, 0.2826650390625, 0.282540771484375, 0.282830810546875, 0.2821959533691406, 0.28281964111328123, 0.2822872314453125, 0.2828547668457031, 0.28284771728515623, 0.282723876953125, 0.28235415649414064, 0.28240869140625, 0.2825396728515625, 0.28260894775390627, 0.28239041137695314, 0.28224798583984373, 0.2827673645019531, 0.28229428100585935, 0.2820782775878906, 0.2822442626953125, 0.2819520263671875, 0.2820889892578125, 0.2825404968261719, 0.282499267578125, 0.28215277099609376, 0.28228512573242187, 0.28275830078125, 0.2826625671386719, 0.28218994140625, 0.28231884765625, 0.28244610595703123, 0.2819981079101562, 0.2818420715332031, 0.282692138671875, 0.282208251953125, 0.2823740844726563, 0.28222061157226563, 0.28262374877929686, 0.28240887451171875, 0.28204629516601565, 0.28255691528320315, 0.28304940795898437, 0.28197909545898436, 0.28234378051757814, 0.28224920654296876, 0.2820809326171875, 0.2819485168457031, 0.2823261413574219, 0.2826187438964844, 0.2821982116699219, 0.2824906005859375, 0.282846923828125, 0.2824277954101562, 0.2825990295410156, 0.2828082580566406, 0.28278585815429685, 0.2824396667480469, 0.28202023315429686, 0.2823386840820313, 0.28228607177734377, 0.282679931640625, 0.2820782775878906, 0.28205517578125, 0.2826039733886719, 0.2818924865722656, 0.28278207397460936, 0.2825441284179688, 0.2825146179199219, 0.2823456115722656, 0.28275888061523435, 0.28253692626953125, 0.2823302612304687, 0.2827496643066406, 0.2826689147949219, 0.2818419189453125, 0.2827179565429688, 0.2824683532714844, 0.28237203979492187, 0.2814505615234375, 0.28211163330078126, 0.2820980224609375, 0.2821591186523438, 0.28242718505859377, 0.28200775146484375, 0.2821791687011719, 0.28169052124023436, 0.2829434814453125, 0.282093017578125, 0.2827833862304687, 0.28231283569335935, 0.28199798583984376, 0.28288214111328125, 0.282334228515625, 0.2829158020019531, 0.28237109375, 0.28195120239257815, 0.28232235717773435, 0.28237677001953126, 0.28265472412109377, 0.2826524658203125, 0.282267333984375, 0.2826734619140625, 0.2824541015625, 0.283025390625, 0.28260562133789063, 0.28247674560546876, 0.2826320495605469, 0.28254443359375, 0.28234735107421877, 0.282380126953125, 0.28298239135742187, 0.282345458984375, 0.282208251953125, 0.2828554382324219, 0.2826026306152344, 0.28202203369140627, 0.2827025451660156, 0.28374346923828125, 0.28333273315429686, 0.28216339111328126, 0.28254568481445314, 0.28293011474609375, 0.2825252380371094, 0.2828234252929688, 0.282904296875, 0.2824818420410156, 0.2824889831542969, 0.28289688110351563, 0.2820088195800781, 0.28275912475585935, 0.28214938354492186, 0.283443603515625, 0.28240280151367186, 0.28236163330078123, 0.28312188720703124, 0.282489990234375, 0.282265625, 0.28280435180664065, 0.2829030151367187, 0.28249981689453124, 0.2827157897949219, 0.2818607788085937, 0.28287387084960935, 0.2825871276855469, 0.28198211669921874, 0.28234225463867185, 0.2826886901855469, 0.28228436279296876, 0.2823562316894531, 0.28245303344726563, 0.2820813293457031, 0.2823396911621094, 0.2829031066894531, 0.28270797729492186, 0.2821275024414063, 0.2832491455078125, 0.28295849609375, 0.28206051635742185, 0.2828975219726563, 0.282876708984375, 0.28224725341796875, 0.2821158447265625, 0.28251162719726564, 0.2826855163574219, 0.28227783203125, 0.28352947998046873, 0.28261312866210936, 0.28258547973632814, 0.28250323486328127, 0.28231478881835936, 0.283111328125, 0.28280416870117187, 0.282333251953125, 0.28235589599609373, 0.28247406005859377, 0.28249728393554685, 0.28256256103515626, 0.28277120971679687, 0.28252108764648437, 0.2828787231445313, 0.28241510009765625, 0.2828995666503906, 0.282104736328125, 0.2827202453613281, 0.283009033203125, 0.2821937561035156, 0.28226309204101563, 0.2824374694824219, 0.28251800537109373, 0.2822692260742187, 0.2821905517578125, 0.28261135864257814, 0.2824639892578125, 0.28300875854492186, 0.2826269226074219, 0.2827056579589844, 0.28254635620117186, 0.28302133178710936, 0.2829619445800781, 0.2832652587890625, 0.2830226135253906, 0.2830750732421875, 0.2827343139648438, 0.2820447082519531, 0.2826931762695313, 0.2829349365234375, 0.2825732421875, 0.2821565246582031, 0.28265933227539064, 0.28219830322265627, 0.2824922180175781, 0.2822982177734375, 0.28269049072265623, 0.2825780944824219, 0.28191806030273436, 0.2827262268066406, 0.28285516357421875, 0.282481201171875, 0.282574951171875, 0.28316058349609374, 0.2822366027832031, 0.28232736206054687, 0.2828328857421875, 0.28230859375, 0.28245196533203126, 0.28311346435546875, 0.2831810607910156, 0.2820157470703125, 0.28297354125976565, 0.2824956359863281, 0.28224920654296876, 0.28210519409179685, 0.28260140991210936, 0.28277188110351564, 0.2822618103027344, 0.2835672302246094, 0.28298330688476564, 0.2829537353515625, 0.28221600341796876, 0.2828394775390625, 0.2828790588378906, 0.2829869689941406, 0.28328958129882814, 0.2826731262207031, 0.28282318115234373, 0.28256979370117186, 0.28257949829101564, 0.28313018798828127, 0.28251962280273435, 0.28293533325195314, 0.283202880859375, 0.2822878723144531, 0.2829209594726563, 0.28301806640625, 0.2826587829589844, 0.282595458984375, 0.2827507629394531, 0.28313739013671874, 0.28288912963867185, 0.282435546875, 0.2828511657714844, 0.2827154235839844, 0.2824917907714844, 0.2834964599609375, 0.28251898193359376]",tokens/s,3.541750834298009,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3159.3472,4423.876608,0.0,4028.628992,3944.723968,s,1,10.06894921875,10.06894921875,0.0,10.06894921875,10.06894921875,10.06894921875,10.06894921875,[10.06894921875],,kWh,9.33011871750106e-05,1.0284588581744736e-05,2.9152801099982528e-05,0.00013273857685673786,,MB,3048.185856,4763.615232,0.0,4353.687552,4305.05728,s,10,1.147124671936035,0.1147124671936035,0.00020432700330958774,0.11468398666381835,0.11498229293823242,0.1150433048248291,0.11509211433410645,"[0.11510431671142578, 0.11450886535644532, 0.11447369384765625, 0.11482249450683593, 0.11451148986816406, 0.11480099487304687, 0.11456610870361328, 0.11459334564208984, 0.11477462768554687, 0.11496873474121094]",tokens/s,2231.66676005618,kWh,3.418145743168676e-06,3.769615614673295e-07,2.2705283022092642e-06,6.06563560684527e-06,tokens/kWh,42204975.140790775,MB,2932.334592,4763.615232,0.0,4353.687552,4305.05984,s,10,25.44476782226563,2.544476782226563,0.019668975351995483,2.54449072265625,2.565529443359375,2.5716923339843754,2.5766226464843753,"[2.520806640625, 2.520442626953125, 2.52698388671875, 2.542894775390625, 2.56067822265625, 2.546086669921875, 2.524440185546875, 2.560419677734375, 2.564159912109375, 2.577855224609375]",tokens/s,24.759510654631082,kWh,7.292893752266355e-05,8.043900098729084e-06,3.751758686159039e-05,0.00011849042448298304,tokens/kWh,531688.5332708698,,s,630,25.442107032775883,0.04038429687742203,0.0007000421652442235,0.04031345558166504,0.04099657821655273,0.041246515274047844,0.043280706901550295,"[0.040855422973632816, 0.04014092636108398, 0.039540096282958986, 0.03972774505615234, 0.04019327926635742, 0.04052659225463867, 0.04020563125610352, 0.04050188827514648, 0.0403251838684082, 0.04016332626342774, 0.04009164810180664, 0.039913471221923826, 0.03970431900024414, 0.04036223983764648, 0.03982438278198242, 0.039672832489013675, 0.03988188934326172, 0.039881568908691406, 0.04000153732299805, 0.03993804931640625, 0.04004476928710937, 0.04014672088623047, 0.039874561309814455, 0.03991961669921875, 0.03984694290161133, 0.040043487548828124, 0.03980233764648437, 0.0402435188293457, 0.040106208801269534, 0.040567935943603514, 0.04097040176391602, 0.040549087524414065, 0.04090380859375, 0.0401596794128418, 0.04042387390136719, 0.0395425910949707, 0.039675201416015625, 0.0396984977722168, 0.039705120086669925, 0.03958403015136719, 0.039642337799072266, 0.039824062347412106, 0.03970579147338867, 0.03936486434936524, 0.03955779266357422, 0.03965542221069336, 0.0400445442199707, 0.039470592498779294, 0.03975363159179687, 0.03964358520507812, 0.03998060989379883, 0.03985776138305664, 0.0398812141418457, 0.03946697616577149, 0.03954332733154297, 0.03974137496948242, 0.03986841583251953, 0.039944255828857425, 0.039865375518798825, 0.03970943832397461, 0.03996489715576172, 0.04280934524536133, 0.040013824462890625, 0.04001177597045898, 0.03975372695922851, 0.03952009582519531, 0.040059040069580075, 0.04025139236450195, 0.04046790313720703, 0.040299072265625, 0.04029574584960938, 0.03996883010864258, 0.04125964736938476, 0.03980704116821289, 0.039559070587158206, 0.04222742462158203, 0.0403070068359375, 0.04029439926147461, 0.04017270278930664, 0.04177724838256836, 0.040790782928466794, 0.04057702255249023, 0.03998726272583008, 0.03977004623413086, 0.03967302322387695, 0.039609375, 0.03983510589599609, 0.04045993423461914, 0.03991139221191406, 0.03999609756469726, 0.039702529907226565, 0.03967935943603516, 0.0395043830871582, 0.03967007827758789, 0.03952751922607422, 0.0398507194519043, 0.039882015228271485, 0.039797470092773436, 0.039766014099121096, 0.0397496337890625, 0.039775520324707034, 0.039588382720947266, 0.03974934387207031, 0.039741920471191405, 0.03975075149536133, 0.03972393417358398, 0.03985612869262695, 0.04014182281494141, 0.040544929504394533, 0.040443775177001956, 0.040186336517333984, 0.04000153732299805, 0.03968761444091797, 0.039668193817138674, 0.03957974243164063, 0.03966345596313477, 0.03960457611083985, 0.0398743667602539, 0.04005855941772461, 0.03987843322753906, 0.04021916961669922, 0.040209503173828126, 0.03966864013671875, 0.03960601425170898, 0.03967948913574219, 0.0395305290222168, 0.041644031524658204, 0.0405766716003418, 0.04041353607177734, 0.039948287963867186, 0.039558368682861327, 0.03957132720947266, 0.03950070571899414, 0.03954073715209961, 0.03952217483520508, 0.03959321594238281, 0.03981401443481445, 0.03975196838378906, 0.039932830810546875, 0.03979756927490234, 0.03985408020019531, 0.03977011108398437, 0.04019331359863281, 0.03965568161010742, 0.0395720329284668, 0.039608222961425785, 0.03991961669921875, 0.040169086456298825, 0.040175743103027343, 0.04042982482910156, 0.0404398078918457, 0.04059660720825195, 0.040604545593261716, 0.040065025329589846, 0.03997081756591797, 0.039687553405761716, 0.03984832000732422, 0.03978601455688477, 0.04030332946777344, 0.040232288360595704, 0.04030752182006836, 0.040218017578125, 0.04012486267089844, 0.039796062469482425, 0.03991535949707031, 0.03978294372558594, 0.040304702758789064, 0.040519134521484375, 0.040325023651123046, 0.04028911972045898, 0.04012646484375, 0.04012441635131836, 0.04015468978881836, 0.0405549430847168, 0.04065871810913086, 0.04026595306396484, 0.04088156890869141, 0.040743328094482424, 0.042321311950683595, 0.04021894454956055, 0.04016502380371094, 0.03987907028198242, 0.039807456970214844, 0.03968780899047852, 0.03972854232788086, 0.039646080017089844, 0.03975785446166992, 0.03995033645629883, 0.04043161773681641, 0.040263553619384766, 0.039686145782470705, 0.039626750946044925, 0.03971635055541992, 0.03964915084838867, 0.040409568786621095, 0.04027606582641602, 0.04050742340087891, 0.04042140960693359, 0.04047568130493164, 0.0398612174987793, 0.03957145690917969, 0.0396404800415039, 0.03975433731079102, 0.04015436935424805, 0.0399633903503418, 0.04033126449584961, 0.04089974212646484, 0.043192832946777344, 0.04057535934448242, 0.04053807830810547, 0.040613887786865234, 0.040509441375732425, 0.040275966644287106, 0.04048691177368164, 0.04069200134277344, 0.040369407653808594, 0.04021526336669922, 0.040375137329101564, 0.03991567993164063, 0.039771968841552735, 0.039752574920654295, 0.039814559936523435, 0.039700702667236326, 0.04330499267578125, 0.04085391998291016, 0.041115646362304685, 0.040499198913574216, 0.040210430145263674, 0.04019507217407227, 0.0398653450012207, 0.03994214248657227, 0.039879871368408204, 0.03957843017578125, 0.03973734283447266, 0.040390655517578124, 0.04097228622436523, 0.040357440948486326, 0.04052217483520508, 0.040618175506591796, 0.04070995330810547, 0.04019404983520508, 0.040513534545898434, 0.0400937614440918, 0.04042156982421875, 0.040435455322265626, 0.040208286285400394, 0.04050543975830078, 0.04200188827514648, 0.040721023559570316, 0.040296031951904294, 0.040468799591064454, 0.040022014617919925, 0.041304256439208986, 0.040981983184814455, 0.04051801681518555, 0.040532577514648435, 0.041093120574951174, 0.04027391815185547, 0.040171134948730466, 0.040341632843017575, 0.04096201705932617, 0.04045596694946289, 0.04057753753662109, 0.041202816009521484, 0.040522624969482425, 0.04096387100219727, 0.040760929107666016, 0.040620670318603516, 0.04080230331420898, 0.040888320922851565, 0.041322494506835936, 0.04043718338012695, 0.04061430358886719, 0.0406918716430664, 0.04081369781494141, 0.04054412841796875, 0.040289279937744144, 0.04009983825683594, 0.0400766716003418, 0.03977593612670898, 0.04031584167480469, 0.03991049575805664, 0.040552513122558594, 0.04063692855834961, 0.04036438369750977, 0.040850433349609375, 0.04025187301635742, 0.04033526229858399, 0.0400574722290039, 0.040013248443603516, 0.04004121780395508, 0.04056864166259765, 0.040890369415283206, 0.04046992111206055, 0.04085321426391601, 0.04101824188232422, 0.040525825500488284, 0.04064828872680664, 0.044755359649658204, 0.04294655990600586, 0.04009344100952148, 0.040220062255859376, 0.04104399871826172, 0.04048569488525391, 0.040705696105957034, 0.0404637451171875, 0.04044063949584961, 0.04027817535400391, 0.04001587295532227, 0.03996985626220703, 0.04011513519287109, 0.039916702270507816, 0.041370368957519534, 0.040498401641845705, 0.04111859130859375, 0.04197750473022461, 0.041314655303955075, 0.040546302795410154, 0.04031875228881836, 0.04090700912475586, 0.04037580871582031, 0.04092361450195312, 0.040506561279296874, 0.040583358764648435, 0.040513534545898434, 0.04035238265991211, 0.040479873657226564, 0.040215423583984375, 0.04011142349243164, 0.040235710144042966, 0.04030822372436523, 0.041027137756347656, 0.04100396728515625, 0.04066918563842774, 0.040304607391357425, 0.040169086456298825, 0.04002207946777344, 0.04003670501708984, 0.039927616119384765, 0.040044097900390624, 0.04087007904052734, 0.04110947036743164, 0.04058566284179688, 0.040384544372558596, 0.04035343933105469, 0.03998492813110351, 0.040030784606933593, 0.039833824157714845, 0.03977807998657226, 0.04061798477172852, 0.04094771194458008, 0.04108083343505859, 0.04040499114990234, 0.040357887268066404, 0.04005068969726563, 0.03990902328491211, 0.040163681030273436, 0.03988655853271485, 0.03994972610473633, 0.0398996467590332, 0.03988313674926758, 0.04031203079223633, 0.040355743408203124, 0.04090969467163086, 0.040869537353515624, 0.04091305541992187, 0.04036156845092773, 0.040280479431152344, 0.03984975814819336, 0.03980265426635742, 0.039591678619384764, 0.04053443145751953, 0.040448478698730465, 0.04071014404296875, 0.04061183929443359, 0.04061798477172852, 0.04076339340209961, 0.03993600082397461, 0.040908702850341795, 0.040598369598388674, 0.04014080047607422, 0.040134654998779294, 0.03970035171508789, 0.039785694122314454, 0.04341843032836914, 0.040132190704345705, 0.03983827209472656, 0.03961561584472656, 0.039715648651123044, 0.04018294525146485, 0.040761249542236325, 0.04009494400024414, 0.039933662414550784, 0.03954687881469727, 0.039610111236572265, 0.039456832885742185, 0.039446720123291014, 0.03932796859741211, 0.039524383544921875, 0.04053948974609375, 0.040094112396240236, 0.04018380737304687, 0.040021183013916016, 0.03991839981079102, 0.039583744049072264, 0.03947520065307617, 0.039485439300537106, 0.039423103332519534, 0.03957030487060547, 0.04035583877563476, 0.040288257598876956, 0.04101715087890625, 0.040588703155517575, 0.039892833709716795, 0.03977104187011719, 0.03982275390625, 0.039523998260498044, 0.039435230255126956, 0.03961206436157227, 0.03984572982788086, 0.04018227386474609, 0.039772159576416014, 0.0401715202331543, 0.039798175811767575, 0.039735744476318356, 0.03971913528442383, 0.03964255905151367, 0.03972147369384765, 0.039809120178222655, 0.03963894271850586, 0.04028416061401367, 0.04029433441162109, 0.0406569595336914, 0.04037836837768555, 0.040401950836181644, 0.04032748794555664, 0.040618656158447265, 0.040705055236816404, 0.04058620834350586, 0.04067027282714844, 0.04071721649169922, 0.04123046493530273, 0.04053420639038086, 0.04466700744628906, 0.040308734893798825, 0.0401080322265625, 0.04000284957885742, 0.040552383422851564, 0.04073756790161133, 0.0406036491394043, 0.04048281478881836, 0.04002345657348633, 0.039750240325927735, 0.039684097290039064, 0.03976192092895508, 0.039925537109375, 0.0396473274230957, 0.03992793655395508, 0.04100096130371094, 0.043756542205810545, 0.041468929290771485, 0.04025753784179688, 0.03991363143920899, 0.04014883041381836, 0.0397496337890625, 0.03961644744873047, 0.039537792205810544, 0.03970553588867187, 0.0404398078918457, 0.04033536148071289, 0.040318977355957034, 0.04040502548217773, 0.04042121505737305, 0.04045836639404297, 0.04031488037109375, 0.04059519958496094, 0.040398750305175785, 0.040790336608886715, 0.040860767364501956, 0.04076809692382812, 0.04076784133911133, 0.040855552673339846, 0.04119884872436524, 0.04116761779785156, 0.040910846710205076, 0.04095382308959961, 0.040844383239746096, 0.04085855865478515, 0.04084326553344726, 0.040877761840820315, 0.04119174575805664, 0.040820735931396485, 0.04111974334716797, 0.0409345588684082, 0.04067155075073242, 0.04073321533203125, 0.04063436889648438, 0.04055244827270508, 0.04090060806274414, 0.040921089172363284, 0.04082876968383789, 0.04078211212158203, 0.04087590408325195, 0.04069375991821289, 0.04166783905029297, 0.041286399841308594, 0.04072006225585938, 0.040822654724121096, 0.040823230743408205, 0.04078742218017578, 0.04070556640625, 0.04061695861816406, 0.040546302795410154, 0.0404664306640625, 0.040677089691162106, 0.04086198425292969, 0.0408616943359375, 0.040613887786865234, 0.04039820861816406, 0.040101566314697266, 0.039989471435546875, 0.04001046371459961, 0.03966505432128906, 0.03960892868041992, 0.03983769607543945, 0.04006911849975586, 0.04015718460083008, 0.040046592712402344, 0.04073020935058594, 0.0407342414855957, 0.04256243133544922, 0.040914337158203126, 0.04026780700683594, 0.04578140640258789, 0.04020649719238281, 0.043675102233886716, 0.04073513412475586, 0.040523902893066406, 0.04036316680908203, 0.040037216186523436, 0.03997491073608399, 0.0399005126953125, 0.039908256530761715, 0.039731201171875, 0.03980438232421875, 0.04040937423706055, 0.04040703964233398, 0.040498241424560544, 0.041925567626953125, 0.04010089492797852, 0.040589920043945314, 0.04322124862670899, 0.041512161254882815, 0.04086876678466797, 0.04177081680297851, 0.04099631881713867, 0.04102012634277344, 0.040720382690429685, 0.040762783050537106, 0.040403553009033207, 0.0403642578125, 0.04015884780883789, 0.04045334243774414, 0.03982227325439453, 0.039913471221923826, 0.04019814300537109, 0.040567008972167966, 0.04065846252441406, 0.04108006286621094, 0.040855297088623045, 0.04092911911010742, 0.040882335662841794, 0.04099264144897461, 0.040994945526123046, 0.041082878112792966, 0.040943614959716795, 0.040941566467285154, 0.04115216064453125, 0.04091734313964844, 0.04089408111572266, 0.04093743896484375, 0.04099523162841797, 0.040862751007080075, 0.04101583862304688, 0.040790462493896486, 0.04078976058959961, 0.04087628936767578, 0.04074291229248047, 0.04073379135131836, 0.04079526519775391, 0.04054982376098633, 0.04075321578979492, 0.04090832138061523, 0.040780479431152344, 0.04066326522827148, 0.040815872192382814, 0.041089630126953124, 0.04089651107788086, 0.04069484710693359, 0.040690238952636716, 0.04064057540893555, 0.041551551818847655, 0.040702590942382814, 0.04087807846069336, 0.04095590209960937, 0.041010368347167966, 0.04089939117431641, 0.0409989128112793, 0.040830944061279295, 0.04084870529174805, 0.041075424194335936, 0.04095590209960937, 0.04080844879150391, 0.04099020767211914, 0.0407938232421875, 0.04083932876586914, 0.041089664459228514, 0.04077939224243164, 0.04084099197387695, 0.04096828842163086, 0.04103833770751953, 0.041193473815917966, 0.04082390213012695, 0.04106537628173828, 0.04107369613647461, 0.041262046813964844, 0.04091843032836914, 0.041388641357421874, 0.04087398529052735, 0.04076339340209961]",tokens/s,24.76210005674453,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4376.895488,4566.482944,0.0,4188.012544,4187.049984,s,1,10.3305439453125,10.3305439453125,0.0,10.3305439453125,10.3305439453125,10.3305439453125,10.3305439453125,[10.3305439453125],,kWh,9.650066008333719e-05,1.0637245674047769e-05,3.166196977399949e-05,0.00013879987553138445,,MB,4312.846336,4962.844672,0.0,4555.014144,4514.269184,s,10,7.85486279296875,0.785486279296875,0.0024385657671590765,0.7846548767089844,0.7884500244140625,0.7891213439941406,0.7896583996582032,"[0.78331787109375, 0.7868609619140625, 0.7877884521484375, 0.7831114501953125, 0.7833239135742187, 0.783201416015625, 0.78598583984375, 0.7883008422851563, 0.7831793823242188, 0.7897926635742187]",tokens/s,325.9127584369232,kWh,2.2828943265063682e-05,2.516557984143609e-06,1.5149542034153764e-05,4.049504328336105e-05,tokens/kWh,6321761.362462538,MB,4320.833536,4979.621888,0.0,4571.79136,4514.271744,s,10,466.9446015625,46.69446015625,0.009211138575253765,46.69581640625,46.701953515625,46.704838085937496,46.7071457421875,"[46.67080078125, 46.69037109375, 46.7013125, 46.69963671875, 46.69367578125, 46.69544140625, 46.69619140625, 46.70772265625, 46.69769140625, 46.6917578125]",tokens/s,1.3491964526238884,kWh,0.0013617363878057702,0.0001502109238077487,0.0009058930003548463,0.0024178403119683653,tokens/kWh,26056.31136520826,,s,630,466.9331598510745,0.7411637457953559,0.0004398744652296437,0.7411553955078125,0.7415886169433593,0.741754150390625,0.7420652239990234,"[0.7400548706054687, 0.7412684326171874, 0.7400137939453125, 0.7404874267578125, 0.7405681762695312, 0.7407911376953125, 0.740642333984375, 0.7402091674804687, 0.740773193359375, 0.7406885986328124, 0.74061376953125, 0.7400984497070312, 0.7404476928710938, 0.7406184692382812, 0.7407650146484375, 0.7409305419921876, 0.7405218505859374, 0.7407485961914062, 0.7407849731445313, 0.740691650390625, 0.7404711303710938, 0.7407369995117188, 0.7407920532226563, 0.740763916015625, 0.740427734375, 0.7405476684570312, 0.7409570922851563, 0.7412613525390624, 0.74046875, 0.7403253784179687, 0.7409314575195313, 0.7408845825195313, 0.7408425903320313, 0.7404266967773437, 0.7407796020507813, 0.7410220947265626, 0.7409541015625, 0.7407222900390625, 0.7410399169921875, 0.7417101440429688, 0.740493408203125, 0.7410515747070312, 0.7407597045898437, 0.741287841796875, 0.740619140625, 0.7408491821289063, 0.7407881469726563, 0.7413148803710937, 0.7415169067382813, 0.7405165405273437, 0.7405834350585937, 0.7412981567382813, 0.74104150390625, 0.7410548095703124, 0.740962646484375, 0.741011474609375, 0.7411691284179688, 0.7408289794921875, 0.7414130859375, 0.7407860107421875, 0.741963134765625, 0.7403405151367187, 0.74083740234375, 0.7407962646484375, 0.7412327880859375, 0.7407361450195312, 0.7408401489257812, 0.7409166870117188, 0.7415664672851563, 0.7402646484375, 0.7407185668945313, 0.7409295654296875, 0.7411096801757813, 0.7403663940429688, 0.7409575805664063, 0.7412445678710937, 0.7411536865234375, 0.7405241088867187, 0.7409991455078125, 0.7411691284179688, 0.741085205078125, 0.7407554321289063, 0.740642822265625, 0.7411220703125, 0.7422764892578125, 0.7411022338867187, 0.74110498046875, 0.7409793701171875, 0.7408450317382812, 0.7406897583007812, 0.7414053955078125, 0.7411097412109375, 0.7411240844726562, 0.7409183349609375, 0.7414363403320312, 0.7412568359375, 0.7411265258789063, 0.7414898071289062, 0.7413748168945312, 0.7415459594726562, 0.7411568603515625, 0.7411302490234375, 0.7410421752929688, 0.74123876953125, 0.7412080078125, 0.7415253295898437, 0.7408212280273437, 0.741011474609375, 0.741369873046875, 0.741525390625, 0.7403992309570312, 0.741412841796875, 0.7414989013671875, 0.7410319213867187, 0.741306396484375, 0.7412183227539062, 0.741035888671875, 0.7411995239257813, 0.7411327514648437, 0.7411178588867188, 0.74104638671875, 0.7415275268554687, 0.7411786499023437, 0.7417346801757813, 0.7408972778320313, 0.7411565551757813, 0.74149169921875, 0.7405444946289063, 0.7411159057617187, 0.7413350219726562, 0.7413760375976562, 0.7407493286132812, 0.7412242431640625, 0.7418446044921875, 0.7410775756835938, 0.7404031982421875, 0.7409903564453125, 0.7414483032226562, 0.7410680541992187, 0.74062451171875, 0.7414396362304687, 0.741398193359375, 0.7410178833007812, 0.7408229370117188, 0.7414319458007812, 0.7412367553710938, 0.7406735229492187, 0.7419535522460937, 0.741868896484375, 0.740987548828125, 0.7405936889648438, 0.7413206787109375, 0.7413677978515625, 0.741185546875, 0.7414883422851563, 0.7411674194335938, 0.74111181640625, 0.7407308959960938, 0.741491943359375, 0.7411206665039063, 0.7415253295898437, 0.7414500732421875, 0.7409674682617188, 0.7418802490234375, 0.7409812622070312, 0.7410646362304687, 0.7412675170898437, 0.7410360107421875, 0.741416015625, 0.7407230224609375, 0.74164013671875, 0.7409830322265625, 0.7409074096679688, 0.7415291748046875, 0.7415167846679688, 0.7413599853515624, 0.7414026489257812, 0.741384765625, 0.7411292724609375, 0.741028564453125, 0.7413885498046875, 0.7414886474609375, 0.7408046264648438, 0.7417279663085937, 0.7412145385742187, 0.7414307250976563, 0.7414728393554687, 0.7443988647460937, 0.7417261962890624, 0.74151123046875, 0.74071875, 0.7410575561523437, 0.7411285400390625, 0.7408866577148437, 0.7407396240234375, 0.7418423461914062, 0.74116357421875, 0.740701904296875, 0.7411981201171876, 0.7411087646484374, 0.7412780151367188, 0.7409827880859375, 0.7410835571289063, 0.7407659301757813, 0.741185546875, 0.7409679565429688, 0.74139697265625, 0.7413186645507812, 0.7406237182617188, 0.741683837890625, 0.7403906860351562, 0.7412840576171875, 0.7410258178710938, 0.7409801635742187, 0.74120361328125, 0.7415632934570312, 0.7408532104492187, 0.741116455078125, 0.7411136474609376, 0.7415145874023438, 0.7412662963867187, 0.7409889526367187, 0.7410872192382812, 0.7411751708984375, 0.7410032348632812, 0.7412933349609375, 0.7412293701171875, 0.741222412109375, 0.7412467651367187, 0.740702392578125, 0.7414190063476562, 0.7414312744140625, 0.741305908203125, 0.7415014038085938, 0.7416770629882813, 0.7416893310546875, 0.7411199951171875, 0.7407368774414063, 0.7420499267578125, 0.7413145751953125, 0.7412017211914063, 0.7411181640625, 0.741326171875, 0.7411607666015625, 0.7411056518554687, 0.7413658447265625, 0.741214111328125, 0.74111474609375, 0.7406795043945312, 0.7464931640625, 0.7411426391601562, 0.7410634765625, 0.7404827880859375, 0.7409974365234375, 0.7406197509765625, 0.7408665771484375, 0.7403984375, 0.7413561401367188, 0.7419658203125, 0.7407144165039062, 0.7410791015625, 0.7407882080078125, 0.7407738647460937, 0.7412030029296875, 0.741058837890625, 0.7410695190429688, 0.7415043334960938, 0.7412430419921875, 0.7411819458007812, 0.740724609375, 0.7415598754882813, 0.7413863525390625, 0.74105615234375, 0.740856689453125, 0.7412633666992188, 0.7410808715820313, 0.7409461059570313, 0.7410293579101562, 0.7410462646484375, 0.7412589111328125, 0.7407010498046875, 0.7408455810546875, 0.7406441650390625, 0.7417471313476562, 0.7409865112304688, 0.7412426147460938, 0.74111181640625, 0.7409611206054687, 0.7407144165039062, 0.7407513427734375, 0.7415823364257812, 0.7408370361328125, 0.7415029907226562, 0.740903564453125, 0.7415789794921875, 0.74149072265625, 0.7410846557617188, 0.741435791015625, 0.7410320434570312, 0.7414353637695312, 0.741074951171875, 0.7409930419921875, 0.7410699462890625, 0.7412860107421875, 0.7413436279296876, 0.7414398803710938, 0.7414678955078124, 0.7416117553710937, 0.74132421875, 0.7413458862304687, 0.7408488159179687, 0.7418765258789063, 0.7413718872070313, 0.7415316772460937, 0.7410808715820313, 0.741275390625, 0.7411348876953125, 0.7408580322265625, 0.7411875610351563, 0.7405813598632812, 0.7406832275390625, 0.7408024291992188, 0.7411677856445312, 0.74102783203125, 0.74060595703125, 0.741514404296875, 0.7412142944335938, 0.740965087890625, 0.7410953979492187, 0.7418016357421875, 0.7406708374023437, 0.7412825317382813, 0.741028076171875, 0.7408599243164062, 0.741296142578125, 0.7411630249023438, 0.7415357666015625, 0.7415214233398437, 0.74069921875, 0.7410963745117187, 0.7411138305664062, 0.741212158203125, 0.7413514404296875, 0.7405642700195313, 0.741257080078125, 0.7412816162109375, 0.7410144653320313, 0.7413077392578125, 0.7413639526367187, 0.7413846435546875, 0.74137939453125, 0.7410100708007813, 0.7410584106445313, 0.7407945556640625, 0.7417665405273437, 0.740880859375, 0.7411976928710937, 0.7413947143554688, 0.7411568603515625, 0.741291748046875, 0.7409461669921875, 0.7417200927734375, 0.741274658203125, 0.7410894775390625, 0.7413556518554687, 0.7417534790039062, 0.74169140625, 0.7408414916992188, 0.7413616943359375, 0.74169873046875, 0.7417782592773438, 0.7409581909179688, 0.7410831298828126, 0.7413800659179688, 0.741080322265625, 0.741001953125, 0.7416782836914062, 0.7413665771484375, 0.74156640625, 0.7408585815429688, 0.7410933837890625, 0.7409397583007813, 0.740996826171875, 0.7409339599609375, 0.7410524291992188, 0.7409766235351563, 0.7410626831054687, 0.7407861938476562, 0.741518310546875, 0.7409890747070312, 0.7406717529296875, 0.741853759765625, 0.7413754272460937, 0.7404221801757812, 0.7417546997070312, 0.7414581298828125, 0.7407673950195313, 0.7408295288085938, 0.7406038818359375, 0.7417908935546875, 0.7411633911132812, 0.7414458618164063, 0.7409154052734375, 0.7411790771484374, 0.7408560791015625, 0.741474365234375, 0.740706298828125, 0.741369873046875, 0.7410830688476563, 0.74087841796875, 0.7412449340820313, 0.7413771362304687, 0.7411264038085937, 0.7408414916992188, 0.7414598999023437, 0.741509765625, 0.7406919555664062, 0.7413411865234375, 0.7411691284179688, 0.7416173706054687, 0.7413078002929687, 0.741866455078125, 0.7411542358398437, 0.7416036987304687, 0.7412963256835937, 0.740874267578125, 0.741128173828125, 0.7413411865234375, 0.74148046875, 0.7407715454101562, 0.7412323608398438, 0.741462646484375, 0.7415228881835938, 0.7410775756835938, 0.7413575439453125, 0.7420906372070313, 0.7412470703125, 0.7411732177734375, 0.7411302490234375, 0.7413616943359375, 0.7414722290039063, 0.7408867797851563, 0.7420254516601562, 0.7411176147460937, 0.7410732421875, 0.7404628295898438, 0.7416627197265625, 0.741654541015625, 0.741158935546875, 0.7411773681640625, 0.7414108276367187, 0.7412153930664063, 0.7408607788085938, 0.7411317749023437, 0.741876220703125, 0.7417835693359375, 0.7407921142578126, 0.7413551025390624, 0.74128857421875, 0.741025390625, 0.74086962890625, 0.7408670043945312, 0.7433052368164063, 0.741336181640625, 0.741034912109375, 0.74132275390625, 0.7412633666992188, 0.74140673828125, 0.7411273803710937, 0.7411863403320312, 0.7417216796875, 0.741171630859375, 0.74113818359375, 0.74107275390625, 0.7415269775390625, 0.7411803588867187, 0.7416565551757812, 0.7418121948242188, 0.7415275268554687, 0.7414517822265625, 0.7410155639648438, 0.7417405395507812, 0.7411896362304687, 0.7407821044921875, 0.7414673461914062, 0.7416921997070313, 0.7413944091796875, 0.7407239379882813, 0.7417884521484375, 0.7412769165039063, 0.7408805541992187, 0.7412127685546875, 0.7414353637695312, 0.7416436767578125, 0.7408827514648437, 0.741300537109375, 0.7416688842773438, 0.74085302734375, 0.7411145629882813, 0.741289306640625, 0.7416286010742188, 0.7413043212890625, 0.7413575439453125, 0.7412879028320313, 0.740908203125, 0.7408115844726563, 0.7414262084960938, 0.7415858764648438, 0.740864013671875, 0.7411909790039063, 0.7413412475585938, 0.7407028198242187, 0.7415029907226562, 0.7410174560546875, 0.7412430419921875, 0.7408756713867187, 0.7415670776367187, 0.7412177124023438, 0.7410222778320312, 0.7408429565429687, 0.7411594848632812, 0.7414948120117187, 0.740800537109375, 0.7413162231445313, 0.7411687622070312, 0.7415235595703125, 0.7405779418945313, 0.7415884399414062, 0.7405798950195313, 0.7411806030273438, 0.7407767944335938, 0.7414476928710938, 0.7411036376953125, 0.7408927001953125, 0.7414456176757812, 0.741211181640625, 0.740918212890625, 0.7409500122070313, 0.7412920532226562, 0.741231689453125, 0.7421244506835938, 0.7411846313476562, 0.7412574462890625, 0.7411528930664063, 0.7410980224609375, 0.740861328125, 0.7416243896484375, 0.7414328002929688, 0.7416283569335937, 0.741484619140625, 0.74087158203125, 0.7415220336914062, 0.7411978149414062, 0.7420714721679688, 0.7412723999023437, 0.7412981567382813, 0.7415902099609375, 0.7411629638671875, 0.7409154663085937, 0.7412301635742188, 0.7416303100585937, 0.7414974975585937, 0.7410515747070312, 0.7414230346679688, 0.7410341796875, 0.741006103515625, 0.741768310546875, 0.7406672973632813, 0.7406749877929687, 0.7412117919921875, 0.7410226440429688, 0.7402691650390625, 0.7408823852539063, 0.74189306640625, 0.7408681030273437, 0.74082470703125, 0.7407844848632813, 0.741823974609375, 0.7409332885742187, 0.7408006591796875, 0.7410634155273438, 0.7409213256835937, 0.7413677978515625, 0.7405787963867188, 0.7414502563476563, 0.741238525390625, 0.7408397216796875, 0.7412633666992188, 0.7413800659179688, 0.741254150390625, 0.74080908203125, 0.74107275390625, 0.7413040771484375, 0.7409694213867187, 0.7408125610351562, 0.74138037109375, 0.7410497436523438, 0.741210693359375, 0.7412342529296875, 0.741245361328125, 0.741232666015625, 0.7413637084960938, 0.7414763793945313, 0.741001220703125, 0.7413013916015625, 0.7409385986328125, 0.7413507690429687, 0.74101123046875, 0.7411248779296875, 0.7411139526367188, 0.7416995849609375, 0.7412953491210937, 0.7412572631835938, 0.74071728515625, 0.7409677734375, 0.7418569946289062, 0.7414486694335938, 0.7406571655273437, 0.7415848999023438, 0.7412244262695312, 0.741185546875, 0.7410769653320313, 0.7414312744140625, 0.7413309326171875, 0.7411056518554687, 0.7408967895507812, 0.7413881225585938, 0.7412020874023437, 0.7409971313476562, 0.740896240234375]",tokens/s,1.349229513279663,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,2050.760704,2155.741184,0.0,1753.219072,1633.407488,s,1,8.9447646484375,8.9447646484375,0.0,8.9447646484375,8.9447646484375,8.9447646484375,8.9447646484375,[8.9447646484375],,kWh,5.217082972501051e-05,5.747039846359296e-06,1.5699457003998107e-05,7.361732657536791e-05,,MB,1642.692608,2241.724416,0.0,1824.52224,1762.836992,s,10,0.820909408569336,0.0820909408569336,0.0003810316196820429,0.08209516906738282,0.08251952972412109,0.08253438873291015,0.0825462759399414,"[0.08139862060546875, 0.08184774780273438, 0.082453857421875, 0.08224979400634766, 0.08173474884033204, 0.08251622772216796, 0.08194054412841797, 0.08254924774169922, 0.08243746948242188, 0.08178115081787109]",tokens/s,3118.4927024548488,kWh,2.431554430034579e-06,2.681564053762577e-07,1.6127975865333025e-06,4.312508421944139e-06,tokens/kWh,59362202.911267966,MB,1653.854208,2430.468096,0.0,2013.26592,1853.210112,s,10,12.950849243164063,1.2950849243164064,0.009816788785648081,1.2957139892578124,1.3078525024414063,1.3093563659667968,1.3105594567871093,"[1.301109375, 1.295614990234375, 1.29581298828125, 1.297327880859375, 1.3108602294921874, 1.307518310546875, 1.292753662109375, 1.290372802734375, 1.2766165771484375, 1.2828624267578126]",tokens/s,48.645458546476185,kWh,3.8523322572047385e-05,4.248749020660395e-06,2.11083872570667e-05,6.388045884977449e-05,tokens/kWh,986217.0863261168,,s,630,12.947786214828492,0.020552041610838873,0.00031355362301329113,0.02051806354522705,0.02090762538909912,0.021052077293395997,0.021661486034393322,"[0.02103696060180664, 0.020948352813720702, 0.020642335891723634, 0.020533599853515626, 0.02046976089477539, 0.02062233543395996, 0.020814847946166993, 0.02072979164123535, 0.02053436851501465, 0.020465791702270506, 0.02118947219848633, 0.020557695388793946, 0.0203633918762207, 0.02027622413635254, 0.020359743118286134, 0.02042025566101074, 0.02031660842895508, 0.020236000061035157, 0.020390527725219727, 0.0204400634765625, 0.02059516716003418, 0.02109494400024414, 0.0204083194732666, 0.020440351486206054, 0.02040070343017578, 0.020439199447631836, 0.020832256317138673, 0.020600032806396485, 0.0205893440246582, 0.020483936309814453, 0.020471839904785155, 0.0204453125, 0.020314111709594726, 0.020407871246337892, 0.020797664642333985, 0.020383968353271484, 0.020346847534179688, 0.020907840728759765, 0.02085500717163086, 0.020536832809448242, 0.020666879653930666, 0.020486143112182616, 0.020516063690185545, 0.020800479888916014, 0.022023551940917967, 0.021747360229492186, 0.020801887512207032, 0.020674400329589844, 0.020625568389892577, 0.02062758445739746, 0.020846912384033203, 0.020520959854125977, 0.020479999542236327, 0.020287488937377928, 0.02047920036315918, 0.020568864822387695, 0.020580352783203124, 0.02064793586730957, 0.02064384078979492, 0.020805631637573242, 0.021045248031616212, 0.021174272537231444, 0.021020671844482423, 0.02102217674255371, 0.020866016387939453, 0.020649663925170897, 0.020746559143066407, 0.02056387138366699, 0.0204815673828125, 0.020542015075683595, 0.020645151138305663, 0.020678592681884767, 0.020711679458618164, 0.02053945541381836, 0.020306400299072266, 0.020330495834350586, 0.02033452796936035, 0.020326784133911133, 0.020378944396972656, 0.020471744537353516, 0.02050924873352051, 0.020428735733032225, 0.02045120048522949, 0.020430368423461916, 0.020420831680297853, 0.02056438446044922, 0.020372447967529298, 0.020450239181518556, 0.020496383666992187, 0.02063974380493164, 0.02065180778503418, 0.02045180892944336, 0.020286304473876953, 0.020171072006225584, 0.020208223342895508, 0.02043289566040039, 0.020641376495361328, 0.020662687301635743, 0.020505664825439453, 0.021004831314086914, 0.020582656860351562, 0.0205579833984375, 0.0207096004486084, 0.020571935653686525, 0.02061516761779785, 0.020584447860717774, 0.02042265510559082, 0.02049843215942383, 0.020624799728393553, 0.020771135330200197, 0.02057040023803711, 0.02064588737487793, 0.020699136734008788, 0.020494400024414064, 0.020443424224853516, 0.020505695343017577, 0.02049648094177246, 0.020494815826416015, 0.020584447860717774, 0.020686847686767578, 0.02065113639831543, 0.020618112564086914, 0.020649984359741212, 0.020727039337158203, 0.020976383209228514, 0.020757728576660157, 0.021174720764160156, 0.021245344161987305, 0.02102128028869629, 0.021217279434204102, 0.02099404716491699, 0.021436256408691408, 0.02094095993041992, 0.020658464431762696, 0.020688608169555665, 0.020998144149780275, 0.02076233673095703, 0.020577951431274413, 0.020606815338134767, 0.020578399658203125, 0.020661056518554686, 0.020576000213623047, 0.020693119049072266, 0.02081177520751953, 0.020600223541259767, 0.020826719284057618, 0.02045257568359375, 0.02051750373840332, 0.02051862335205078, 0.02052681541442871, 0.02052403259277344, 0.020508384704589842, 0.020373504638671876, 0.020428768157958986, 0.020430240631103515, 0.02034668731689453, 0.020490848541259765, 0.02047407913208008, 0.02030499267578125, 0.02048092842102051, 0.02047113609313965, 0.021745887756347657, 0.020410816192626954, 0.02047385597229004, 0.020417856216430663, 0.02039910316467285, 0.020382528305053712, 0.02026995277404785, 0.02024448013305664, 0.02033772850036621, 0.020260128021240234, 0.020290239334106445, 0.020507200241088867, 0.020244735717773438, 0.020322463989257813, 0.020907615661621092, 0.02028303909301758, 0.020282432556152342, 0.020313312530517578, 0.020471584320068358, 0.020486848831176758, 0.020281663894653322, 0.020104896545410155, 0.020215007781982423, 0.020359968185424803, 0.020227104187011718, 0.02052377510070801, 0.02044745635986328, 0.020397279739379885, 0.020836095809936523, 0.020742464065551757, 0.020508672714233397, 0.020746240615844725, 0.020298816680908202, 0.02020652770996094, 0.02024380874633789, 0.020136192321777345, 0.020191648483276366, 0.02039094352722168, 0.020277919769287108, 0.02020310401916504, 0.020261247634887694, 0.020247936248779297, 0.020355648040771484, 0.020306047439575196, 0.020400415420532225, 0.020297632217407227, 0.02030112075805664, 0.020750656127929687, 0.02059516716003418, 0.02140943908691406, 0.020523359298706054, 0.02048115158081055, 0.020384639739990235, 0.020330528259277343, 0.020244287490844726, 0.020352287292480467, 0.02057219123840332, 0.02035923194885254, 0.020699935913085936, 0.02044927978515625, 0.020587776184082033, 0.02044595146179199, 0.020369407653808593, 0.0204017276763916, 0.020507072448730467, 0.020493375778198243, 0.020351167678833007, 0.020529024124145506, 0.02043766403198242, 0.020508800506591797, 0.020582496643066408, 0.020625408172607423, 0.020514911651611328, 0.020477855682373047, 0.020609024047851563, 0.020768768310546876, 0.02087116813659668, 0.021124095916748048, 0.021268512725830076, 0.02117068862915039, 0.02108860778808594, 0.021057664871215822, 0.020953088760375976, 0.021000192642211913, 0.02085430335998535, 0.02091801643371582, 0.020925151824951173, 0.020885631561279296, 0.020858272552490235, 0.020906463623046875, 0.020802751541137695, 0.02071583938598633, 0.020875104904174803, 0.020832351684570313, 0.020883520126342772, 0.02072166442871094, 0.020760704040527343, 0.020874399185180664, 0.02068931198120117, 0.02071174430847168, 0.02103811264038086, 0.020685792922973633, 0.02076019287109375, 0.020858591079711913, 0.02191222381591797, 0.021987327575683592, 0.02093164825439453, 0.020803903579711912, 0.020694976806640626, 0.020684959411621094, 0.02078335952758789, 0.02058678436279297, 0.020731903076171874, 0.02063155174255371, 0.020926464080810548, 0.020823808670043947, 0.020670719146728515, 0.02060697555541992, 0.02067580795288086, 0.02066115188598633, 0.02074201583862305, 0.020590368270874022, 0.020690847396850585, 0.02087763214111328, 0.020832191467285155, 0.020838464736938477, 0.020639232635498047, 0.020416255950927734, 0.020678592681884767, 0.020702016830444335, 0.020681888580322265, 0.020808000564575196, 0.020787200927734374, 0.02082815933227539, 0.020855424880981445, 0.020809247970581056, 0.02080169677734375, 0.020914464950561523, 0.020860864639282228, 0.0207869758605957, 0.020889823913574218, 0.02078447914123535, 0.020771488189697266, 0.020760576248168947, 0.020772863388061523, 0.020723552703857423, 0.02062351989746094, 0.0206759033203125, 0.021006240844726562, 0.020742176055908203, 0.020595455169677736, 0.020673856735229493, 0.02106643295288086, 0.02078838348388672, 0.02095734405517578, 0.021004480361938478, 0.020762624740600585, 0.020787200927734374, 0.02072166442871094, 0.020757951736450196, 0.02080415916442871, 0.0207127685546875, 0.020556480407714843, 0.020782304763793946, 0.0206396484375, 0.020701152801513672, 0.02090438461303711, 0.02089369583129883, 0.020913984298706053, 0.020934431076049805, 0.020927583694458008, 0.02086479949951172, 0.020733951568603515, 0.02062745666503906, 0.020692800521850584, 0.020662752151489258, 0.020674591064453126, 0.020618240356445314, 0.020660192489624023, 0.02074684715270996, 0.020675743103027344, 0.02072643280029297, 0.020756383895874024, 0.020775327682495116, 0.020671712875366212, 0.02060163116455078, 0.020568063735961914, 0.020674560546875, 0.02059391975402832, 0.020525760650634765, 0.020611135482788086, 0.020887168884277343, 0.020908416748046874, 0.021141504287719725, 0.02076246452331543, 0.02070047950744629, 0.020812639236450194, 0.020840448379516603, 0.02145484733581543, 0.020786304473876954, 0.02106243133544922, 0.020781152725219725, 0.021020095825195314, 0.020907712936401368, 0.020628639221191406, 0.020498207092285156, 0.020443071365356447, 0.020504640579223632, 0.020522943496704103, 0.02058860778808594, 0.020605920791625976, 0.020460256576538084, 0.020559263229370118, 0.02079420852661133, 0.02072972869873047, 0.02081760025024414, 0.020756927490234375, 0.021157632827758788, 0.020894367218017577, 0.020692991256713866, 0.020768768310546876, 0.020774911880493165, 0.02087321662902832, 0.020759807586669923, 0.02053196716308594, 0.020602880477905275, 0.020754592895507813, 0.02057161521911621, 0.020451711654663085, 0.020502527236938475, 0.020393983840942383, 0.020302112579345704, 0.02041142463684082, 0.020396928787231445, 0.020442943572998046, 0.020330495834350586, 0.020291296005249024, 0.020465951919555664, 0.02029132843017578, 0.020537599563598633, 0.020158464431762696, 0.020559871673583984, 0.020547391891479493, 0.020400320053100586, 0.020862079620361327, 0.021105056762695314, 0.020568544387817383, 0.020279296875, 0.020198816299438475, 0.02015292739868164, 0.020235488891601563, 0.020111616134643556, 0.020197919845581055, 0.020364927291870116, 0.020340768814086915, 0.02018339157104492, 0.02030182456970215, 0.02012774467468262, 0.020192928314208984, 0.020273120880126953, 0.020296064376831055, 0.020462848663330077, 0.02049510383605957, 0.020393503189086913, 0.020557535171508788, 0.020365760803222655, 0.02049295997619629, 0.020489311218261717, 0.020397727966308593, 0.02038198471069336, 0.022459007263183593, 0.021350400924682617, 0.020568000793457032, 0.020502592086791994, 0.020582399368286132, 0.02046175956726074, 0.02039520072937012, 0.02035366439819336, 0.020776735305786134, 0.020326623916625975, 0.020825279235839843, 0.020764575958251954, 0.0211190071105957, 0.02085340881347656, 0.020918655395507812, 0.021067615509033202, 0.020846336364746094, 0.02070742416381836, 0.020406431198120117, 0.020578304290771485, 0.020729600906372072, 0.02058902359008789, 0.020731231689453126, 0.020548032760620116, 0.020389535903930663, 0.020527360916137695, 0.020406400680541992, 0.020463584899902344, 0.020493696212768555, 0.020388639450073243, 0.02044198417663574, 0.02068134307861328, 0.02043734359741211, 0.02064793586730957, 0.020591840744018555, 0.020681503295898438, 0.02052662467956543, 0.020474271774291994, 0.020321855545043944, 0.020326911926269533, 0.02029529571533203, 0.020360864639282227, 0.020347616195678712, 0.020514144897460937, 0.02029225540161133, 0.02019273567199707, 0.02051535987854004, 0.020256767272949217, 0.020133888244628906, 0.020242143630981445, 0.020094655990600587, 0.02012015914916992, 0.020207616806030275, 0.020084192276000976, 0.020060096740722656, 0.020101728439331053, 0.020153375625610353, 0.020128671646118163, 0.02015558433532715, 0.020209760665893556, 0.02004047966003418, 0.020270624160766602, 0.02021833610534668, 0.021292768478393554, 0.020646175384521483, 0.02047536087036133, 0.020318496704101564, 0.020189279556274413, 0.020050432205200194, 0.020317855834960936, 0.02138492774963379, 0.021387487411499023, 0.02052716827392578, 0.020625024795532226, 0.020492895126342774, 0.020262527465820312, 0.020384159088134766, 0.020330495834350586, 0.02039151954650879, 0.02032271957397461, 0.020254720687866212, 0.020170751571655272, 0.020414432525634765, 0.02024825668334961, 0.020236095428466796, 0.02031216049194336, 0.02030745506286621, 0.020087743759155275, 0.0201474552154541, 0.020140607833862303, 0.02023852729797363, 0.02015804862976074, 0.02001251220703125, 0.0199768009185791, 0.02006051254272461, 0.02004582405090332, 0.02000486373901367, 0.02001263999938965, 0.020183456420898437, 0.020072704315185548, 0.02010905647277832, 0.020084735870361328, 0.02002908706665039, 0.01999260711669922, 0.02006662368774414, 0.020080608367919923, 0.02017616081237793, 0.020085311889648436, 0.020123199462890626, 0.020181631088256834, 0.020336063385009765, 0.020152448654174804, 0.02024822425842285, 0.02021046447753906, 0.020377824783325196, 0.020509920120239257, 0.02025734329223633, 0.020129791259765627, 0.02021785545349121, 0.020137983322143553, 0.020191167831420897, 0.020156448364257812, 0.020160287857055665, 0.021030399322509767, 0.020345600128173828, 0.020401472091674804, 0.02055423927307129, 0.020398271560668944, 0.020434335708618166, 0.020381471633911134, 0.020415136337280274, 0.020535455703735352, 0.020502527236938475, 0.020486143112182616, 0.02057756805419922, 0.02037583923339844, 0.020788608551025392, 0.02080134391784668, 0.02042665672302246, 0.02034569549560547, 0.02015001678466797, 0.02010758399963379, 0.020158464431762696, 0.020191232681274415, 0.02006768035888672, 0.02024435234069824, 0.020149023056030273, 0.020084735870361328, 0.020121152877807618, 0.020219423294067382, 0.02016659164428711, 0.020292352676391602, 0.020127967834472658, 0.02026825523376465, 0.02040649604797363, 0.0205828800201416, 0.020317279815673828, 0.020576480865478516, 0.020337024688720704, 0.020308000564575195, 0.020321887969970705, 0.020431583404541015, 0.02038755226135254, 0.02030828857421875, 0.020273183822631834, 0.020271104812622072, 0.02021504020690918, 0.020214847564697266, 0.0201060791015625, 0.020124319076538087, 0.020074016571044923, 0.02011734390258789, 0.02012406349182129, 0.020184831619262697, 0.020085216522216797, 0.020280736923217774, 0.020137056350708008, 0.020216608047485353, 0.02020240020751953, 0.020289215087890625, 0.020254783630371094, 0.020336896896362304, 0.020633567810058595, 0.02055740737915039, 0.0205251522064209, 0.020502880096435548, 0.02056959915161133, 0.020681215286254884, 0.020753696441650392, 0.02185420799255371, 0.020554464340209962, 0.02047590446472168, 0.020377599716186523, 0.020600831985473633, 0.02077686309814453, 0.020398208618164063, 0.020363231658935547, 0.020445184707641603, 0.020274911880493164]",tokens/s,48.65696649196221,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3809.091584,4375.642112,0.0,3997.171712,3878.257152,s,1,10.1610673828125,10.1610673828125,0.0,10.1610673828125,10.1610673828125,10.1610673828125,10.1610673828125,[10.1610673828125],,kWh,9.453367005832356e-05,1.0420346157089993e-05,3.048863550200731e-05,0.00013544265171742086,,MB,2068.639744,4862.181376,0.0,4454.350848,4371.844096,s,10,6.675686889648437,0.6675686889648438,0.001182260049307542,0.6671465148925781,0.6692501159667968,0.6694332855224608,0.6695798211669921,"[0.6663990478515625, 0.665687744140625, 0.668204345703125, 0.6683736572265625, 0.669616455078125, 0.6692094116210937, 0.66723583984375, 0.666953125, 0.6670571899414063, 0.6669500732421875]",tokens/s,383.48113719497974,kWh,1.942514973437639e-05,2.142278729064239e-06,1.2911815885000184e-05,3.4479244348440813e-05,tokens/kWh,7424756.6858748915,MB,2082.807808,5063.507968,0.0,4655.67744,4530.328576,s,10,392.17560546875006,39.217560546875,0.04674525619346836,39.191431640625,39.287291015625,39.2940673828125,39.2994884765625,"[39.161171875, 39.1854765625, 39.26026171875, 39.28578515625, 39.30084375, 39.2348046875, 39.185828125, 39.19703515625, 39.18309375, 39.1813046875]",tokens/s,1.6064232226963457,kWh,0.0011423082741539557,0.00012600511666804526,0.0007596047187943987,0.0020279181096164,tokens/kWh,31066.343212407653,,s,630,392.1716875610349,0.6224947421603733,0.000999651843105283,0.6221973571777344,0.6240720092773437,0.6245301025390625,0.6251899475097656,"[0.6208662719726562, 0.6207017211914062, 0.6207979125976563, 0.620652587890625, 0.6211030883789063, 0.6210949096679688, 0.6213795776367188, 0.6215086059570313, 0.6209710693359375, 0.620970947265625, 0.6212689208984375, 0.6219121704101562, 0.621631103515625, 0.6217035522460937, 0.6210741577148438, 0.6216338500976563, 0.621465576171875, 0.6212767944335937, 0.6213320922851563, 0.6221156005859375, 0.621531005859375, 0.6213934326171875, 0.6221089477539062, 0.6209555053710938, 0.6218368530273437, 0.6214320068359375, 0.6214171142578125, 0.621451416015625, 0.6215410766601562, 0.6220205688476562, 0.6226742553710938, 0.6227169189453124, 0.621549560546875, 0.6219837646484375, 0.6218383178710938, 0.6212990112304687, 0.621251220703125, 0.6218731689453125, 0.6219625244140625, 0.6210096435546875, 0.6220963745117187, 0.6212274169921875, 0.6215418090820313, 0.6216337890625, 0.6213214111328125, 0.6221744384765625, 0.6212382202148438, 0.6219413452148438, 0.6216642456054687, 0.6220770263671875, 0.6223819580078125, 0.621633544921875, 0.6220308227539062, 0.6218731079101563, 0.621556884765625, 0.6221239624023438, 0.6228521118164062, 0.6217460327148437, 0.6216721801757813, 0.6218858642578124, 0.6213114013671875, 0.6218358764648437, 0.6216796264648438, 0.6216312255859375, 0.6215479125976563, 0.62104296875, 0.6209993896484375, 0.6224806518554687, 0.6214332275390625, 0.6217445068359375, 0.6213316040039063, 0.6214786376953125, 0.6217033081054687, 0.6214041748046875, 0.621117431640625, 0.62139599609375, 0.6212894897460938, 0.6211517944335937, 0.6212304077148437, 0.6213383178710937, 0.6229478149414063, 0.62127783203125, 0.6217813110351562, 0.6214799194335937, 0.621645751953125, 0.6212383422851563, 0.6216724243164062, 0.622635009765625, 0.621991943359375, 0.62181298828125, 0.6215087890625, 0.621041748046875, 0.6216663818359375, 0.622186279296875, 0.6211631469726563, 0.6220943603515625, 0.621770751953125, 0.6214201049804687, 0.6217911376953125, 0.6219002685546875, 0.6219304809570313, 0.6224404296875, 0.6215733642578125, 0.622050048828125, 0.622199951171875, 0.6227362670898438, 0.6235402221679688, 0.621139404296875, 0.6225966796875, 0.6220348510742187, 0.6215162963867188, 0.62206005859375, 0.6221367797851562, 0.6229017944335937, 0.6219788818359375, 0.6231944580078125, 0.622035400390625, 0.6224629516601563, 0.6234024047851563, 0.62279736328125, 0.62261865234375, 0.6231654663085937, 0.6234412841796875, 0.6231921997070312, 0.6231817626953124, 0.6234131469726563, 0.6228252563476563, 0.623083740234375, 0.62229296875, 0.6221814575195312, 0.6223729858398438, 0.6235595092773437, 0.6238056030273438, 0.6229471435546875, 0.6226575317382812, 0.6221455078125, 0.6221781005859375, 0.621883056640625, 0.6221412353515625, 0.6218841552734375, 0.6221474609375, 0.6222430419921875, 0.6224573974609375, 0.6244620361328125, 0.6231836547851562, 0.6232293090820312, 0.6236917724609375, 0.623435546875, 0.6235765991210938, 0.621744873046875, 0.6233309326171875, 0.6228955078125, 0.6224219360351563, 0.6232003173828125, 0.6229783935546875, 0.6228689575195312, 0.6229219970703125, 0.62275927734375, 0.62302685546875, 0.6236508178710938, 0.6232855834960938, 0.62368017578125, 0.6235176391601562, 0.6234476928710937, 0.62329248046875, 0.6232743530273438, 0.6232473754882812, 0.6228970336914063, 0.6231710815429687, 0.6233646850585938, 0.6227291870117188, 0.623120361328125, 0.6228684692382812, 0.6239575805664063, 0.6243536987304688, 0.6232473754882812, 0.624051513671875, 0.6230247192382813, 0.6234602661132812, 0.6252095947265625, 0.6237081298828125, 0.6246498413085938, 0.6240689697265625, 0.6241747436523437, 0.6232557983398438, 0.6239718627929688, 0.6235609741210938, 0.623882568359375, 0.6232124633789062, 0.6231897583007813, 0.6230674438476562, 0.6230609741210937, 0.624166748046875, 0.6237717895507813, 0.6243942260742188, 0.6245888061523438, 0.625301513671875, 0.6247157592773438, 0.6230240478515625, 0.625141845703125, 0.6228869018554688, 0.6232366943359375, 0.62269873046875, 0.6231851806640625, 0.6220228271484375, 0.622017333984375, 0.6219386596679688, 0.6229503784179687, 0.6238351440429688, 0.62455810546875, 0.62429736328125, 0.6225447387695312, 0.623547119140625, 0.6232158203125, 0.6232850341796875, 0.6230805053710937, 0.6226461791992187, 0.62251171875, 0.62471533203125, 0.6247340698242188, 0.6254151000976562, 0.623763427734375, 0.6232801513671875, 0.62427685546875, 0.6225496826171875, 0.6223196411132812, 0.6223701171875, 0.6233524780273437, 0.6226534423828125, 0.6228392944335938, 0.6217301635742187, 0.62282568359375, 0.6233374633789063, 0.6248344116210938, 0.624234619140625, 0.6246707153320312, 0.624437255859375, 0.6261248168945313, 0.624300048828125, 0.6247915649414062, 0.6247709350585937, 0.623346923828125, 0.62327490234375, 0.6231838989257813, 0.6232733764648437, 0.624319091796875, 0.623203369140625, 0.6235957641601563, 0.6232378540039063, 0.6230384521484374, 0.6235934448242187, 0.62410546875, 0.624552734375, 0.622970703125, 0.6232434692382812, 0.6235320434570313, 0.6216826171875, 0.6229218139648437, 0.6228759765625, 0.622852783203125, 0.6244249267578125, 0.6225879516601562, 0.6232387084960938, 0.6226347045898437, 0.622448974609375, 0.6230695190429687, 0.6228805541992187, 0.62324755859375, 0.6231512451171874, 0.6230609741210937, 0.623164794921875, 0.6216956176757813, 0.6223012084960937, 0.623686767578125, 0.6242169189453125, 0.624099365234375, 0.623435791015625, 0.6225490112304688, 0.6222274780273438, 0.6235869140625, 0.623337890625, 0.6240316772460938, 0.6234563598632813, 0.62336962890625, 0.6241322021484375, 0.6243677978515625, 0.6238907470703124, 0.6246154174804688, 0.62384130859375, 0.6250864868164062, 0.6254507446289063, 0.6248143920898438, 0.6251089477539062, 0.6250797119140625, 0.6246277465820312, 0.6248864135742187, 0.6241300659179687, 0.6247028198242187, 0.62443994140625, 0.62450244140625, 0.6245584106445312, 0.6252871704101562, 0.6252933349609375, 0.6248130493164062, 0.6243065185546876, 0.6248900756835938, 0.6244520263671876, 0.624332275390625, 0.624459716796875, 0.6245643920898437, 0.6243065185546876, 0.6236464233398438, 0.6242077026367188, 0.624173583984375, 0.622940185546875, 0.6232531127929688, 0.623549072265625, 0.6243225708007812, 0.6241682739257812, 0.623501708984375, 0.6244286499023437, 0.6234568481445313, 0.6228500366210937, 0.62369189453125, 0.622315185546875, 0.6235504760742188, 0.6238941650390625, 0.622547607421875, 0.6224956665039062, 0.6225897216796875, 0.6231893920898437, 0.6223553466796875, 0.6224359130859375, 0.6219324951171875, 0.6216434326171875, 0.6220637817382813, 0.622115478515625, 0.621849609375, 0.6217793579101563, 0.6230022583007813, 0.622055419921875, 0.6230033569335938, 0.6232311401367188, 0.6235029296875, 0.6232695922851562, 0.6238994750976562, 0.6235864868164063, 0.6242169189453125, 0.62344384765625, 0.62461962890625, 0.6235402221679688, 0.6238945922851562, 0.6230337524414062, 0.6217464599609375, 0.6219135131835938, 0.6227423706054688, 0.6214451293945312, 0.6217722778320313, 0.6224737548828125, 0.6223883666992187, 0.6219784545898438, 0.6218731689453125, 0.6218629150390625, 0.6218421020507813, 0.6229135131835938, 0.6222031860351562, 0.6221757202148438, 0.6219984130859375, 0.6235321044921875, 0.6232987670898438, 0.62225, 0.6229113159179688, 0.6220924682617187, 0.6214716186523438, 0.6222839965820313, 0.6220870971679687, 0.622716552734375, 0.6221619262695313, 0.62134326171875, 0.6216268920898438, 0.62130224609375, 0.6210475463867188, 0.621955322265625, 0.6211614990234375, 0.6220830688476563, 0.6220595092773438, 0.62343359375, 0.62227880859375, 0.6221425170898438, 0.6217224731445312, 0.6216438598632813, 0.6211522827148438, 0.62152294921875, 0.6214039916992188, 0.62175146484375, 0.6217625732421875, 0.6218528442382812, 0.6212329711914063, 0.6213324584960938, 0.6222576904296875, 0.6224789428710937, 0.6215234985351562, 0.6221717529296875, 0.6216543579101562, 0.6217588500976563, 0.62298876953125, 0.621654541015625, 0.6225194091796875, 0.621765380859375, 0.6218440551757812, 0.6219963989257813, 0.621807861328125, 0.6218424072265625, 0.621813232421875, 0.6216232299804687, 0.6219638671875, 0.6222356567382813, 0.6216294555664063, 0.622065673828125, 0.6220779418945312, 0.6213734130859375, 0.621981689453125, 0.6218458862304688, 0.6219042358398438, 0.6228216552734375, 0.62259814453125, 0.622703857421875, 0.6227461547851563, 0.6227069702148438, 0.6219407348632813, 0.6222471313476563, 0.6224263305664063, 0.6222280883789062, 0.6221859130859375, 0.6219306640625, 0.62283349609375, 0.621927001953125, 0.6228889770507813, 0.6222005615234375, 0.6224120483398438, 0.6220709228515625, 0.62156201171875, 0.6224703979492188, 0.6221766357421875, 0.622147216796875, 0.6220294799804688, 0.6219849243164063, 0.6213472900390625, 0.6218675537109375, 0.6220467529296875, 0.6216909790039062, 0.6217915649414063, 0.6212394409179688, 0.62157080078125, 0.6216124877929687, 0.6215706176757813, 0.62171923828125, 0.6212115478515625, 0.6219024047851562, 0.6219141235351563, 0.6221842041015625, 0.6226741333007813, 0.6221475219726562, 0.62283984375, 0.6219939575195312, 0.621854736328125, 0.6222540893554688, 0.621770263671875, 0.6217395629882813, 0.6221505737304688, 0.6216661987304688, 0.6218499145507812, 0.6217726440429687, 0.6218966674804688, 0.622606201171875, 0.6219019775390625, 0.6216640625, 0.6225096435546875, 0.622348876953125, 0.6223441772460937, 0.62247119140625, 0.622202880859375, 0.6227476196289062, 0.6222908935546875, 0.6219326171875, 0.6220206298828125, 0.6229647216796875, 0.6225469360351562, 0.62230322265625, 0.6233429565429688, 0.6229756469726563, 0.62355859375, 0.6229109497070312, 0.6216934814453124, 0.62272265625, 0.622436767578125, 0.6221782836914063, 0.621923828125, 0.6228751831054687, 0.6234577026367187, 0.6220641479492187, 0.6225791015625, 0.6223378295898437, 0.6220719604492188, 0.6216929321289062, 0.6215291137695312, 0.6213193359375, 0.62173681640625, 0.62162255859375, 0.621828857421875, 0.6211678466796875, 0.621646240234375, 0.6214249877929687, 0.6218397827148437, 0.621078857421875, 0.621701171875, 0.6215374755859375, 0.6221947631835938, 0.6217788696289063, 0.6222756958007812, 0.6222693481445313, 0.6219939575195312, 0.6229188232421875, 0.6223552856445312, 0.6213916625976562, 0.6219757690429687, 0.6217498168945312, 0.6219063720703125, 0.6214553833007812, 0.6221741943359375, 0.6214533081054687, 0.6217052001953125, 0.621414306640625, 0.621573486328125, 0.6216115112304688, 0.6217853393554688, 0.623085205078125, 0.6220038452148438, 0.6223834228515625, 0.622569091796875, 0.6228565673828125, 0.6226044311523438, 0.6217865600585938, 0.622486328125, 0.6222801513671875, 0.6225289306640625, 0.6220162963867187, 0.6221029052734375, 0.6229155883789063, 0.62178271484375, 0.6214864501953125, 0.6217922973632812, 0.6217655639648437, 0.6219755249023438, 0.6215924072265625, 0.6230631103515625, 0.6217892456054688, 0.6223544311523438, 0.62274755859375, 0.6217227783203125, 0.621640625, 0.6221250610351563, 0.6218157958984375, 0.621573486328125, 0.6218594360351563, 0.6215670166015625, 0.6223014526367188, 0.6217172241210938, 0.6220453491210938, 0.6212354736328125, 0.621681396484375, 0.621613037109375, 0.6219480590820312, 0.6224996948242187, 0.6217942504882813, 0.6217050170898437, 0.6220220336914063, 0.6217483520507813, 0.6217632446289062, 0.6217789306640625, 0.6213734130859375, 0.6209495239257813, 0.6216151123046875, 0.6211727294921875, 0.6215557250976562, 0.6225057373046875, 0.6211812133789063, 0.6214102783203125, 0.6219301147460937, 0.6214495849609375, 0.6215593872070313, 0.6216724243164062, 0.6226577758789062, 0.6225548706054688, 0.6222279663085938, 0.6219957885742188, 0.6224058837890625, 0.6217394409179687, 0.6216561889648438, 0.6219207153320313, 0.6218521118164062, 0.6216215209960938, 0.621865234375, 0.621643798828125, 0.6220712890625, 0.6220040283203125, 0.6222424926757812, 0.6220428466796875, 0.6224735107421875, 0.6216294555664063, 0.622002197265625, 0.6223499145507813, 0.6226825561523438, 0.6219522705078125, 0.622004150390625, 0.6220866088867187, 0.6224613647460937, 0.621686767578125, 0.6215733642578125, 0.6223180541992187, 0.6216582641601562, 0.6230263061523438, 0.6226472778320312, 0.6219386596679688, 0.6220728759765625, 0.622066650390625, 0.62205517578125, 0.6220638427734375, 0.6219235229492187, 0.62180224609375]",tokens/s,1.6064392713253954,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8214.26176,11251.089408,0.0,10848.567296,10616.027648,s,1,14.6757431640625,14.6757431640625,0.0,14.6757431640625,14.6757431640625,14.6757431640625,14.6757431640625,[14.6757431640625],,kWh,0.0002211354205666661,2.438576929504324e-05,6.723922045799927e-05,0.0003127604103197086,,MB,3991.093248,11683.10272,0.0,11265.900544,11070.470656,s,10,3.806494598388672,0.38064945983886717,0.0010613886177521343,0.38066357421874997,0.3817038726806641,0.381945915222168,0.3821395492553711,"[0.378303466796875, 0.3804141540527344, 0.37995913696289063, 0.38218795776367187, 0.37997836303710936, 0.3803094482421875, 0.3811336669921875, 0.38165008544921875, 0.3816453247070313, 0.3809129943847656]",tokens/s,672.5347780826155,kWh,1.1155931145679019e-05,1.2303034757801454e-06,7.3734421127407835e-06,1.975967673419995e-05,tokens/kWh,12955677.536814986,MB,3995.324416,11685.199872,0.0,11267.997696,11070.473216,s,10,29.121740234375,2.9121740234375,0.006231435576901955,2.911367431640625,2.918307275390625,2.9211138427734378,2.923359096679688,"[2.91715771484375, 2.92392041015625, 2.916811767578125, 2.904305908203125, 2.907447509765625, 2.909410400390625, 2.90612353515625, 2.905554931640625, 2.913324462890625, 2.91768359375]",tokens/s,21.633322560042426,kWh,8.509224432765374e-05,9.385862275273755e-06,5.658166460685896e-05,0.0001510597712097864,tokens/kWh,417053.45834602026,,s,630,29.118463600158663,0.04621978349231538,0.000731681437829562,0.04609744071960449,0.0466427547454834,0.047004865455627445,0.04981674510955813,"[0.048306495666503906, 0.04608147048950195, 0.0490912971496582, 0.045331806182861326, 0.045789825439453126, 0.045467681884765625, 0.04558428955078125, 0.045733985900878904, 0.04556595230102539, 0.045686016082763674, 0.04591494369506836, 0.046354366302490235, 0.046356159210205077, 0.04577926254272461, 0.04579103851318359, 0.04587558364868164, 0.04597724914550781, 0.0456091194152832, 0.04601036834716797, 0.04616527938842773, 0.04610326385498047, 0.046677566528320315, 0.04650166320800781, 0.04614825439453125, 0.045612640380859375, 0.04608041763305664, 0.045862911224365234, 0.04588544082641602, 0.045817089080810544, 0.046007038116455075, 0.04601388931274414, 0.04580524826049805, 0.04653366470336914, 0.046349342346191404, 0.0459989128112793, 0.04895334243774414, 0.04558415985107422, 0.04602041625976563, 0.045728160858154294, 0.04588297653198242, 0.04630684661865234, 0.046302238464355466, 0.04599123382568359, 0.04659868621826172, 0.04621468734741211, 0.046137825012207034, 0.04668323135375976, 0.046492095947265624, 0.045862945556640625, 0.04608659362792969, 0.04638719940185547, 0.04628275299072265, 0.046241790771484374, 0.046682113647460936, 0.04680851364135742, 0.04630176162719726, 0.04629913711547851, 0.04667145538330078, 0.0465882568359375, 0.046204990386962894, 0.046450687408447267, 0.046669822692871094, 0.05049139022827148, 0.048349342346191405, 0.04609820938110352, 0.045762367248535156, 0.04571142578125, 0.04586140823364258, 0.045623294830322264, 0.04595507049560547, 0.04582175827026367, 0.04560915374755859, 0.045623294830322264, 0.045766494750976563, 0.04618387222290039, 0.04646371078491211, 0.050405376434326174, 0.04571305465698242, 0.045650272369384765, 0.04584640121459961, 0.04629884719848633, 0.0464469108581543, 0.045866401672363284, 0.04615033721923828, 0.04657561492919922, 0.04627865600585938, 0.045674495697021485, 0.04614892959594727, 0.04599059295654297, 0.04607385635375977, 0.04598374557495117, 0.04605708694458008, 0.04607849502563476, 0.04591756820678711, 0.046163585662841795, 0.046801761627197264, 0.04599193572998047, 0.045932544708251956, 0.046342144012451174, 0.04874409484863281, 0.045867454528808596, 0.04657756805419922, 0.046343360900878906, 0.04600444793701172, 0.04631539154052734, 0.04673199844360352, 0.04895948791503906, 0.0462088623046875, 0.046471328735351564, 0.04598556900024414, 0.04625430297851563, 0.04649311828613281, 0.046426689147949216, 0.04612505722045898, 0.04619247817993164, 0.0463361587524414, 0.046104576110839846, 0.04633625411987305, 0.046626209259033206, 0.04642985534667969, 0.04635923385620117, 0.05069311904907227, 0.0459048957824707, 0.04639744186401367, 0.04669440078735351, 0.04674361419677735, 0.0478521614074707, 0.046235649108886716, 0.045776416778564456, 0.04552758407592773, 0.04540169525146484, 0.04568105697631836, 0.04584454345703125, 0.04567801666259766, 0.045660350799560545, 0.05007795333862305, 0.04538364791870117, 0.04607798385620117, 0.04576665496826172, 0.045714847564697264, 0.04570377731323242, 0.045897247314453125, 0.04602518463134766, 0.04589567947387695, 0.045852352142333984, 0.04621343994140625, 0.04632985687255859, 0.046020606994628906, 0.04637465667724609, 0.04627072143554688, 0.045864479064941406, 0.045858432769775394, 0.04615248107910156, 0.04585603332519531, 0.04593743896484375, 0.04620675277709961, 0.04600048065185547, 0.04800614547729492, 0.04620377731323242, 0.04617216110229492, 0.045765697479248045, 0.0459826545715332, 0.046150848388671874, 0.04615865707397461, 0.046059326171875, 0.046123199462890625, 0.04664115142822266, 0.046344287872314455, 0.04620483016967773, 0.04659404754638672, 0.046292991638183595, 0.04602812957763672, 0.04613391876220703, 0.04877721786499024, 0.04580352020263672, 0.04605952072143555, 0.046376705169677734, 0.04634368133544922, 0.04580188751220703, 0.04620640182495117, 0.050380897521972653, 0.045757247924804685, 0.04640873718261719, 0.04659471893310547, 0.04600428771972656, 0.04631167984008789, 0.0466770248413086, 0.046502208709716795, 0.046418590545654295, 0.048699966430664064, 0.046046497344970704, 0.045543296813964844, 0.04567859268188477, 0.04542345428466797, 0.04560076904296875, 0.045520896911621096, 0.04588748931884765, 0.04561651229858398, 0.045705856323242186, 0.045795326232910154, 0.0456888313293457, 0.04560076904296875, 0.04570873641967774, 0.046252609252929684, 0.046047233581542966, 0.046071807861328126, 0.04572979354858398, 0.04574323272705078, 0.04581600189208984, 0.04604179382324219, 0.04627775955200195, 0.04622608184814453, 0.04593686294555664, 0.045762367248535156, 0.045762847900390625, 0.045899486541748045, 0.04567782211303711, 0.04581228637695312, 0.04610483169555664, 0.04573516845703125, 0.0458474235534668, 0.045991134643554685, 0.045945343017578126, 0.04609667205810547, 0.04644406509399414, 0.046631393432617185, 0.0463092155456543, 0.046207134246826174, 0.0462371826171875, 0.04627711868286133, 0.04621052932739258, 0.04654092788696289, 0.04641628646850586, 0.04618035125732422, 0.04613516616821289, 0.046569534301757816, 0.04629305648803711, 0.045848575592041016, 0.046020606994628906, 0.04635193634033203, 0.04602627182006836, 0.04595804977416992, 0.04611072158813476, 0.046309375762939455, 0.04612300872802735, 0.04656742477416992, 0.04684799957275391, 0.046467071533203126, 0.046255359649658205, 0.04670745468139648, 0.0464989128112793, 0.04613961410522461, 0.04817919921875, 0.046137344360351565, 0.045578369140625, 0.045260608673095705, 0.045588321685791015, 0.045887649536132814, 0.045889854431152344, 0.04582380676269531, 0.04554336166381836, 0.04584447860717773, 0.04566016006469727, 0.04575360107421875, 0.04586572647094726, 0.04596905517578125, 0.046174560546875, 0.04583385467529297, 0.046171680450439456, 0.045814624786376955, 0.045706558227539065, 0.04598444747924805, 0.04621871948242187, 0.046085758209228514, 0.045835041046142576, 0.045879425048828124, 0.046186496734619144, 0.045871105194091794, 0.04599398422241211, 0.046015777587890626, 0.046117599487304685, 0.04593388748168945, 0.04607984161376953, 0.04601737594604492, 0.045895454406738284, 0.04607305526733398, 0.04627276611328125, 0.04605414581298828, 0.04592230224609375, 0.046415870666503906, 0.04617830276489258, 0.04596556854248047, 0.04608383941650391, 0.04655292892456055, 0.04793561553955078, 0.04597948837280273, 0.04624303817749023, 0.04700675201416016, 0.045950912475585935, 0.04628070449829102, 0.04629094314575195, 0.045959232330322265, 0.04628009414672852, 0.04644713592529297, 0.04609024047851563, 0.04597747039794922, 0.046319198608398435, 0.046407806396484376, 0.04608451080322266, 0.04647932815551758, 0.04685609436035156, 0.04662694549560547, 0.04635446548461914, 0.04687459182739258, 0.04638105773925781, 0.04757833480834961, 0.04757379150390625, 0.04547174453735352, 0.045352222442626954, 0.0463326416015625, 0.04560076904296875, 0.04566835021972656, 0.045649921417236325, 0.04601974487304687, 0.04592438507080078, 0.045935009002685545, 0.045706817626953125, 0.04557295989990234, 0.04582928085327148, 0.04592521667480469, 0.04552908706665039, 0.045739551544189454, 0.04603094482421875, 0.04598604965209961, 0.04573388671875, 0.04621932983398438, 0.04622687911987305, 0.04586969757080078, 0.04583161544799805, 0.04600070571899414, 0.045780990600585936, 0.04570236968994141, 0.04637916946411133, 0.046283393859863284, 0.046241790771484374, 0.046458881378173826, 0.046180320739746095, 0.04591360092163086, 0.04580611038208008, 0.046226753234863284, 0.046262977600097656, 0.04597555160522461, 0.04613129425048828, 0.04659526443481445, 0.04610086441040039, 0.04581814575195312, 0.04651193618774414, 0.046316993713378905, 0.04599388885498047, 0.046202014923095704, 0.04651391983032226, 0.04630527877807617, 0.04593824005126953, 0.04670064163208008, 0.04640937423706055, 0.04625888061523437, 0.04811494445800781, 0.04641971206665039, 0.046158241271972655, 0.04612156677246094, 0.046526561737060546, 0.046282657623291014, 0.046069759368896485, 0.04678860855102539, 0.0468388786315918, 0.04646572875976562, 0.046434528350830076, 0.04655513763427734, 0.047927745819091795, 0.04596121597290039, 0.04549836730957031, 0.0454389762878418, 0.04530995178222656, 0.04559360122680664, 0.04581071853637695, 0.0455761604309082, 0.045460990905761715, 0.04580812835693359, 0.04600831985473633, 0.04595264053344727, 0.045566337585449215, 0.04564352035522461, 0.04604524612426758, 0.04614524841308594, 0.04611529541015625, 0.046045185089111325, 0.04570707321166992, 0.04562963104248047, 0.0460307502746582, 0.046507614135742184, 0.046004737854003906, 0.045770206451416016, 0.04588934326171875, 0.046176990509033206, 0.04583203125, 0.04598799896240234, 0.04597760009765625, 0.04592435073852539, 0.04611072158813476, 0.045963264465332034, 0.04609395217895508, 0.04587353515625, 0.046015968322753904, 0.04641836929321289, 0.04613452911376953, 0.04597356796264648, 0.04648220825195312, 0.04640563201904297, 0.04670627212524414, 0.04624835205078125, 0.04641715240478515, 0.04622835159301758, 0.04614467239379883, 0.04656579208374023, 0.04630963134765625, 0.04595308685302734, 0.045956478118896485, 0.04656771087646484, 0.046284961700439456, 0.04628412628173828, 0.046483585357666016, 0.04639398574829102, 0.04603862380981445, 0.04630176162719726, 0.046354366302490235, 0.046147167205810545, 0.04628499221801758, 0.04663113784790039, 0.04645916748046875, 0.04668182373046875, 0.047568225860595705, 0.04812019348144531, 0.04596038436889648, 0.04576339340209961, 0.04548812866210938, 0.045385726928710936, 0.04557164764404297, 0.045916576385498044, 0.045501983642578125, 0.04526131057739258, 0.04566220855712891, 0.046427295684814456, 0.04619760131835938, 0.045600990295410156, 0.045639678955078124, 0.04592617416381836, 0.04574617767333984, 0.045897632598876956, 0.046418014526367186, 0.04600940704345703, 0.046029247283935544, 0.04609913635253906, 0.04643616104125976, 0.046157855987548825, 0.04570265579223633, 0.04593420791625977, 0.04606447982788086, 0.045795326232910154, 0.0458158073425293, 0.04628591918945312, 0.046121406555175784, 0.04571583938598633, 0.0461640625, 0.04648905563354492, 0.046125598907470707, 0.04601036834716797, 0.04641996765136719, 0.04597555160522461, 0.04584777450561523, 0.04634860610961914, 0.04613488006591797, 0.046008865356445314, 0.0461308479309082, 0.046550846099853514, 0.04641827011108399, 0.04611126327514648, 0.04618979263305664, 0.046107425689697265, 0.04597555160522461, 0.046202880859375, 0.04656899261474609, 0.04627264022827148, 0.04612745666503906, 0.046548545837402346, 0.046354110717773435, 0.04608287811279297, 0.046010303497314456, 0.04660745620727539, 0.04643299102783203, 0.04639148712158203, 0.046630912780761716, 0.04645632171630859, 0.04626278305053711, 0.04665718460083008, 0.04813619232177734, 0.045727680206298825, 0.04564096069335938, 0.045577022552490236, 0.04644659042358398, 0.04549647903442383, 0.04537737655639648, 0.0458054084777832, 0.04569718551635742, 0.04581171035766601, 0.04595097732543945, 0.04590387344360351, 0.045604480743408206, 0.0458202896118164, 0.04585827255249023, 0.04558492660522461, 0.04580684661865234, 0.04607257461547851, 0.04593414306640625, 0.045945121765136716, 0.04648771286010742, 0.046534366607666015, 0.04615737533569336, 0.04561993789672852, 0.045848224639892576, 0.04600457763671875, 0.0458076171875, 0.04576265716552735, 0.04592819213867187, 0.046100639343261716, 0.049219585418701174, 0.04568880081176758, 0.045994014739990235, 0.04589158248901367, 0.04576870346069336, 0.045991744995117184, 0.04622560119628906, 0.046170112609863284, 0.04630876922607422, 0.046295486450195315, 0.046104736328125, 0.046205951690673826, 0.046564319610595706, 0.046384449005126956, 0.04591705703735351, 0.046145374298095704, 0.04618415832519531, 0.045953311920166016, 0.0459931526184082, 0.04650476837158203, 0.04641513442993164, 0.04607664108276367, 0.04696092987060547, 0.047002559661865236, 0.0529846076965332, 0.04500848007202148, 0.04603126525878906, 0.046343841552734376, 0.04637958526611328, 0.046392608642578125, 0.046207489013671874, 0.04663296127319336, 0.04665958404541016, 0.04810163116455078, 0.04579030227661133, 0.045601696014404294, 0.0477957763671875, 0.04543024063110351, 0.04544960021972656, 0.04565033721923828, 0.045792991638183594, 0.04554927825927734, 0.045589248657226564, 0.045846431732177735, 0.045944862365722657, 0.04559881591796875, 0.0455577278137207, 0.04561920166015625, 0.045725696563720705, 0.046188480377197264, 0.04613347244262695, 0.04641062545776367, 0.045962207794189455, 0.04636671829223633, 0.04627260971069336, 0.04633795166015625, 0.04600201416015625, 0.04582166290283203, 0.04593094253540039, 0.049423553466796874, 0.04548691177368164, 0.046096446990966794, 0.045931903839111325, 0.045711936950683596, 0.04606284713745117, 0.046381824493408205, 0.045868385314941404, 0.04576675033569336, 0.045948638916015624, 0.046107486724853514, 0.045914112091064455, 0.04639920043945313, 0.046506145477294925, 0.04638937759399414, 0.046004318237304685, 0.046497695922851565, 0.04753606414794922, 0.04642601776123047, 0.0466596794128418, 0.04673926544189453, 0.0461212158203125, 0.049977344512939455, 0.04592758560180664, 0.04597436904907227, 0.04649075317382813, 0.046700736999511716, 0.046432960510253904, 0.04630313491821289, 0.0464851188659668, 0.046746078491210936, 0.046413951873779294, 0.046607776641845705, 0.0468070068359375, 0.04645119857788086, 0.04671500778198242, 0.04684598541259766]",tokens/s,21.63575690842997,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 57735 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,853.331968,556.72832,0.0,178.25792,176.52224,s,1,7.511060546875,7.511060546875,0.0,7.511060546875,7.511060546875,7.511060546875,7.511060546875,[7.511060546875],,kWh,2.013667340417366e-05,2.214166881086207e-06,6.45611627600462e-06,2.8806956561264485e-05,,MB,1176.674304,669.974528,0.0,262.144,221.118976,s,10,0.2373123188018799,0.02373123188018799,7.72173514862464e-05,0.023694639205932615,0.023846604537963865,0.023847382164001465,0.023848004264831545,"[0.023846431732177733, 0.023769792556762696, 0.023693119049072266, 0.02362953567504883, 0.023674528121948243, 0.023655296325683594, 0.023848159790039063, 0.02381488037109375, 0.023684415817260742, 0.023696159362792967]",tokens/s,10787.472023891078,kWh,6.970796484486802e-07,7.687053776317147e-08,4.0972453886394427e-07,1.183674725075796e-06,tokens/kWh,216275632.63514575,MB,1211.006976,684.654592,0.0,276.824064,221.271552,s,10,13.6217958984375,1.3621795898437499,0.005329277517955597,1.3617802124023437,1.3692102783203126,1.3709354614257812,1.3723156079101562,"[1.3575599365234374, 1.3600166015625, 1.3583939208984375, 1.368826904296875, 1.3543009033203126, 1.37266064453125, 1.3644005126953125, 1.3580308837890624, 1.364061767578125, 1.3635438232421875]",tokens/s,46.24940827899681,kWh,3.945427498654954e-05,4.3513940986203914e-06,2.053856999753807e-05,6.4344239082708e-05,tokens/kWh,979108.6334709761,,s,630,13.616925313949594,0.02161416716499934,0.00044554585507595816,0.0215066556930542,0.021869215393066407,0.0222325496673584,0.023323972034454347,"[0.021236480712890624, 0.021513376235961914, 0.021559551239013673, 0.021807424545288084, 0.021418655395507812, 0.021406335830688475, 0.021258880615234375, 0.021897600173950194, 0.021334016799926758, 0.02127462387084961, 0.021506048202514647, 0.021445632934570313, 0.021560319900512694, 0.021444896697998046, 0.021406431198120118, 0.021496511459350585, 0.021620447158813477, 0.02152262306213379, 0.021377023696899415, 0.021294591903686523, 0.02125686454772949, 0.021400192260742187, 0.021376031875610352, 0.021254976272583007, 0.021309215545654295, 0.02146099281311035, 0.02182143974304199, 0.021465087890625, 0.02154105567932129, 0.02138297653198242, 0.0215285758972168, 0.021423744201660155, 0.021451135635375977, 0.021352767944335938, 0.021373823165893556, 0.0214653434753418, 0.021315488815307617, 0.021363168716430663, 0.021469600677490236, 0.024030271530151366, 0.0217542724609375, 0.021818784713745116, 0.021697439193725587, 0.021587167739868164, 0.02165635108947754, 0.021979455947875978, 0.021725215911865235, 0.02145987129211426, 0.02147439956665039, 0.021526176452636717, 0.022349855422973634, 0.021488832473754882, 0.021376895904541015, 0.02139638328552246, 0.02154911994934082, 0.02137696075439453, 0.021604352951049805, 0.0214769287109375, 0.021598175048828126, 0.02154745674133301, 0.021344287872314453, 0.021366783142089844, 0.021515615463256837, 0.02103910446166992, 0.021428096771240236, 0.021523616790771485, 0.02137392044067383, 0.021559551239013673, 0.02133580780029297, 0.021301248550415038, 0.021358591079711914, 0.021294240951538087, 0.021968000411987303, 0.02149087905883789, 0.02148828887939453, 0.021571424484252928, 0.021542783737182616, 0.021405759811401366, 0.021499839782714844, 0.02141404724121094, 0.021464672088623047, 0.022016416549682616, 0.021377023696899415, 0.021331968307495116, 0.02183782386779785, 0.021456159591674805, 0.021559616088867188, 0.021505983352661132, 0.021764575958251955, 0.021630975723266603, 0.02229574394226074, 0.02196361541748047, 0.02179836845397949, 0.021625343322753905, 0.021481472015380858, 0.022567264556884764, 0.021362079620361327, 0.021455488204956054, 0.021685888290405273, 0.02166783905029297, 0.0214334716796875, 0.02133046340942383, 0.021445247650146486, 0.021576704025268553, 0.021385951995849608, 0.02145075225830078, 0.02139468765258789, 0.021360992431640625, 0.02156716728210449, 0.021318368911743164, 0.021366304397583007, 0.02145280075073242, 0.021376703262329103, 0.021602815628051757, 0.021485855102539062, 0.0213090877532959, 0.0218767032623291, 0.02401113510131836, 0.021763200759887694, 0.02183660888671875, 0.02154435157775879, 0.02144937515258789, 0.021519519805908202, 0.021488447189331055, 0.021364063262939454, 0.02138591957092285, 0.021095584869384766, 0.02143539237976074, 0.021462879180908202, 0.02152681541442871, 0.021454111099243164, 0.021505823135375978, 0.021439456939697267, 0.021528255462646483, 0.0215284481048584, 0.021602399826049806, 0.02222457695007324, 0.021737695693969727, 0.02164339256286621, 0.021497856140136717, 0.02161782455444336, 0.022481760025024413, 0.021597471237182617, 0.021271263122558594, 0.02153267288208008, 0.02141798400878906, 0.021300928115844726, 0.021309440612792968, 0.02148080062866211, 0.02167087936401367, 0.02307891273498535, 0.023346271514892578, 0.021427104949951172, 0.021526527404785157, 0.02138966369628906, 0.021448352813720702, 0.021378175735473633, 0.021322175979614257, 0.021395904541015625, 0.021366336822509765, 0.021342655181884766, 0.021215232849121093, 0.021369888305664064, 0.02134934425354004, 0.021639232635498048, 0.021739200592041017, 0.02140166473388672, 0.021434560775756836, 0.021565216064453125, 0.02146892738342285, 0.02161712074279785, 0.02147020721435547, 0.021308095932006835, 0.02142585563659668, 0.021338367462158205, 0.021242176055908203, 0.021329631805419923, 0.02126268768310547, 0.02137049674987793, 0.021432479858398436, 0.02135001564025879, 0.02145955276489258, 0.021856607437133788, 0.02143824005126953, 0.02148543930053711, 0.021528863906860353, 0.021448768615722657, 0.02254198455810547, 0.021444608688354492, 0.021233823776245116, 0.02158470344543457, 0.02138092803955078, 0.021612543106079102, 0.021438432693481446, 0.021710847854614256, 0.02139548873901367, 0.02126233673095703, 0.021428224563598632, 0.021377023696899415, 0.021393312454223632, 0.02146633529663086, 0.021441408157348633, 0.021370880126953123, 0.022338655471801756, 0.02152332878112793, 0.021620927810668947, 0.021360479354858398, 0.021301248550415038, 0.021393407821655275, 0.021346303939819337, 0.021821760177612306, 0.021613279342651368, 0.021425119400024416, 0.02141798400878906, 0.021368831634521485, 0.021389312744140625, 0.021393184661865235, 0.021822975158691405, 0.028106592178344728, 0.023689855575561525, 0.02150614356994629, 0.021493919372558595, 0.02158415985107422, 0.021788543701171875, 0.021432159423828125, 0.02142972755432129, 0.021537151336669922, 0.02193833541870117, 0.021755903244018555, 0.021513856887817383, 0.02152662467956543, 0.02160691261291504, 0.022642656326293944, 0.02157548713684082, 0.02149996757507324, 0.021469120025634766, 0.021436416625976562, 0.0213602237701416, 0.021483936309814454, 0.02154003143310547, 0.021692480087280273, 0.022926080703735353, 0.021868383407592774, 0.021767936706542968, 0.021552799224853515, 0.02137785530090332, 0.021379072189331053, 0.021987552642822265, 0.02208745574951172, 0.021632160186767578, 0.02144540786743164, 0.021427871704101563, 0.02114784049987793, 0.021381919860839843, 0.021385215759277345, 0.021882688522338867, 0.021602495193481445, 0.021585376739501953, 0.02145510482788086, 0.02143052864074707, 0.021573663711547852, 0.02146892738342285, 0.021491552352905275, 0.021446815490722658, 0.021419296264648436, 0.021679296493530273, 0.021521663665771483, 0.021440927505493163, 0.02150003242492676, 0.02126790428161621, 0.02143289566040039, 0.021368928909301758, 0.02165670394897461, 0.021641151428222656, 0.02160111999511719, 0.02149190330505371, 0.021603488922119142, 0.021748384475708007, 0.022570112228393554, 0.021510208129882812, 0.02151299285888672, 0.021689952850341795, 0.02138912010192871, 0.021396095275878907, 0.021436256408691408, 0.02161680030822754, 0.021418176651000976, 0.02135379219055176, 0.021361152648925782, 0.02136787223815918, 0.021292255401611327, 0.02136649513244629, 0.021348512649536133, 0.021396928787231446, 0.021399423599243163, 0.02142892837524414, 0.021361759185791016, 0.02150716781616211, 0.02141276741027832, 0.021352575302124022, 0.021504447937011718, 0.021362016677856446, 0.021740352630615235, 0.02172480010986328, 0.021442592620849608, 0.021463424682617187, 0.02154412841796875, 0.021480255126953125, 0.021506048202514647, 0.02141798400878906, 0.021415935516357423, 0.021483295440673827, 0.021291231155395506, 0.021380447387695314, 0.021363391876220703, 0.021203104019165038, 0.021699424743652343, 0.021726112365722656, 0.022511199951171876, 0.021791231155395507, 0.021799999237060545, 0.021648319244384765, 0.021608448028564452, 0.022517759323120116, 0.02173734474182129, 0.02153913688659668, 0.021912895202636718, 0.02168502426147461, 0.021475040435791015, 0.021719039916992186, 0.02185215950012207, 0.021931615829467774, 0.021713312149047852, 0.02166374397277832, 0.021796863555908205, 0.021571584701538086, 0.021753856658935547, 0.021683584213256835, 0.021701248168945312, 0.021776384353637695, 0.02178665542602539, 0.021830751419067384, 0.02181193542480469, 0.021757120132446288, 0.0218239688873291, 0.021856767654418945, 0.0216760311126709, 0.021751808166503905, 0.022245376586914063, 0.021845760345458983, 0.021792831420898436, 0.021808544158935548, 0.021952384948730468, 0.021726112365722656, 0.021707839965820312, 0.021689504623413087, 0.02170217514038086, 0.02162099266052246, 0.021502208709716798, 0.021669408798217774, 0.021523040771484377, 0.021489280700683594, 0.021467424392700194, 0.02176585578918457, 0.021926048278808594, 0.02217763137817383, 0.02186412811279297, 0.021649696350097655, 0.021668224334716796, 0.022607519149780275, 0.021911104202270507, 0.021637920379638673, 0.021941919326782227, 0.021501920700073243, 0.02174569511413574, 0.021964319229125978, 0.022018207550048827, 0.02172934341430664, 0.021215103149414063, 0.021654848098754884, 0.021450944900512695, 0.021463680267333984, 0.021476831436157227, 0.021384832382202148, 0.021523584365844728, 0.021538591384887694, 0.021530176162719728, 0.021807552337646485, 0.02147942352294922, 0.021550592422485353, 0.021504608154296875, 0.021501663208007813, 0.021645824432373048, 0.021761375427246092, 0.022051359176635744, 0.021648256301879883, 0.021705663681030274, 0.021592063903808592, 0.02143779182434082, 0.021566112518310546, 0.021395776748657228, 0.023150527954101562, 0.021544704437255858, 0.021659872055053712, 0.02168832015991211, 0.02164486312866211, 0.021528255462646483, 0.021572128295898437, 0.021745920181274414, 0.021972736358642577, 0.021752864837646484, 0.02178761672973633, 0.021981472015380858, 0.021750848770141603, 0.022692512512207032, 0.021782527923583983, 0.021788288116455078, 0.02184185600280762, 0.02171129608154297, 0.02167807960510254, 0.02164735984802246, 0.021727455139160155, 0.021724832534790038, 0.02165977668762207, 0.02154300880432129, 0.02133907127380371, 0.0214619197845459, 0.021438207626342774, 0.02154521560668945, 0.02156675148010254, 0.021545759201049806, 0.022007871627807617, 0.02141779136657715, 0.0214836483001709, 0.021448703765869142, 0.021372991561889647, 0.021419904708862306, 0.021506111145019532, 0.021420095443725588, 0.02153392028808594, 0.02195884895324707, 0.021135200500488283, 0.02152448081970215, 0.021420032501220702, 0.021526432037353514, 0.021352319717407225, 0.021341888427734376, 0.02133660888671875, 0.021372095108032226, 0.02150275230407715, 0.021373151779174804, 0.02140550422668457, 0.021366783142089844, 0.021950464248657226, 0.021575679779052736, 0.02169241523742676, 0.02153171157836914, 0.02156844711303711, 0.02156915283203125, 0.02149603271484375, 0.022657344818115235, 0.021507904052734374, 0.021608320236206055, 0.021773632049560548, 0.021376895904541015, 0.02155404853820801, 0.021467231750488282, 0.02162892723083496, 0.021494943618774413, 0.02142207908630371, 0.02140598487854004, 0.021397600173950194, 0.021281343460083008, 0.021360864639282228, 0.02146886444091797, 0.021516223907470704, 0.02133852767944336, 0.021405471801757812, 0.021343488693237305, 0.02146588706970215, 0.021483360290527345, 0.02125388717651367, 0.021575935363769533, 0.021437503814697265, 0.021586143493652343, 0.021332704544067382, 0.02195043182373047, 0.02157904052734375, 0.021408096313476562, 0.02149043273925781, 0.021531679153442382, 0.021385568618774414, 0.021508544921875, 0.021430112838745116, 0.02132371139526367, 0.023760351181030273, 0.022306880950927734, 0.02149190330505371, 0.02148387145996094, 0.02141321563720703, 0.02173766326904297, 0.021580255508422852, 0.021537952423095703, 0.021437280654907225, 0.02126233673095703, 0.022544384002685547, 0.02185625648498535, 0.021630975723266603, 0.021977088928222657, 0.021675519943237305, 0.021795232772827147, 0.021745792388916017, 0.0215817928314209, 0.021440511703491212, 0.021544223785400392, 0.0216375675201416, 0.02151775932312012, 0.0219451847076416, 0.021585119247436522, 0.021475263595581055, 0.021744543075561524, 0.02140889549255371, 0.021465919494628907, 0.02141916847229004, 0.02146828842163086, 0.02176358413696289, 0.021487775802612304, 0.02141539192199707, 0.021592863082885744, 0.02155939292907715, 0.0215629768371582, 0.021763872146606446, 0.021606752395629883, 0.021607744216918946, 0.0216746883392334, 0.021718175888061523, 0.02150079917907715, 0.021529823303222655, 0.021543872833251952, 0.0215163516998291, 0.021417728424072267, 0.021360767364501952, 0.021480960845947264, 0.02174835205078125, 0.021801887512207033, 0.02326937675476074, 0.02180624008178711, 0.021788511276245117, 0.021608287811279298, 0.021486719131469725, 0.0214619197845459, 0.02257302474975586, 0.02166169548034668, 0.021725343704223632, 0.021858144760131835, 0.021579519271850586, 0.021688255310058593, 0.02163030433654785, 0.021555583953857423, 0.02142064094543457, 0.021446016311645506, 0.021441375732421875, 0.02157779121398926, 0.02131318473815918, 0.021421344757080078, 0.021473440170288086, 0.021369216918945312, 0.02115167999267578, 0.021372480392456053, 0.0214021110534668, 0.024878175735473632, 0.022911903381347656, 0.022460735321044922, 0.02148748779296875, 0.021491519927978514, 0.02141766357421875, 0.02157756805419922, 0.022239072799682617, 0.021631616592407227, 0.021651456832885742, 0.02146895980834961, 0.021473056793212892, 0.02152284812927246, 0.021473087310791016, 0.0215568962097168, 0.021619264602661132, 0.02139571189880371, 0.021346303939819337, 0.021520063400268553, 0.021483232498168945, 0.02184409523010254, 0.021637088775634767, 0.021454912185668945, 0.02143657684326172, 0.0214998722076416, 0.02136604881286621, 0.02233622360229492, 0.021698688507080077, 0.02163443183898926, 0.022107967376708983, 0.02165017509460449, 0.021835775375366212, 0.021809152603149414, 0.02177129554748535, 0.021502944946289064, 0.0214400634765625, 0.02154854393005371, 0.021502431869506837, 0.021457376480102538, 0.021403167724609377, 0.021461471557617188, 0.021397504806518555, 0.021450239181518553, 0.021422880172729492, 0.021454559326171876, 0.021368831634521485, 0.021394464492797853, 0.021459936141967773, 0.02132809638977051, 0.021341344833374024, 0.02138175964355469, 0.021444896697998046, 0.02145587158203125, 0.021709440231323242, 0.021944063186645508, 0.02167030334472656, 0.021497695922851563, 0.021485183715820314, 0.02149737548828125, 0.021431520462036134]",tokens/s,46.2659510480394,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1472.057344,1326.383104,0.0,947.912704,945.250304,s,1,8.178099609375,8.178099609375,0.0,8.178099609375,8.178099609375,8.178099609375,8.178099609375,[8.178099609375],,kWh,4.011619602085451e-05,4.417830343113296e-06,1.32619550540225e-05,5.7795981417990304e-05,,MB,1331.990528,1519.321088,0.0,1111.49056,1098.82368,s,10,1.6119050292968748,0.16119050292968748,0.0005511915126697848,0.16121893310546875,0.16174463500976563,0.16185521240234377,0.16194367431640624,"[0.1598438720703125, 0.16109616088867187, 0.16131968688964843, 0.16081129455566406, 0.1617200622558594, 0.161114013671875, 0.16135888671875, 0.16111817932128905, 0.16196578979492188, 0.16155708312988282]",tokens/s,1588.1828975474389,kWh,4.707219859811589e-06,5.191210867039741e-07,3.1204326038710095e-06,8.346773550386572e-06,tokens/kWh,30670533.764288314,MB,1345.765376,1653.538816,0.0,1245.708288,1164.242432,s,10,89.03407812500001,8.903407812500001,0.010136610228676235,8.90321484375,8.9131953125,8.9178447265625,8.9215642578125,"[8.887240234375, 8.8903544921875, 8.8948603515625, 8.9013642578125, 8.90372265625, 8.90270703125, 8.9090263671875, 8.910146484375, 8.912162109375, 8.922494140625]",tokens/s,7.075942305097012,kWh,0.000260164826486438,2.8697575803232348e-05,0.0001730542361135283,0.00046191663840319856,tokens/kWh,136388.24576180012,,s,630,89.03100794982909,0.14131906023782398,0.0002831820943250541,0.14131715393066407,0.14165845794677734,0.1417887367248535,0.14212909545898436,"[0.14145948791503907, 0.14063404846191407, 0.14118077087402345, 0.14077389526367187, 0.14100070190429687, 0.14090165710449218, 0.1408253173828125, 0.1409248046875, 0.1407427215576172, 0.1410826873779297, 0.14081024169921874, 0.14104165649414063, 0.14084259033203125, 0.14099270629882812, 0.14092218017578126, 0.14089222717285158, 0.1408658905029297, 0.1405506591796875, 0.14191206359863281, 0.14092825317382812, 0.14114483642578124, 0.1407093505859375, 0.14122364807128907, 0.1407639617919922, 0.1410618896484375, 0.14123388671875, 0.14120109558105468, 0.14125914001464843, 0.14094154357910157, 0.14102940368652345, 0.14082275390625, 0.14112265014648437, 0.1410016326904297, 0.14109405517578125, 0.14125535583496093, 0.14095747375488282, 0.14095747375488282, 0.14070387268066406, 0.14129808044433595, 0.141316162109375, 0.14115635681152344, 0.14103961181640626, 0.14105599975585936, 0.14100070190429687, 0.1411903076171875, 0.14090675354003906, 0.14128182983398438, 0.1409761962890625, 0.1414606475830078, 0.14112582397460938, 0.14143350219726564, 0.14087484741210937, 0.1413939208984375, 0.14115933227539063, 0.14100405883789063, 0.14126153564453126, 0.14099430847167968, 0.14152114868164062, 0.14120072937011718, 0.14130181884765625, 0.1408948211669922, 0.14096989440917967, 0.14135836791992187, 0.14088572692871093, 0.1407283172607422, 0.140797607421875, 0.14076358032226563, 0.14064128112792967, 0.14102015686035158, 0.14123622131347657, 0.14100889587402343, 0.14116864013671876, 0.14103347778320313, 0.14112733459472657, 0.140813720703125, 0.14087420654296876, 0.14126124572753906, 0.1410738525390625, 0.1411774444580078, 0.14085894775390626, 0.14095199584960938, 0.14083482360839844, 0.14101298522949218, 0.1409863739013672, 0.1407665557861328, 0.1413351287841797, 0.14102125549316405, 0.14103961181640626, 0.1418887939453125, 0.14091746520996093, 0.14107565307617187, 0.14160263061523437, 0.1410345001220703, 0.14137957763671874, 0.14093927001953124, 0.14087577819824218, 0.14114163208007813, 0.14073663330078126, 0.14112159729003906, 0.14095936584472657, 0.141291259765625, 0.1413738250732422, 0.1412646026611328, 0.14135165405273437, 0.14123622131347657, 0.1409944610595703, 0.14084719848632812, 0.14152703857421875, 0.14121778869628906, 0.14128742980957032, 0.14146697998046875, 0.14108303833007813, 0.14127714538574218, 0.14101123046875, 0.14129766845703126, 0.14123826599121095, 0.1412954559326172, 0.1415836181640625, 0.14099539184570312, 0.14113095092773437, 0.14120643615722656, 0.1411658935546875, 0.14150930786132812, 0.14132838439941406, 0.14111279296875, 0.14087632751464843, 0.14100889587402343, 0.14088534545898437, 0.140742431640625, 0.14105279541015625, 0.1409410858154297, 0.14057455444335937, 0.140972412109375, 0.1408675842285156, 0.14127529907226563, 0.140922119140625, 0.14108230590820312, 0.14098728942871094, 0.14095881652832032, 0.14126991271972655, 0.14094070434570313, 0.1412122497558594, 0.14120140075683593, 0.14098335266113282, 0.14122285461425782, 0.14086537170410157, 0.14103363037109376, 0.14096588134765625, 0.14114405822753906, 0.1411051483154297, 0.14105804443359374, 0.14094950866699218, 0.14117478942871095, 0.14093516540527343, 0.14135910034179688, 0.14086697387695313, 0.14140617370605468, 0.1413658905029297, 0.14137753295898436, 0.14078073120117188, 0.14133740234375, 0.14125430297851563, 0.14134716796875, 0.1413137969970703, 0.14140786743164063, 0.1412918701171875, 0.14121603393554688, 0.14110086059570312, 0.14103570556640624, 0.14148403930664064, 0.14160896301269532, 0.14127923583984375, 0.1415755157470703, 0.14136131286621093, 0.14128134155273436, 0.14130630493164062, 0.1414983673095703, 0.14163885498046874, 0.14148236083984375, 0.14129930114746095, 0.14142684936523436, 0.14081878662109376, 0.14149375915527343, 0.14150743103027344, 0.14132838439941406, 0.14153114318847657, 0.1415167999267578, 0.1410723876953125, 0.1412458953857422, 0.1413120574951172, 0.14130975341796875, 0.14153952026367186, 0.14124761962890625, 0.14087033081054687, 0.14102537536621093, 0.14111549377441407, 0.1412437744140625, 0.1412491455078125, 0.14141798400878905, 0.14094140625, 0.14138575744628906, 0.14074844360351563, 0.14118576049804688, 0.141338623046875, 0.14120889282226562, 0.14171820068359375, 0.14092652893066407, 0.14119923400878906, 0.14101356506347656, 0.14121165466308594, 0.14130995178222655, 0.1415244140625, 0.14148637390136717, 0.14099017333984376, 0.14118771362304688, 0.14125254821777344, 0.1413441925048828, 0.1413570556640625, 0.14167507934570311, 0.1414082489013672, 0.14083686828613282, 0.14121888732910157, 0.14102330017089842, 0.14108761596679686, 0.14108876037597656, 0.14177676391601562, 0.14139610290527344, 0.14163558959960937, 0.14149221801757814, 0.1413080596923828, 0.14131350708007812, 0.14142604064941405, 0.14151373291015626, 0.1414710693359375, 0.14130447387695313, 0.14119322204589843, 0.1412434539794922, 0.14107084655761717, 0.1411844787597656, 0.14153555297851564, 0.1413782043457031, 0.14137344360351561, 0.14123008728027345, 0.1413570556640625, 0.14129560852050782, 0.14134585571289063, 0.14143994140625, 0.14125260925292968, 0.14129766845703126, 0.14135267639160157, 0.14133071899414062, 0.14153523254394532, 0.14170620727539063, 0.14111888122558594, 0.14121014404296875, 0.1411152648925781, 0.1412181091308594, 0.14105996704101562, 0.1413324737548828, 0.14158029174804687, 0.14123213195800782, 0.14176255798339843, 0.14107034301757812, 0.14107571411132813, 0.1408577880859375, 0.14136761474609374, 0.14135501098632813, 0.14120652770996095, 0.1410795593261719, 0.14096588134765625, 0.1412196807861328, 0.14103388977050782, 0.1415078125, 0.14150096130371093, 0.14145330810546874, 0.1414593963623047, 0.14109458923339843, 0.14106866455078124, 0.141127685546875, 0.14121075439453126, 0.14146444702148436, 0.1415925750732422, 0.14147523498535156, 0.14118563842773438, 0.14137554931640625, 0.14125459289550782, 0.14156390380859374, 0.14137957763671874, 0.14138163757324218, 0.14120057678222656, 0.14118380737304687, 0.14109286499023438, 0.14136026000976562, 0.14123858642578124, 0.14148051452636717, 0.14127923583984375, 0.14153318786621094, 0.1418035125732422, 0.14136729431152345, 0.14140594482421875, 0.14169523620605468, 0.14147715759277343, 0.14160684204101562, 0.14127781677246093, 0.14146546936035156, 0.14147772216796875, 0.14130221557617187, 0.1414368896484375, 0.141285400390625, 0.1415166778564453, 0.1412359619140625, 0.14129600524902344, 0.1411788787841797, 0.14131365966796874, 0.14122227478027344, 0.14144685363769532, 0.14057533264160157, 0.14106214904785155, 0.14102313232421876, 0.1410397186279297, 0.14097389221191406, 0.14116064453125, 0.14121778869628906, 0.14116249084472657, 0.14119241333007812, 0.14129014587402344, 0.1412691192626953, 0.140943359375, 0.14146131896972655, 0.14128076171875, 0.14126150512695312, 0.1416697540283203, 0.14129817199707032, 0.14105625915527345, 0.1409814453125, 0.14132704162597656, 0.14131814575195312, 0.1410846710205078, 0.141053955078125, 0.1413017578125, 0.14129273986816407, 0.14124281311035156, 0.1413492431640625, 0.1412147216796875, 0.1410463104248047, 0.14133091735839845, 0.1413119354248047, 0.14121171569824217, 0.1411112976074219, 0.1413668212890625, 0.14136679077148437, 0.1416386260986328, 0.14119635009765624, 0.14147471618652344, 0.14152006530761718, 0.1413456268310547, 0.14147357177734374, 0.1412395782470703, 0.14143174743652343, 0.14132342529296876, 0.14156787109375, 0.14134127807617186, 0.1413492431640625, 0.14145481872558593, 0.1411212158203125, 0.14166717529296874, 0.14157005310058593, 0.1414956817626953, 0.14128192138671875, 0.14131382751464844, 0.14174435424804688, 0.1415755157470703, 0.14143760681152343, 0.1418260498046875, 0.14139744567871093, 0.14164784240722655, 0.14133670043945312, 0.14132392883300782, 0.14165811157226563, 0.1410662384033203, 0.14105923461914063, 0.14149449157714844, 0.14112422180175782, 0.1410908203125, 0.1413035888671875, 0.1414105224609375, 0.1412912902832031, 0.14114633178710936, 0.14122979736328126, 0.14147132873535156, 0.1413987274169922, 0.14137548828125, 0.14150860595703124, 0.14153446960449217, 0.14137831115722657, 0.1412833251953125, 0.14143487548828124, 0.14144717407226562, 0.14104701232910155, 0.14170806884765624, 0.14126255798339843, 0.14171165466308594, 0.14156185913085936, 0.14114521789550782, 0.14112448120117188, 0.14133978271484374, 0.14135952758789064, 0.14126332092285157, 0.14150186157226563, 0.14135562133789062, 0.14144102478027343, 0.1411604461669922, 0.14132838439941406, 0.14138983154296875, 0.14135090637207032, 0.14146258544921875, 0.14141445922851562, 0.14108546447753906, 0.1413159942626953, 0.14144432067871093, 0.1418162841796875, 0.14188800048828126, 0.14148512268066407, 0.1415213165283203, 0.14119290161132814, 0.14145826721191407, 0.14137139892578124, 0.14140573120117186, 0.14166883850097656, 0.14143487548828124, 0.14152432250976563, 0.1412425994873047, 0.14132269287109375, 0.14122189331054688, 0.14141644287109376, 0.1420226593017578, 0.14211891174316407, 0.1415925750732422, 0.1414307861328125, 0.14179122924804688, 0.14129379272460937, 0.1421332550048828, 0.14138819885253906, 0.14116876220703126, 0.141025146484375, 0.14109849548339845, 0.1408599090576172, 0.14175628662109374, 0.1413796844482422, 0.14152093505859376, 0.14128034973144532, 0.14126908874511718, 0.14096263122558594, 0.14094744873046874, 0.14207589721679686, 0.14147366333007813, 0.14132850646972656, 0.14128073120117188, 0.14121533203125, 0.1412303009033203, 0.1411808624267578, 0.14171420288085937, 0.14133042907714843, 0.14161305236816407, 0.14135699462890625, 0.14132640075683595, 0.14123826599121095, 0.1412095947265625, 0.141486083984375, 0.14173954772949218, 0.14187362670898437, 0.1414819793701172, 0.141366943359375, 0.14103538513183594, 0.14115213012695313, 0.14150306701660156, 0.14143600463867187, 0.14144195556640626, 0.14122393798828126, 0.1413507537841797, 0.14107049560546875, 0.14114405822753906, 0.14135910034179688, 0.14179737854003907, 0.14163555908203124, 0.14138348388671876, 0.1414575653076172, 0.14106629943847657, 0.14138291931152344, 0.14153919982910157, 0.1421853485107422, 0.14193827819824217, 0.14152691650390625, 0.1418429718017578, 0.1414737548828125, 0.1414469451904297, 0.1414263000488281, 0.14154960632324218, 0.14178569030761717, 0.141623291015625, 0.14146322631835936, 0.1412360382080078, 0.14148031616210938, 0.14157632446289062, 0.1414430694580078, 0.14132147216796875, 0.14133938598632811, 0.14099046325683592, 0.14133453369140625, 0.14104371643066407, 0.14164787292480469, 0.1412768249511719, 0.14141270446777343, 0.14125033569335937, 0.141218017578125, 0.14125465393066405, 0.14102117919921875, 0.14165811157226563, 0.14155775451660157, 0.14152499389648437, 0.14134271240234375, 0.1411399688720703, 0.1412689971923828, 0.14137139892578124, 0.14142892456054687, 0.141627197265625, 0.14138914489746093, 0.14128195190429688, 0.14127308654785156, 0.14122393798828126, 0.1413570556640625, 0.14147517395019532, 0.14134701538085936, 0.141489990234375, 0.14188188171386718, 0.14152716064453125, 0.14145535278320312, 0.1416165771484375, 0.141664794921875, 0.141548828125, 0.14149913024902344, 0.14151589965820313, 0.14165020751953125, 0.14187171936035156, 0.14137344360351561, 0.14165402221679688, 0.14144102478027343, 0.14138163757324218, 0.14151174926757812, 0.14158944702148438, 0.14166426086425782, 0.14127513122558594, 0.14161882019042968, 0.1414470672607422, 0.1414599304199219, 0.14133859252929687, 0.14159858703613282, 0.14153334045410157, 0.1415147247314453, 0.1416859588623047, 0.14156402587890626, 0.14190870666503907, 0.14160281372070313, 0.1415535430908203, 0.1415230712890625, 0.14143283081054686, 0.14166157531738283, 0.14140249633789062, 0.14101913452148437, 0.1411907501220703, 0.14109432983398437, 0.14126588439941407, 0.1412947540283203, 0.14145417785644532, 0.1417216033935547, 0.14114154052734376, 0.14110357666015624, 0.14177894592285156, 0.14144102478027343, 0.14146969604492188, 0.14183815002441405, 0.14160914611816405, 0.14141439819335938, 0.14153932189941407, 0.14139596557617187, 0.1414713592529297, 0.14152128601074218, 0.1418354949951172, 0.14162818908691407, 0.14148812866210939, 0.14158642578125, 0.14165577697753906, 0.1413962860107422, 0.14158026123046874, 0.14171884155273437, 0.1416956787109375, 0.14155775451660157, 0.14166835021972657, 0.14216192626953125, 0.14160415649414063, 0.14148268127441407, 0.14198080444335937, 0.14197616577148436, 0.14152668762207032, 0.14170994567871092, 0.14188934326171876, 0.14164601135253907, 0.14136524963378908, 0.1416458282470703, 0.1414998779296875, 0.14226486206054687, 0.14176856994628906, 0.1414739227294922, 0.14199349975585937, 0.1415172119140625, 0.14173802185058593, 0.14159181213378907, 0.14214810180664061, 0.14181800842285155, 0.141580322265625, 0.14169097900390626, 0.14160850524902344, 0.14166819763183594, 0.14131056213378906, 0.14222950744628907, 0.1419325408935547, 0.14162124633789064, 0.14177484130859375, 0.1416510467529297, 0.14232669067382814]",tokens/s,7.076186314267256,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 151715 has 14.64 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 312.03 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 816, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 734, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.255552,1596.915712,0.0,1201.668096,1189.151232,s,1,8.466310546875,8.466310546875,0.0,8.466310546875,8.466310546875,8.466310546875,8.466310546875,[8.466310546875],,kWh,3.5574402879161464e-05,3.916985400328343e-06,1.096223099199961e-05,5.045361927148942e-05,,MB,1582.055424,1791.950848,0.0,1382.023168,1351.367168,s,10,0.47527958679199217,0.04752795867919922,0.0001405060291031286,0.0475042724609375,0.047698503112792966,0.04776477127075195,0.04781778579711914,"[0.04783103942871094, 0.047349502563476566, 0.0474101448059082, 0.04753100967407226, 0.047494945526123045, 0.04768377685546875, 0.04737731170654297, 0.04761840057373047, 0.047513599395751956, 0.04746985626220703]",tokens/s,5386.303285776069,kWh,1.4258651037194828e-06,1.5724679265124117e-07,9.432636272390402e-07,2.5263755236097645e-06,tokens/kWh,101330937.38741547,MB,1582.055424,1833.893888,0.0,1423.966208,1407.328256,s,10,13.680080322265626,1.3680080322265626,0.005092286023816497,1.3692230224609374,1.372664990234375,1.3746703247070313,1.3762745922851563,"[1.3663924560546874, 1.3722193603515624, 1.36963427734375, 1.3708880615234376, 1.3766756591796876, 1.3609461669921874, 1.3623831787109375, 1.368811767578125, 1.360443603515625, 1.371685791015625]",tokens/s,46.05236118201845,kWh,3.959619600836351e-05,4.367061092489081e-06,1.8069751583160582e-05,6.203300868401318e-05,tokens/kWh,1015588.3349284658,,s,630,13.677172939300538,0.02170979831635006,0.0003257925203109579,0.021650959968566894,0.02201076183319092,0.022220206356048582,0.023109855136871343,"[0.02183737564086914, 0.021971391677856444, 0.021774335861206053, 0.02169036865234375, 0.021478784561157226, 0.021575775146484375, 0.021658143997192382, 0.021591840744018556, 0.02197711944580078, 0.021448287963867187, 0.021748319625854492, 0.02164531135559082, 0.021510143280029297, 0.02131155204772949, 0.021342144012451172, 0.021348352432250976, 0.021510143280029297, 0.0214466552734375, 0.021825536727905274, 0.02328985595703125, 0.023052288055419923, 0.021985279083251954, 0.021737472534179687, 0.02159395217895508, 0.02135820770263672, 0.021760543823242186, 0.022798336029052735, 0.022199455261230468, 0.022010656356811525, 0.02200720024108887, 0.021852672576904295, 0.021866655349731444, 0.021757759094238282, 0.021776575088500977, 0.021420032501220702, 0.02141548728942871, 0.02150239944458008, 0.021568864822387696, 0.02147190475463867, 0.021398944854736326, 0.02151059150695801, 0.02261724853515625, 0.02175187110900879, 0.021832639694213868, 0.021522144317626953, 0.021604639053344726, 0.021640447616577147, 0.021680192947387697, 0.02156819152832031, 0.021274368286132814, 0.02143401527404785, 0.021611103057861326, 0.021487071990966798, 0.0213319034576416, 0.021312095642089843, 0.02142790412902832, 0.02147929573059082, 0.021328128814697266, 0.02131577682495117, 0.021321727752685548, 0.02128451156616211, 0.02140176010131836, 0.021917888641357422, 0.021655744552612304, 0.021626495361328126, 0.021563840866088868, 0.021434560775756836, 0.021370880126953123, 0.021405696868896484, 0.02141983985900879, 0.02174790382385254, 0.0214355525970459, 0.021416799545288086, 0.021501951217651367, 0.021476959228515623, 0.021436832427978517, 0.02153267288208008, 0.021452159881591798, 0.021387168884277344, 0.021381824493408204, 0.02141391944885254, 0.02147123146057129, 0.021594112396240234, 0.021640224456787108, 0.02175484848022461, 0.022253440856933593, 0.021506175994873047, 0.021579776763916016, 0.021671648025512694, 0.021647552490234374, 0.021651264190673827, 0.021673919677734375, 0.0216661434173584, 0.02167398452758789, 0.021784576416015625, 0.022504543304443358, 0.021859296798706053, 0.021761247634887695, 0.021699296951293946, 0.02184419250488281, 0.021999391555786132, 0.022024032592773437, 0.021909151077270508, 0.021924352645874022, 0.021831520080566408, 0.021915775299072265, 0.022086719512939453, 0.021868928909301758, 0.02190300750732422, 0.022864831924438476, 0.022024032592773437, 0.02205302429199219, 0.022237184524536133, 0.023139839172363282, 0.022139392852783202, 0.021809152603149414, 0.022282047271728514, 0.021975231170654298, 0.021981184005737304, 0.021725183486938478, 0.02167171287536621, 0.021833951950073243, 0.021721088409423828, 0.02184601593017578, 0.021690208435058592, 0.021622112274169922, 0.02175987243652344, 0.02165235137939453, 0.0216944637298584, 0.02162483215332031, 0.02165065574645996, 0.021822240829467772, 0.021741567611694337, 0.021651456832885742, 0.023347200393676756, 0.021571584701538086, 0.021628511428833007, 0.021868959426879882, 0.021728511810302734, 0.021762815475463868, 0.02168422317504883, 0.021634176254272462, 0.021855104446411134, 0.022321151733398437, 0.021559295654296876, 0.02191257667541504, 0.021799936294555664, 0.02167398452758789, 0.02168217658996582, 0.021618112564086914, 0.02167046356201172, 0.021621856689453125, 0.021588895797729494, 0.021708799362182618, 0.021550304412841798, 0.021771039962768555, 0.021788095474243162, 0.021668415069580078, 0.02167807960510254, 0.021939487457275392, 0.02173411178588867, 0.021538816452026367, 0.021575935363769533, 0.021631776809692384, 0.021573663711547852, 0.021658208847045897, 0.021582176208496093, 0.021749759674072267, 0.02162483215332031, 0.021790304183959962, 0.021707168579101564, 0.021611743927001954, 0.02174236869812012, 0.021544960021972655, 0.021747711181640626, 0.022042303085327147, 0.021905279159545897, 0.02189561653137207, 0.021768192291259765, 0.021710399627685548, 0.021640928268432617, 0.02163580894470215, 0.021563392639160156, 0.02168966484069824, 0.02205504035949707, 0.021524927139282227, 0.02162905693054199, 0.021596160888671875, 0.02168329620361328, 0.0215947208404541, 0.02175708770751953, 0.022016864776611328, 0.02207257652282715, 0.02200150489807129, 0.021978015899658202, 0.02183782386779785, 0.02178438377380371, 0.0217007999420166, 0.021542367935180665, 0.021638879776000975, 0.02160054397583008, 0.02154550361633301, 0.021573631286621094, 0.02167807960510254, 0.0220214729309082, 0.021695135116577148, 0.023735424041748047, 0.022834047317504883, 0.02168217658996582, 0.02164531135559082, 0.021594207763671876, 0.02172435188293457, 0.02173411178588867, 0.021724735260009766, 0.021698144912719725, 0.02151100730895996, 0.02159811210632324, 0.02150409507751465, 0.02153593635559082, 0.021566080093383788, 0.021452735900878907, 0.021502208709716798, 0.021549055099487305, 0.02160963249206543, 0.02166012763977051, 0.02234998321533203, 0.021653728485107424, 0.021590015411376954, 0.02162073516845703, 0.021624704360961915, 0.021708927154541015, 0.021655040740966795, 0.021715456008911133, 0.02171494483947754, 0.021648479461669923, 0.021617151260375975, 0.021598623275756835, 0.0216759033203125, 0.021869983673095703, 0.021912288665771485, 0.022030336380004883, 0.021542144775390626, 0.021825920104980467, 0.0216231689453125, 0.021612543106079102, 0.02159324836730957, 0.021603168487548827, 0.02166713523864746, 0.02168288040161133, 0.021784191131591798, 0.02188502311706543, 0.021725471496582032, 0.021899328231811524, 0.021958976745605468, 0.02193315124511719, 0.021725439071655275, 0.02178895950317383, 0.021696575164794923, 0.021952512741088868, 0.02390630340576172, 0.02294528007507324, 0.021631296157836915, 0.021466880798339843, 0.0219880313873291, 0.023131904602050782, 0.022017248153686525, 0.021768991470336913, 0.021746719360351562, 0.021492704391479493, 0.021491519927978514, 0.021562944412231444, 0.02178835105895996, 0.021662656784057616, 0.021710336685180662, 0.021647872924804686, 0.021536767959594725, 0.021513599395751953, 0.02174835205078125, 0.021776384353637695, 0.021596160888671875, 0.021501951217651367, 0.021493759155273438, 0.021559072494506837, 0.02167830467224121, 0.023006271362304688, 0.0216114559173584, 0.021579776763916016, 0.02164531135559082, 0.021702079772949218, 0.02160915184020996, 0.021518207550048827, 0.021615711212158203, 0.021676959991455077, 0.022882303237915038, 0.02233100891113281, 0.023055871963500976, 0.02203926467895508, 0.021883039474487304, 0.021764095306396485, 0.021585472106933595, 0.021685888290405273, 0.02164579200744629, 0.02167788887023926, 0.021867040634155274, 0.02187468719482422, 0.02191564750671387, 0.021695903778076172, 0.02154147148132324, 0.021574783325195312, 0.021640064239501954, 0.02166374397277832, 0.02166713523864746, 0.021545663833618164, 0.02189107131958008, 0.021618688583374023, 0.02166476821899414, 0.021589599609375, 0.021747968673706056, 0.021606559753417968, 0.021809152603149414, 0.02157948875427246, 0.021686368942260743, 0.021678272247314452, 0.02155628776550293, 0.021498815536499023, 0.021338111877441408, 0.02132905578613281, 0.021635936737060546, 0.02145894432067871, 0.021622783660888673, 0.021510143280029297, 0.021548383712768553, 0.021942943572998048, 0.021370880126953123, 0.021404991149902342, 0.021426111221313476, 0.021405887603759766, 0.02141241645812988, 0.021458303451538086, 0.021371519088745117, 0.02144576072692871, 0.021429119110107423, 0.02146713638305664, 0.02132294464111328, 0.021500736236572265, 0.021407743453979493, 0.021335136413574218, 0.021429119110107423, 0.0213668155670166, 0.021663455963134765, 0.021549215316772462, 0.021553279876708985, 0.021579776763916016, 0.021440256118774415, 0.021451007843017577, 0.021552255630493164, 0.021453695297241213, 0.021604352951049805, 0.02163711929321289, 0.021700607299804688, 0.021626880645751953, 0.021825536727905274, 0.022101503372192383, 0.021578239440917968, 0.021725183486938478, 0.02179465675354004, 0.02183945655822754, 0.02163974380493164, 0.02174950408935547, 0.021835647583007812, 0.02169385528564453, 0.021611488342285157, 0.021695903778076172, 0.021874847412109374, 0.022085567474365235, 0.021901023864746093, 0.02167478370666504, 0.021771488189697267, 0.02161664009094238, 0.021573631286621094, 0.02161430358886719, 0.02176790428161621, 0.021520959854125978, 0.02138105583190918, 0.021440576553344727, 0.021454368591308594, 0.02150217628479004, 0.02161484718322754, 0.021370655059814454, 0.021493247985839844, 0.021654239654541017, 0.021753856658935547, 0.02176630401611328, 0.02172297668457031, 0.02175584030151367, 0.02172115135192871, 0.021647327423095702, 0.021540735244750978, 0.021481632232666015, 0.021372928619384765, 0.02138057518005371, 0.02145948791503906, 0.021432319641113282, 0.021549055099487305, 0.021442047119140627, 0.021565792083740234, 0.021588031768798827, 0.021586015701293947, 0.021493759155273438, 0.02145020866394043, 0.021469024658203124, 0.02144326400756836, 0.02151219177246094, 0.021563392639160156, 0.02175116729736328, 0.021555360794067384, 0.021531103134155273, 0.021475040435791015, 0.021522432327270507, 0.021545248031616213, 0.021413888931274414, 0.021366432189941408, 0.021383520126342773, 0.02181065559387207, 0.02151683235168457, 0.021609760284423827, 0.021719263076782225, 0.021817440032958983, 0.022796703338623048, 0.021710687637329102, 0.02185641670227051, 0.021753856658935547, 0.02185420799255371, 0.021934080123901366, 0.021602304458618164, 0.02185625648498535, 0.021464256286621092, 0.02264147186279297, 0.02157513618469238, 0.021856128692626955, 0.02152284812927246, 0.021712928771972655, 0.021458623886108398, 0.0218338565826416, 0.021415456771850586, 0.02130179214477539, 0.021204416275024413, 0.021631872177124024, 0.0216944637298584, 0.02166579246520996, 0.022117759704589842, 0.02160089683532715, 0.021530624389648437, 0.021489664077758788, 0.021395456314086913, 0.02144256019592285, 0.021325824737548828, 0.021542911529541017, 0.021366336822509765, 0.021359039306640626, 0.02146303939819336, 0.021362688064575194, 0.021569536209106444, 0.0216494083404541, 0.021800960540771484, 0.02346931266784668, 0.02169523239135742, 0.022111360549926757, 0.0217445125579834, 0.02170457649230957, 0.021819520950317382, 0.022346912384033205, 0.022270816802978516, 0.021993471145629884, 0.022122047424316407, 0.02201171112060547, 0.023003776550292968, 0.02169856071472168, 0.02210383987426758, 0.021919776916503906, 0.021866016387939453, 0.021930784225463868, 0.021731199264526366, 0.021714080810546876, 0.02170966339111328, 0.02170863914489746, 0.02165692710876465, 0.021672767639160158, 0.021542207717895508, 0.02233318328857422, 0.021593023300170898, 0.021681312561035157, 0.02161248016357422, 0.021869024276733397, 0.021745439529418945, 0.021553823471069336, 0.021511808395385742, 0.021597600936889647, 0.02156755256652832, 0.02146601676940918, 0.02146713638305664, 0.021341728210449218, 0.021370687484741212, 0.021381792068481446, 0.021871423721313475, 0.021936128616333008, 0.021618495941162108, 0.02164508819580078, 0.021371295928955078, 0.021399072647094727, 0.021905887603759767, 0.021495136260986328, 0.021379743576049805, 0.021354496002197267, 0.02143756866455078, 0.021713792800903322, 0.02154489517211914, 0.021526592254638672, 0.021362688064575194, 0.021311487197875977, 0.021359712600708007, 0.02137321662902832, 0.021631616592407227, 0.022008064270019532, 0.021827295303344728, 0.02156470489501953, 0.021451520919799804, 0.02136796760559082, 0.021461759567260742, 0.021653568267822266, 0.021664928436279297, 0.021415807723999022, 0.021279743194580078, 0.021477632522583008, 0.02129280090332031, 0.021335552215576172, 0.02133247947692871, 0.02155913543701172, 0.021481632232666015, 0.021598207473754884, 0.021447999954223633, 0.021626752853393556, 0.021709535598754885, 0.0215184326171875, 0.02141168022155762, 0.021379104614257814, 0.02139334487915039, 0.021368511199951173, 0.02152057647705078, 0.02147056007385254, 0.021396448135375976, 0.021398624420166015, 0.021403968811035155, 0.021557151794433595, 0.02169107246398926, 0.021626688003540038, 0.021745376586914063, 0.021502431869506837, 0.02168009567260742, 0.022105663299560548, 0.022116832733154297, 0.022240959167480468, 0.02241708755493164, 0.022077375411987305, 0.021758176803588866, 0.021804800033569337, 0.0217890567779541, 0.02182102394104004, 0.021904191970825194, 0.022130495071411134, 0.022003007888793946, 0.021805376052856446, 0.021662080764770508, 0.021897216796875, 0.02156723213195801, 0.02151759910583496, 0.021486560821533204, 0.02182963180541992, 0.021821279525756836, 0.02198953628540039, 0.021612224578857423, 0.02170022392272949, 0.0216296329498291, 0.021696512222290038, 0.02159779167175293, 0.021603776931762696, 0.021598207473754884, 0.021826528549194337, 0.0220214729309082, 0.02184239959716797, 0.021799104690551758, 0.02206105613708496, 0.021798912048339843, 0.021661663055419923, 0.02184182357788086, 0.021885055541992188, 0.02166374397277832, 0.021987327575683592, 0.02187059211730957, 0.02214646339416504, 0.021583871841430666, 0.021760128021240235, 0.02177222442626953, 0.0216396484375, 0.021792255401611327, 0.021723712921142578, 0.022554239273071288, 0.022104352951049806, 0.021970016479492187, 0.021517087936401367, 0.021742944717407228, 0.02165657615661621, 0.02158729553222656, 0.021438880920410155, 0.0214836483001709, 0.021510112762451173, 0.021610143661499024, 0.021563776016235353, 0.02161664009094238, 0.02169968032836914, 0.02182032012939453, 0.021587968826293946, 0.021479040145874023, 0.021940223693847655, 0.021735807418823243, 0.021635072708129883, 0.022015615463256834, 0.022018367767333985, 0.021801023483276366, 0.021777599334716798]",tokens/s,46.06215062103461,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1582.67392,1596.915712,0.0,1201.668096,1189.151232,s,1,8.203900390625,8.203900390625,0.0,8.203900390625,8.203900390625,8.203900390625,8.203900390625,[8.203900390625],,kWh,3.496767919166738e-05,3.849963228949563e-06,1.0954453207999426e-05,4.9772095628616364e-05,,MB,1564.16,1791.950848,0.0,1382.023168,1351.367168,s,10,0.47518137359619145,0.04751813735961914,0.0002168038865286773,0.04744075202941894,0.047669537353515624,0.04788238372802734,0.04805266082763672,"[0.04809523010253906, 0.04736966323852539, 0.04743475341796875, 0.04732377624511719, 0.04743775939941406, 0.04756444931030274, 0.047313503265380856, 0.04762223815917969, 0.04744374465942383, 0.047576255798339843]",tokens/s,5387.416557652121,kWh,1.426084696565e-06,1.5722692729590755e-07,9.489682388487577e-07,2.5322798627096657e-06,tokens/kWh,101094671.15773974,MB,1564.16,1833.893888,0.0,1423.966208,1407.328256,s,10,13.738055297851563,1.3738055297851564,0.003616846912428085,1.3729713134765624,1.378206689453125,1.3787253540039062,1.3791402856445312,"[1.3766226806640625, 1.3780914306640626, 1.3720076904296874, 1.369878662109375, 1.3776171875, 1.369964111328125, 1.371584228515625, 1.3739349365234375, 1.3792440185546875, 1.3691103515625]",tokens/s,45.85801893653194,kWh,3.954381329343452e-05,4.3613261565511e-06,1.8056352520951623e-05,6.196149197093724e-05,tokens/kWh,1016760.53942584,,s,630,13.735277492523181,0.02180202776590983,0.00033505996915220957,0.02172318458557129,0.022011130142211913,0.02230405149459839,0.023348841247558597,"[0.021792863845825194, 0.02171536064147949, 0.021740928649902343, 0.022104480743408202, 0.021720895767211913, 0.021602815628051757, 0.021725183486938478, 0.021708799362182618, 0.02162073516845703, 0.02167193603515625, 0.022177791595458983, 0.021825536727905274, 0.021644319534301758, 0.021639520645141602, 0.021813888549804688, 0.021712896347045898, 0.021765119552612306, 0.021570560455322265, 0.021763423919677734, 0.021709440231323242, 0.021766176223754884, 0.021712896347045898, 0.021567487716674806, 0.02166988754272461, 0.021839679718017577, 0.021624736785888672, 0.021889312744140625, 0.02546073532104492, 0.021745664596557617, 0.02179427146911621, 0.021656095504760744, 0.02175292778015137, 0.0224736328125, 0.02186182403564453, 0.02158799934387207, 0.021578271865844725, 0.021590015411376954, 0.022145023345947267, 0.023379743576049803, 0.022912351608276368, 0.02170524787902832, 0.021743967056274415, 0.02168217658996582, 0.021753631591796874, 0.021598432540893556, 0.02158736038208008, 0.02166204833984375, 0.021698816299438477, 0.021593376159667967, 0.021689056396484375, 0.021585376739501953, 0.021885120391845703, 0.02172323226928711, 0.021776639938354492, 0.021716991424560548, 0.021757280349731446, 0.021977247238159178, 0.02166124725341797, 0.021662656784057616, 0.021712608337402343, 0.021700895309448243, 0.02183782386779785, 0.021635072708129883, 0.02174835205078125, 0.021863935470581054, 0.021833728790283204, 0.021700895309448243, 0.02176041603088379, 0.021870399475097658, 0.02232121658325195, 0.021790847778320313, 0.021800960540771484, 0.021995519638061522, 0.022013952255249023, 0.021804927825927734, 0.021670015335083007, 0.02170639991760254, 0.021778783798217773, 0.021894943237304686, 0.021772512435913084, 0.021722623825073242, 0.021675552368164062, 0.021636064529418946, 0.021950304031372072, 0.02182467269897461, 0.02172313690185547, 0.021699583053588867, 0.02174937629699707, 0.021737855911254884, 0.02172835159301758, 0.021562271118164063, 0.02302297592163086, 0.022726463317871093, 0.021971744537353517, 0.021757984161376955, 0.021964448928833008, 0.021762399673461913, 0.021704704284667968, 0.021678016662597655, 0.02178873634338379, 0.02189107131958008, 0.02178278350830078, 0.021847007751464843, 0.02165020751953125, 0.02180860710144043, 0.02170275115966797, 0.02177043151855469, 0.021647296905517577, 0.02167977523803711, 0.021861024856567383, 0.021740959167480468, 0.021682559967041017, 0.021635295867919922, 0.021594112396240234, 0.02301692771911621, 0.021633567810058593, 0.021609888076782227, 0.021658208847045897, 0.02163043212890625, 0.0215614070892334, 0.02174745559692383, 0.023663328170776366, 0.02226585578918457, 0.02230067253112793, 0.02196463966369629, 0.021778079986572267, 0.02161177635192871, 0.021776351928710937, 0.02163167953491211, 0.021575935363769533, 0.021558687210083007, 0.02153660774230957, 0.021620960235595704, 0.021592096328735353, 0.021465599060058595, 0.021591615676879884, 0.021607999801635743, 0.023083295822143555, 0.022989408493041992, 0.02352332878112793, 0.02171494483947754, 0.021651168823242188, 0.021597824096679687, 0.02163983917236328, 0.02170675277709961, 0.021774335861206053, 0.021847583770751952, 0.021686752319335936, 0.021675039291381835, 0.021607391357421873, 0.021624544143676757, 0.021647552490234374, 0.021594112396240234, 0.02155939292907715, 0.021712896347045898, 0.02162073516845703, 0.021739295959472656, 0.02167625617980957, 0.02175939178466797, 0.021699167251586913, 0.022181407928466797, 0.02164169692993164, 0.021683616638183592, 0.021682783126831053, 0.021985279083251954, 0.022149120330810547, 0.021783903121948243, 0.021810943603515626, 0.021600576400756837, 0.02165190315246582, 0.021644927978515624, 0.02157948875427246, 0.02183247947692871, 0.02165558433532715, 0.021796863555908205, 0.021833728790283204, 0.021719039916992186, 0.02164531135559082, 0.021686111450195313, 0.021727392196655274, 0.021741567611694337, 0.021634687423706056, 0.021668224334716796, 0.021896575927734373, 0.02173388862609863, 0.021714431762695312, 0.021962976455688475, 0.021739839553833008, 0.021694368362426757, 0.021821632385253906, 0.021968832015991212, 0.021942527770996093, 0.021925535202026367, 0.021745311737060548, 0.021611200332641602, 0.0217488956451416, 0.02175062370300293, 0.02183919906616211, 0.021625503540039063, 0.02229043197631836, 0.021660896301269533, 0.021826175689697264, 0.02182547187805176, 0.02184582328796387, 0.021658016204833985, 0.021659263610839842, 0.02179110336303711, 0.02169036865234375, 0.021958656311035156, 0.02200371170043945, 0.02187446403503418, 0.021815519332885742, 0.02172313690185547, 0.021821056365966797, 0.021703039169311523, 0.021751808166503905, 0.02170639991760254, 0.02177039909362793, 0.021827775955200194, 0.021848031997680664, 0.021776256561279298, 0.02165478324890137, 0.02163715171813965, 0.02164620780944824, 0.021628192901611328, 0.021668127059936523, 0.021608896255493164, 0.021617759704589845, 0.02165648078918457, 0.02166374397277832, 0.02167193603515625, 0.021753856658935547, 0.021729280471801758, 0.021736991882324218, 0.021582304000854494, 0.02168012809753418, 0.021654687881469726, 0.021617504119873048, 0.021821216583251955, 0.021717216491699217, 0.021565439224243164, 0.021592063903808592, 0.021595680236816406, 0.02164963150024414, 0.02159436798095703, 0.021602304458618164, 0.021669408798217774, 0.021567968368530272, 0.021766143798828123, 0.021823007583618163, 0.021803487777709962, 0.0217509765625, 0.021703359603881835, 0.02184796714782715, 0.021753599166870117, 0.021706880569458006, 0.021842144012451173, 0.02162892723083496, 0.02188694381713867, 0.021833759307861328, 0.021786624908447266, 0.021665567398071288, 0.02167625617980957, 0.02269152069091797, 0.021626304626464844, 0.021735872268676758, 0.021807552337646485, 0.021638944625854493, 0.021737695693969727, 0.02171494483947754, 0.021702655792236326, 0.02164246368408203, 0.021635776519775392, 0.021640960693359374, 0.021758304595947266, 0.021859392166137696, 0.021659936904907227, 0.021680351257324218, 0.021682624816894532, 0.021702655792236326, 0.0216246395111084, 0.021647552490234374, 0.021708351135253906, 0.021815423965454103, 0.021800607681274415, 0.021651199340820312, 0.021666048049926757, 0.021796607971191408, 0.021805631637573243, 0.021721439361572267, 0.021719039916992186, 0.023518239974975586, 0.022268896102905274, 0.022314687728881837, 0.02189548873901367, 0.02169209671020508, 0.021776704788208007, 0.021862464904785155, 0.02280195236206055, 0.022339712142944335, 0.02185158348083496, 0.02192470359802246, 0.023119871139526366, 0.021951583862304686, 0.021842464447021485, 0.02233526420593262, 0.021801567077636717, 0.021747007369995117, 0.021731264114379884, 0.021822208404541014, 0.02186444854736328, 0.021693952560424806, 0.021674495697021484, 0.02170675277709961, 0.021650848388671876, 0.0215285758972168, 0.021638624191284178, 0.021666336059570312, 0.02191155242919922, 0.022071296691894532, 0.021729280471801758, 0.02182143974304199, 0.021962751388549806, 0.02219558334350586, 0.02194905662536621, 0.021573631286621094, 0.022183263778686523, 0.022041248321533202, 0.021810623168945314, 0.021717567443847657, 0.02151628875732422, 0.02166783905029297, 0.021630975723266603, 0.021551103591918946, 0.021606399536132814, 0.021725120544433593, 0.021624895095825197, 0.022220800399780274, 0.021761407852172853, 0.021645055770874024, 0.021658496856689455, 0.022054912567138672, 0.021857343673706054, 0.02160736083984375, 0.02161664009094238, 0.02165555191040039, 0.02163020706176758, 0.02169113540649414, 0.02168832015991211, 0.021745664596557617, 0.021581760406494142, 0.021643327713012694, 0.021544960021972655, 0.02166374397277832, 0.021656671524047853, 0.021617183685302733, 0.021739007949829102, 0.02171993637084961, 0.02175763130187988, 0.022328800201416015, 0.0216595516204834, 0.021586048126220704, 0.0216297607421875, 0.02161840057373047, 0.02173529624938965, 0.021710687637329102, 0.02169913673400879, 0.021722688674926757, 0.021744064331054688, 0.021610591888427736, 0.02164931106567383, 0.021600160598754883, 0.021573728561401367, 0.021700607299804688, 0.021753631591796874, 0.021758176803588866, 0.021740736007690428, 0.021823392868041993, 0.021796960830688477, 0.021600255966186522, 0.021719039916992186, 0.02209916877746582, 0.021606943130493165, 0.02166364860534668, 0.02247923278808594, 0.021751455307006836, 0.022157215118408204, 0.021613983154296874, 0.021647552490234374, 0.02169523239135742, 0.021692480087280273, 0.021607456207275392, 0.021865440368652345, 0.021879840850830078, 0.021788703918457032, 0.02175276756286621, 0.021710847854614256, 0.021811199188232423, 0.021740543365478517, 0.021724159240722657, 0.021958656311035156, 0.022087135314941407, 0.021795360565185547, 0.02202134323120117, 0.021977184295654296, 0.022110464096069336, 0.021756000518798828, 0.02187913513183594, 0.02189107131958008, 0.021702655792236326, 0.021755903244018555, 0.021766143798828123, 0.021985279083251954, 0.021724863052368162, 0.02176646423339844, 0.021811199188232423, 0.021853504180908204, 0.0216494083404541, 0.021686975479125976, 0.02164531135559082, 0.021593376159667967, 0.021752159118652345, 0.021612800598144532, 0.021571264266967774, 0.021659616470336915, 0.02160688018798828, 0.021768192291259765, 0.0220446720123291, 0.021542015075683593, 0.021588863372802733, 0.021660703659057617, 0.02201081657409668, 0.021806976318359376, 0.0216615047454834, 0.02158153533935547, 0.02165376091003418, 0.02155353546142578, 0.021618688583374023, 0.02164646339416504, 0.02154319953918457, 0.0216494083404541, 0.02179782485961914, 0.021716991424560548, 0.02162073516845703, 0.021647104263305662, 0.021686399459838867, 0.021678207397460937, 0.021831680297851562, 0.021622783660888673, 0.021610015869140624, 0.02162719917297363, 0.021780256271362305, 0.021837312698364256, 0.02161097526550293, 0.021682592391967775, 0.021817312240600586, 0.021935583114624024, 0.021770784378051758, 0.021773984909057617, 0.02175424003601074, 0.021764095306396485, 0.02171494483947754, 0.02165555191040039, 0.021774335861206053, 0.021754976272583007, 0.02180803108215332, 0.02189926338195801, 0.021746944427490235, 0.021836544036865236, 0.021635072708129883, 0.021882272720336913, 0.022131296157836915, 0.021833728790283204, 0.021743616104125976, 0.021890111923217773, 0.022125503540039063, 0.022128639221191407, 0.02175935935974121, 0.021770656585693358, 0.021820831298828124, 0.021713727951049804, 0.02169759941101074, 0.021704927444458007, 0.021946239471435546, 0.021916511535644532, 0.021958656311035156, 0.02187264060974121, 0.021985279083251954, 0.02190505599975586, 0.021789024353027344, 0.02174284744262695, 0.02170102310180664, 0.021731679916381835, 0.02234163284301758, 0.021845695495605468, 0.022308511734008787, 0.021745920181274414, 0.02169487953186035, 0.021780288696289063, 0.02175200080871582, 0.021767391204833984, 0.021616928100585936, 0.021826047897338868, 0.021772287368774415, 0.022202655792236327, 0.021602432250976564, 0.021749824523925782, 0.02171638488769531, 0.02166192054748535, 0.021701055526733397, 0.021741472244262695, 0.021755935668945313, 0.021791135787963867, 0.02211020851135254, 0.022345727920532226, 0.021646495819091796, 0.021586784362792967, 0.02159152030944824, 0.021940671920776367, 0.021757984161376955, 0.02211027145385742, 0.021766143798828123, 0.021819391250610352, 0.021757280349731446, 0.021650079727172852, 0.021721088409423828, 0.021719039916992186, 0.021708799362182618, 0.021708799362182618, 0.02181715202331543, 0.02179212760925293, 0.021663616180419922, 0.021805023193359373, 0.021819711685180664, 0.02245903968811035, 0.023595008850097656, 0.021786624908447266, 0.021882463455200195, 0.02186854362487793, 0.021884639739990233, 0.02163692855834961, 0.021741632461547852, 0.022858560562133787, 0.02177539253234863, 0.0216396484375, 0.021641727447509765, 0.021751808166503905, 0.02168009567260742, 0.021717023849487305, 0.02165551948547363, 0.02182761573791504, 0.02180271911621094, 0.021690656661987304, 0.0216494083404541, 0.02167807960510254, 0.021794815063476563, 0.021669792175292968, 0.021661792755126953, 0.021707807540893555, 0.02168726348876953, 0.022155263900756835, 0.021597824096679687, 0.02163283157348633, 0.021537343978881837, 0.022401023864746093, 0.022345567703247072, 0.02483216094970703, 0.022258880615234376, 0.02166864013671875, 0.021501983642578125, 0.02168627166748047, 0.021604352951049805, 0.021530624389648437, 0.021620031356811523, 0.0216746883392334, 0.021554496765136717, 0.02156819152832031, 0.021587968826293946, 0.021622783660888673, 0.021716991424560548, 0.021579776763916016, 0.02230681610107422, 0.021538400650024415, 0.023273183822631837, 0.021777088165283204, 0.021712799072265625, 0.021631071090698242, 0.021604223251342772, 0.02164748764038086, 0.021630975723266603, 0.02205081558227539, 0.0215817928314209, 0.021788703918457032, 0.021826656341552734, 0.021828128814697267, 0.021688703536987305, 0.021618623733520508, 0.0215982723236084, 0.021581760406494142, 0.021663808822631837, 0.021598207473754884, 0.021562528610229493, 0.021647712707519532, 0.02169465637207031, 0.0218668155670166, 0.021577728271484374, 0.021566848754882812, 0.021719680786132813, 0.02164121627807617, 0.02150809669494629, 0.0218024959564209, 0.021950143814086914, 0.02159062385559082, 0.021915712356567384, 0.0218175048828125, 0.021724512100219726, 0.021647615432739256, 0.021558944702148437, 0.02155801582336426, 0.021628320693969725, 0.021719072341918947, 0.021784160614013674, 0.021880800247192383, 0.02186342430114746, 0.021780479431152345, 0.021811199188232423, 0.0216693115234375, 0.02178060722351074, 0.021729248046875, 0.021746143341064453]",tokens/s,45.86729320488362,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 20716 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6765.768704,7762.542592,0.0,7367.294976,7351.94368,s,1,12.484943359375,12.484943359375,0.0,12.484943359375,12.484943359375,12.484943359375,12.484943359375,[12.484943359375],,kWh,0.00015955133010832774,1.7592247732297916e-05,5.0381151416003384e-05,0.00022752472925662904,,MB,1654.890496,8372.813824,0.0,7962.886144,7872.44544,s,10,3.217915252685547,0.3217915252685547,0.0013950329344355533,0.3218920440673828,0.32337008056640626,0.32344077758789064,0.3234973352050781,"[0.3185030212402344, 0.32086541748046876, 0.32094903564453126, 0.3215930480957031, 0.3233543701171875, 0.3225209045410156, 0.32187457275390624, 0.3219095153808594, 0.3228338928222656, 0.323511474609375]",tokens/s,795.5461219382715,kWh,9.364775526823147e-06,1.032679123397765e-06,6.21373760987501e-06,1.6611192260095923e-05,tokens/kWh,15411295.949838202,MB,1666.101248,8624.472064,0.0,8214.544384,8118.577152,s,10,27.102155029296874,2.7102155029296875,0.0027258359209844218,2.709692626953125,2.712966064453125,2.7143250732421875,2.7154122802734375,"[2.71568408203125, 2.710020263671875, 2.7065537109375, 2.708927978515625, 2.708125732421875, 2.706820556640625, 2.712405517578125, 2.7126640625, 2.709364990234375, 2.711588134765625]",tokens/s,23.245383967399746,kWh,7.911330669484301e-05,8.72638364765581e-06,5.2567253859325075e-05,0.0001404069442018239,tokens/kWh,448695.7561688863,,s,630,27.098995811462423,0.043014279065813336,0.00037987361758398897,0.04298665618896484,0.04340562782287598,0.04354385929107666,0.044067572555542,"[0.04339251327514648, 0.04290611267089844, 0.042633216857910154, 0.04266726303100586, 0.04283404922485352, 0.04267686462402344, 0.04281257629394531, 0.043326400756835935, 0.04269865417480469, 0.042773696899414064, 0.04270108795166016, 0.04273596954345703, 0.04285414505004883, 0.04289318466186524, 0.042850879669189455, 0.04271641540527344, 0.04280915069580078, 0.04295999908447266, 0.043157215118408206, 0.042928031921386715, 0.04373324966430664, 0.042939937591552735, 0.042885536193847655, 0.04328995132446289, 0.04309030532836914, 0.04312297439575195, 0.042874462127685545, 0.04292959976196289, 0.04283491134643555, 0.04286054229736328, 0.04292607879638672, 0.04326399993896484, 0.04312995147705078, 0.0434246711730957, 0.04319232177734375, 0.04315750503540039, 0.042979328155517575, 0.043210014343261716, 0.043340511322021484, 0.04307353591918945, 0.043063297271728515, 0.043237377166748046, 0.04330825424194336, 0.04332998275756836, 0.043265472412109374, 0.04323612976074219, 0.04312409591674805, 0.0433191032409668, 0.04336304092407227, 0.04312841415405273, 0.0432624626159668, 0.04347097778320313, 0.043364673614501956, 0.043372222900390625, 0.04331958389282226, 0.04315017700195312, 0.043266654968261715, 0.04326019287109375, 0.04331315231323242, 0.043361793518066405, 0.04371916961669922, 0.0433172492980957, 0.043243743896484374, 0.043122783660888675, 0.04278262329101563, 0.04254515075683594, 0.04233420944213867, 0.042259807586669924, 0.04243059158325195, 0.04224873733520508, 0.04244895935058594, 0.042476478576660155, 0.04262400054931641, 0.04294451141357422, 0.04263705444335938, 0.04263935852050781, 0.04265497589111328, 0.04269801712036133, 0.04263497543334961, 0.04261798477172852, 0.04272995376586914, 0.04263468933105469, 0.04276649475097656, 0.042861278533935544, 0.04277193450927735, 0.043126815795898436, 0.043006561279296876, 0.04286435317993164, 0.042698879241943356, 0.04280876922607422, 0.04266876983642578, 0.042718494415283206, 0.04295139312744141, 0.043122432708740235, 0.04289152145385742, 0.042754207611083984, 0.04283564758300781, 0.0429323844909668, 0.042915233612060545, 0.04295363235473633, 0.04291657638549805, 0.04294345474243164, 0.043009151458740236, 0.04309491348266602, 0.04309811019897461, 0.04298115158081055, 0.043237598419189456, 0.04324147033691406, 0.04363673782348633, 0.04364716720581055, 0.0433928337097168, 0.044109825134277345, 0.04691558456420898, 0.04290259170532226, 0.043037151336669924, 0.04327881622314453, 0.04325312042236328, 0.04311011123657227, 0.04304790496826172, 0.04336019134521484, 0.043503456115722657, 0.043474815368652345, 0.04331862258911133, 0.04343190383911133, 0.0434442253112793, 0.04323984146118164, 0.04339212799072266, 0.042651905059814456, 0.04240614318847656, 0.042181087493896485, 0.04228496170043945, 0.0423765754699707, 0.04293222427368164, 0.0434054069519043, 0.04273001480102539, 0.04251238250732422, 0.04238655853271484, 0.04245939254760742, 0.042549888610839845, 0.042694591522216795, 0.04264556884765625, 0.042690464019775394, 0.04290569686889648, 0.04269875335693359, 0.042673728942871095, 0.04281388854980469, 0.04283353424072266, 0.04291945648193359, 0.04421283340454102, 0.04299174499511719, 0.04295897674560547, 0.04284620666503906, 0.04293632125854492, 0.04281865692138672, 0.04263753509521485, 0.04266463851928711, 0.04277043151855469, 0.04276838302612305, 0.04294374465942383, 0.0428223991394043, 0.04297119903564453, 0.04305500793457031, 0.04289750289916992, 0.04289056015014649, 0.04294105529785156, 0.042917728424072266, 0.04286223983764648, 0.04314729690551758, 0.04312063980102539, 0.043178462982177736, 0.0433983039855957, 0.04360665512084961, 0.04335638427734375, 0.04313087844848633, 0.04310835266113281, 0.043022560119628905, 0.04296384048461914, 0.04300601577758789, 0.04301091384887695, 0.043050430297851563, 0.04334988784790039, 0.04344083023071289, 0.04338483047485352, 0.04323904037475586, 0.043399551391601565, 0.043153408050537106, 0.04312396621704102, 0.0434672966003418, 0.04347312164306641, 0.04348108673095703, 0.04288409423828125, 0.042620990753173826, 0.042643455505371096, 0.042474559783935543, 0.042493824005126954, 0.04222390365600586, 0.042982559204101566, 0.04247814559936523, 0.042264575958251956, 0.04261580657958984, 0.042482688903808595, 0.04269465637207031, 0.043224414825439456, 0.04280963134765625, 0.04275971221923828, 0.04259254455566406, 0.042891841888427734, 0.04274176025390625, 0.04267734527587891, 0.04297001647949219, 0.04314835357666016, 0.04338275146484375, 0.043109344482421874, 0.04299760055541992, 0.04289737701416016, 0.04270918273925781, 0.042690208435058594, 0.04307388687133789, 0.04259635162353516, 0.042633216857910154, 0.04281897735595703, 0.04297296142578125, 0.04292076873779297, 0.042799102783203126, 0.04293609619140625, 0.0428812141418457, 0.04285776138305664, 0.042869503021240235, 0.04304012680053711, 0.043036670684814454, 0.04393971252441406, 0.04331292724609375, 0.04334662246704102, 0.0436778564453125, 0.04368396759033203, 0.04348281478881836, 0.04328656005859375, 0.043423168182373045, 0.043213119506835936, 0.04329033660888672, 0.04339206314086914, 0.043089664459228516, 0.04297283172607422, 0.04301017761230469, 0.043087135314941405, 0.04310521697998047, 0.0430544319152832, 0.04329539108276367, 0.0432509765625, 0.043344097137451174, 0.04339548873901367, 0.043599967956542966, 0.043315361022949216, 0.04273196792602539, 0.04244416046142578, 0.0423985595703125, 0.04230579376220703, 0.04235033416748047, 0.04253900909423828, 0.04252492904663086, 0.04249484634399414, 0.04268064117431641, 0.04270153427124023, 0.04263673782348633, 0.0426530876159668, 0.04245743942260742, 0.04241052627563477, 0.04261471939086914, 0.04282294464111328, 0.04270336151123047, 0.042927936553955076, 0.043076416015625, 0.04304851150512695, 0.04296300888061524, 0.04290166473388672, 0.04305920028686523, 0.04289712142944336, 0.04295635223388672, 0.04283583831787109, 0.04289827346801758, 0.04281753540039063, 0.042799102783203126, 0.042774177551269534, 0.04262947082519531, 0.04281753540039063, 0.042995712280273435, 0.04292812728881836, 0.042981342315673826, 0.042939678192138675, 0.04293452835083008, 0.043326976776123044, 0.043121662139892575, 0.04309811019897461, 0.044436702728271486, 0.04318611145019531, 0.04359219360351563, 0.04317219161987305, 0.04318003082275391, 0.0431769905090332, 0.04339811325073242, 0.04340035247802734, 0.04325667190551758, 0.04330624008178711, 0.04327091217041015, 0.04319145584106445, 0.04305599975585937, 0.04314716720581055, 0.043165760040283205, 0.04337238311767578, 0.04333580780029297, 0.04356304168701172, 0.04319622421264648, 0.043141311645507815, 0.04329395294189453, 0.04348169708251953, 0.04351996612548828, 0.042734527587890626, 0.0424672966003418, 0.04235001754760742, 0.04251055908203125, 0.04264550399780274, 0.04276176071166992, 0.04258095932006836, 0.04257756805419922, 0.04234377670288086, 0.04252713775634766, 0.042531265258789065, 0.042461406707763674, 0.04265167999267578, 0.042692352294921875, 0.0427613754272461, 0.042576416015625, 0.04268012619018555, 0.042706497192382814, 0.04284425735473633, 0.042871646881103516, 0.042856449127197264, 0.04312844848632812, 0.04304115295410156, 0.04301728057861328, 0.04295961761474609, 0.042829856872558594, 0.04280073547363281, 0.04336838531494141, 0.042945152282714845, 0.042942272186279294, 0.042750175476074216, 0.042774494171142576, 0.04271104049682617, 0.04279276657104492, 0.04293036651611328, 0.042985183715820316, 0.04294646453857422, 0.042871166229248046, 0.043028480529785154, 0.04294451141357422, 0.04311859130859375, 0.043259552001953125, 0.043964126586914065, 0.043610305786132814, 0.04332793426513672, 0.04339199829101562, 0.04333465576171875, 0.0432470703125, 0.043098464965820316, 0.04309811019897461, 0.043378273010253904, 0.043190113067626955, 0.04317068862915039, 0.04312255859375, 0.04317593765258789, 0.04312390518188477, 0.04307254409790039, 0.043183902740478515, 0.04321673583984375, 0.043149471282958984, 0.04325900650024414, 0.04354547119140625, 0.04343603134155274, 0.043044864654541014, 0.04262911987304688, 0.042618881225585936, 0.04242432022094727, 0.04251155090332031, 0.04255007934570312, 0.04272700881958008, 0.042664352416992186, 0.042675392150878906, 0.042658622741699216, 0.04277679824829102, 0.042898944854736325, 0.04281689453125, 0.042703777313232424, 0.0433438720703125, 0.04309161758422852, 0.04275439834594726, 0.04297926330566406, 0.04299782562255859, 0.042858558654785155, 0.043007232666015624, 0.04306208038330078, 0.0430365104675293, 0.043097248077392576, 0.042957118988037106, 0.042900032043457034, 0.042821632385253904, 0.04264755249023437, 0.042681888580322264, 0.04283391952514649, 0.04318435287475586, 0.04304054260253906, 0.042872512817382816, 0.04275609588623047, 0.04273017501831055, 0.042807392120361325, 0.04308777618408203, 0.04311843109130859, 0.04308812713623047, 0.04323680114746094, 0.04336627197265625, 0.04362924957275391, 0.04319427108764649, 0.0433787841796875, 0.04427775955200195, 0.04339244842529297, 0.04331577682495117, 0.043205760955810545, 0.04380070495605469, 0.043447040557861326, 0.043227134704589845, 0.04311859130859375, 0.04305100631713867, 0.04318790435791016, 0.04325203323364258, 0.043003902435302735, 0.04312441635131836, 0.04326041412353516, 0.04318598556518555, 0.04331267166137695, 0.043326976776123044, 0.04392035293579102, 0.04344841766357422, 0.04281520080566406, 0.04252467346191406, 0.04247836685180664, 0.04235212707519531, 0.04245532989501953, 0.042557823181152345, 0.042552513122558595, 0.04264432144165039, 0.042547168731689455, 0.04255744171142578, 0.042593441009521484, 0.042514686584472654, 0.04277104187011719, 0.04268147277832031, 0.04259929656982422, 0.04274748611450195, 0.042839969635009766, 0.042888702392578124, 0.04286975860595703, 0.04295206451416016, 0.042945152282714845, 0.043060543060302735, 0.042961631774902344, 0.042874462127685545, 0.0429714241027832, 0.04287088012695312, 0.043012096405029294, 0.04292227172851563, 0.042979198455810545, 0.04278572845458985, 0.04279004669189453, 0.04276172637939453, 0.042753280639648436, 0.04296214294433594, 0.04308505630493164, 0.04327062225341797, 0.04319347381591797, 0.04313932800292969, 0.04369887924194336, 0.04321279907226563, 0.043235294342041014, 0.04354188919067383, 0.04347561645507812, 0.043407615661621095, 0.043398944854736325, 0.04346262359619141, 0.0434442253112793, 0.04338687896728516, 0.04336608123779297, 0.04365548706054687, 0.04322844696044922, 0.043254016876220706, 0.043208351135253904, 0.04333663940429688, 0.04348483276367188, 0.04350950241088867, 0.043557342529296876, 0.04349747085571289, 0.043401214599609376, 0.04358473587036133, 0.043574047088623044, 0.04361833572387695, 0.043200416564941405, 0.04267948913574219, 0.04248617553710937, 0.04257388687133789, 0.04260892868041992, 0.042546688079833986, 0.042656383514404296, 0.042503807067871095, 0.04257628631591797, 0.04242147064208984, 0.042463134765625, 0.04256835174560547, 0.04272355270385742, 0.04278217697143555, 0.04293072128295899, 0.042805248260498044, 0.0428807373046875, 0.043251232147216795, 0.042922592163085936, 0.04285660934448242, 0.04308992004394531, 0.043175838470458985, 0.043081825256347656, 0.0432735366821289, 0.04299436950683594, 0.042833759307861326, 0.04280131149291992, 0.04268841552734375, 0.042749248504638675, 0.04283689498901367, 0.04282969665527344, 0.04277967834472656, 0.04297808074951172, 0.04299766540527344, 0.042842174530029295, 0.04284438323974609, 0.04301801681518555, 0.04305667114257813, 0.04286124801635742, 0.04293632125854492, 0.04308515167236328, 0.043240097045898436, 0.04328646469116211, 0.04334393692016601, 0.04345187377929687, 0.04347123336791992, 0.043284160614013675, 0.043159839630126956, 0.04311648178100586, 0.044163326263427734, 0.04320857620239258, 0.04326518249511719, 0.04327657699584961, 0.043313343048095705, 0.04326383972167969, 0.043231903076171876, 0.043218238830566406, 0.04324739074707031, 0.043192607879638675, 0.043342464447021486, 0.0432988166809082, 0.043225086212158204, 0.04327324676513672, 0.04348108673095703, 0.042967041015625, 0.042674175262451174, 0.04249151992797852, 0.04243084716796875, 0.042240001678466796, 0.04242371368408203, 0.042477535247802733, 0.042566272735595705, 0.0425615348815918, 0.042909183502197266, 0.04448470306396484, 0.04250848007202149, 0.04247087860107422, 0.042474239349365235, 0.04253488159179687, 0.04274998474121094, 0.04283596801757812, 0.042894432067871094, 0.04296099090576172, 0.04316447830200195, 0.043119873046875, 0.043125503540039065, 0.04311859130859375, 0.0432125129699707, 0.04272911834716797, 0.042740352630615236, 0.042782718658447266, 0.04290505599975586, 0.04291619110107422, 0.04298553466796875, 0.04299993515014648, 0.04304256057739258, 0.042987777709960935, 0.042907615661621094, 0.04304899215698242, 0.042874881744384766, 0.04354048156738281, 0.043030143737792965, 0.043358592987060546, 0.043412830352783205, 0.04321267318725586, 0.04331302261352539, 0.043273120880126956, 0.043401214599609376, 0.04361612701416016, 0.04336579132080078, 0.04337148666381836, 0.04324121475219726, 0.04315702438354492, 0.042947040557861325, 0.04310806274414063, 0.043299423217773435, 0.04313631820678711, 0.04316128158569336, 0.04308819198608398, 0.04321446228027344, 0.043071231842041015, 0.0432281608581543, 0.04335520172119141, 0.043235679626464844, 0.043499393463134764, 0.04387311935424805]",tokens/s,23.24809392876178,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.447296,11243.814912,0.0,10848.567296,10616.027648,s,1,14.3384755859375,14.3384755859375,0.0,14.3384755859375,14.3384755859375,14.3384755859375,14.3384755859375,[14.3384755859375],,kWh,0.00021537803253750856,2.375057297665813e-05,6.718449819199945e-05,0.00030631310370616617,,MB,3748.20864,11675.828224,0.0,11265.900544,11070.470656,s,10,3.8320139770507815,0.38320139770507816,0.0009032618281221941,0.38316490173339846,0.3842410583496094,0.3842586364746094,0.38427269897460936,"[0.38118585205078126, 0.3822434387207031, 0.38324179077148435, 0.3830880126953125, 0.3830860595703125, 0.3837288513183594, 0.383967041015625, 0.38295956420898436, 0.38427621459960937, 0.3842371520996094]",tokens/s,668.0560184099962,kWh,1.123143302435866e-05,1.2378677839720334e-06,7.456020922077341e-06,1.9925321730408035e-05,tokens/kWh,12847973.220393144,MB,3752.505344,11677.925376,0.0,11267.997696,11070.473216,s,10,29.213083251953126,2.921308325195313,0.005078015239350936,2.9213482666015627,2.928300830078125,2.9298215576171875,2.9310381396484377,"[2.913442138671875, 2.914806884765625, 2.91856884765625, 2.92129833984375, 2.921822509765625, 2.921398193359375, 2.920527587890625, 2.92191357421875, 2.927962890625, 2.93134228515625]",tokens/s,21.565679821142446,kWh,8.544600416355846e-05,9.425667577272549e-06,5.6726058201323676e-05,0.00015159772994215466,tokens/kWh,415573.5051180449,,s,630,29.210234874725337,0.046365452182103715,0.0005080789365927493,0.04631377410888672,0.04681200180053711,0.047042915725708005,0.0487489128112793,"[0.048790336608886715, 0.04646899032592773, 0.04569283294677735, 0.045532512664794925, 0.045716415405273436, 0.04563561630249023, 0.04543689727783203, 0.04720435333251953, 0.0456022720336914, 0.045744670867919925, 0.04590182495117188, 0.04625612640380859, 0.04597350311279297, 0.04576412963867187, 0.045816287994384766, 0.04609027099609375, 0.04579459381103516, 0.045824703216552735, 0.0462479362487793, 0.04623769760131836, 0.04613449478149414, 0.046492446899414064, 0.04641772842407227, 0.04601084899902344, 0.04596294403076172, 0.04625324630737305, 0.04609251022338867, 0.045832416534423825, 0.04616640090942383, 0.0460904655456543, 0.04602163314819336, 0.04585916900634766, 0.04627299118041992, 0.046105983734130856, 0.04595321655273438, 0.046145408630371094, 0.04643289566040039, 0.04617004776000976, 0.046257568359375, 0.046723648071289064, 0.04649168014526367, 0.046352161407470706, 0.046532833099365234, 0.04640335845947265, 0.04617443084716797, 0.04630220794677734, 0.04660889434814453, 0.046338558197021484, 0.04629459381103516, 0.04653100967407227, 0.04646092987060547, 0.04606083297729492, 0.046250720977783204, 0.0463474235534668, 0.04615584182739258, 0.04610332870483398, 0.04664700698852539, 0.04654265594482422, 0.04643068695068359, 0.04651987075805664, 0.04682387161254883, 0.04659823989868164, 0.04704655838012695, 0.04849436950683594, 0.045856990814208985, 0.045760353088378905, 0.04563779067993164, 0.04557136154174805, 0.0453966064453125, 0.04576051330566406, 0.04577494430541992, 0.04589337539672852, 0.04617824172973633, 0.04609260940551758, 0.0460063362121582, 0.04573791885375977, 0.0458526725769043, 0.04605878448486328, 0.04585340881347656, 0.04607385635375977, 0.046284801483154295, 0.04641177749633789, 0.04589158248901367, 0.046339870452880856, 0.046549217224121094, 0.046089599609375, 0.046029312133789066, 0.046604415893554685, 0.04609843063354492, 0.04571136093139649, 0.046080001831054686, 0.04604927825927734, 0.04588937759399414, 0.04609244918823242, 0.04640547180175781, 0.04619484710693359, 0.04591820907592774, 0.04609638214111328, 0.0461844482421875, 0.046129150390625, 0.046112415313720706, 0.046440799713134764, 0.04639059066772461, 0.04634636688232422, 0.04662694549560547, 0.0466682243347168, 0.046292545318603516, 0.04635030364990234, 0.046590431213378906, 0.04654694366455078, 0.04628889465332031, 0.04647651290893555, 0.04639372634887695, 0.04637500762939453, 0.04646547317504883, 0.04658153533935547, 0.04627648162841797, 0.04639148712158203, 0.046540191650390625, 0.04642060852050781, 0.04643430328369141, 0.04674883270263672, 0.046596481323242185, 0.04646089553833008, 0.04818380737304687, 0.04646297454833984, 0.04876428985595703, 0.046567649841308595, 0.04588409423828125, 0.04565606307983398, 0.04578700637817383, 0.045811038970947265, 0.0456866569519043, 0.045822784423828124, 0.04606576156616211, 0.0460184326171875, 0.04595312118530273, 0.045953056335449216, 0.04581343841552735, 0.045603134155273437, 0.04593385696411133, 0.0460645751953125, 0.04620470428466797, 0.046022014617919924, 0.04642390441894531, 0.04616825485229492, 0.046156063079833984, 0.04656671905517578, 0.04694262313842774, 0.04645334243774414, 0.04596902465820313, 0.046036865234375, 0.04615756988525391, 0.0457665901184082, 0.0458535041809082, 0.04624595260620117, 0.04643600082397461, 0.04603017425537109, 0.04606662368774414, 0.0461578254699707, 0.04599427032470703, 0.0460715217590332, 0.0461844482421875, 0.046283008575439454, 0.046300926208496095, 0.046112159729003906, 0.04639599990844727, 0.046373119354248045, 0.04655487823486328, 0.0468823356628418, 0.046819808959960935, 0.046683265686035154, 0.04654751968383789, 0.046458976745605465, 0.04815027236938477, 0.046118465423583985, 0.046375839233398435, 0.0461475830078125, 0.04639664077758789, 0.04657625579833984, 0.04638022232055664, 0.04643324661254883, 0.0466165771484375, 0.046665950775146486, 0.04657888031005859, 0.04669091033935547, 0.046671871185302735, 0.04688057708740234, 0.0468988151550293, 0.047982593536376954, 0.04583212661743164, 0.045819713592529294, 0.04576623916625976, 0.045891807556152346, 0.045908416748046875, 0.045778942108154294, 0.045641727447509765, 0.045707263946533204, 0.045889537811279295, 0.04615695953369141, 0.04605414581298828, 0.04594073486328125, 0.04598147201538086, 0.04613561630249023, 0.04597964859008789, 0.046327457427978516, 0.04626617431640625, 0.046129409790039065, 0.04613891220092774, 0.04638956832885742, 0.04623353576660156, 0.046079872131347656, 0.04601715087890625, 0.046186496734619144, 0.04604313659667969, 0.04586697769165039, 0.04620233535766602, 0.046114398956298826, 0.04604140853881836, 0.04619676971435547, 0.04623593521118164, 0.046164321899414065, 0.04602460861206055, 0.0489780158996582, 0.04586700820922852, 0.04600831985473633, 0.04661862564086914, 0.04712582397460938, 0.04619251251220703, 0.04662748718261719, 0.04663056182861328, 0.0463171501159668, 0.04629391860961914, 0.04646527862548828, 0.04650086212158203, 0.04625641632080078, 0.04618428802490234, 0.0464920654296875, 0.04635670471191406, 0.046386558532714846, 0.046545536041259765, 0.04799798583984375, 0.04644745635986328, 0.046678142547607424, 0.04610611343383789, 0.048622081756591794, 0.046265567779541016, 0.04658179092407227, 0.046567169189453125, 0.04706816101074219, 0.0469483528137207, 0.04675945663452148, 0.04882886505126953, 0.04620243072509766, 0.045801921844482424, 0.04576870346069336, 0.04572159957885742, 0.045706783294677734, 0.04565164947509766, 0.04824758529663086, 0.045402111053466795, 0.0462110710144043, 0.04657356643676758, 0.04604108810424805, 0.04570492935180664, 0.04580585479736328, 0.04612300872802735, 0.04591001510620117, 0.04602243041992188, 0.04643862533569336, 0.0463804817199707, 0.046113086700439454, 0.04674492645263672, 0.04651046371459961, 0.045932830810546874, 0.045879550933837894, 0.046179550170898434, 0.04605199813842773, 0.045873279571533206, 0.04630656051635742, 0.04609231948852539, 0.04791984176635742, 0.046088191986083986, 0.04621692657470703, 0.046051265716552735, 0.046008544921875, 0.04627628707885742, 0.04636102294921875, 0.04607183837890625, 0.046161441802978515, 0.04635692977905274, 0.04637654495239258, 0.04651459121704102, 0.046811134338378906, 0.04673715209960937, 0.04655100631713867, 0.04662643051147461, 0.04648313522338867, 0.04627062225341797, 0.04651295852661133, 0.04782694244384766, 0.045956287384033206, 0.046285377502441404, 0.04653286361694336, 0.04636467361450195, 0.046389183044433596, 0.04651424026489258, 0.0465629768371582, 0.046460990905761716, 0.04677824020385742, 0.04654735946655274, 0.04641487884521484, 0.0467465934753418, 0.04689113616943359, 0.04667174530029297, 0.049076255798339845, 0.046418975830078125, 0.047290943145751954, 0.0452694091796875, 0.04568262481689453, 0.04560294342041016, 0.04567830276489258, 0.04598601531982422, 0.045980960845947265, 0.04584499359130859, 0.04589206314086914, 0.04603398513793945, 0.045857471466064455, 0.04581171035766601, 0.04628227233886719, 0.04616649627685547, 0.045980926513671874, 0.04651084899902344, 0.04636444854736328, 0.046199039459228514, 0.04647727966308594, 0.04694940948486328, 0.04656768035888672, 0.04591036987304688, 0.04625801467895508, 0.046170654296875, 0.046047233581542966, 0.045943904876708984, 0.04630764770507813, 0.04627926254272461, 0.04607590484619141, 0.04590137481689453, 0.046219711303710935, 0.04603289413452148, 0.04607555389404297, 0.046546367645263674, 0.04630137634277344, 0.046203617095947266, 0.04643840026855469, 0.04635647964477539, 0.046383102416992186, 0.0466346549987793, 0.04706089782714844, 0.046680511474609374, 0.046583168029785155, 0.046516094207763675, 0.04643507385253906, 0.04624163055419922, 0.04672262573242188, 0.04647366333007812, 0.04632592010498047, 0.04637247848510742, 0.046615230560302735, 0.04656300735473633, 0.04637286376953125, 0.04642780685424805, 0.04661897659301758, 0.0466063346862793, 0.0470362548828125, 0.04669164657592773, 0.04658380889892578, 0.04698633575439453, 0.04720614242553711, 0.04878131103515625, 0.04636262512207031, 0.04572332763671875, 0.045598751068115236, 0.04585004806518555, 0.04594748687744141, 0.045784351348876956, 0.046113471984863284, 0.045926593780517576, 0.045942367553710936, 0.0459901123046875, 0.04608227157592774, 0.04585055923461914, 0.04611830520629883, 0.04619251251220703, 0.04632473754882813, 0.04633795166015625, 0.04605523300170898, 0.04623782348632813, 0.04636876678466797, 0.046120960235595705, 0.04638508987426758, 0.046556480407714845, 0.04625075149536133, 0.045958656311035156, 0.04608256149291992, 0.04616396713256836, 0.04612505722045898, 0.04609843063354492, 0.046325759887695314, 0.04616396713256836, 0.046050846099853514, 0.04623203277587891, 0.04628684616088867, 0.04603289413452148, 0.046534400939941406, 0.04664140701293945, 0.04655104064941406, 0.04653875350952148, 0.04665753555297852, 0.04640134429931641, 0.04645497512817383, 0.046749855041503904, 0.046782047271728515, 0.04628828811645508, 0.04619452667236328, 0.04653276824951172, 0.04633481597900391, 0.046258113861083985, 0.04644358444213867, 0.046496768951416016, 0.04641299057006836, 0.04677510452270508, 0.04676403045654297, 0.04657356643676758, 0.046429664611816406, 0.04663059234619141, 0.04654489517211914, 0.046711647033691406, 0.04682137680053711, 0.04682080078125, 0.04681084823608399, 0.04669731140136719, 0.0480662727355957, 0.0462372817993164, 0.04597564697265625, 0.0458260498046875, 0.04557065582275391, 0.04574784088134766, 0.046035232543945315, 0.045902976989746096, 0.046201919555664064, 0.0462591667175293, 0.04632057571411133, 0.04592025756835937, 0.04595238494873047, 0.04604079818725586, 0.04600924682617188, 0.045946880340576174, 0.04645273590087891, 0.04635548782348633, 0.04605436706542969, 0.0463724479675293, 0.046548511505126955, 0.04615667343139648, 0.0463296012878418, 0.046319774627685543, 0.04617331314086914, 0.04604988861083984, 0.04626265716552734, 0.04628400039672852, 0.04620556640625, 0.046100639343261716, 0.04659404754638672, 0.04641523361206055, 0.046125473022460936, 0.0463218879699707, 0.04637081527709961, 0.04614144134521484, 0.04640768051147461, 0.04671897506713867, 0.046430206298828124, 0.04642598342895508, 0.04682099151611328, 0.04670105743408203, 0.04625408172607422, 0.046516063690185544, 0.046543006896972654, 0.04632156753540039, 0.046443809509277345, 0.04660921478271484, 0.04670230484008789, 0.04668592071533203, 0.04665401458740234, 0.04637696075439453, 0.04642406463623047, 0.04664934539794922, 0.04658585739135742, 0.04609344100952149, 0.04669289779663086, 0.04698556900024414, 0.04679065704345703, 0.04670259094238281, 0.0467672004699707, 0.046811038970947266, 0.046855167388916014, 0.04887347030639649, 0.046295455932617184, 0.04589753723144531, 0.045660926818847654, 0.045930240631103514, 0.04567270278930664, 0.04603903961181641, 0.046086143493652344, 0.04598566436767578, 0.04605964660644531, 0.046252033233642575, 0.046309375762939455, 0.04600831985473633, 0.04635359954833984, 0.04610307312011719, 0.04591030502319336, 0.046166015625, 0.04637007904052735, 0.046233375549316405, 0.04629190444946289, 0.04699955368041992, 0.04694182586669922, 0.04652671813964844, 0.04645465469360351, 0.04648905563354492, 0.046287647247314455, 0.04605952072143555, 0.04634982299804687, 0.046166526794433595, 0.04617375946044922, 0.04634259033203125, 0.046307582855224606, 0.04625081634521484, 0.04653091049194336, 0.046400096893310545, 0.04623769760131836, 0.04621516799926758, 0.046671775817871096, 0.04653884887695312, 0.046386302947998045, 0.04676492691040039, 0.04684185409545898, 0.04671241760253906, 0.04657398223876953, 0.04682732772827149, 0.046796062469482425, 0.04646601486206055, 0.04657670211791992, 0.04637760162353516, 0.046323966979980466, 0.046755840301513675, 0.046695838928222655, 0.04634048080444336, 0.04807113647460937, 0.04646169662475586, 0.04631039810180664, 0.04656899261474609, 0.046688735961914064, 0.046516223907470705, 0.046823135375976564, 0.04717391967773438, 0.0470384635925293, 0.04715919876098633, 0.048711265563964844, 0.04620569610595703, 0.04592832183837891, 0.046012542724609376, 0.04587519836425781, 0.04585443115234375, 0.04607372665405273, 0.04580188751220703, 0.04584844970703125, 0.04600640106201172, 0.046723072052001956, 0.04641308975219727, 0.04597423934936523, 0.045956863403320315, 0.0464667854309082, 0.04749756622314453, 0.04595859146118164, 0.04626918411254883, 0.04628246307373047, 0.04636716842651367, 0.04692342376708984, 0.0466761589050293, 0.04644432067871094, 0.04650620651245117, 0.04657084655761719, 0.0461473617553711, 0.04612160110473633, 0.046217247009277346, 0.04620918273925781, 0.04617737579345703, 0.046556224822998045, 0.04630108642578125, 0.04623791885375977, 0.04615484619140625, 0.046932640075683596, 0.046529888153076175, 0.04633603286743164, 0.04660451126098633, 0.046502334594726566, 0.04640768051147461, 0.046683391571044924, 0.04759164810180664, 0.04658777618408203, 0.04670083236694336, 0.04724057769775391, 0.04684505462646484, 0.046696319580078124, 0.04652236938476562, 0.046581760406494144, 0.04631961441040039, 0.04665753555297852, 0.04655702209472656, 0.046473377227783205, 0.04669161605834961, 0.0466778564453125, 0.04694515228271484, 0.04668620681762695, 0.04698521423339844, 0.04693561553955078, 0.04694633483886719, 0.04706550216674805, 0.046941665649414065, 0.04687129592895508]",tokens/s,21.567782754979433,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8806.985728,10152.18176,0.0,9749.659648,9714.717184,s,1,14.500876953125,14.500876953125,0.0,14.500876953125,14.500876953125,14.500876953125,14.500876953125,[14.500876953125],,kWh,0.0002100704608041648,2.3161825936238284e-05,6.408894015999533e-05,0.0002973212269003984,,MB,1873.637376,10580.000768,0.0,10162.798592,10037.824,s,10,4.6236782531738285,0.46236782531738274,0.0004647463172391825,0.46233811950683595,0.4630218353271484,0.46305596160888673,0.46308326263427735,"[0.46163937377929687, 0.46235504150390627, 0.463090087890625, 0.46210296630859377, 0.4626707458496094, 0.46301425170898436, 0.46232119750976564, 0.4622552185058594, 0.46255322265625, 0.4616761474609375]",tokens/s,553.6717435394086,kWh,1.3528553554355982e-05,1.4919736555904223e-06,9.02868146536356e-06,2.4049208675309965e-05,tokens/kWh,10644840.89502793,MB,1885.925376,10791.81312,0.0,10374.610944,10299.609088,s,10,34.45157177734375,3.4451571777343752,0.0029347249595274515,3.4456467285156247,3.448758935546875,3.4489090087890624,3.4490290673828126,"[3.440833740234375, 3.440480224609375, 3.44821142578125, 3.44553466796875, 3.44905908203125, 3.44396337890625, 3.44284619140625, 3.4457587890625, 3.4487255859375, 3.44615869140625]",tokens/s,18.28653868310021,kWh,0.0001005578825048101,1.1089824029380255e-05,6.664212907123668e-05,0.000178289835605427,tokens/kWh,353357.2162769011,,s,630,34.447690570831305,0.05467887392195444,0.0004015603324851577,0.05468820762634277,0.0551680305480957,0.05526932697296143,0.05553693641662598,"[0.05422867202758789, 0.05403680038452149, 0.053889022827148435, 0.05436969757080078, 0.05394729614257812, 0.0540013427734375, 0.05393337631225586, 0.05396960067749024, 0.054132095336914064, 0.0543238410949707, 0.05414281463623047, 0.05396319961547852, 0.05421635055541992, 0.05435776138305664, 0.054083904266357424, 0.05413283157348633, 0.05435382461547852, 0.05403987121582031, 0.05417644882202149, 0.054144832611083986, 0.054126270294189455, 0.05419641494750976, 0.05427347183227539, 0.054242176055908205, 0.053991073608398436, 0.054182239532470707, 0.054335487365722655, 0.054788097381591794, 0.05479423904418945, 0.05427414321899414, 0.054812576293945314, 0.05461401748657226, 0.054687679290771486, 0.054570079803466794, 0.05455152130126953, 0.054542335510253906, 0.05467340850830078, 0.05479171371459961, 0.054767902374267576, 0.054835391998291017, 0.05471171188354492, 0.0545715217590332, 0.0548037109375, 0.05501628875732422, 0.05492326354980469, 0.05503385543823242, 0.055075904846191404, 0.05503478240966797, 0.05489433670043945, 0.05501571273803711, 0.05505862426757813, 0.05501318359375, 0.05729008102416992, 0.054873825073242184, 0.05520851135253906, 0.05571014404296875, 0.05506041717529297, 0.05511526489257813, 0.05516534423828125, 0.054934974670410155, 0.05519187164306641, 0.0551440315246582, 0.05511052703857422, 0.05452979278564453, 0.054069503784179684, 0.05406412887573242, 0.05388828659057617, 0.05407209777832031, 0.054017982482910155, 0.0539634895324707, 0.05396300888061523, 0.05396892929077148, 0.05406105422973633, 0.054247390747070315, 0.05419772720336914, 0.05432582473754883, 0.05438873672485352, 0.05432831954956055, 0.05408256149291992, 0.05439667129516602, 0.05438694381713867, 0.05415273666381836, 0.05425392150878906, 0.05429056167602539, 0.054636737823486325, 0.054169185638427736, 0.05411862564086914, 0.054042945861816405, 0.0547402229309082, 0.05473110580444336, 0.05432640075683594, 0.05428499221801758, 0.054504928588867185, 0.05441004943847656, 0.05456076812744141, 0.05482291030883789, 0.05465673446655273, 0.05474540710449219, 0.05442464065551758, 0.05468867111206055, 0.054725887298583985, 0.0544796142578125, 0.05485548782348633, 0.05480291366577148, 0.05477481460571289, 0.054815425872802734, 0.05493958282470703, 0.05487558364868164, 0.05462694549560547, 0.05485327911376953, 0.05483536148071289, 0.055124160766601565, 0.05516070556640625, 0.055140384674072264, 0.05512611389160156, 0.055183361053466794, 0.05506582260131836, 0.05506252670288086, 0.05516777420043945, 0.05552489471435547, 0.05520566558837891, 0.055392704010009765, 0.05529792022705078, 0.05515302276611328, 0.05511167907714844, 0.055283008575439455, 0.05487891387939453, 0.054320510864257814, 0.05424361419677735, 0.05436000061035156, 0.05403692626953125, 0.0542845458984375, 0.05420019149780273, 0.05389340972900391, 0.054220638275146484, 0.05421593475341797, 0.05417180633544922, 0.05410060882568359, 0.054269119262695314, 0.05423321533203125, 0.05419411087036133, 0.05419260787963867, 0.05469619369506836, 0.054798336029052735, 0.054599681854248044, 0.05461552047729492, 0.05455887985229492, 0.05444441604614258, 0.054763553619384765, 0.05457936096191406, 0.0545780143737793, 0.054526142120361325, 0.0546822395324707, 0.054507678985595706, 0.05463846588134766, 0.05465100860595703, 0.05472079849243164, 0.05477056121826172, 0.05475619125366211, 0.05477151870727539, 0.054732990264892575, 0.054799392700195314, 0.05491145706176758, 0.05478665542602539, 0.05494889450073242, 0.05495487976074219, 0.05500723266601563, 0.05493532943725586, 0.05484703826904297, 0.055112350463867185, 0.0549048957824707, 0.05468582534790039, 0.05478790283203125, 0.05514358520507812, 0.05515536117553711, 0.05496425628662109, 0.055167137145996095, 0.05528575897216797, 0.05487161636352539, 0.05527606582641602, 0.05547407913208008, 0.05535129547119141, 0.05512188720703125, 0.05523401641845703, 0.055173408508300784, 0.05508931350708008, 0.05532175827026367, 0.05506351852416992, 0.05526528167724609, 0.054561279296875, 0.05416803359985352, 0.05452137756347656, 0.05408406448364258, 0.05402828979492187, 0.054196224212646485, 0.05430476760864258, 0.05415932846069336, 0.05404249572753906, 0.05420048141479492, 0.054178081512451175, 0.05418307113647461, 0.0540513916015625, 0.05433135986328125, 0.054165695190429686, 0.05416739273071289, 0.05414912033081055, 0.054179840087890625, 0.05439712142944336, 0.05451865768432617, 0.05475219345092774, 0.05465305709838867, 0.054534015655517576, 0.05444723129272461, 0.055089473724365234, 0.05471398544311523, 0.05463260650634766, 0.054733600616455075, 0.05460969543457031, 0.05443587112426758, 0.05481059265136719, 0.054593856811523435, 0.05472451019287109, 0.05462835311889649, 0.054596702575683595, 0.05482998275756836, 0.05470316696166992, 0.05495868682861328, 0.05511612701416015, 0.05501705551147461, 0.05501724624633789, 0.054977153778076174, 0.05511782455444336, 0.05480652618408203, 0.05489984130859375, 0.054930145263671876, 0.054943904876708985, 0.055005184173583986, 0.05479423904418945, 0.05497651290893555, 0.054967647552490236, 0.055076641082763673, 0.05514704132080078, 0.05495820617675781, 0.05513811111450195, 0.055070560455322264, 0.05519571304321289, 0.05525705718994141, 0.05512860870361328, 0.05526483154296875, 0.05518819046020508, 0.055076160430908204, 0.05499871826171875, 0.05475788879394531, 0.05428838348388672, 0.054073345184326174, 0.0543744010925293, 0.05444124984741211, 0.05418057632446289, 0.05402582550048828, 0.05440457534790039, 0.05408428955078125, 0.05410022354125977, 0.05410508728027344, 0.05418937683105469, 0.054425281524658205, 0.05421612930297852, 0.05429510498046875, 0.054341087341308596, 0.05445788955688476, 0.05505513763427734, 0.05453155136108399, 0.054469310760498046, 0.05445228958129883, 0.05475532913208008, 0.054617729187011715, 0.05439936065673828, 0.05480448150634765, 0.05475459289550781, 0.05444796752929688, 0.05467014312744141, 0.054675201416015624, 0.054565185546875, 0.054687744140625, 0.05468716812133789, 0.05485193634033203, 0.05464863967895508, 0.05489673614501953, 0.05481292724609375, 0.054906143188476565, 0.05482086563110351, 0.054919681549072265, 0.05514473724365234, 0.054973567962646484, 0.055081855773925784, 0.05508505630493164, 0.054908512115478515, 0.0549851188659668, 0.05502975845336914, 0.05497635269165039, 0.054884449005126956, 0.05501139068603516, 0.05503315353393555, 0.05488505554199219, 0.05498783874511719, 0.05519279861450195, 0.055131744384765625, 0.055103488922119144, 0.055050048828125, 0.05523072052001953, 0.0552817268371582, 0.05514767837524414, 0.05559487915039062, 0.05527616119384766, 0.05522876739501953, 0.055244800567626956, 0.0548636474609375, 0.05409571075439453, 0.054019329071044925, 0.05399849700927734, 0.05418598556518555, 0.05427977752685547, 0.05416592025756836, 0.05439897537231445, 0.05431296157836914, 0.05428838348388672, 0.05447679901123047, 0.054252830505371094, 0.05438723373413086, 0.0544134407043457, 0.05428025436401367, 0.05436985778808594, 0.054258113861083986, 0.05426499176025391, 0.0545145263671875, 0.05446665573120117, 0.054494239807128905, 0.054379390716552733, 0.05434777450561523, 0.05465087890625, 0.054583297729492185, 0.054583297729492185, 0.054291679382324216, 0.054429664611816406, 0.05454726409912109, 0.05448198318481445, 0.05450643157958984, 0.0544747200012207, 0.054695968627929685, 0.054728607177734374, 0.05450486373901367, 0.054598495483398436, 0.05472857666015625, 0.054757377624511716, 0.05475052642822266, 0.0548267822265625, 0.05504499053955078, 0.05487129592895508, 0.054745025634765625, 0.05465740966796875, 0.054751201629638674, 0.05481468963623047, 0.05480089569091797, 0.05505340957641602, 0.055094432830810544, 0.0552564811706543, 0.05487849426269531, 0.05515433502197266, 0.05529958343505859, 0.05523305511474609, 0.055201793670654295, 0.05501776123046875, 0.05498860931396484, 0.05498268890380859, 0.05513628768920899, 0.054937824249267575, 0.05505843353271484, 0.055373825073242185, 0.0554967041015625, 0.054488895416259765, 0.05436643218994141, 0.05398233413696289, 0.05410639953613281, 0.054319839477539066, 0.05415449523925781, 0.05394476699829102, 0.0542496337890625, 0.05439078521728516, 0.054101215362548825, 0.054126529693603515, 0.0545513916015625, 0.054166622161865234, 0.054540542602539065, 0.05425219345092774, 0.054197502136230466, 0.05405756759643555, 0.05424991989135742, 0.05428396987915039, 0.0543047981262207, 0.05437055969238281, 0.054626049041748045, 0.05439683151245117, 0.05430089569091797, 0.05444905471801758, 0.0543590087890625, 0.05442348861694336, 0.05462432098388672, 0.05442995071411133, 0.054488929748535156, 0.05444588851928711, 0.05457929611206055, 0.054467742919921874, 0.05461030578613281, 0.054821342468261716, 0.05444598388671875, 0.054719841003417966, 0.05487094497680664, 0.0552077751159668, 0.05505023956298828, 0.054724094390869144, 0.055001823425292966, 0.05473462295532226, 0.054863872528076174, 0.054871585845947264, 0.05499542236328125, 0.05492700958251953, 0.054745246887207034, 0.054898880004882813, 0.05495548629760742, 0.05500163269042969, 0.05486796951293945, 0.0549747200012207, 0.05493119812011719, 0.05503395080566406, 0.05523436737060547, 0.05554185485839844, 0.05527104187011719, 0.05526528167724609, 0.05541926574707031, 0.05526131057739258, 0.05505356979370117, 0.05541542434692383, 0.054540542602539065, 0.05407731246948242, 0.05421433639526367, 0.054038177490234374, 0.05396115112304688, 0.05423235321044922, 0.05424851226806641, 0.054321025848388674, 0.05421811294555664, 0.05415385437011719, 0.054658241271972656, 0.05447161483764648, 0.05420854568481445, 0.05442521667480469, 0.054403297424316405, 0.05436620712280273, 0.05441641616821289, 0.054313793182373046, 0.05418627166748047, 0.05416934585571289, 0.05446886444091797, 0.054682945251464846, 0.0543012466430664, 0.05470003128051758, 0.05468979263305664, 0.05455257415771484, 0.054568225860595704, 0.054488895416259765, 0.054801055908203125, 0.05481510543823242, 0.054725631713867184, 0.05454342269897461, 0.054787487030029294, 0.054599422454833985, 0.054647262573242185, 0.05469427108764648, 0.054812480926513675, 0.05462015914916992, 0.054880256652832034, 0.0547690544128418, 0.054741600036621096, 0.05480857467651367, 0.05469510269165039, 0.054839649200439454, 0.054626785278320315, 0.054691841125488284, 0.05504719924926758, 0.055167007446289065, 0.054895294189453124, 0.05513593673706055, 0.05500163269042969, 0.055760929107666016, 0.05532223892211914, 0.05503219223022461, 0.05518163299560547, 0.055128864288330075, 0.05511209487915039, 0.05508761596679688, 0.0554886703491211, 0.055000896453857424, 0.055107616424560545, 0.055387359619140625, 0.05535004806518555, 0.0547627182006836, 0.054207134246826175, 0.054159038543701174, 0.05399728012084961, 0.054238239288330076, 0.05430662536621094, 0.05446995162963867, 0.054715072631835934, 0.054279296875, 0.05429094314575195, 0.05422732925415039, 0.05437849426269531, 0.05457619094848633, 0.0545863037109375, 0.05452339172363281, 0.05420051193237305, 0.05446073532104492, 0.05459987258911133, 0.05426764678955078, 0.05463065719604492, 0.05453392028808594, 0.05453337478637695, 0.054507518768310545, 0.05434758377075195, 0.054811614990234375, 0.054591487884521485, 0.05446246337890625, 0.054755233764648435, 0.054685791015625, 0.05441151809692383, 0.054596641540527346, 0.054739456176757816, 0.05488662338256836, 0.05462953567504883, 0.055607872009277345, 0.05470566558837891, 0.05504415893554687, 0.05482364654541016, 0.05493468856811524, 0.05493251037597656, 0.05472204971313477, 0.05488393783569336, 0.05503664016723633, 0.054873855590820315, 0.05483750534057617, 0.054834175109863284, 0.0550463981628418, 0.055098110198974606, 0.054798336029052735, 0.05486796951293945, 0.05496937561035156, 0.055247840881347654, 0.055139583587646486, 0.05506233596801758, 0.05506963348388672, 0.05492940902709961, 0.05496319961547851, 0.05514518356323242, 0.05518159866333008, 0.055145633697509765, 0.055433311462402345, 0.055188255310058595, 0.055441375732421874, 0.05469516754150391, 0.05441126251220703, 0.0555975341796875, 0.05420800018310547, 0.0541561279296875, 0.054037567138671874, 0.053887966156005856, 0.05432115173339844, 0.05425139236450195, 0.054179969787597655, 0.05408534240722656, 0.054173534393310546, 0.05427449417114258, 0.05431449508666992, 0.054456832885742185, 0.054599681854248044, 0.054474750518798826, 0.05450342559814453, 0.05444607925415039, 0.05459487915039062, 0.0546044807434082, 0.05462393569946289, 0.054253089904785154, 0.05457180786132813, 0.054740993499755856, 0.05520582580566406, 0.05418399810791016, 0.054476001739501956, 0.054627105712890624, 0.05456617736816406, 0.054489505767822265, 0.054427391052246095, 0.05457158279418945, 0.05467356872558594, 0.05491017532348633, 0.05498470306396484, 0.054825599670410154, 0.054712352752685545, 0.05476665496826172, 0.05467228698730469, 0.05457920074462891, 0.05467136001586914, 0.05513651275634766, 0.05512371063232422, 0.05459715270996094, 0.055002784729003905, 0.054712703704833984, 0.05477830505371094, 0.05497158432006836, 0.0549343376159668, 0.05483107376098633, 0.054978591918945316, 0.05523244857788086, 0.055170337677001954, 0.05513296127319336, 0.05526723098754883, 0.05528995132446289, 0.05498793411254883, 0.055022369384765624, 0.05521414566040039, 0.05546416091918945, 0.05501721572875977, 0.05511734390258789]",tokens/s,18.28859901956547,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4975.235072,7338.917888,0.0,6943.670272,6539.1744,s,1,11.480953125,11.480953125,0.0,11.480953125,11.480953125,11.480953125,11.480953125,[11.480953125],,kWh,0.00012999175551251484,1.4331755563570499e-05,4.0486421278004836e-05,0.00018480993235409017,,MB,5060.62848,7634.61632,0.0,7224.68864,6917.39904,s,10,2.0691580657958983,0.20691580657958983,0.0004209720328200674,0.20692723083496095,0.20733978881835938,0.20746485443115234,0.2075649069213867,"[0.20605894470214844, 0.20640179443359374, 0.20696316528320313, 0.2075899200439453, 0.20731199645996093, 0.20689129638671874, 0.2068658905029297, 0.2072939453125, 0.20697042846679686, 0.20681068420410156]",tokens/s,1237.2181914557118,kWh,6.0760903196431905e-06,6.700888447467249e-07,4.044135888367268e-06,1.0790315052757185e-05,tokens/kWh,23724979.182566673,MB,5063.942144,7636.713472,0.0,7226.785792,6917.4016,s,10,21.998330322265623,2.1998330322265622,0.014466196460380025,2.1997823486328123,2.2205612060546875,2.2218045288085935,2.2227991870117187,"[2.18112548828125, 2.211341064453125, 2.181974609375, 2.200316650390625, 2.182593994140625, 2.1949794921875, 2.199248046875, 2.203418212890625, 2.220284912109375, 2.2230478515625]",tokens/s,28.63853714217325,kWh,6.324594263535734e-05,6.9759540446782756e-06,4.201862318403299e-05,0.00011224051986406862,tokens/kWh,561294.6204837393,,s,630,21.99556670379637,0.034913597942533944,0.0006599943159116003,0.034829471588134764,0.03534640693664551,0.03560117568969727,0.037297059745788605,"[0.03521535873413086, 0.03477872085571289, 0.034703296661376955, 0.03445158386230469, 0.034527584075927736, 0.034576095581054685, 0.034578720092773435, 0.03460086441040039, 0.034791519165039066, 0.034371166229248046, 0.03463151931762695, 0.03455648040771484, 0.0345354232788086, 0.034764766693115234, 0.03461123275756836, 0.03454771041870117, 0.03483967971801758, 0.03497987365722656, 0.034666591644287106, 0.03485567855834961, 0.034756256103515626, 0.034814304351806644, 0.03549593734741211, 0.03465593719482422, 0.03440671920776367, 0.0343818244934082, 0.034409889221191405, 0.034385631561279294, 0.034474048614501956, 0.034466014862060544, 0.03435299301147461, 0.03466726303100586, 0.03453033447265625, 0.03442787170410156, 0.03449651336669922, 0.03444863891601563, 0.03444607925415039, 0.03432377624511719, 0.034152225494384764, 0.03429059219360352, 0.03423436737060547, 0.03440412902832031, 0.0343732795715332, 0.03443123245239258, 0.03444358444213867, 0.03440390396118164, 0.034601024627685543, 0.03455132675170899, 0.03453628921508789, 0.034699230194091796, 0.03454313659667969, 0.0345687026977539, 0.03464601516723633, 0.03489791870117188, 0.03759241485595703, 0.034898849487304685, 0.034562911987304684, 0.03452143859863281, 0.034504768371582034, 0.034347518920898434, 0.03448825454711914, 0.034359294891357424, 0.03432044982910156, 0.035456417083740234, 0.03482896041870117, 0.03462921524047852, 0.034581214904785156, 0.0345247688293457, 0.03528335952758789, 0.035276321411132815, 0.03515235137939453, 0.034928638458251955, 0.03512115097045899, 0.03500236892700195, 0.03514777755737305, 0.035471359252929685, 0.03536076736450195, 0.03499212646484375, 0.034920448303222655, 0.03488927841186523, 0.034955711364746095, 0.03481727981567383, 0.035074817657470704, 0.03494297790527344, 0.035004417419433595, 0.03753350448608399, 0.035110782623291016, 0.034832191467285153, 0.0350274543762207, 0.03488515090942383, 0.03483081436157227, 0.03486310577392578, 0.035043327331542966, 0.0350346565246582, 0.0348713264465332, 0.034871742248535155, 0.03515379333496094, 0.035268001556396485, 0.0349969596862793, 0.034981727600097656, 0.03513100814819336, 0.03512307357788086, 0.03504604721069336, 0.035168254852294925, 0.036519935607910156, 0.03518259048461914, 0.035092479705810545, 0.03504460906982422, 0.03512393569946289, 0.035084320068359376, 0.035141632080078124, 0.03500543975830078, 0.03513651275634765, 0.03497107315063477, 0.034954975128173825, 0.03515068817138672, 0.03570278549194336, 0.0351819839477539, 0.03493334579467773, 0.034933856964111325, 0.03511145782470703, 0.03499808120727539, 0.03495379257202148, 0.03491839981079101, 0.034936832427978515, 0.03486515045166016, 0.03579616165161133, 0.03493561553955078, 0.03487948989868164, 0.0348012809753418, 0.03468896102905274, 0.034781631469726564, 0.03470905685424805, 0.0347360954284668, 0.035027423858642576, 0.034756607055664065, 0.03456979370117187, 0.03468495941162109, 0.0343842887878418, 0.034364990234375, 0.03438572692871094, 0.034189151763916015, 0.03418396759033203, 0.03417059326171875, 0.03414163208007812, 0.03418812942504883, 0.034369598388671874, 0.03435843276977539, 0.034231071472167966, 0.03454489517211914, 0.03430476760864258, 0.03423846435546875, 0.03464191818237305, 0.03484649658203125, 0.03477731323242188, 0.03525212860107422, 0.03492572784423828, 0.03495558547973633, 0.03499264144897461, 0.03487091064453125, 0.03488614273071289, 0.034737438201904294, 0.034662593841552736, 0.034544158935546875, 0.03468902587890625, 0.0346951675415039, 0.034664447784423826, 0.03450182342529297, 0.03440313720703125, 0.034289600372314454, 0.034643199920654295, 0.034614078521728514, 0.03444326400756836, 0.03446169662475586, 0.0344268798828125, 0.03450454330444336, 0.03466233444213867, 0.034445537567138675, 0.03462883377075195, 0.03464707183837891, 0.034349822998046876, 0.03457356643676758, 0.03457129669189453, 0.034949569702148436, 0.03492496109008789, 0.034822017669677734, 0.03482998275756836, 0.03465660858154297, 0.03480303955078125, 0.03550419235229492, 0.03490553665161133, 0.0349639663696289, 0.03454972839355469, 0.03608992004394531, 0.03459081649780273, 0.03458038330078125, 0.03466153717041016, 0.03497046279907227, 0.03467468643188477, 0.03502489471435547, 0.03617792129516602, 0.034904064178466795, 0.03671817779541016, 0.034744384765625, 0.034566497802734374, 0.03504067230224609, 0.03584470367431641, 0.0352624626159668, 0.034907520294189455, 0.034780799865722654, 0.03473100662231445, 0.03508803176879883, 0.034533729553222654, 0.034604385375976564, 0.03453744125366211, 0.034622142791748044, 0.034492416381835936, 0.03444121551513672, 0.03470745468139649, 0.03444736099243164, 0.034442878723144534, 0.03438406372070312, 0.034350975036621094, 0.03442313766479492, 0.03454563140869141, 0.03432447814941406, 0.03498188781738281, 0.03460710525512695, 0.03444697570800781, 0.034786750793457034, 0.042973407745361326, 0.03458736038208008, 0.034492416381835936, 0.0343895378112793, 0.03483286285400391, 0.03556700897216797, 0.034654815673828124, 0.034633438110351564, 0.03458070373535156, 0.0347213134765625, 0.034800159454345704, 0.03468492889404297, 0.03473721694946289, 0.03479180908203125, 0.03454435348510742, 0.034534912109375, 0.0345211181640625, 0.03454790496826172, 0.03468505477905273, 0.03456972885131836, 0.03453593444824219, 0.03470876693725586, 0.03544473648071289, 0.03471142578125, 0.034687103271484374, 0.03440639877319336, 0.03443302536010742, 0.03487539291381836, 0.03506380844116211, 0.03456409454345703, 0.03483766555786133, 0.03451171112060547, 0.034393630981445315, 0.0345432014465332, 0.03451279830932617, 0.03457942581176758, 0.03454556655883789, 0.03456742477416992, 0.034581344604492185, 0.03465824127197266, 0.03459619140625, 0.03457904052734375, 0.03448025512695312, 0.03452467346191406, 0.03460966491699219, 0.034659713745117185, 0.034734718322753905, 0.034849918365478516, 0.03468991851806641, 0.03492240142822266, 0.03468092727661133, 0.034797569274902344, 0.03452630233764648, 0.03454249572753906, 0.034516990661621096, 0.034353153228759765, 0.034482177734375, 0.03464524841308594, 0.034627647399902345, 0.03476889419555664, 0.03471548843383789, 0.03447407913208008, 0.034448127746582034, 0.03452099227905273, 0.034627681732177736, 0.03467468643188477, 0.03459612655639648, 0.0345807991027832, 0.03457788848876953, 0.03462649536132813, 0.03467654418945312, 0.03466463851928711, 0.03471155166625976, 0.03466239929199219, 0.03460095977783203, 0.034514942169189454, 0.03473612976074219, 0.03481151962280273, 0.03454399871826172, 0.03488668823242187, 0.034982879638671874, 0.03446742248535156, 0.034447742462158204, 0.03462742233276367, 0.034619583129882815, 0.03534592056274414, 0.034777599334716795, 0.034492416381835936, 0.034471359252929684, 0.03468291091918945, 0.03468534469604492, 0.03465017700195312, 0.03460835266113281, 0.03465407943725586, 0.03477542495727539, 0.03451555252075195, 0.03464361572265625, 0.03447129440307617, 0.034514942169189454, 0.034581470489501956, 0.03466035079956055, 0.034510593414306644, 0.03468835067749024, 0.03451580810546875, 0.03452035140991211, 0.034558303833007814, 0.034558048248291014, 0.03428908920288086, 0.03477596664428711, 0.03466035079956055, 0.0344268798828125, 0.035315711975097655, 0.034523136138916014, 0.034476032257080076, 0.03460300827026367, 0.034530433654785156, 0.03437862396240234, 0.034377727508544925, 0.0348768310546875, 0.03615804672241211, 0.03508844757080078, 0.03473196792602539, 0.03461072158813477, 0.035152191162109374, 0.03448758316040039, 0.03436537551879883, 0.03442988967895508, 0.03431161499023438, 0.03433324813842773, 0.034485408782958984, 0.03441955184936524, 0.034354911804199216, 0.035159934997558595, 0.0342020149230957, 0.0343164176940918, 0.03439807891845703, 0.03435868835449219, 0.034855518341064456, 0.04340140914916992, 0.03504518508911133, 0.03523993682861328, 0.03497091293334961, 0.03571376037597656, 0.03504127883911133, 0.03502191925048828, 0.03481670379638672, 0.03511225509643555, 0.03501558303833008, 0.03563724899291992, 0.0348521614074707, 0.03474684906005859, 0.03461491012573242, 0.03471420669555664, 0.03482771301269531, 0.034885440826416016, 0.034751102447509764, 0.034516990661621096, 0.03445772933959961, 0.034402305603027344, 0.03466761779785156, 0.03486521530151367, 0.034681121826171876, 0.03464249420166016, 0.03463987350463867, 0.03508329772949219, 0.03465315246582031, 0.03468259048461914, 0.034936832427978515, 0.03454185485839844, 0.034582080841064455, 0.03433840179443359, 0.03450556945800781, 0.034408447265625, 0.034418689727783204, 0.03465331268310547, 0.034833278656005856, 0.03484262466430664, 0.03498393630981445, 0.03497267150878906, 0.03495219039916992, 0.035043327331542966, 0.03511270523071289, 0.03506982421875, 0.03540224075317383, 0.03508211135864258, 0.03506393432617187, 0.03507187271118164, 0.034938880920410156, 0.034904064178466795, 0.03587481689453125, 0.03533795166015625, 0.03521292877197266, 0.03518531036376953, 0.03523993682861328, 0.03520512008666992, 0.03498780822753906, 0.03486537551879883, 0.03490140914916992, 0.03491900634765625, 0.035158271789550784, 0.036060928344726566, 0.034985984802246094, 0.03489791870117188, 0.034864158630371095, 0.0349766731262207, 0.035098464965820315, 0.03474163055419922, 0.03474723052978516, 0.03498140716552734, 0.03487100982666016, 0.03484748840332031, 0.03605136108398437, 0.036176990509033204, 0.03821660614013672, 0.035208480834960934, 0.035055553436279294, 0.03508099365234375, 0.03508006286621094, 0.035299457550048825, 0.034958560943603514, 0.03519772720336914, 0.034988033294677735, 0.03514080047607422, 0.0348221435546875, 0.034947265625, 0.03466656112670898, 0.034934337615966794, 0.03475500869750976, 0.034900478363037106, 0.03463993453979492, 0.034547168731689455, 0.03449910354614258, 0.03466854476928711, 0.03477913665771484, 0.03516387176513672, 0.03607759857177734, 0.03535078430175781, 0.03493273544311523, 0.0346561279296875, 0.034861183166503905, 0.035628223419189455, 0.03563788986206055, 0.03515615844726563, 0.0347955207824707, 0.03473158264160156, 0.03474460983276367, 0.034739967346191405, 0.03447235107421875, 0.03468902587890625, 0.03438095855712891, 0.03455897521972656, 0.03439187240600586, 0.03482422256469726, 0.03461705780029297, 0.0346319694519043, 0.034678558349609374, 0.034656478881835935, 0.03483852767944336, 0.034709087371826174, 0.0346929931640625, 0.034544158935546875, 0.03430310440063476, 0.03460800170898438, 0.03525823974609375, 0.038235649108886716, 0.03474905776977539, 0.034457599639892575, 0.03462348937988281, 0.034770942687988284, 0.034523136138916014, 0.034799232482910156, 0.034449790954589846, 0.034245887756347654, 0.03438614273071289, 0.03605196762084961, 0.03536489486694336, 0.035118049621582034, 0.03517779159545899, 0.0351459846496582, 0.035400127410888674, 0.03498393630981445, 0.03503923034667969, 0.03498745727539063, 0.035326526641845706, 0.03511500930786133, 0.03508643341064453, 0.03542416000366211, 0.035221214294433596, 0.03507843017578125, 0.03522150421142578, 0.034977790832519534, 0.03553500747680664, 0.03514761734008789, 0.03560857772827149, 0.035098400115966794, 0.03524630355834961, 0.03494297790527344, 0.034854911804199216, 0.035067680358886716, 0.03512137603759766, 0.03496345520019531, 0.034961406707763674, 0.03523321533203125, 0.035111488342285155, 0.035194881439208986, 0.0349648323059082, 0.03514835357666016, 0.03498166275024414, 0.03512351989746094, 0.035149822235107424, 0.03530710220336914, 0.035155681610107424, 0.03498851013183594, 0.03592822265625, 0.03558931350708008, 0.035248897552490235, 0.03516761779785156, 0.03527552032470703, 0.035417823791503905, 0.035641502380371094, 0.03513763046264649, 0.035697792053222654, 0.03559212875366211, 0.03523433685302734, 0.035531200408935544, 0.035373054504394534, 0.035227649688720705, 0.03541846466064453, 0.03502851104736328, 0.035201152801513674, 0.035209217071533204, 0.03525632095336914, 0.035337345123291015, 0.03540057754516602, 0.035216480255126956, 0.03517737579345703, 0.035074047088623043, 0.03572947311401367, 0.035259166717529294, 0.035192001342773435, 0.03518288040161133, 0.03522361755371094, 0.03553308868408203, 0.035031070709228514, 0.03539148712158203, 0.03551216125488281, 0.035284385681152344, 0.035199745178222656, 0.03527715301513672, 0.03506857681274414, 0.035097599029541016, 0.035110912322998046, 0.03540079879760742, 0.03515075302124023, 0.0351723518371582, 0.03514486312866211, 0.03523478317260742, 0.03523366546630859, 0.03516976165771484, 0.03505411148071289, 0.035281982421875, 0.0351690559387207, 0.03494659042358399, 0.03512998580932617, 0.035074047088623043, 0.035227649688720705, 0.035225601196289064, 0.03506790542602539, 0.035084320068359376, 0.03491632080078125, 0.035416065216064455, 0.03510067367553711, 0.03506175994873047, 0.03547312164306641, 0.03528729629516601, 0.03527478408813477, 0.034909313201904296, 0.03487814331054687, 0.03505376052856445, 0.035210784912109376, 0.03772777557373047, 0.03585263824462891, 0.03502569580078125, 0.03530118560791016, 0.03526860809326172, 0.03512633514404297, 0.03526332855224609, 0.03515811157226562, 0.035296607971191406, 0.03535529708862305, 0.03523379135131836, 0.035229057312011716, 0.03527743911743164, 0.035532417297363283, 0.03547926330566406, 0.03544316864013672, 0.035246273040771485, 0.03509964752197266, 0.03539209747314453, 0.03591004943847656]",tokens/s,28.642135412281213,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.073728,12227.3792,0.0,11848.9088,11814.752256,s,1,16.2592177734375,16.2592177734375,0.0,16.2592177734375,16.2592177734375,16.2592177734375,16.2592177734375,[16.2592177734375],,kWh,0.00026480793421250536,2.9202987075294415e-05,8.6119513340005e-05,0.0003801304346278048,,MB,2084.450304,14033.027072,0.0,13625.196544,13298.00192,s,10,23.3345458984375,2.33345458984375,0.0006364246180502187,2.3333216552734375,2.33433662109375,2.3344462646484376,2.3345339794921878,"[2.33269580078125, 2.333192138671875, 2.332452392578125, 2.334312255859375, 2.334555908203125, 2.333282958984375, 2.333640625, 2.333096923828125, 2.33395654296875, 2.3333603515625]",tokens/s,109.70858447994995,kWh,6.801452597916902e-05,7.501730836656543e-06,4.510703608559819e-05,0.00012062329290142372,tokens/kWh,2122309.8279136634,MB,2088.77568,14184.022016,0.0,13776.191488,13689.859584,s,10,1365.1401875000001,136.51401875,0.043220834864750195,136.508390625,136.56333750000002,136.5687234375,136.57303218750002,"[136.474234375, 136.480890625, 136.51365625, 136.574109375, 136.554078125, 136.443140625, 136.558046875, 136.562140625, 136.476765625, 136.503125]",tokens/s,0.4614910657298337,kWh,0.003982938044971668,0.0004393487290448445,0.0026491697860008046,0.0070714565600173175,tokens/kWh,8909.05564720682,,s,630,1365.1345581054693,2.1668802509610616,0.001184149768945829,2.1668848876953124,2.1684020751953126,2.1688128173828125,2.1695393676757813,"[2.16586181640625, 2.165533447265625, 2.16496337890625, 2.164727783203125, 2.166044677734375, 2.166136474609375, 2.165833251953125, 2.164509033203125, 2.164980224609375, 2.16477294921875, 2.1657529296875, 2.165496826171875, 2.164805419921875, 2.1644208984375, 2.1651064453125, 2.166121826171875, 2.16661279296875, 2.165902587890625, 2.1653994140625, 2.167724609375, 2.1663828125, 2.166310791015625, 2.165684326171875, 2.16618798828125, 2.166972412109375, 2.165771240234375, 2.16618017578125, 2.16656689453125, 2.16574853515625, 2.16681884765625, 2.166228515625, 2.166172119140625, 2.166724609375, 2.167048095703125, 2.165712158203125, 2.165650146484375, 2.16532275390625, 2.166369140625, 2.167191650390625, 2.167224365234375, 2.167033935546875, 2.1665341796875, 2.16774658203125, 2.167560302734375, 2.16664892578125, 2.16663671875, 2.16684326171875, 2.167447509765625, 2.167486328125, 2.16671044921875, 2.167170166015625, 2.16604541015625, 2.1677705078125, 2.166477783203125, 2.166822265625, 2.1664443359375, 2.16675537109375, 2.166044189453125, 2.166268310546875, 2.16554052734375, 2.1667373046875, 2.16703173828125, 2.166902099609375, 2.164823974609375, 2.16512109375, 2.166529296875, 2.166343994140625, 2.16569873046875, 2.166558837890625, 2.167146484375, 2.165773681640625, 2.166070068359375, 2.16474169921875, 2.165238037109375, 2.166134521484375, 2.165922119140625, 2.16620849609375, 2.16500732421875, 2.166781005859375, 2.165747802734375, 2.166453369140625, 2.165948974609375, 2.16657958984375, 2.166655029296875, 2.166045654296875, 2.165488525390625, 2.1654794921875, 2.166298583984375, 2.16550146484375, 2.1659345703125, 2.165116455078125, 2.16589892578125, 2.166954833984375, 2.16605224609375, 2.167164794921875, 2.166157958984375, 2.165796875, 2.16744970703125, 2.1675908203125, 2.166740966796875, 2.166281494140625, 2.16719140625, 2.1674482421875, 2.166792236328125, 2.167214111328125, 2.166931396484375, 2.16813525390625, 2.166753173828125, 2.1673251953125, 2.166135009765625, 2.166437744140625, 2.1670419921875, 2.165987060546875, 2.16810302734375, 2.1652685546875, 2.16776904296875, 2.166192138671875, 2.1670849609375, 2.16669775390625, 2.16688671875, 2.16671630859375, 2.1665361328125, 2.166640625, 2.166073486328125, 2.165718994140625, 2.165845947265625, 2.1654443359375, 2.16521337890625, 2.1656064453125, 2.164690185546875, 2.165152099609375, 2.165153564453125, 2.166433349609375, 2.16533056640625, 2.16590087890625, 2.165445068359375, 2.16591162109375, 2.166353515625, 2.166974853515625, 2.166444091796875, 2.16614501953125, 2.1657333984375, 2.166519775390625, 2.165841552734375, 2.165972412109375, 2.165611572265625, 2.165755615234375, 2.16555322265625, 2.16642724609375, 2.165973388671875, 2.166176025390625, 2.167818359375, 2.16762939453125, 2.167644287109375, 2.16604296875, 2.167754638671875, 2.167920166015625, 2.167478759765625, 2.16732421875, 2.167259765625, 2.168190185546875, 2.167671630859375, 2.167791015625, 2.167031982421875, 2.168864990234375, 2.1674599609375, 2.167080810546875, 2.16809423828125, 2.16811328125, 2.1680869140625, 2.167527587890625, 2.167656005859375, 2.167390869140625, 2.166988037109375, 2.168318603515625, 2.167972900390625, 2.1684541015625, 2.167140380859375, 2.168670166015625, 2.1695263671875, 2.16701123046875, 2.168231201171875, 2.167736572265625, 2.16655078125, 2.166257568359375, 2.166305908203125, 2.166731689453125, 2.167017578125, 2.16652099609375, 2.164148193359375, 2.165614501953125, 2.16821533203125, 2.167324951171875, 2.166708251953125, 2.1669375, 2.16767919921875, 2.1668359375, 2.1665556640625, 2.16671630859375, 2.1664638671875, 2.167177978515625, 2.167428955078125, 2.1676435546875, 2.16705712890625, 2.166687744140625, 2.16604052734375, 2.167203857421875, 2.166679443359375, 2.167146484375, 2.16696142578125, 2.167349365234375, 2.1678125, 2.167871826171875, 2.167183349609375, 2.16703173828125, 2.16832421875, 2.1674189453125, 2.167361572265625, 2.168323974609375, 2.167330810546875, 2.167510009765625, 2.167006103515625, 2.167711669921875, 2.1676904296875, 2.1678779296875, 2.1690947265625, 2.16873583984375, 2.168320068359375, 2.16975146484375, 2.1685224609375, 2.169452880859375, 2.169634521484375, 2.169194580078125, 2.168541259765625, 2.169198974609375, 2.168323974609375, 2.16812890625, 2.169175048828125, 2.168743408203125, 2.16881201171875, 2.166730712890625, 2.1686845703125, 2.16939892578125, 2.168643798828125, 2.169746826171875, 2.168140625, 2.168422119140625, 2.167958740234375, 2.16957373046875, 2.168743896484375, 2.16764599609375, 2.169199462890625, 2.1681357421875, 2.165767822265625, 2.166407470703125, 2.167010986328125, 2.1682080078125, 2.167797607421875, 2.1663662109375, 2.16692333984375, 2.166476806640625, 2.166065185546875, 2.16653759765625, 2.167151123046875, 2.167858154296875, 2.166207275390625, 2.16652197265625, 2.167060302734375, 2.16585009765625, 2.16644970703125, 2.16733544921875, 2.166739013671875, 2.16699853515625, 2.166995361328125, 2.1677744140625, 2.166822998046875, 2.166467529296875, 2.16658935546875, 2.166917236328125, 2.167504638671875, 2.166527587890625, 2.1661435546875, 2.167434326171875, 2.16802099609375, 2.1672333984375, 2.166693359375, 2.166917724609375, 2.166367431640625, 2.167483154296875, 2.167582763671875, 2.167276611328125, 2.167583251953125, 2.168227294921875, 2.16863818359375, 2.168342529296875, 2.167855224609375, 2.1689814453125, 2.169544677734375, 2.16923876953125, 2.1683740234375, 2.167326416015625, 2.168219482421875, 2.168541748046875, 2.16815771484375, 2.168617431640625, 2.168645751953125, 2.169489013671875, 2.167476806640625, 2.1682646484375, 2.168379150390625, 2.1695771484375, 2.1693896484375, 2.16714453125, 2.16802099609375, 2.168919677734375, 2.1644375, 2.16413525390625, 2.16406494140625, 2.164509765625, 2.165660400390625, 2.167010986328125, 2.16608935546875, 2.165963623046875, 2.16654248046875, 2.164913818359375, 2.164343017578125, 2.16390869140625, 2.163916748046875, 2.164716796875, 2.165203125, 2.164300048828125, 2.16482861328125, 2.16473583984375, 2.165071044921875, 2.16362890625, 2.1636845703125, 2.16490380859375, 2.165056396484375, 2.16562841796875, 2.163931640625, 2.164023193359375, 2.165086181640625, 2.165473388671875, 2.165923828125, 2.165321533203125, 2.166739013671875, 2.16607958984375, 2.1651669921875, 2.167163818359375, 2.16523974609375, 2.1653134765625, 2.16590478515625, 2.165904052734375, 2.166857177734375, 2.167341552734375, 2.16569873046875, 2.167005126953125, 2.165802001953125, 2.165296142578125, 2.16627001953125, 2.166312744140625, 2.166623779296875, 2.166095947265625, 2.16608740234375, 2.166962890625, 2.1675576171875, 2.166712890625, 2.166293701171875, 2.16719580078125, 2.167150634765625, 2.167144775390625, 2.16835498046875, 2.166561279296875, 2.16798828125, 2.1664521484375, 2.166530029296875, 2.166826904296875, 2.1669375, 2.166547607421875, 2.16634033203125, 2.16666650390625, 2.16660986328125, 2.167008056640625, 2.166205810546875, 2.166114990234375, 2.166304931640625, 2.166407958984375, 2.167910888671875, 2.16789453125, 2.16703564453125, 2.1695634765625, 2.167732177734375, 2.166179931640625, 2.16768017578125, 2.167562744140625, 2.167038330078125, 2.16815380859375, 2.166386962890625, 2.1661962890625, 2.1672734375, 2.166528076171875, 2.16612451171875, 2.16584814453125, 2.16689208984375, 2.167228759765625, 2.167357421875, 2.16840185546875, 2.168848388671875, 2.168018798828125, 2.1668857421875, 2.1680400390625, 2.167444580078125, 2.166825927734375, 2.166277587890625, 2.1680703125, 2.16814208984375, 2.16769873046875, 2.16819287109375, 2.168285888671875, 2.168908935546875, 2.16764697265625, 2.168127197265625, 2.168122802734375, 2.168683349609375, 2.16888330078125, 2.168537109375, 2.168404052734375, 2.168764404296875, 2.1684013671875, 2.166385009765625, 2.16930908203125, 2.168162353515625, 2.1688134765625, 2.16937255859375, 2.167416748046875, 2.16837744140625, 2.169112548828125, 2.1670537109375, 2.16781884765625, 2.16823193359375, 2.167018798828125, 2.166949951171875, 2.1668740234375, 2.16616357421875, 2.166748291015625, 2.167088134765625, 2.167986083984375, 2.16764599609375, 2.1668125, 2.167314697265625, 2.166613525390625, 2.16709912109375, 2.16738037109375, 2.167388427734375, 2.1675126953125, 2.16755224609375, 2.167644287109375, 2.166744384765625, 2.167925048828125, 2.168575927734375, 2.1672841796875, 2.16865771484375, 2.168525146484375, 2.1678818359375, 2.16807421875, 2.167703369140625, 2.16839794921875, 2.169016357421875, 2.169155517578125, 2.16738720703125, 2.1685380859375, 2.167985107421875, 2.1684755859375, 2.168439453125, 2.16862255859375, 2.16863818359375, 2.167289794921875, 2.167142333984375, 2.16776708984375, 2.16764013671875, 2.1680947265625, 2.168289306640625, 2.167946533203125, 2.1689287109375, 2.168463623046875, 2.16720703125, 2.16830029296875, 2.16787548828125, 2.1665458984375, 2.167499267578125, 2.166310791015625, 2.168385498046875, 2.166969970703125, 2.1669541015625, 2.1665302734375, 2.167193115234375, 2.166907470703125, 2.168440673828125, 2.166923095703125, 2.166546875, 2.16703564453125, 2.168654052734375, 2.167615478515625, 2.167334716796875, 2.165754150390625, 2.16547900390625, 2.165604248046875, 2.165964599609375, 2.164871337890625, 2.1648740234375, 2.165676025390625, 2.166201416015625, 2.16602734375, 2.165337646484375, 2.166315185546875, 2.164508544921875, 2.164201904296875, 2.1656923828125, 2.165769775390625, 2.165017333984375, 2.1645595703125, 2.16470947265625, 2.165456787109375, 2.165345703125, 2.165402099609375, 2.166044677734375, 2.16509814453125, 2.166001953125, 2.165135498046875, 2.16656884765625, 2.164768798828125, 2.165671875, 2.166693359375, 2.166614501953125, 2.168379150390625, 2.167519287109375, 2.166951904296875, 2.16780712890625, 2.166045654296875, 2.16529296875, 2.16745166015625, 2.166091064453125, 2.16620068359375, 2.16697900390625, 2.16651171875, 2.167469970703125, 2.167701416015625, 2.1664912109375, 2.165646728515625, 2.166743408203125, 2.16632763671875, 2.16810693359375, 2.168990966796875, 2.1682939453125, 2.168727783203125, 2.1678369140625, 2.16667333984375, 2.167680908203125, 2.167146728515625, 2.1657333984375, 2.1679345703125, 2.165698974609375, 2.166783203125, 2.1666943359375, 2.165668212890625, 2.16674609375, 2.16652001953125, 2.164810302734375, 2.1640908203125, 2.16560009765625, 2.16654833984375, 2.16613720703125, 2.166560546875, 2.165886962890625, 2.165761474609375, 2.16423388671875, 2.164981689453125, 2.16667431640625, 2.16605078125, 2.166181884765625, 2.16501806640625, 2.166455078125, 2.165868408203125, 2.166765625, 2.167066650390625, 2.1680078125, 2.1658408203125, 2.166343505859375, 2.1672490234375, 2.167269287109375, 2.1657314453125, 2.16593212890625, 2.166884033203125, 2.166148681640625, 2.167370361328125, 2.166601806640625, 2.1672099609375, 2.1672568359375, 2.167049560546875, 2.16719384765625, 2.16667822265625, 2.167221923828125, 2.16730859375, 2.167490478515625, 2.166222412109375, 2.167810302734375, 2.166908935546875, 2.167332275390625, 2.166823486328125, 2.167975830078125, 2.16768408203125, 2.168091064453125, 2.167548583984375, 2.16677783203125, 2.166906494140625, 2.16699951171875, 2.16765966796875, 2.167762939453125, 2.166803466796875, 2.1678857421875, 2.166867919921875, 2.16678515625, 2.167721923828125, 2.166592529296875, 2.1664208984375, 2.167431884765625, 2.166370361328125, 2.16732861328125, 2.166756591796875, 2.16758544921875]",tokens/s,0.46149296877687485,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3554.562048,4495.179776,0.0,4116.709376,3980.386816,s,1,9.8548896484375,9.8548896484375,0.0,9.8548896484375,9.8548896484375,9.8548896484375,9.8548896484375,[9.8548896484375],,kWh,9.09966390958336e-05,1.0026190653853335e-05,3.093585808200405e-05,0.000131958687831691,,MB,3485.7984,4826.529792,0.0,4418.699264,4245.89568,s,10,6.620557922363281,0.662055792236328,0.000751265648237151,0.6620900573730468,0.6628945556640625,0.6629605834960938,0.6630134057617187,"[0.6616687622070313, 0.6617026977539062, 0.663026611328125, 0.66051611328125, 0.6628349609375, 0.6612864379882812, 0.6621630859375, 0.6620170288085937, 0.6624623413085937, 0.6628798828125]",tokens/s,386.67436038172747,kWh,1.9296033496614424e-05,2.1279812942374758e-06,1.2802249825124906e-05,3.42262646159768e-05,tokens/kWh,7479635.971741402,MB,3489.87392,4837.015552,0.0,4429.185024,4245.89824,s,10,385.57617187499994,38.5576171875,0.010987278465840647,38.560576171875,38.567292968749996,38.567697265625,38.568020703125,"[38.53258984375, 38.54215234375, 38.55772265625, 38.5681015625, 38.5658203125, 38.5649296875, 38.5565, 38.560171875, 38.56098046875, 38.567203125]",tokens/s,1.6339183952587188,kWh,0.001124747326490886,0.00012406806433800434,0.0007475806918140751,0.0019963960826429655,tokens/kWh,31556.86416524936,,s,630,385.5724367675785,0.6120197409009176,0.0004554953714293266,0.6120278625488281,0.6126125122070313,0.6127261108398437,0.6130161370849609,"[0.6121142578125, 0.6112144165039063, 0.612133056640625, 0.6109717407226563, 0.6118790283203125, 0.6111687622070312, 0.6118563842773438, 0.6111764526367187, 0.611968994140625, 0.6111453247070312, 0.6115288696289063, 0.6115491333007812, 0.61184814453125, 0.6115392456054688, 0.6113744506835938, 0.6116513061523438, 0.6119515380859375, 0.6112620239257812, 0.6114207763671875, 0.6118275146484375, 0.61140771484375, 0.6112462158203125, 0.6116078491210938, 0.6112278442382812, 0.6117545166015625, 0.6113173828125, 0.6115408325195313, 0.6119054565429688, 0.61109716796875, 0.611557373046875, 0.6114898071289062, 0.6120910034179687, 0.6114508666992188, 0.6112526245117188, 0.6114199829101562, 0.6118121337890625, 0.6114620361328125, 0.61165380859375, 0.6115807495117187, 0.6114669799804687, 0.6115678100585937, 0.6115462036132813, 0.6115228881835938, 0.6111729736328125, 0.611970703125, 0.6116990356445312, 0.6120406494140626, 0.611641357421875, 0.6119807739257812, 0.6115968017578125, 0.6114283447265625, 0.6122384643554688, 0.6113780517578125, 0.612173828125, 0.6114588012695312, 0.6116984252929687, 0.6117297973632813, 0.61170703125, 0.6120755004882813, 0.61180126953125, 0.6120068359375, 0.612145263671875, 0.611707763671875, 0.611455078125, 0.6119686889648438, 0.6116396484375, 0.611403564453125, 0.6110066528320313, 0.61208984375, 0.6113250732421875, 0.6116536254882813, 0.6114210815429687, 0.611715087890625, 0.6115339965820312, 0.6111299438476563, 0.6121720581054687, 0.6109406127929687, 0.6123619384765625, 0.6113341064453125, 0.6119102783203125, 0.6113526000976562, 0.611614013671875, 0.6116974487304687, 0.611786376953125, 0.6113773803710938, 0.6118853149414063, 0.611208984375, 0.6120120239257812, 0.6114317626953125, 0.6120004272460937, 0.6119423828125, 0.6115693969726562, 0.6115078735351562, 0.6117171020507812, 0.6123374633789063, 0.6121275024414062, 0.6108787231445313, 0.6121347045898438, 0.6113515014648437, 0.61224267578125, 0.6114926147460937, 0.6115780029296874, 0.611550537109375, 0.6118218383789062, 0.61205126953125, 0.6117457885742188, 0.6117621459960938, 0.6116024169921875, 0.6124619140625, 0.6115656127929687, 0.6119385986328125, 0.6118463134765625, 0.612028564453125, 0.6117929077148437, 0.6125826416015625, 0.6117527465820313, 0.6122456665039062, 0.6116406860351562, 0.6126637573242187, 0.6123397216796875, 0.6117594604492187, 0.6127886352539063, 0.6115760498046875, 0.6121256713867187, 0.6121380004882813, 0.6117232666015625, 0.6116925659179687, 0.6115901489257812, 0.6112005615234375, 0.6118138427734375, 0.6111497192382812, 0.6115753173828125, 0.6116255493164062, 0.6120460205078125, 0.6120271606445312, 0.6112542724609376, 0.6120878295898438, 0.6115795288085938, 0.6120797729492188, 0.6120482788085938, 0.6121416015625, 0.6114710693359375, 0.6121723022460938, 0.6112501831054687, 0.6116390991210937, 0.6114646606445312, 0.6126207275390625, 0.6116801147460937, 0.6122869873046874, 0.61224755859375, 0.6117556762695312, 0.6119776000976562, 0.6119215698242187, 0.6119111938476562, 0.61240380859375, 0.6120157470703125, 0.612395751953125, 0.6117763061523438, 0.6126807861328125, 0.61121630859375, 0.6124871826171875, 0.6119192504882812, 0.6123485717773437, 0.612099365234375, 0.6127904663085938, 0.611822021484375, 0.6121922607421875, 0.6124646606445312, 0.6118911743164063, 0.61201416015625, 0.6117307739257812, 0.612303466796875, 0.61236962890625, 0.6120701904296875, 0.612619384765625, 0.6118327026367187, 0.612288330078125, 0.612212890625, 0.61239013671875, 0.6121643676757812, 0.6127042846679688, 0.61183349609375, 0.6126124877929687, 0.6126674194335937, 0.611536376953125, 0.6123505249023438, 0.612005859375, 0.6123560791015625, 0.6124705200195313, 0.6131610717773438, 0.6112392578125, 0.6117443237304687, 0.6113894653320312, 0.6120325317382812, 0.6118903198242187, 0.6113043212890625, 0.6120017700195313, 0.6121266479492188, 0.61219775390625, 0.6118837280273437, 0.6123126831054687, 0.6115352172851563, 0.6124722290039063, 0.611629638671875, 0.612042724609375, 0.6121021118164063, 0.6123438110351562, 0.6116590576171875, 0.6119451293945313, 0.6118370971679687, 0.6123292846679688, 0.6120560302734375, 0.6118502197265625, 0.612443115234375, 0.6120396728515625, 0.6123577880859375, 0.6119038696289063, 0.6128283081054687, 0.6119330444335938, 0.612441650390625, 0.6119649047851563, 0.6122930908203125, 0.6122843627929687, 0.6123295288085937, 0.6125045776367187, 0.6127236938476562, 0.6114489135742187, 0.61211572265625, 0.6123500366210938, 0.6122659301757812, 0.6124222412109375, 0.6124031982421875, 0.6122719116210937, 0.6120572509765625, 0.6123601684570312, 0.61216064453125, 0.6124381713867187, 0.6126107788085937, 0.6120919189453125, 0.6125908203125, 0.6119821166992188, 0.6124503173828125, 0.6126817016601562, 0.6120588989257812, 0.6132062377929688, 0.6120140991210937, 0.6124320068359375, 0.6122822265625, 0.6124273071289063, 0.612599365234375, 0.6120205078125, 0.61283935546875, 0.6122119140625, 0.6117545166015625, 0.611811279296875, 0.6120430908203125, 0.611078125, 0.61248876953125, 0.6120595703125, 0.612294677734375, 0.6119854125976563, 0.6123458862304687, 0.6114295043945313, 0.611914794921875, 0.612179443359375, 0.6117108154296875, 0.6124776611328125, 0.6116636962890625, 0.6123334350585937, 0.6115018310546875, 0.6118132934570313, 0.612077392578125, 0.6123218994140625, 0.6119627685546875, 0.6126206665039062, 0.6122882690429687, 0.6115143432617187, 0.6121533203125, 0.6119895629882812, 0.6120072021484375, 0.6114556884765625, 0.6126775512695313, 0.61162890625, 0.6123578491210937, 0.6120326538085937, 0.6123626708984375, 0.6125403442382813, 0.612054443359375, 0.6119348754882813, 0.6121572265625, 0.6124013671875, 0.612495361328125, 0.6122327880859375, 0.6121760864257813, 0.6119195556640625, 0.6124318237304688, 0.6122091674804687, 0.6119915771484375, 0.6124253540039063, 0.6124895629882813, 0.6127388916015625, 0.6125733642578125, 0.6124769287109375, 0.6121710815429687, 0.6117905883789062, 0.6126417236328126, 0.6126325073242187, 0.6121554565429688, 0.6125913696289063, 0.61184228515625, 0.6124534912109375, 0.6122402954101562, 0.6122333374023438, 0.612042236328125, 0.6128357543945312, 0.612595703125, 0.6127513427734375, 0.6109691162109375, 0.6123616943359375, 0.6117359619140625, 0.612026611328125, 0.6123339233398437, 0.610981689453125, 0.6121719970703124, 0.6120570678710937, 0.61194677734375, 0.6122832641601562, 0.611919921875, 0.6118634033203125, 0.6124083862304688, 0.6119985961914063, 0.61241357421875, 0.6116821899414062, 0.61247900390625, 0.611831787109375, 0.611758056640625, 0.6124605712890625, 0.61218408203125, 0.6115502319335937, 0.6121154174804687, 0.6121103515625, 0.6117601318359375, 0.6125892333984375, 0.61183349609375, 0.6121336669921875, 0.6117366943359375, 0.612065185546875, 0.61258837890625, 0.6118395385742188, 0.6126959838867188, 0.6116390380859374, 0.6129447021484375, 0.6121799926757813, 0.6119232788085938, 0.611804931640625, 0.6121580810546875, 0.6127963256835938, 0.6115818481445312, 0.6126958618164062, 0.6120201416015625, 0.6121695556640625, 0.61252197265625, 0.6119035034179687, 0.6121950073242187, 0.6117481079101562, 0.6126157836914062, 0.6124855346679687, 0.6115591430664062, 0.6131427612304687, 0.6119464721679687, 0.612766845703125, 0.6125670776367188, 0.61140869140625, 0.6127734985351563, 0.6117210083007812, 0.6126946411132812, 0.6123840942382812, 0.6119880981445313, 0.611838134765625, 0.6118623657226563, 0.6119669189453125, 0.6112620849609375, 0.6125039672851562, 0.6112794189453125, 0.6130339965820313, 0.6112164916992188, 0.6126127319335938, 0.6115816040039063, 0.6117484741210938, 0.61230078125, 0.61129931640625, 0.6126282348632812, 0.6112975463867187, 0.6116823120117187, 0.6121248168945312, 0.61120703125, 0.6125687255859374, 0.6120260620117187, 0.6120589599609375, 0.6121654052734375, 0.6110865478515625, 0.6126713256835937, 0.6112509155273438, 0.6119139404296875, 0.6123639526367187, 0.6120186157226563, 0.6126338500976563, 0.6111279907226562, 0.6127632446289063, 0.6112950439453125, 0.6119976196289062, 0.6120557250976563, 0.6124134521484375, 0.6123069458007813, 0.6116843872070312, 0.6122720336914063, 0.611460693359375, 0.612424072265625, 0.6122516479492187, 0.612010009765625, 0.6124620361328125, 0.6112745361328125, 0.6120944213867188, 0.6120062255859375, 0.6122250366210937, 0.6117908325195313, 0.6119178466796875, 0.6122025146484374, 0.6119874267578125, 0.611997802734375, 0.6123902587890625, 0.6122452392578125, 0.6121665649414062, 0.6117449951171875, 0.6119155883789062, 0.6119039306640625, 0.6129322509765625, 0.6121162719726563, 0.612421630859375, 0.6124832763671875, 0.6116205444335937, 0.6125343627929688, 0.6110094604492188, 0.6124031982421875, 0.6119382934570312, 0.611999755859375, 0.61218603515625, 0.610879150390625, 0.61291259765625, 0.6114223022460937, 0.6118695068359375, 0.6122926635742187, 0.611399658203125, 0.6122921752929688, 0.6110989379882813, 0.6120439453125, 0.6116995239257812, 0.6118563842773438, 0.6123151245117188, 0.6112803955078125, 0.612638427734375, 0.6121705322265625, 0.611858154296875, 0.6119235229492187, 0.6121417236328125, 0.6124308471679687, 0.6114147338867187, 0.6120963134765625, 0.6115348510742188, 0.6124412231445312, 0.6118445434570312, 0.6117661743164062, 0.612811279296875, 0.6112337646484375, 0.6126388549804688, 0.6118950805664063, 0.612406982421875, 0.6118363037109374, 0.6121062622070312, 0.6122528686523437, 0.6123179931640625, 0.6118911743164063, 0.6126052856445312, 0.6117557373046875, 0.612244140625, 0.612090087890625, 0.6121342163085938, 0.612966552734375, 0.6113693237304687, 0.6125150146484375, 0.611779541015625, 0.61260546875, 0.6122430419921875, 0.6119534301757813, 0.6121980590820313, 0.612149169921875, 0.6125381469726563, 0.6121417846679688, 0.612169677734375, 0.61263671875, 0.611600341796875, 0.6127117919921875, 0.6122625732421875, 0.6120914916992187, 0.6121656494140625, 0.6111639404296875, 0.6126246337890625, 0.6118850708007812, 0.6122250366210937, 0.61163134765625, 0.6121688842773437, 0.6111559448242188, 0.6125655517578125, 0.6118174438476562, 0.6122291259765625, 0.6118236083984375, 0.6115594482421876, 0.611704833984375, 0.6121345825195312, 0.6124312744140625, 0.6116069946289062, 0.6121016235351563, 0.6124471435546875, 0.611276123046875, 0.6121101684570313, 0.6114844970703125, 0.6124954833984375, 0.6115463256835938, 0.6123585205078125, 0.612188232421875, 0.6113446655273438, 0.6121123657226563, 0.6114365234375, 0.6124310302734375, 0.6119739379882813, 0.6115614624023438, 0.6124237060546875, 0.61134814453125, 0.6127559204101563, 0.6119259033203125, 0.6119415893554687, 0.61205712890625, 0.6115681762695313, 0.612296875, 0.6123171997070312, 0.61203662109375, 0.6125028686523437, 0.6117075805664063, 0.6124912719726563, 0.6118809814453126, 0.6126448364257813, 0.61213037109375, 0.6118486328125, 0.6125650024414062, 0.6119321899414063, 0.6125468139648438, 0.6120425415039062, 0.6125135498046875, 0.6115678100585937, 0.61297216796875, 0.6116644287109375, 0.612972412109375, 0.6127227172851563, 0.6117510986328125, 0.6131720581054687, 0.6115921630859374, 0.6129044799804687, 0.6119600830078125, 0.6125370483398438, 0.61171435546875, 0.6126210327148438, 0.6113599243164063, 0.6115885620117187, 0.6133087158203125, 0.6110249633789062, 0.6127280883789062, 0.6117313842773437, 0.6121046142578125, 0.61195849609375, 0.6116296997070313, 0.6124127197265625, 0.6114652099609375, 0.61193701171875, 0.612396728515625, 0.6118967895507812, 0.6119945068359375, 0.6115444946289063, 0.6119818725585937, 0.6122965698242188, 0.6124524536132813, 0.6119517211914063, 0.6124677734375, 0.611462158203125, 0.6120108642578125, 0.612780029296875, 0.6114529418945313, 0.6124046630859376, 0.611566162109375, 0.6125131225585938, 0.6121314697265625, 0.6122491455078125, 0.6122808227539063, 0.6119035034179687, 0.6121082763671875, 0.611983154296875, 0.6118319702148437, 0.6126550903320312, 0.612560791015625, 0.6118763427734375, 0.6122172241210937, 0.6117850341796875, 0.6125855712890625, 0.6121488647460938, 0.6121957397460938, 0.6129197387695312, 0.611925537109375, 0.612774658203125, 0.6122659912109375, 0.6121143798828125, 0.6127218627929687, 0.6118162841796875, 0.6126605224609375, 0.6120098266601562, 0.6125699462890625, 0.6128468627929687, 0.6122276000976562, 0.6131796875, 0.61227197265625, 0.612358154296875, 0.6123499755859375]",tokens/s,1.6339342233110976,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5180.358656,5584.584704,0.0,5182.062592,5181.108736,s,1,11.4033701171875,11.4033701171875,0.0,11.4033701171875,11.4033701171875,11.4033701171875,11.4033701171875,[11.4033701171875],,kWh,0.00011793607067084699,1.2999488384072235e-05,3.546447281600057e-05,0.00016640003187091978,,MB,5158.52288,5739.773952,0.0,5322.571776,5283.621376,s,10,2.5200260314941407,0.2520026031494141,0.00023730096354918385,0.25199404907226564,0.2523175537109375,0.2523290344238281,0.2523382189941406,"[0.2520255432128906, 0.2518825225830078, 0.25204130554199217, 0.2519625549316406, 0.25176133728027345, 0.25231500244140626, 0.2515979766845703, 0.25181840515136716, 0.25234051513671873, 0.2522808685302734]",tokens/s,1015.862522055837,kWh,7.4315017853130404e-06,8.193028827532785e-07,4.914594209450007e-06,1.3165398877516325e-05,tokens/kWh,19444910.28199632,MB,5164.875776,5756.551168,0.0,5339.348992,5283.623936,s,10,28.434452392578123,2.8434452392578122,0.00992853494976894,2.84279150390625,2.860056396484375,2.8606650390625,2.861151953125,"[2.83762939453125, 2.835802978515625, 2.8265859375, 2.8394404296875, 2.845128173828125, 2.842534912109375, 2.843087646484375, 2.861273681640625, 2.859921142578125, 2.843048095703125]",tokens/s,22.15622060526971,kWh,8.189476586593759e-05,9.033323295490017e-06,5.2827674206550046e-05,0.00014375576336797768,tokens/kWh,438243.2990789819,,s,630,28.43129645919799,0.045129041998726985,0.0007832032918577279,0.04501094436645508,0.04575907897949219,0.04640671424865722,0.0487604891204834,"[0.04511142349243164, 0.04876540756225586, 0.044556289672851565, 0.044652542114257815, 0.044267520904541016, 0.04396169662475586, 0.04398342514038086, 0.044265567779541014, 0.04417257690429687, 0.044664638519287106, 0.04425001525878906, 0.04437811279296875, 0.0442749137878418, 0.04409833526611328, 0.04412416076660156, 0.04421836853027344, 0.04508176040649414, 0.04489625549316406, 0.044993217468261716, 0.044964256286621096, 0.04498191833496094, 0.044853343963623046, 0.04430352020263672, 0.044921695709228514, 0.04972748947143555, 0.044592479705810546, 0.04484777450561524, 0.04470988845825195, 0.04430438232421875, 0.04458438491821289, 0.0451610221862793, 0.04432691192626953, 0.046862335205078126, 0.04504780960083008, 0.04423680114746094, 0.04426137542724609, 0.04474265670776367, 0.04437744140625, 0.044423839569091794, 0.044420673370361326, 0.04445228958129883, 0.04468255996704101, 0.04495840072631836, 0.04483891296386719, 0.04461308670043945, 0.048876064300537106, 0.044912094116210936, 0.0494699821472168, 0.04523417663574219, 0.04506828689575195, 0.04490444946289063, 0.04526006317138672, 0.04503567886352539, 0.04506246566772461, 0.04615913772583008, 0.04510819244384766, 0.045551616668701174, 0.044980224609375, 0.04526851272583008, 0.04512201690673828, 0.04493628692626953, 0.0447210578918457, 0.044746753692626956, 0.04530176162719726, 0.04853145599365234, 0.046018558502197264, 0.04595503997802734, 0.044862590789794925, 0.044886943817138675, 0.044678558349609376, 0.04535766220092773, 0.04539187240600586, 0.04436377716064453, 0.04483283233642578, 0.04476102447509766, 0.04481024169921875, 0.04478739166259765, 0.04480646514892578, 0.04476089477539062, 0.04453753662109375, 0.044814273834228514, 0.04549075317382813, 0.04516556930541992, 0.045071361541748046, 0.04525161743164063, 0.04465353775024414, 0.04475235366821289, 0.044945953369140625, 0.04476518249511719, 0.045348705291748045, 0.04523212814331055, 0.045086879730224606, 0.044928863525390626, 0.047558815002441406, 0.04566758346557617, 0.04547660827636719, 0.04453116989135742, 0.04513232040405273, 0.04492697525024414, 0.04471398544311524, 0.044482398986816406, 0.044843006134033206, 0.0445786247253418, 0.044198238372802734, 0.0446376953125, 0.04495001602172852, 0.04490864181518555, 0.04480195236206055, 0.04454105758666992, 0.04440281677246094, 0.04456320190429688, 0.045888832092285156, 0.04425759887695312, 0.044519680023193356, 0.044917919158935546, 0.045771297454833985, 0.04486374282836914, 0.044626113891601565, 0.044604480743408205, 0.0444339828491211, 0.04473484802246094, 0.044761089324951174, 0.04465459060668946, 0.04500396728515625, 0.044730430603027345, 0.04467993545532226, 0.0457542724609375, 0.0451478385925293, 0.0454925422668457, 0.04494649505615234, 0.04456499099731445, 0.044507583618164065, 0.044799999237060545, 0.04490854263305664, 0.04548339080810547, 0.04475334548950195, 0.04480633544921875, 0.04490854263305664, 0.04484873580932617, 0.04451084899902344, 0.04442297744750977, 0.04507900619506836, 0.04515891265869141, 0.04505187225341797, 0.044940353393554684, 0.04486857604980469, 0.04583628845214844, 0.0448573112487793, 0.0443408317565918, 0.044407230377197265, 0.04524758529663086, 0.04523865509033203, 0.04502924728393555, 0.044894046783447265, 0.045316032409667965, 0.045115711212158204, 0.044698177337646486, 0.04444566345214844, 0.04507036972045898, 0.0449117431640625, 0.04480908966064453, 0.04437811279296875, 0.04441088104248047, 0.044199935913085936, 0.04417443084716797, 0.04414556884765625, 0.044520641326904295, 0.045034305572509765, 0.044797409057617185, 0.04477596664428711, 0.0449637451171875, 0.044642208099365234, 0.044765377044677736, 0.04444287872314453, 0.04498009490966797, 0.04566515350341797, 0.04498227310180664, 0.04505395126342773, 0.044859390258789066, 0.0447281265258789, 0.044365215301513675, 0.044506977081298825, 0.04533942413330078, 0.045252769470214844, 0.045350910186767575, 0.045080577850341794, 0.04458838272094726, 0.044300960540771483, 0.04482358551025391, 0.04580352020263672, 0.04477337646484375, 0.04431788635253906, 0.04547257614135742, 0.04592844772338867, 0.04501913452148437, 0.045195262908935545, 0.04874844741821289, 0.04474249649047852, 0.04447660827636719, 0.04474211120605469, 0.045185150146484374, 0.045218208312988284, 0.04509702301025391, 0.04497139358520508, 0.04468377685546875, 0.044687488555908206, 0.04446822357177734, 0.044322017669677735, 0.045093406677246095, 0.045053665161132815, 0.04486608123779297, 0.04482608032226563, 0.04453020858764648, 0.044128257751464846, 0.04404592132568359, 0.044275936126708985, 0.04636896133422851, 0.044971969604492186, 0.045117504119873045, 0.04463564682006836, 0.044466686248779294, 0.044396190643310546, 0.04438438415527344, 0.04472444915771484, 0.045149215698242186, 0.04523926544189453, 0.044843006134033206, 0.044783519744873046, 0.04463216018676758, 0.04445552062988281, 0.04451484680175781, 0.04462067031860351, 0.0449486083984375, 0.044948352813720706, 0.045017055511474606, 0.04465206527709961, 0.046141952514648435, 0.04458700942993164, 0.04519116973876953, 0.04484864044189453, 0.04518924713134766, 0.045695358276367185, 0.045503742218017576, 0.044444416046142576, 0.044576416015625, 0.044774753570556644, 0.045116447448730466, 0.04501641464233398, 0.04526144027709961, 0.044969249725341796, 0.04571609497070313, 0.05057689666748047, 0.04542879867553711, 0.04668937683105469, 0.04508348846435547, 0.04530387115478516, 0.04506009674072266, 0.04458700942993164, 0.04435103988647461, 0.04472051239013672, 0.04510521697998047, 0.04546355056762695, 0.04470783996582031, 0.044930049896240234, 0.044783615112304685, 0.04465151977539063, 0.04460508728027344, 0.04486563110351562, 0.045007102966308596, 0.04523212814331055, 0.044865535736083983, 0.04744540786743164, 0.04569497680664063, 0.045265472412109375, 0.04458425521850586, 0.044969791412353514, 0.04724124908447266, 0.046457534790039064, 0.0454453125, 0.04502460861206055, 0.04496166229248047, 0.044639007568359375, 0.044862590789794925, 0.04495232009887695, 0.04531008148193359, 0.04534815979003906, 0.04500051116943359, 0.044880416870117186, 0.044451423645019535, 0.044262142181396485, 0.04415283203125, 0.0444951057434082, 0.045340160369873046, 0.045187328338623045, 0.045023231506347655, 0.0446484489440918, 0.04437811279296875, 0.04467251205444336, 0.04466739273071289, 0.049608543395996095, 0.0457811508178711, 0.04580979156494141, 0.045117313385009766, 0.04599603271484375, 0.04462387084960937, 0.04468851089477539, 0.04475379180908203, 0.04506828689575195, 0.045090816497802735, 0.045279232025146485, 0.04514815902709961, 0.04476038360595703, 0.04489049530029297, 0.04462575912475586, 0.04478326416015625, 0.046629150390625, 0.04507881546020508, 0.04503718566894531, 0.04490646362304687, 0.04458118438720703, 0.045281375885009766, 0.04514815902709961, 0.04491657638549805, 0.045096736907958984, 0.04486297607421875, 0.04465702438354492, 0.04448716735839844, 0.04469760131835938, 0.045080257415771485, 0.04487200164794922, 0.04469548797607422, 0.04497375869750977, 0.044832672119140625, 0.04468988800048828, 0.04450835037231445, 0.04464672088623047, 0.0450334701538086, 0.04495001602172852, 0.04496588897705078, 0.04509286499023438, 0.04533772659301758, 0.04483567810058594, 0.044402721405029294, 0.04480220794677734, 0.044871040344238285, 0.045123455047607425, 0.04500515365600586, 0.04485555267333984, 0.044886016845703126, 0.04499792098999023, 0.04506492614746094, 0.045235294342041016, 0.04467292785644531, 0.04501164627075195, 0.04482284927368164, 0.0449536018371582, 0.044795745849609374, 0.04452675247192383, 0.044542976379394535, 0.04515382385253906, 0.045074817657470706, 0.04515030288696289, 0.04511539077758789, 0.04519935989379883, 0.04501094436645508, 0.045238273620605465, 0.04523417663574219, 0.045088768005371094, 0.045190654754638675, 0.045275646209716795, 0.04545497512817383, 0.04999724960327148, 0.04592326354980469, 0.045758464813232425, 0.04556390380859375, 0.04527228927612305, 0.045605281829833984, 0.045457790374755856, 0.04712572860717774, 0.046021087646484375, 0.04556185531616211, 0.045879295349121094, 0.04540729522705078, 0.045953983306884764, 0.04591820907592774, 0.045176830291748044, 0.0455035514831543, 0.04509286499023438, 0.04515264129638672, 0.045365825653076175, 0.04493926239013672, 0.04514003372192383, 0.04554940795898438, 0.045273185729980465, 0.04518672180175781, 0.04526671981811523, 0.044993087768554686, 0.04627395248413086, 0.046432865142822265, 0.04520345687866211, 0.04528271865844727, 0.04697727966308594, 0.04511164855957031, 0.045483104705810545, 0.04564406585693359, 0.045199329376220704, 0.04541872024536133, 0.044896705627441406, 0.0447957763671875, 0.04462195205688477, 0.04433715057373047, 0.04459628677368164, 0.04487263870239258, 0.044803199768066404, 0.04488796615600586, 0.044553184509277345, 0.04435353469848633, 0.04423680114746094, 0.04415667343139648, 0.04439648056030274, 0.04478598403930664, 0.04522393417358399, 0.045142017364501956, 0.04612473678588867, 0.044568702697753905, 0.044597438812255856, 0.04452761459350586, 0.045385726928710936, 0.044974079132080076, 0.04501504135131836, 0.045063488006591795, 0.044726974487304685, 0.04430847930908203, 0.04420956802368164, 0.04416969680786133, 0.044603519439697266, 0.044771072387695315, 0.0449169921875, 0.04492281723022461, 0.045217857360839844, 0.044539905548095705, 0.04566134262084961, 0.04495167922973633, 0.04460918426513672, 0.04502617645263672, 0.04463411331176758, 0.044371967315673826, 0.044267520904541016, 0.04431257629394531, 0.04558396911621094, 0.04442323303222656, 0.047282176971435545, 0.0449969596862793, 0.04481833648681641, 0.0448328628540039, 0.04500275039672851, 0.04530153656005859, 0.0451995849609375, 0.045158401489257816, 0.04637475204467773, 0.04511350250244141, 0.0454389762878418, 0.044969791412353514, 0.04503571319580078, 0.045125633239746096, 0.04528643035888672, 0.04550857543945312, 0.045489055633544925, 0.04565001678466797, 0.04521564865112305, 0.04527299118041992, 0.04515190505981445, 0.045240863800048825, 0.04516659164428711, 0.045402015686035156, 0.04528889465332031, 0.048081024169921875, 0.04557263946533203, 0.04511673736572266, 0.04535532760620117, 0.04586739349365234, 0.04691763305664062, 0.045281280517578126, 0.04632153701782227, 0.04671062469482422, 0.045299999237060545, 0.047202144622802734, 0.04509014511108399, 0.04582073593139648, 0.04575436782836914, 0.046952449798583984, 0.045100639343261716, 0.0449192008972168, 0.044701694488525394, 0.0444026870727539, 0.044214080810546875, 0.04529097747802734, 0.04500143814086914, 0.045258209228515624, 0.04543952178955078, 0.04529148864746094, 0.04507036972045898, 0.04520140838623047, 0.048330753326416016, 0.047104320526123046, 0.045239936828613284, 0.04628313446044922, 0.048363521575927736, 0.0451932144165039, 0.04600012969970703, 0.04501094436645508, 0.04552908706665039, 0.04534611129760742, 0.04507686233520508, 0.04508089447021484, 0.04514406585693359, 0.0452567024230957, 0.045107200622558595, 0.04500275039672851, 0.045764606475830076, 0.04653875350952148, 0.04524979019165039, 0.04511616134643555, 0.04517068862915039, 0.04538163375854492, 0.04524236679077148, 0.04501628875732422, 0.04501174545288086, 0.04534476852416992, 0.045238273620605465, 0.04549836730957031, 0.04582783889770508, 0.04542851257324219, 0.04516707229614258, 0.04504304122924805, 0.04544169616699219, 0.045142017364501956, 0.04522598266601562, 0.04520959854125976, 0.045049854278564457, 0.04514406585693359, 0.045174976348876954, 0.04493894577026367, 0.045246593475341795, 0.04531315231323242, 0.045138622283935545, 0.04498041534423828, 0.04532428741455078, 0.04511948776245117, 0.045192798614501956, 0.045217952728271484, 0.0452402229309082, 0.04529391860961914, 0.04562496185302734, 0.04542892837524414, 0.04530531311035156, 0.045378273010253906, 0.04525801467895508, 0.045144798278808594, 0.04526185607910156, 0.04512246322631836, 0.04514003372192383, 0.044969406127929684, 0.04528745651245117, 0.04681372833251953, 0.045284385681152346, 0.045429054260253905, 0.04580966567993164, 0.04506828689575195, 0.04575414276123047, 0.04485894393920899, 0.04458089447021484, 0.045152542114257815, 0.04488131332397461, 0.04488636779785156, 0.04474303817749024, 0.04470937728881836, 0.044735198974609376, 0.044834815979003906, 0.044566463470458985, 0.044507198333740235, 0.04528287887573242, 0.045252384185791014, 0.044867294311523434, 0.044548545837402344, 0.044964351654052735, 0.044644351959228515, 0.04492287826538086, 0.044380126953125, 0.0450662727355957, 0.04528857421875, 0.04559740829467773, 0.04514831924438477, 0.04530176162719726, 0.045395969390869144, 0.045080257415771485, 0.045357376098632815, 0.04534220886230469, 0.04555417633056641, 0.045254657745361325, 0.045176830291748044, 0.04502937698364258, 0.04580556869506836, 0.04537548828125, 0.04529340744018555, 0.048365726470947265, 0.04534457778930664, 0.04582419204711914, 0.045006366729736326, 0.04493155288696289, 0.044834686279296876, 0.04494348907470703, 0.04480195236206055, 0.04480790328979492, 0.045419937133789064, 0.04494409561157227, 0.04646937561035156, 0.04489011383056641, 0.044844062805175784, 0.044781600952148434, 0.044776382446289065, 0.04470742416381836, 0.045623233795166016, 0.045260478973388675, 0.044772129058837894, 0.04487699127197266, 0.04464060974121094, 0.04496022415161133, 0.045197311401367186, 0.04469964981079102]",tokens/s,22.158679992103718,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,873.742336,655.294464,0.0,260.046848,258.555392,s,1,7.44559619140625,7.44559619140625,0.0,7.44559619140625,7.44559619140625,7.44559619140625,7.44559619140625,[7.44559619140625],,kWh,1.4648123000000623e-05,1.6085814022668542e-06,4.526670288003409e-06,2.0783374690270886e-05,,MB,1328.021504,751.763456,0.0,341.835776,317.950464,s,14,0.19269519901275634,0.013763942786625455,0.0004346000602619136,0.013681695938110352,0.013778102111816407,0.01431407971382141,0.01510181740760803,"[0.0137838716506958, 0.013700896263122559, 0.013743647575378418, 0.013534432411193847, 0.013509984016418458, 0.013537280082702637, 0.015298751831054688, 0.013539520263671875, 0.013588768005371094, 0.013641023635864257, 0.01368899154663086, 0.013764639854431153, 0.013680255889892579, 0.013683135986328126]",tokens/s,18599.321718247586,kWh,4.0062819423870005e-07,4.4182161689928466e-08,2.0989309582167692e-07,6.547034517503055e-07,tokens/kWh,391016725.6879451,MB,1367.61344,779.026432,0.0,369.098752,317.953024,s,14,9.901269592285155,0.7072335423060825,0.004373440602804263,0.7081146545410156,0.712881365966797,0.7140843505859376,0.7146861083984375,"[0.7070420532226562, 0.7039707641601562, 0.7092022094726562, 0.709187255859375, 0.7008070678710937, 0.704184814453125, 0.70412060546875, 0.7023123779296875, 0.7011494750976562, 0.7095607299804687, 0.7148365478515625, 0.7101968994140625, 0.7110194702148438, 0.7136793212890625]",tokens/s,89.07948539116988,kWh,2.068407336409532e-05,2.281084263019303e-06,7.4988829124638515e-06,3.0464040539578474e-05,tokens/kWh,2068011.9539018879,,s,882,9.894378210067753,0.011218115884430556,0.00020622448943279577,0.01117680025100708,0.011354713726043702,0.011455970811843873,0.012109231624603268,"[0.011076095581054688, 0.011262368202209473, 0.011347519874572753, 0.011610560417175293, 0.011513855934143067, 0.01123641586303711, 0.011499584197998047, 0.011205504417419434, 0.011307007789611816, 0.011177984237670899, 0.011100255966186523, 0.011169280052185059, 0.011192319869995117, 0.011210271835327149, 0.011164192199707032, 0.011237215995788575, 0.01117849636077881, 0.011163647651672364, 0.011311167716979981, 0.011204544067382813, 0.011051008224487305, 0.011173343658447266, 0.011166239738464355, 0.011132927894592285, 0.011118240356445312, 0.011170144081115723, 0.011259903907775879, 0.011276288032531738, 0.011118656158447265, 0.011115455627441406, 0.011142304420471191, 0.011261792182922363, 0.011211968421936036, 0.011199007987976075, 0.011227423667907716, 0.011177984237670899, 0.011155360221862793, 0.01117807960510254, 0.011374784469604491, 0.011120608329772949, 0.01115664005279541, 0.011430624008178712, 0.011270272254943847, 0.011229023933410645, 0.011132415771484374, 0.0111211519241333, 0.011161151885986329, 0.011190303802490234, 0.011147744178771972, 0.01112451171875, 0.011155872344970704, 0.01120025634765625, 0.011302111625671386, 0.011239263534545898, 0.01121615982055664, 0.011128479957580567, 0.011225088119506836, 0.011241472244262696, 0.01126195240020752, 0.011247008323669434, 0.011185791969299317, 0.011212032318115234, 0.011197919845581055, 0.011031968116760254, 0.011237471580505372, 0.011267775535583497, 0.01126255989074707, 0.011224960327148438, 0.011163999557495117, 0.01113702392578125, 0.011163359642028808, 0.011116671562194824, 0.01107158374786377, 0.011173888206481934, 0.011159968376159669, 0.011200160026550292, 0.011179871559143066, 0.011102368354797363, 0.011190272331237794, 0.011173024177551269, 0.01112559986114502, 0.011188223838806152, 0.011085887908935547, 0.011153056144714355, 0.011153696060180664, 0.011146944046020507, 0.011168416023254395, 0.01114896011352539, 0.011279552459716798, 0.011195520401000976, 0.011116064071655274, 0.011071647644042969, 0.011290592193603516, 0.011249088287353515, 0.011139776229858398, 0.011100064277648926, 0.0110797119140625, 0.011116512298583984, 0.011142175674438476, 0.011146112442016602, 0.011159903526306152, 0.011112192153930663, 0.011161600112915039, 0.011181407928466797, 0.011204768180847167, 0.011188032150268555, 0.011181056022644043, 0.011270048141479493, 0.01114668846130371, 0.011120320320129395, 0.01110524845123291, 0.011148480415344239, 0.011242336273193359, 0.011175871849060059, 0.011154399871826173, 0.011168512344360351, 0.011154848098754883, 0.011149920463562012, 0.011204031944274903, 0.011136832237243653, 0.011098015785217285, 0.011172703742980957, 0.011195551872253419, 0.011279264450073241, 0.011250975608825684, 0.01114793586730957, 0.010966143608093261, 0.011238271713256837, 0.011218943595886231, 0.011265567779541016, 0.011244000434875489, 0.011204159736633301, 0.011133567810058594, 0.011419455528259277, 0.011125920295715332, 0.011131936073303223, 0.011095423698425293, 0.011121088027954102, 0.011118880271911621, 0.0111626558303833, 0.01117289638519287, 0.011117471694946288, 0.011052831649780274, 0.011135968208312988, 0.011195615768432617, 0.011149248123168946, 0.011452383995056152, 0.011191328048706055, 0.011161727905273438, 0.011196127891540527, 0.011161888122558594, 0.011173312187194823, 0.011148639678955078, 0.011248576164245605, 0.011171551704406738, 0.011141152381896972, 0.01109331226348877, 0.011336640357971191, 0.011114496231079102, 0.01337660789489746, 0.01354543972015381, 0.011299936294555665, 0.011167584419250488, 0.011184127807617187, 0.011161184310913086, 0.011164064407348634, 0.01109347152709961, 0.011108896255493165, 0.011145152091979981, 0.011206208229064941, 0.011151455879211425, 0.01110262393951416, 0.011060640335083008, 0.011184639930725097, 0.011251520156860352, 0.01117737579345703, 0.011222240447998046, 0.011107263565063476, 0.011141856193542481, 0.01123356819152832, 0.0112391357421875, 0.011270144462585448, 0.011194368362426758, 0.011272095680236816, 0.011245120048522949, 0.011156000137329102, 0.011124735832214355, 0.011080896377563477, 0.01120524787902832, 0.01093996810913086, 0.011124959945678711, 0.011298208236694337, 0.011147775650024415, 0.011173983573913575, 0.011179840087890625, 0.011210687637329101, 0.01118671989440918, 0.011111807823181153, 0.011077952384948731, 0.011165727615356445, 0.01123532772064209, 0.011938207626342774, 0.012149951934814452, 0.011880096435546874, 0.011303680419921875, 0.011206975936889648, 0.011251423835754394, 0.011130847930908204, 0.011239423751831054, 0.011309056282043458, 0.011202783584594726, 0.011157279968261718, 0.011116288185119628, 0.011241408348083496, 0.011538496017456054, 0.011747808456420898, 0.011462431907653808, 0.011300864219665528, 0.011228832244873047, 0.011338080406188965, 0.011382399559020996, 0.011356063842773437, 0.011386879920959473, 0.011172320365905762, 0.011101216316223144, 0.011099103927612305, 0.011157343864440918, 0.011126688003540039, 0.011135231971740723, 0.011134783744812011, 0.011132575988769532, 0.011137568473815918, 0.011175647735595703, 0.01107808017730713, 0.011279935836791993, 0.011210399627685547, 0.01121548843383789, 0.011169792175292969, 0.011081727981567382, 0.011184127807617187, 0.011134976387023926, 0.011201696395874023, 0.01105174446105957, 0.011046079635620118, 0.011806816101074218, 0.011182944297790527, 0.011213055610656738, 0.011204128265380859, 0.011132351875305176, 0.011158304214477539, 0.011150848388671876, 0.011131391525268555, 0.010863072395324707, 0.011168864250183106, 0.011160479545593262, 0.011077024459838868, 0.011189184188842773, 0.011129792213439942, 0.011118464469909668, 0.011073535919189453, 0.01107158374786377, 0.011113216400146484, 0.011132927894592285, 0.011313216209411621, 0.011034560203552247, 0.011116031646728516, 0.011315263748168946, 0.011167807579040527, 0.011116352081298828, 0.011067744255065917, 0.011110336303710937, 0.011093503952026367, 0.011084575653076172, 0.011000991821289063, 0.011123583793640137, 0.011098079681396485, 0.011044992446899413, 0.011036512374877929, 0.011085984230041503, 0.011140992164611816, 0.011111807823181153, 0.011158304214477539, 0.01111638355255127, 0.011155263900756836, 0.011221311569213866, 0.01117081642150879, 0.01122764778137207, 0.011155296325683593, 0.011166239738464355, 0.011141119956970215, 0.01111244773864746, 0.011101920127868653, 0.011090208053588867, 0.01109763240814209, 0.011132991790771485, 0.011082400321960449, 0.011036416053771973, 0.011139072418212891, 0.011122688293457032, 0.011053152084350586, 0.011032416343688964, 0.011058624267578126, 0.011375231742858887, 0.011196415901184082, 0.011112607955932618, 0.011081567764282227, 0.011120991706848144, 0.011097760200500488, 0.011055071830749512, 0.01104435157775879, 0.011052607536315918, 0.011096192359924316, 0.01126863956451416, 0.011036479949951172, 0.011039199829101563, 0.011505023956298828, 0.011086144447326661, 0.011063615798950195, 0.011175775527954101, 0.0111494722366333, 0.01114851188659668, 0.011078432083129882, 0.01113702392578125, 0.01133516788482666, 0.011422143936157227, 0.01111251163482666, 0.011115615844726562, 0.011143487930297851, 0.011248224258422852, 0.011177984237670899, 0.011104255676269532, 0.011069439888000488, 0.011134976387023926, 0.011120832443237305, 0.011114303588867187, 0.011102335929870605, 0.011165568351745606, 0.011216896057128906, 0.011184127807617187, 0.011151040077209472, 0.011112832069396972, 0.011198240280151367, 0.011144831657409668, 0.011121343612670899, 0.011129920005798339, 0.011050848007202149, 0.0111212158203125, 0.011121088027954102, 0.011099424362182617, 0.011043071746826172, 0.011063712120056152, 0.011259903907775879, 0.011105888366699219, 0.011126175880432129, 0.011056127548217774, 0.011358207702636718, 0.011191871643066406, 0.01113475227355957, 0.011155167579650878, 0.01110758399963379, 0.011153311729431152, 0.011134847640991211, 0.011098015785217285, 0.011132479667663574, 0.011072256088256837, 0.011171072006225587, 0.011216608047485352, 0.011193152427673339, 0.011216799736022949, 0.0111627197265625, 0.011145600318908691, 0.011142911911010743, 0.011279135704040528, 0.011077216148376465, 0.011123104095458984, 0.011176063537597656, 0.011224800109863282, 0.011966624259948731, 0.011808511734008788, 0.01118832015991211, 0.011069600105285644, 0.011231231689453124, 0.011163552284240723, 0.011174176216125488, 0.011097184181213379, 0.011164128303527832, 0.011141375541687011, 0.011189472198486329, 0.011144191741943359, 0.01112656021118164, 0.011161600112915039, 0.01144761562347412, 0.011147968292236328, 0.011082847595214844, 0.01121177577972412, 0.011138879776000976, 0.011077280044555665, 0.011066880226135254, 0.01104582405090332, 0.011315199851989746, 0.011132672309875488, 0.011165280342102051, 0.011123359680175781, 0.011280320167541503, 0.011069503784179688, 0.011101247787475586, 0.01113491153717041, 0.011019264221191406, 0.010992799758911133, 0.011135968208312988, 0.011116415977478027, 0.011171456336975097, 0.011161984443664551, 0.011122688293457032, 0.011292896270751952, 0.011304991722106934, 0.011419679641723633, 0.011161439895629883, 0.011034496307373047, 0.011208703994750976, 0.011116543769836425, 0.011397151947021485, 0.011079744338989258, 0.011035584449768066, 0.01118511962890625, 0.011155455589294434, 0.011366592407226563, 0.011513664245605468, 0.011163552284240723, 0.011213215827941894, 0.011220831871032715, 0.011170944213867188, 0.01108563232421875, 0.01103279972076416, 0.011076288223266601, 0.01105510425567627, 0.011075584411621094, 0.01100595188140869, 0.01112063980102539, 0.01113702392578125, 0.01108902359008789, 0.010907103538513183, 0.011314784049987793, 0.011164608001708985, 0.011249664306640626, 0.011298368453979492, 0.011282719612121583, 0.011314911842346191, 0.011196096420288086, 0.011092512130737305, 0.011155679702758789, 0.011147263526916504, 0.011177184104919434, 0.011105055809020997, 0.011063615798950195, 0.011162400245666504, 0.011164544105529785, 0.011136992454528809, 0.011029727935791015, 0.011107168197631835, 0.011186400413513183, 0.011175935745239257, 0.011142368316650391, 0.0111942720413208, 0.011194751739501954, 0.011288640022277831, 0.01113916778564453, 0.01108950424194336, 0.011071423530578613, 0.01110598373413086, 0.01116204833984375, 0.011046527862548828, 0.011065855979919433, 0.01108579158782959, 0.01112054443359375, 0.011139552116394042, 0.01107968044281006, 0.01108790397644043, 0.011349696159362793, 0.01118015956878662, 0.011167903900146485, 0.011018048286437989, 0.011071680068969726, 0.011235424041748047, 0.011138976097106934, 0.011073535919189453, 0.01103667163848877, 0.011122688293457032, 0.011087871551513672, 0.01115664005279541, 0.011027487754821777, 0.011109760284423828, 0.011119359970092774, 0.011116512298583984, 0.011089632034301758, 0.011040767669677735, 0.011094016075134277, 0.011177311897277832, 0.011127552032470704, 0.011136927604675294, 0.011104191780090332, 0.011220704078674317, 0.011241824150085448, 0.011138208389282226, 0.010908384323120118, 0.011159616470336914, 0.011165696144104004, 0.01115340805053711, 0.011192319869995117, 0.011218976020812989, 0.01112700843811035, 0.011105216026306153, 0.011131711959838867, 0.011130816459655761, 0.01114527988433838, 0.011108351707458495, 0.011064352035522461, 0.011146080017089844, 0.011208831787109375, 0.01113868808746338, 0.011125408172607421, 0.011083488464355468, 0.011105343818664551, 0.011084704399108887, 0.01109216022491455, 0.011091327667236328, 0.011082207679748535, 0.011133119583129883, 0.011097920417785645, 0.01099731159210205, 0.011098560333251952, 0.011212800025939941, 0.011096063613891602, 0.011041088104248047, 0.011042495727539063, 0.01113491153717041, 0.011206720352172852, 0.011126784324645997, 0.011024383544921875, 0.011096384048461914, 0.011095744132995605, 0.011095135688781739, 0.011105216026306153, 0.011025376319885254, 0.011086848258972168, 0.011115551948547363, 0.011121536254882812, 0.011068991661071777, 0.011235872268676757, 0.011185503959655762, 0.011109024047851563, 0.01108137607574463, 0.011168224334716797, 0.011136896133422851, 0.011165696144104004, 0.011222271919250489, 0.011217663764953614, 0.011200032234191894, 0.01117846393585205, 0.011150783538818359, 0.011092543601989746, 0.011091263771057128, 0.011127488136291504, 0.011079232215881347, 0.011140735626220703, 0.011143808364868163, 0.011102272033691405, 0.012161312103271484, 0.012273664474487305, 0.011263744354248047, 0.011227680206298827, 0.011314463615417481, 0.011237536430358887, 0.011233152389526367, 0.011157695770263671, 0.011190496444702148, 0.011065343856811523, 0.011231231689453124, 0.01112883186340332, 0.011180031776428222, 0.01112883186340332, 0.0110632963180542, 0.011097503662109374, 0.011168064117431641, 0.011129119873046874, 0.01114521598815918, 0.011099200248718261, 0.011150272369384765, 0.011188575744628906, 0.011142687797546387, 0.011032575607299805, 0.011118720054626465, 0.011118528366088867, 0.011081888198852538, 0.011152671813964845, 0.011080320358276368, 0.01113702392578125, 0.011149439811706543, 0.01114896011352539, 0.011104479789733886, 0.011177984237670899, 0.011132224082946778, 0.011179743766784668, 0.011103551864624023, 0.011110239982604981, 0.011153216361999512, 0.011177215576171875, 0.011216768264770508, 0.01143513584136963, 0.011545472145080567, 0.011807519912719727, 0.011300959587097169, 0.011218943595886231, 0.011212608337402345, 0.01118227195739746, 0.011276288032531738, 0.011244640350341797, 0.011350048065185548, 0.0113887996673584, 0.011833791732788085, 0.011519871711730956, 0.011254464149475098, 0.011156831741333008, 0.011253631591796875, 0.011227935791015624, 0.011280384063720703, 0.011255807876586914, 0.0113154878616333, 0.011280096054077149, 0.011364352226257325, 0.010883904457092286, 0.011250592231750489, 0.01125222396850586, 0.011202272415161132, 0.01121459197998047, 0.011183296203613281, 0.011247008323669434, 0.011286591529846192, 0.011441599845886231, 0.011355008125305176, 0.01125699234008789, 0.011212736129760743, 0.011317215919494628, 0.011311103820800781, 0.011316160202026367, 0.011280223846435547, 0.011380895614624024, 0.011268095970153809, 0.011390624046325683, 0.011416095733642578, 0.011365407943725587, 0.01129747200012207, 0.011594079971313476, 0.01141500759124756, 0.011315327644348144, 0.011350175857543946, 0.011337087631225587, 0.011407903671264649, 0.011331680297851563, 0.011286527633666991, 0.011272128105163575, 0.011268320083618165, 0.011337759971618653, 0.01130844783782959, 0.011215104103088378, 0.01117568016052246, 0.01127667236328125, 0.011274271965026856, 0.01133568000793457, 0.011290623664855956, 0.011243519783020019, 0.011265279769897461, 0.011279104232788086, 0.011283552169799805, 0.011302016258239745, 0.011251487731933595, 0.011150367736816406, 0.011314399719238281, 0.011396863937377929, 0.011318431854248048, 0.01128758430480957, 0.011228351593017579, 0.011156000137329102, 0.01125385570526123, 0.01199078369140625, 0.013234175682067872, 0.011904895782470703, 0.011393407821655273, 0.011343615531921387, 0.011288415908813477, 0.011235744476318359, 0.011180031776428222, 0.01113923168182373, 0.011018239974975585, 0.011105824470520019, 0.011176416397094727, 0.011188223838806152, 0.01123465633392334, 0.011199199676513671, 0.011159647941589355, 0.011240832328796387, 0.011211071968078613, 0.011261856079101563, 0.01117414379119873, 0.01112883186340332, 0.01145036792755127, 0.011197823524475098, 0.01116812801361084, 0.011135231971740723, 0.011106623649597167, 0.01111945629119873, 0.011178048133850097, 0.011193087577819824, 0.011187616348266602, 0.01114192008972168, 0.011220831871032715, 0.011202239990234375, 0.011241791725158691, 0.01114521598815918, 0.011069439888000488, 0.011203680038452148, 0.011197279930114747, 0.011151424407958984, 0.011168928146362304, 0.01112067222595215, 0.011201343536376952, 0.011333632469177245, 0.011187616348266602, 0.011180447578430177, 0.011203904151916504, 0.01118502426147461, 0.011179360389709472, 0.011168448448181153, 0.011125856399536133, 0.011146112442016602, 0.011206015586853028, 0.011204511642456054, 0.011213631629943847, 0.01115135955810547, 0.01116096019744873, 0.011191007614135742, 0.012163071632385255, 0.012752703666687012, 0.01135206413269043, 0.012099679946899413, 0.011315103530883788, 0.011405311584472656, 0.01136844825744629, 0.011192319869995117, 0.01131929588317871, 0.011407360076904297, 0.01141385555267334, 0.011351712226867676, 0.011298368453979492, 0.011217344284057617, 0.011321344375610352, 0.01100217628479004, 0.01123401641845703, 0.011242303848266601, 0.011219136238098145, 0.011225407600402831, 0.011216544151306152, 0.011128255844116211, 0.011269760131835938, 0.011223679542541503, 0.011233856201171875, 0.011245120048522949, 0.011219136238098145, 0.011243519783020019, 0.011286527633666991, 0.011376383781433106, 0.011336064338684082, 0.011370368003845215, 0.011194368362426758, 0.011198047637939454, 0.011262368202209473, 0.011236448287963868, 0.011231295585632324, 0.011147199630737304, 0.011567551612854004, 0.011302495956420898, 0.011270272254943847, 0.011235487937927247, 0.011159199714660645, 0.01115231990814209, 0.011304767608642578, 0.011439807891845703, 0.011469311714172363, 0.011415552139282227, 0.011333696365356445, 0.011376704216003418, 0.011323264122009277, 0.011560959815979004, 0.011286751747131347, 0.011271552085876465, 0.011264512062072754, 0.011220895767211914, 0.011310943603515625, 0.011438207626342773, 0.011380767822265625, 0.011285728454589843, 0.011285311698913574, 0.011253727912902831, 0.011280384063720703, 0.011287903785705567, 0.011309727668762207, 0.011271488189697266, 0.011267871856689453, 0.011264415740966796, 0.011291135787963867, 0.011268095970153809, 0.011257856369018555, 0.01122697639465332, 0.011337216377258302, 0.011264927864074707, 0.011236384391784668, 0.011285216331481933, 0.01122326374053955, 0.011181856155395508, 0.010975008010864257, 0.011221504211425782, 0.011208864212036132, 0.011300671577453613, 0.011297151565551758, 0.011308863639831543, 0.011216575622558594, 0.012291680335998536, 0.011283136367797852, 0.011202272415161132, 0.011284992218017579, 0.01133561611175537, 0.01123423957824707, 0.011239456176757813, 0.01119324779510498, 0.011208800315856934, 0.011213824272155762, 0.011196415901184082, 0.011195199966430664, 0.011102304458618165, 0.011202560424804688, 0.011193408012390136, 0.011226048469543457, 0.01127030372619629, 0.011108192443847656, 0.011165696144104004, 0.011198431968688964, 0.011208767890930177, 0.011136799812316895, 0.011097824096679688, 0.01131340789794922, 0.011186495780944824, 0.011238975524902344, 0.011170368194580078, 0.011097567558288574, 0.011175583839416504, 0.011217568397521973, 0.011306143760681153, 0.011449055671691894, 0.011587712287902832, 0.011535455703735351, 0.01160700798034668, 0.01141708755493164, 0.01141097640991211, 0.011284704208374024, 0.011248671531677246, 0.011232959747314452, 0.011296992301940917, 0.011492159843444824, 0.011616959571838378, 0.011693599700927735, 0.011696864128112792, 0.011456159591674805, 0.0113155517578125, 0.011251999855041503, 0.011301888465881347, 0.01131107234954834, 0.011406399726867677, 0.011568832397460937, 0.011503647804260254, 0.011517056465148926, 0.011509951591491699, 0.011436256408691406]",tokens/s,89.14152878273295,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.218304,4878.958592,0.0,4483.710976,4465.672704,s,1,10.81604296875,10.81604296875,0.0,10.81604296875,10.81604296875,10.81604296875,10.81604296875,[10.81604296875],,kWh,0.00010374368459583061,1.1436280005859159e-05,3.232752586200538e-05,0.00014750749046369517,,MB,2153.508864,5302.583296,0.0,4892.655616,4841.339904,s,10,1.967754409790039,0.1967754409790039,0.0007356678215833901,0.19680464935302733,0.19747964477539062,0.1976221923828125,0.19773623046875,"[0.1949957733154297, 0.19676002502441406, 0.1961219482421875, 0.19675791931152345, 0.19684927368164062, 0.19744796752929689, 0.19728854370117188, 0.19710079956054688, 0.19666741943359375, 0.19776473999023436]",tokens/s,1300.97535915224,kWh,5.7379459151963324e-06,6.325698364105242e-07,3.81651176780382e-06,1.0187027519410678e-05,tokens/kWh,25129999.846590154,MB,2159.16544,5470.355456,0.0,5060.427776,5012.931584,s,10,18.726147827148434,1.8726147827148438,0.00503696046827301,1.8736304321289063,1.8771864624023438,1.8791170349121094,1.880661492919922,"[1.8708316650390624, 1.881047607421875, 1.8662344970703124, 1.8717244873046874, 1.872715576171875, 1.8745452880859375, 1.8745848388671875, 1.8767574462890626, 1.8753662109375, 1.8623402099609374]",tokens/s,33.6427975371769,kWh,5.4658738813137084e-05,6.028875793492107e-06,3.62540202885962e-05,9.69416348952254e-05,tokens/kWh,649875.5675834274,,s,630,18.72291741943359,0.02971891653878348,0.0005142623723293799,0.029654208183288573,0.03003931541442871,0.030243571472167968,0.0314014969444275,"[0.030463424682617188, 0.02997715187072754, 0.029487104415893556, 0.029668352127075196, 0.029520896911621092, 0.02987411117553711, 0.029372447967529296, 0.029342784881591796, 0.029365215301513672, 0.029259775161743166, 0.029061119079589845, 0.02939244842529297, 0.02922540855407715, 0.0293787841796875, 0.029474592208862303, 0.029542400360107423, 0.02950262451171875, 0.029974592208862304, 0.02957596778869629, 0.029585407257080077, 0.029521087646484374, 0.029368831634521485, 0.029324928283691407, 0.02948780822753906, 0.029716480255126954, 0.02940108871459961, 0.029578912734985353, 0.029714784622192382, 0.029619359970092775, 0.02945110321044922, 0.029661184310913087, 0.030052352905273437, 0.029980064392089844, 0.029922943115234375, 0.029886911392211914, 0.029596416473388672, 0.02959119987487793, 0.029589632034301757, 0.02946601676940918, 0.029612640380859374, 0.029789663314819335, 0.02962076759338379, 0.029616128921508788, 0.0297205753326416, 0.029644800186157227, 0.029851648330688478, 0.029791711807250976, 0.029909536361694335, 0.029647008895874023, 0.029645919799804688, 0.029611904144287108, 0.029639392852783202, 0.029651103973388673, 0.0299683837890625, 0.029793920516967772, 0.02988889694213867, 0.029861888885498046, 0.02996428871154785, 0.030437023162841796, 0.030390623092651368, 0.0302259521484375, 0.03008355140686035, 0.030171295166015626, 0.03016499137878418, 0.0297574405670166, 0.02948067283630371, 0.02966352081298828, 0.02950783920288086, 0.029498655319213866, 0.03165232086181641, 0.030357503890991212, 0.029487104415893556, 0.02939904022216797, 0.029232736587524413, 0.029388608932495116, 0.029538719177246094, 0.029498912811279296, 0.029464767456054686, 0.029190303802490235, 0.02938902473449707, 0.02940732765197754, 0.029429471969604493, 0.029329311370849608, 0.030562688827514648, 0.029421567916870117, 0.03015235137939453, 0.029454687118530273, 0.02939289665222168, 0.029599647521972656, 0.029658271789550782, 0.029379520416259765, 0.029703903198242187, 0.029581600189208985, 0.02959494400024414, 0.03010630416870117, 0.03567001724243164, 0.02977996826171875, 0.030162975311279296, 0.02968556785583496, 0.02983923149108887, 0.0302840633392334, 0.02997452735900879, 0.029942815780639648, 0.030874528884887696, 0.02984556770324707, 0.029734527587890625, 0.02974883270263672, 0.029854496002197264, 0.029927200317382812, 0.029898815155029297, 0.029691743850708007, 0.029566688537597655, 0.02968022346496582, 0.02954854393005371, 0.02961395263671875, 0.029661312103271484, 0.029699167251586913, 0.029577695846557617, 0.02983475112915039, 0.029743936538696288, 0.029734432220458986, 0.02995599937438965, 0.030107999801635744, 0.029877983093261718, 0.030009151458740235, 0.029821760177612306, 0.030220544815063477, 0.029968000411987303, 0.029748928070068358, 0.02934351921081543, 0.029317312240600586, 0.029285087585449218, 0.02937446403503418, 0.029433727264404297, 0.029251712799072266, 0.029417472839355467, 0.02936422348022461, 0.029336896896362305, 0.029158079147338867, 0.029367712020874022, 0.02915705680847168, 0.02933145523071289, 0.02951817512512207, 0.02954297637939453, 0.02937446403503418, 0.02937766456604004, 0.029369216918945312, 0.02937651252746582, 0.029489152908325194, 0.029519712448120117, 0.029554847717285157, 0.029666303634643554, 0.029844640731811523, 0.029589344024658203, 0.029580703735351564, 0.02998918342590332, 0.029819168090820313, 0.029626367568969726, 0.02977177619934082, 0.029587200164794922, 0.029681663513183593, 0.029933631896972655, 0.029696191787719727, 0.02948691177368164, 0.029687999725341797, 0.029479936599731447, 0.02957200050354004, 0.029655136108398438, 0.029679616928100585, 0.02964406394958496, 0.02976201629638672, 0.029610240936279297, 0.02959516716003418, 0.029587936401367188, 0.029462528228759766, 0.02960758399963379, 0.02954070472717285, 0.029824480056762696, 0.029671167373657225, 0.029816768646240235, 0.029666080474853515, 0.02974959945678711, 0.02971343994140625, 0.029887071609497072, 0.030062496185302736, 0.030080448150634764, 0.0299748477935791, 0.03041868782043457, 0.02974176025390625, 0.030338783264160157, 0.029975135803222655, 0.029693952560424806, 0.029526079177856445, 0.029370304107666015, 0.029419456481933594, 0.02934121513366699, 0.02937091255187988, 0.029210432052612305, 0.029315263748168945, 0.029294559478759766, 0.029294015884399414, 0.02924995231628418, 0.02929635238647461, 0.02936675262451172, 0.029458431243896483, 0.02953545570373535, 0.029438144683837892, 0.029368928909301758, 0.02937651252746582, 0.029474815368652343, 0.02953215980529785, 0.029687328338623045, 0.029809120178222657, 0.029884416580200194, 0.029740415573120117, 0.029770143508911134, 0.029775999069213868, 0.02981488037109375, 0.0298024959564209, 0.029886240005493163, 0.02981091117858887, 0.02994748878479004, 0.030706079483032226, 0.029882368087768556, 0.029784128189086913, 0.02962761688232422, 0.02969468879699707, 0.02982089614868164, 0.029847583770751952, 0.029740575790405274, 0.029784032821655274, 0.02967807960510254, 0.02968550491333008, 0.02974745559692383, 0.029691743850708007, 0.029719999313354492, 0.02967625617980957, 0.029716480255126954, 0.02954035186767578, 0.029464000701904296, 0.029786687850952148, 0.029861888885498046, 0.029764896392822267, 0.030147296905517578, 0.029947776794433594, 0.0299234561920166, 0.030175199508666994, 0.029936832427978517, 0.029846368789672853, 0.02987014389038086, 0.029953344345092774, 0.030071168899536132, 0.03019980812072754, 0.03004524803161621, 0.029596607208251954, 0.02947452735900879, 0.02925596809387207, 0.029263168334960937, 0.029259967803955077, 0.02928201675415039, 0.03066540718078613, 0.030205856323242186, 0.02945350456237793, 0.029455360412597657, 0.02936627197265625, 0.029431039810180665, 0.02947465515136719, 0.02964374351501465, 0.02973075294494629, 0.029511680603027345, 0.029404800415039064, 0.029428096771240236, 0.029472768783569334, 0.029739007949829102, 0.029495296478271486, 0.02940108871459961, 0.02942073631286621, 0.029562911987304687, 0.029500192642211914, 0.029483007431030273, 0.029526016235351563, 0.029822368621826172, 0.031152736663818358, 0.03197337532043457, 0.030089088439941406, 0.029808256149291994, 0.02969584083557129, 0.029796735763549805, 0.0296058235168457, 0.029578847885131834, 0.029682432174682617, 0.02958745574951172, 0.029705663681030274, 0.0295503044128418, 0.029843328475952148, 0.029875167846679686, 0.02973801612854004, 0.029682655334472657, 0.029601791381835937, 0.02959116744995117, 0.029606239318847656, 0.029809823989868166, 0.029680511474609376, 0.02960383987426758, 0.029672447204589843, 0.029692928314208986, 0.029892608642578124, 0.029962240219116212, 0.029845504760742186, 0.029849599838256836, 0.029868032455444334, 0.029661184310913087, 0.029749248504638674, 0.029626367568969726, 0.029787872314453127, 0.030157215118408204, 0.029927167892456055, 0.029685375213623046, 0.029537408828735352, 0.029422527313232423, 0.02955923271179199, 0.02944041633605957, 0.02978553581237793, 0.02952016067504883, 0.029628704071044922, 0.029791711807250976, 0.02954044723510742, 0.029623743057250976, 0.029671968460083006, 0.02961984062194824, 0.02972115135192871, 0.029540479660034178, 0.029546655654907227, 0.02968899154663086, 0.02990729522705078, 0.029538047790527343, 0.029409088134765626, 0.029567935943603515, 0.029601568222045897, 0.029661407470703127, 0.029683935165405274, 0.029651840209960936, 0.029665983200073243, 0.030025760650634767, 0.029853887557983398, 0.029638656616210936, 0.030006944656372072, 0.030037631988525392, 0.03003865623474121, 0.029636703491210937, 0.029767295837402345, 0.02947929573059082, 0.029549728393554686, 0.029765727996826172, 0.030672895431518556, 0.02994374465942383, 0.029848384857177734, 0.02997248077392578, 0.02984671974182129, 0.02972502326965332, 0.029561151504516603, 0.029574432373046876, 0.030085599899291993, 0.0296942081451416, 0.02949488067626953, 0.029628576278686522, 0.02966364860534668, 0.029800447463989257, 0.029765920639038088, 0.02967932891845703, 0.029655040740966795, 0.030007295608520508, 0.029787296295166014, 0.02979088020324707, 0.02997881507873535, 0.030000511169433593, 0.03023948860168457, 0.029874048233032226, 0.03025779151916504, 0.029765888214111327, 0.02946409606933594, 0.029403615951538085, 0.029388799667358398, 0.029396991729736328, 0.02940108871459961, 0.02939084815979004, 0.030436704635620117, 0.029624576568603515, 0.02939126396179199, 0.02950953674316406, 0.02954457664489746, 0.02939695930480957, 0.02944112014770508, 0.029403520584106446, 0.02946124839782715, 0.029705408096313477, 0.029509311676025392, 0.029499391555786132, 0.030738784790039064, 0.03150310325622559, 0.029554496765136717, 0.02950553512573242, 0.029439424514770506, 0.030032032012939452, 0.030224992752075196, 0.029632320404052736, 0.029750463485717773, 0.029819711685180664, 0.02981068801879883, 0.02978611183166504, 0.029845279693603517, 0.029771455764770506, 0.029626848220825196, 0.029556800842285156, 0.030461439132690428, 0.029742975234985352, 0.029591936111450196, 0.029703487396240236, 0.02950035285949707, 0.02961020851135254, 0.029734687805175783, 0.029650943756103516, 0.02982863998413086, 0.02981936073303223, 0.029640703201293944, 0.029689855575561523, 0.029664384841918946, 0.02975993537902832, 0.030023487091064453, 0.029694591522216797, 0.029731840133666993, 0.029682687759399414, 0.02981385612487793, 0.02986422348022461, 0.03003865623474121, 0.03014656066894531, 0.029828384399414064, 0.029839296340942383, 0.03017398452758789, 0.029609535217285158, 0.02949920082092285, 0.030246912002563478, 0.029726720809936522, 0.029381727218627928, 0.02916854476928711, 0.02918809509277344, 0.029148576736450195, 0.029153888702392577, 0.029728511810302734, 0.029505056381225588, 0.02942639923095703, 0.029336959838867187, 0.0294017276763916, 0.029336767196655275, 0.02924012756347656, 0.02956492805480957, 0.029485055923461914, 0.02950137519836426, 0.029974592208862304, 0.0315043830871582, 0.029329408645629884, 0.02969183921813965, 0.02966534423828125, 0.029288063049316405, 0.02972915267944336, 0.02972003173828125, 0.029559328079223634, 0.029476863861083984, 0.02941244888305664, 0.02957814407348633, 0.029460479736328125, 0.029335552215576172, 0.030482431411743165, 0.029553760528564454, 0.029527231216430663, 0.029591264724731444, 0.029560319900512694, 0.02962483215332031, 0.029701919555664064, 0.029538112640380858, 0.029566719055175782, 0.029429952621459962, 0.02949718475341797, 0.029917823791503907, 0.029887775421142578, 0.02993388748168945, 0.02995756721496582, 0.02964784049987793, 0.029601791381835937, 0.029657087326049804, 0.02972211265563965, 0.029694400787353515, 0.029782079696655275, 0.02979430389404297, 0.02979430389404297, 0.02999295997619629, 0.02995622444152832, 0.030277664184570313, 0.030340959548950195, 0.03747840118408203, 0.02978179168701172, 0.029559007644653322, 0.029623359680175782, 0.029634559631347656, 0.03012393569946289, 0.0296092472076416, 0.02927804756164551, 0.02937651252746582, 0.029578432083129883, 0.02956867218017578, 0.029720703125, 0.02976153564453125, 0.02959699249267578, 0.029571775436401368, 0.029493247985839844, 0.029324800491333007, 0.029482656478881836, 0.02980521583557129, 0.029761728286743165, 0.02978767967224121, 0.02975699234008789, 0.029624799728393554, 0.029514144897460938, 0.02951580810546875, 0.029648096084594726, 0.02998147201538086, 0.029929471969604493, 0.029378175735473633, 0.031972063064575194, 0.02960758399963379, 0.0294420166015625, 0.02950761604309082, 0.02973695945739746, 0.02973695945739746, 0.029633920669555665, 0.029600383758544922, 0.029665279388427734, 0.030051488876342774, 0.02972870445251465, 0.02974198341369629, 0.029487104415893556, 0.029531167984008788, 0.02965760040283203, 0.030472415924072266, 0.029853055953979492, 0.029893503189086915, 0.02978611183166504, 0.029616128921508788, 0.02960588836669922, 0.02954444885253906, 0.029560831069946288, 0.029695264816284178, 0.02985763168334961, 0.030048831939697266, 0.029929792404174805, 0.02985887908935547, 0.02983203125, 0.029681535720825197, 0.030808544158935545, 0.029704992294311523, 0.0297706241607666, 0.02983247947692871, 0.029751327514648436, 0.029704992294311523, 0.029666847229003906, 0.02954083251953125, 0.030657567977905274, 0.030111263275146485, 0.029600223541259765, 0.029256927490234376, 0.02918070411682129, 0.029466272354125977, 0.029208927154541015, 0.029173759460449217, 0.02924742317199707, 0.029104192733764647, 0.029269472122192382, 0.029223455429077148, 0.02936832046508789, 0.029462528228759766, 0.030314367294311525, 0.02930240058898926, 0.02929100799560547, 0.029283903121948243, 0.02921721649169922, 0.02998636817932129, 0.030140800476074217, 0.029898815155029297, 0.02952720069885254, 0.02938697624206543, 0.02930956840515137, 0.029259775161743166, 0.02918809509277344, 0.029489152908325194, 0.029378559112548826, 0.029509632110595704, 0.029530208587646486, 0.029673376083374024, 0.029671039581298828, 0.02971891212463379, 0.029877504348754882, 0.030225151062011717, 0.02975667190551758, 0.02962713623046875, 0.029664928436279298, 0.029505887985229493, 0.02953011131286621, 0.02958131217956543, 0.02954444885253906, 0.029593599319458007, 0.02949497604370117, 0.029495616912841797, 0.029560831069946288, 0.02977916717529297, 0.02965337562561035, 0.02960742378234863, 0.029608863830566406, 0.029591552734375, 0.029585407257080077, 0.029677568435668947, 0.02958060836791992, 0.029636831283569337, 0.029591936111450196, 0.02961008071899414, 0.029714431762695313, 0.029587392807006837, 0.029589567184448242, 0.02960383987426758, 0.02959769630432129, 0.029845312118530275]",tokens/s,33.64860218558069,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,7137.8944,7948.075008,0.0,7545.552896,7295.865344,s,1,12.95448046875,12.95448046875,0.0,12.95448046875,12.95448046875,12.95448046875,12.95448046875,[12.95448046875],,kWh,0.00016743833187080856,1.8462341399461568e-05,5.111948534000654e-05,0.00023702015861027666,,MB,3037.04064,8264.74496,0.0,7847.542784,7548.649984,s,10,3.316819793701172,0.3316819793701171,0.0008051522959342814,0.33153733825683596,0.33236585998535156,0.33292252349853513,0.33336785430908206,"[0.33030712890625, 0.33110287475585937, 0.3312548217773438, 0.3316152954101563, 0.33145938110351564, 0.33127334594726565, 0.3322001953125, 0.3318854064941406, 0.3322421569824219, 0.33347918701171875]",tokens/s,771.8236621903865,kWh,9.675818259408333e-06,1.067061854758999e-06,6.422621625548182e-06,1.7165501739715516e-05,tokens/kWh,14913633.395736832,MB,3046.72768,8558.34624,0.0,8141.144064,7829.444096,s,10,27.00395458984375,2.7003954589843753,0.00434515275438668,2.6991160888671875,2.7055049072265627,2.7059224975585936,2.7062565698242187,"[2.694590087890625, 2.69827490234375, 2.694842041015625, 2.699887939453125, 2.69834423828125, 2.696543212890625, 2.704648193359375, 2.706340087890625, 2.705412109375, 2.70507177734375]",tokens/s,23.329916287036877,kWh,7.888963691225842e-05,8.701633787767827e-06,5.2398730987052615e-05,0.0001399900016870789,tokens/kWh,450032.1397296969,,s,630,26.99833569717408,0.04285450110662552,0.00032490259272692214,0.042846975326538084,0.04316139678955078,0.04328394660949707,0.04414307640075684,"[0.04259209442138672, 0.04243865585327149, 0.04210483169555664, 0.04221747207641602, 0.04262604904174805, 0.04277116775512695, 0.042283294677734375, 0.042336254119873046, 0.042471424102783206, 0.04245913696289062, 0.042625022888183595, 0.04238131332397461, 0.04271420669555664, 0.0428388786315918, 0.043075519561767577, 0.042425662994384765, 0.043162433624267575, 0.042702465057373046, 0.04269279861450195, 0.04245443344116211, 0.042522911071777345, 0.04266649627685547, 0.04287670516967773, 0.04273907089233398, 0.042775390625, 0.04262448120117188, 0.042409664154052736, 0.04240060806274414, 0.04281753540039063, 0.042790912628173826, 0.0429749755859375, 0.04282803344726562, 0.04314726257324219, 0.04255702209472656, 0.04254966354370117, 0.042881023406982424, 0.04253900909423828, 0.04271664047241211, 0.042834400177001956, 0.042942527770996095, 0.04270048141479492, 0.04282988739013672, 0.04271948623657226, 0.04274166488647461, 0.04263081741333008, 0.04264595031738281, 0.04302643203735351, 0.042972320556640624, 0.042833793640136716, 0.043020320892333985, 0.043036670684814454, 0.0429634895324707, 0.04295516967773438, 0.04291923141479492, 0.04295948791503906, 0.04315331268310547, 0.04300163269042969, 0.042987903594970706, 0.043038719177246096, 0.043089313507080076, 0.04316543960571289, 0.04319523239135742, 0.043030529022216796, 0.0427586555480957, 0.04243865585327149, 0.04224204635620117, 0.042444801330566405, 0.04242432022094727, 0.042551296234130856, 0.04247552108764648, 0.04302643203735351, 0.042590145111083985, 0.04233631896972656, 0.04230752182006836, 0.04257798385620117, 0.04260851287841797, 0.04269068908691406, 0.042753406524658207, 0.042601089477539066, 0.04261999893188476, 0.042861473083496096, 0.04269776153564453, 0.042392543792724606, 0.042323841094970706, 0.042657920837402344, 0.04288265609741211, 0.04262934494018555, 0.04261088180541992, 0.04277657699584961, 0.04263727951049805, 0.04253699111938476, 0.042945598602294924, 0.04287993621826172, 0.04463206481933594, 0.043433982849121096, 0.04255478286743164, 0.042543712615966796, 0.042813438415527344, 0.042671585083007814, 0.042861087799072266, 0.04299763107299805, 0.04273139190673828, 0.04280960083007813, 0.04319232177734375, 0.043055103302001956, 0.042889217376708984, 0.04282572937011719, 0.042625022888183595, 0.04300595092773438, 0.043069438934326174, 0.04296636962890625, 0.04298400115966797, 0.042815582275390625, 0.04266774368286133, 0.04296323013305664, 0.04298137664794922, 0.04355865478515625, 0.04296934509277344, 0.04299305725097656, 0.04303523254394531, 0.04293222427368164, 0.04301004791259765, 0.04312630462646484, 0.04359215927124024, 0.043053054809570314, 0.043186046600341796, 0.04250435256958008, 0.04232809448242188, 0.04237516784667969, 0.0425164794921875, 0.042534912109375, 0.04245503997802735, 0.042274494171142575, 0.04260076904296875, 0.04236083221435547, 0.042262527465820314, 0.042423519134521484, 0.04256208038330078, 0.04277478408813477, 0.04284153747558594, 0.04240851211547852, 0.04251372909545898, 0.042603199005126956, 0.04265926361083985, 0.04277920150756836, 0.04269055938720703, 0.04256051254272461, 0.04276044845581055, 0.042574592590332035, 0.042543102264404296, 0.042856449127197264, 0.042491233825683594, 0.04259088134765625, 0.042968990325927735, 0.042643104553222656, 0.0427047348022461, 0.042699359893798826, 0.0425098876953125, 0.04254585647583008, 0.04298112106323242, 0.04298060989379883, 0.042912513732910156, 0.04252467346191406, 0.042561153411865234, 0.04274214553833008, 0.042858238220214846, 0.04294255828857422, 0.042893470764160155, 0.04271091079711914, 0.0429442253112793, 0.04317839813232422, 0.04290969467163086, 0.04284956741333008, 0.04261552047729492, 0.04298137664794922, 0.04302438354492188, 0.04291923141479492, 0.042840766906738284, 0.04272073745727539, 0.042952606201171875, 0.04302707290649414, 0.042968734741210934, 0.0430978889465332, 0.04304767990112305, 0.04316128158569336, 0.04304089736938477, 0.044859390258789066, 0.042987518310546875, 0.04328348922729492, 0.04323984146118164, 0.04264550399780274, 0.04243999862670898, 0.042472129821777345, 0.04244070434570312, 0.04225228881835937, 0.042412033081054686, 0.0423026237487793, 0.0423199348449707, 0.042575904846191406, 0.04244351959228516, 0.04280115127563477, 0.04265977478027344, 0.04249353790283203, 0.04283168029785156, 0.042423999786376954, 0.04256582260131836, 0.0425494384765625, 0.04248380661010742, 0.04263731384277344, 0.042847774505615235, 0.04264604949951172, 0.04291628646850586, 0.043030529022216796, 0.04295884704589844, 0.04293632125854492, 0.04269055938720703, 0.042626335144042966, 0.04270902252197266, 0.04263801574707031, 0.042858497619628906, 0.04265369415283203, 0.042897407531738284, 0.042979328155517575, 0.04307263946533203, 0.04296086502075196, 0.04296166229248047, 0.04318624114990234, 0.04391289520263672, 0.04291587066650391, 0.04295923233032226, 0.04306249618530274, 0.04304348754882813, 0.04318988800048828, 0.0431907844543457, 0.043147136688232425, 0.04302764892578125, 0.042965953826904296, 0.04276838302612305, 0.04291340637207031, 0.042838401794433593, 0.04301824188232422, 0.04310220718383789, 0.043019584655761715, 0.042994174957275394, 0.04302048110961914, 0.04301372909545898, 0.043090335845947264, 0.042967041015625, 0.04330905532836914, 0.043216766357421874, 0.04323331069946289, 0.043033790588378903, 0.04243539047241211, 0.04284415817260742, 0.042759681701660154, 0.04231423950195313, 0.04243014526367188, 0.04249222564697266, 0.042684417724609375, 0.042336254119873046, 0.042627071380615236, 0.042622783660888675, 0.04266355133056641, 0.042846145629882815, 0.042689151763916015, 0.042590015411376955, 0.042689727783203124, 0.0424376335144043, 0.04258816146850586, 0.04245913696289062, 0.04259603118896484, 0.042794689178466794, 0.042786529541015625, 0.04298640060424805, 0.04268422317504883, 0.04270918273925781, 0.042618881225585936, 0.042840065002441405, 0.04276838302612305, 0.04290707015991211, 0.04290822219848633, 0.04355820846557617, 0.04254966354370117, 0.04276867294311523, 0.042567550659179686, 0.04276435089111328, 0.04303878402709961, 0.04291584014892578, 0.042823680877685545, 0.04265740966796875, 0.042574207305908204, 0.042800769805908204, 0.04286832046508789, 0.04269340896606445, 0.042955936431884764, 0.043079776763916014, 0.04308556747436523, 0.042929153442382816, 0.04288716888427734, 0.04276019287109375, 0.042823680877685545, 0.043046432495117186, 0.043006431579589846, 0.04306124877929687, 0.04355184173583984, 0.04293929672241211, 0.04291788864135742, 0.04271718215942383, 0.04290079879760742, 0.043366302490234376, 0.04304361724853516, 0.04306534576416016, 0.043093505859375, 0.043004417419433595, 0.043072990417480465, 0.04245139312744141, 0.042526111602783204, 0.042412384033203125, 0.0425536003112793, 0.04248371124267578, 0.04250624084472656, 0.04250419235229492, 0.04248310470581055, 0.0424699821472168, 0.042438014984130856, 0.043200542449951175, 0.04274995040893555, 0.04275436782836914, 0.04252700805664063, 0.042401439666748045, 0.042586463928222656, 0.042534912109375, 0.042893310546875, 0.04268803024291992, 0.04270332717895508, 0.04262297439575195, 0.04258816146850586, 0.04250771331787109, 0.04239007949829102, 0.04257948684692383, 0.042641887664794924, 0.04286003112792969, 0.04259481430053711, 0.042804672241210935, 0.042574337005615234, 0.04261040115356445, 0.0429488639831543, 0.04281967926025391, 0.04268851089477539, 0.04265369415283203, 0.04290758514404297, 0.042958911895751954, 0.042831871032714845, 0.04272700881958008, 0.04275241470336914, 0.04283580780029297, 0.042935871124267576, 0.04324553680419922, 0.04303046417236328, 0.042922687530517575, 0.04308755111694336, 0.04294688034057617, 0.043014144897460936, 0.04290137481689453, 0.04294259262084961, 0.04312063980102539, 0.04315955352783203, 0.04312063980102539, 0.04284620666503906, 0.042843425750732425, 0.042912479400634765, 0.042931743621826175, 0.0431313591003418, 0.04334592056274414, 0.043474945068359375, 0.04318822479248047, 0.042974624633789066, 0.043262561798095706, 0.04300262451171875, 0.04274176025390625, 0.04286873626708984, 0.042614814758300784, 0.042444766998291014, 0.04254924774169922, 0.04248896026611328, 0.04258272171020508, 0.042520767211914064, 0.04319334411621094, 0.042761249542236326, 0.04418332672119141, 0.04269622421264648, 0.04255952072143555, 0.04264204788208008, 0.04289516830444336, 0.04261248016357422, 0.042660289764404294, 0.042763935089111325, 0.042979679107666015, 0.04302438354492188, 0.04300799942016602, 0.04306739044189453, 0.042960830688476566, 0.044146751403808596, 0.04297478485107422, 0.04272377777099609, 0.04254515075683594, 0.042657791137695314, 0.04282089614868164, 0.04274777603149414, 0.04285935974121094, 0.042782718658447266, 0.04282694244384765, 0.042842945098876956, 0.04269036865234375, 0.04276444625854492, 0.042641441345214845, 0.042971134185791016, 0.04263315200805664, 0.042951904296875, 0.04286105728149414, 0.042907840728759764, 0.043067550659179686, 0.042858497619628906, 0.04287849426269531, 0.04306787109375, 0.04315865707397461, 0.043133342742919925, 0.04297280120849609, 0.04300790405273437, 0.04282400131225586, 0.042756641387939456, 0.04309718322753906, 0.04312985610961914, 0.043270145416259766, 0.04313433456420898, 0.04291801452636719, 0.042954399108886716, 0.0442720947265625, 0.04285628890991211, 0.0434672966003418, 0.04328432083129883, 0.04257791900634766, 0.04266796875, 0.042522689819335935, 0.0425533447265625, 0.04309401702880859, 0.04281139373779297, 0.0425984001159668, 0.04239974212646484, 0.04247119903564453, 0.04275836944580078, 0.042813438415527344, 0.0429486083984375, 0.042592254638671875, 0.042710529327392575, 0.04277289581298828, 0.04259849548339844, 0.04284758377075195, 0.04297795104980469, 0.04287823867797851, 0.04280393600463867, 0.04290339279174805, 0.04290780639648437, 0.04261628723144531, 0.0426767692565918, 0.04278825759887695, 0.04297513580322266, 0.04305088043212891, 0.043076416015625, 0.04297225570678711, 0.0429488639831543, 0.04288560104370117, 0.04290755081176758, 0.04271542358398438, 0.04280275344848633, 0.043055553436279294, 0.04283596801757812, 0.042971134185791016, 0.04305920028686523, 0.043122528076171875, 0.043067550659179686, 0.04325344085693359, 0.04299756622314453, 0.04298739242553711, 0.043010688781738284, 0.043140670776367185, 0.04314694213867187, 0.04294527816772461, 0.04308582305908203, 0.04307539367675781, 0.04314540863037109, 0.04363884735107422, 0.043247550964355466, 0.04308992004394531, 0.04310835266113281, 0.043087646484375, 0.04332479858398437, 0.04341622543334961, 0.04319353485107422, 0.04312575912475586, 0.043278209686279295, 0.04319859313964844, 0.043230369567871095, 0.04343014526367187, 0.042926048278808596, 0.04276271820068359, 0.04336470413208008, 0.04287641525268555, 0.04277008056640625, 0.04242243194580078, 0.04255753707885742, 0.0425494384765625, 0.04257759857177734, 0.04276297760009766, 0.042534912109375, 0.042600223541259766, 0.042633438110351564, 0.04287692642211914, 0.04273356628417969, 0.042759807586669925, 0.043002239227294924, 0.04283801651000976, 0.04268851089477539, 0.042842113494873046, 0.04276428985595703, 0.04286873626708984, 0.04273971176147461, 0.042808513641357425, 0.04280543899536133, 0.04265776062011719, 0.04251465606689453, 0.04262527847290039, 0.04290911865234375, 0.04302214431762695, 0.04294079971313477, 0.04359990310668945, 0.04347343826293945, 0.04290764617919922, 0.042581439971923825, 0.042981952667236326, 0.04310796737670899, 0.04312918472290039, 0.04294863891601562, 0.0429035530090332, 0.04291171264648438, 0.04413407897949219, 0.042893600463867185, 0.04306694412231445, 0.0430546875, 0.0429925422668457, 0.04302755355834961, 0.0430294075012207, 0.04296831893920899, 0.04304111862182617, 0.043008415222167966, 0.04295244979858399, 0.04347724914550781, 0.04287692642211914, 0.04302995300292969, 0.04290617752075195, 0.043050334930419924, 0.04303529739379883, 0.043278335571289066, 0.04317184066772461, 0.04355632019042969, 0.043133472442626955, 0.04307689666748047, 0.04288355255126953, 0.04287916946411133, 0.04247689437866211, 0.042562206268310546, 0.042314785003662106, 0.042664447784423826, 0.042543582916259766, 0.042510337829589843, 0.04256972885131836, 0.042638782501220704, 0.04298124694824219, 0.04281155014038086, 0.04262351989746094, 0.04256972885131836, 0.04281958389282227, 0.042651649475097655, 0.042655071258544924, 0.04274448013305664, 0.04295884704589844, 0.04271718215942383, 0.04282777786254883, 0.04282572937011719, 0.0429936637878418, 0.04270006561279297, 0.042939231872558596, 0.04293209457397461, 0.04271500778198242, 0.042673694610595704, 0.042682975769042966, 0.04257177734375, 0.04295254516601563, 0.04284636688232422, 0.04426444625854492, 0.045511711120605466, 0.042864639282226565, 0.04264956665039062, 0.04294041442871094, 0.04292806243896485, 0.04314300918579102, 0.0429136962890625, 0.04278844833374024, 0.04291167831420899, 0.04289177703857422, 0.0430489616394043, 0.04306304168701172, 0.043016735076904296, 0.0429486083984375, 0.043046913146972655, 0.04293807983398437, 0.04285673522949219, 0.043068862915039065, 0.0429918098449707, 0.04312031936645508, 0.04319302368164062, 0.043218944549560545, 0.04326979064941406, 0.04307523345947266, 0.042816158294677734, 0.04283321762084961, 0.043309791564941406, 0.043292671203613284, 0.043320831298828126, 0.043170143127441406]",tokens/s,23.33477170838876,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4033.65888,4592.631808,0.0,4190.109696,3918.037504,s,1,10.68051171875,10.68051171875,0.0,10.68051171875,10.68051171875,10.68051171875,10.68051171875,[10.68051171875],,kWh,9.766174387082174e-05,1.0764771314119688e-05,2.902446766399336e-05,0.0001374509828489348,,MB,1760.309248,4678.61504,0.0,4261.412864,4088.623616,s,10,1.8373931121826172,0.18373931121826173,0.0003427441389007619,0.18365309143066405,0.1842168212890625,0.18422572021484374,0.18423283935546875,"[0.18359295654296875, 0.18353219604492188, 0.18401837158203124, 0.1830732421875, 0.18392988586425782, 0.1837005157470703, 0.1834908142089844, 0.184234619140625, 0.18421484375, 0.18360566711425783]",tokens/s,1393.2783262472376,kWh,5.378707559696618e-06,5.929345234782097e-07,3.5481190001090706e-06,9.519761083283898e-06,tokens/kWh,26891431.17777608,MB,1772.158976,4804.44416,0.0,4387.241984,4262.434304,s,10,16.655712646484375,1.6655712646484375,0.002937438722640952,1.665074462890625,1.667884619140625,1.6703179809570312,1.6722646704101563,"[1.6673438720703124, 1.6635850830078125, 1.661226318359375, 1.664418701171875, 1.6727513427734375, 1.6659691162109376, 1.665730224609375, 1.663897705078125, 1.663913330078125, 1.666876953125]",tokens/s,37.82486005682729,kWh,4.829679545780343e-05,5.327144355426299e-06,3.2053437258890594e-05,8.567737707212033e-05,tokens/kWh,735316.6279468234,,s,630,16.649963211059575,0.02642851303342789,0.00028079069042091835,0.026404399871826172,0.026632957458496094,0.026726558780670165,0.027493589057922384,"[0.02642099189758301, 0.026427648544311524, 0.02778940773010254, 0.02677356719970703, 0.026117599487304688, 0.026194656372070312, 0.026247167587280275, 0.026224416732788088, 0.026218719482421875, 0.026219583511352538, 0.026135456085205077, 0.026142751693725586, 0.0261529598236084, 0.026253055572509766, 0.026147071838378905, 0.026169343948364256, 0.026396671295166017, 0.02622870445251465, 0.02630784034729004, 0.026275903701782226, 0.026360544204711914, 0.026369823455810546, 0.02635158348083496, 0.026343679428100585, 0.026429439544677736, 0.026703872680664063, 0.026273632049560548, 0.02643779182434082, 0.026482688903808595, 0.02640246391296387, 0.026360128402709963, 0.026310400009155275, 0.026558752059936522, 0.02634956741333008, 0.026289983749389647, 0.026425535202026368, 0.026595327377319337, 0.026606975555419923, 0.026528383255004884, 0.026320287704467774, 0.02662166404724121, 0.026440576553344728, 0.02645583915710449, 0.02641481590270996, 0.02652191925048828, 0.02639072036743164, 0.026537984848022462, 0.026501056671142578, 0.02645315170288086, 0.026432416915893556, 0.026365375518798827, 0.026462783813476564, 0.026482112884521486, 0.02653001594543457, 0.026447359085083007, 0.02646307182312012, 0.026435583114624024, 0.02651136016845703, 0.026494688034057617, 0.026607391357421874, 0.028824064254760744, 0.02669152069091797, 0.026503231048583983, 0.026404415130615234, 0.026458879470825196, 0.026187231063842773, 0.026139263153076173, 0.02617296028137207, 0.026345247268676757, 0.026245824813842772, 0.026245119094848633, 0.026415103912353514, 0.026441728591918946, 0.026238975524902345, 0.0262873592376709, 0.026288799285888672, 0.026349664688110352, 0.02631884765625, 0.026376192092895507, 0.026285696029663085, 0.02646668815612793, 0.026402816772460938, 0.02643667221069336, 0.026282400131225587, 0.026241567611694334, 0.02629987144470215, 0.02620879936218262, 0.026402208328247072, 0.02647305679321289, 0.026414495468139648, 0.026371936798095703, 0.026305280685424804, 0.026296192169189454, 0.02644495964050293, 0.02633964729309082, 0.02636835289001465, 0.026247488021850587, 0.02627324867248535, 0.026290719985961913, 0.02648579216003418, 0.02639561653137207, 0.026425344467163086, 0.026359487533569335, 0.026495296478271483, 0.026613759994506835, 0.02650931167602539, 0.02648624038696289, 0.026484384536743164, 0.026500991821289063, 0.026565631866455077, 0.02634752082824707, 0.02632908821105957, 0.026421247482299806, 0.026443775177001954, 0.026521600723266602, 0.02657689666748047, 0.026468128204345704, 0.02655014419555664, 0.02663235282897949, 0.02650115203857422, 0.026503231048583983, 0.0266200008392334, 0.026610944747924806, 0.0265611515045166, 0.026560064315795898, 0.02650783920288086, 0.02635811233520508, 0.026245599746704103, 0.026158208847045897, 0.026088319778442382, 0.02595840072631836, 0.026013696670532226, 0.025954208374023437, 0.02599932861328125, 0.02597644805908203, 0.02607974433898926, 0.026244319915771485, 0.0260382080078125, 0.025954944610595703, 0.02597216033935547, 0.02615171241760254, 0.02608742332458496, 0.026090656280517578, 0.026190271377563478, 0.026144704818725585, 0.026111936569213866, 0.026014240264892578, 0.026066719055175783, 0.02612246322631836, 0.026310400009155275, 0.026196224212646484, 0.026234880447387695, 0.02627174377441406, 0.026388320922851562, 0.026433183670043946, 0.026345983505249023, 0.026425344467163086, 0.02636739158630371, 0.02623753547668457, 0.02635366439819336, 0.02642076873779297, 0.026588800430297852, 0.02654412841796875, 0.026573503494262695, 0.02647238349914551, 0.02638051223754883, 0.027076608657836915, 0.028508159637451173, 0.02640870475769043, 0.026351871490478514, 0.026397920608520507, 0.026323295593261718, 0.026322912216186524, 0.026442207336425782, 0.02654345512390137, 0.02660419273376465, 0.026438943862915038, 0.02640121650695801, 0.026501407623291017, 0.026472448348999023, 0.026484256744384767, 0.026560096740722655, 0.026469247817993164, 0.02671820831298828, 0.02650931167602539, 0.026570751190185548, 0.026529184341430666, 0.02656265640258789, 0.0264135684967041, 0.02648678398132324, 0.026406719207763673, 0.026222240447998046, 0.02622313690185547, 0.026174848556518554, 0.02624985694885254, 0.026326496124267577, 0.02637059211730957, 0.02623868751525879, 0.026453344345092774, 0.026180160522460937, 0.02617545509338379, 0.026317216873168944, 0.026261503219604493, 0.026291423797607422, 0.02634012794494629, 0.026217567443847657, 0.026174367904663084, 0.026120031356811523, 0.026265567779541015, 0.02640438461303711, 0.0263474235534668, 0.026346111297607423, 0.026406335830688476, 0.02630067253112793, 0.026245567321777345, 0.026605567932128905, 0.02627993583679199, 0.026380287170410157, 0.026359807968139647, 0.026241024017333983, 0.026294271469116212, 0.026273088455200197, 0.02629088020324707, 0.026262943267822265, 0.02661027145385742, 0.02647020721435547, 0.026548416137695312, 0.026539167404174804, 0.026527584075927733, 0.026567455291748046, 0.026597600936889648, 0.02689200019836426, 0.026726688385009766, 0.026505216598510743, 0.026606592178344726, 0.026477439880371094, 0.0265032958984375, 0.026458112716674805, 0.026537919998168947, 0.026527488708496094, 0.026492799758911133, 0.026440000534057616, 0.026363391876220704, 0.026351808547973633, 0.026452415466308592, 0.026611263275146485, 0.026571199417114257, 0.026615808486938477, 0.026658336639404298, 0.02665705680847168, 0.026598783493041993, 0.026577728271484375, 0.026736640930175783, 0.026779584884643555, 0.026286144256591797, 0.026406591415405273, 0.026261823654174805, 0.026279199600219728, 0.02622127914428711, 0.026249120712280274, 0.02618707275390625, 0.02607798385620117, 0.026336896896362306, 0.026351999282836915, 0.026382335662841795, 0.026815935134887694, 0.026290624618530274, 0.026281152725219727, 0.026205120086669923, 0.026134464263916017, 0.026155040740966796, 0.026211679458618162, 0.026267871856689454, 0.02636025619506836, 0.026368032455444335, 0.026439680099487304, 0.026394559860229493, 0.026525760650634767, 0.026763263702392577, 0.026482688903808595, 0.02627174377441406, 0.02634547233581543, 0.026375839233398438, 0.026379743576049806, 0.026275808334350587, 0.02840812873840332, 0.026446271896362304, 0.02655436706542969, 0.026556575775146484, 0.026875904083251953, 0.02943519973754883, 0.027660768508911134, 0.026439104080200195, 0.02662995147705078, 0.026503263473510744, 0.0265798397064209, 0.02653593635559082, 0.026424640655517577, 0.026610368728637694, 0.026755071640014647, 0.026611328125, 0.026521984100341796, 0.026666336059570313, 0.02655299186706543, 0.02651670455932617, 0.02652035140991211, 0.02636595153808594, 0.026431488037109374, 0.026373600006103514, 0.026468639373779298, 0.026603008270263673, 0.02662419128417969, 0.02650374412536621, 0.026793664932250976, 0.026495071411132814, 0.026568063735961912, 0.026611455917358397, 0.026370367050170897, 0.026185504913330077, 0.026157920837402343, 0.02614271926879883, 0.02609561538696289, 0.0261693115234375, 0.026148895263671874, 0.026275487899780275, 0.026382463455200195, 0.026406496047973634, 0.02638502311706543, 0.02634137535095215, 0.02638640022277832, 0.026593311309814453, 0.026739871978759766, 0.02643235206604004, 0.026423295974731444, 0.02629955291748047, 0.026288991928100587, 0.02625334358215332, 0.026334720611572264, 0.02624332809448242, 0.02638368034362793, 0.026399648666381836, 0.02636595153808594, 0.026346559524536132, 0.026385343551635743, 0.02635759925842285, 0.0263472957611084, 0.026422719955444336, 0.026350528717041015, 0.02627529525756836, 0.026216352462768554, 0.02622480010986328, 0.026398464202880858, 0.02641494369506836, 0.02634841537475586, 0.02643721580505371, 0.02639094352722168, 0.026646528244018555, 0.02670182418823242, 0.026394624710083008, 0.026400480270385742, 0.026449823379516603, 0.026648767471313478, 0.026638399124145506, 0.026521215438842772, 0.02654617691040039, 0.02651747131347656, 0.02650771141052246, 0.02662819290161133, 0.02674630355834961, 0.026567232131958007, 0.026529151916503905, 0.026597919464111327, 0.026555679321289063, 0.02670470428466797, 0.02666700744628906, 0.026657791137695314, 0.026498048782348634, 0.02654412841796875, 0.02652169609069824, 0.026302944183349608, 0.02603228759765625, 0.02614886474609375, 0.02613657569885254, 0.02612633514404297, 0.026025983810424806, 0.026019584655761718, 0.026185888290405274, 0.02628166389465332, 0.026136480331420898, 0.02624287986755371, 0.026243776321411134, 0.026421247482299806, 0.026398719787597655, 0.02641641616821289, 0.026288639068603514, 0.02634137535095215, 0.02628611183166504, 0.02641756820678711, 0.02808297538757324, 0.026218528747558593, 0.026125280380249024, 0.026359455108642578, 0.026357215881347658, 0.026376800537109377, 0.026260799407958984, 0.02624947166442871, 0.026327423095703125, 0.026328575134277343, 0.026362560272216798, 0.026328447341918946, 0.026272544860839842, 0.026330591201782227, 0.02635775947570801, 0.026564895629882814, 0.026450176239013672, 0.02647772789001465, 0.026440160751342773, 0.026495359420776368, 0.026566656112670898, 0.02640239906311035, 0.026302175521850588, 0.026378944396972658, 0.026371328353881836, 0.026413824081420897, 0.026715167999267576, 0.026667999267578124, 0.026648576736450196, 0.02658070373535156, 0.026408544540405275, 0.026466304779052735, 0.026581695556640625, 0.02672640037536621, 0.026466304779052735, 0.02653183937072754, 0.026626047134399415, 0.027074560165405274, 0.027084287643432618, 0.026582975387573243, 0.026767391204833985, 0.026571296691894532, 0.026656768798828126, 0.026639616012573242, 0.02657766342163086, 0.02646563148498535, 0.0262191047668457, 0.026138080596923827, 0.026295936584472657, 0.026077632904052735, 0.026100255966186522, 0.02603183937072754, 0.026120128631591796, 0.02629462432861328, 0.026253087997436524, 0.026378143310546876, 0.02632035255432129, 0.0263985595703125, 0.026473312377929686, 0.026308767318725584, 0.026226688385009765, 0.026162879943847656, 0.026290496826171874, 0.02611404800415039, 0.02608742332458496, 0.026103296279907227, 0.026472959518432617, 0.026525440216064452, 0.026743040084838868, 0.026226943969726562, 0.02613542366027832, 0.026105728149414063, 0.026331743240356444, 0.026273632049560548, 0.026364095687866212, 0.026393983840942385, 0.026489856719970704, 0.02628540802001953, 0.026393247604370118, 0.026435136795043945, 0.02663225555419922, 0.026564992904663087, 0.026527711868286133, 0.026474111557006835, 0.02634998321533203, 0.026334400177001952, 0.02673731231689453, 0.026482847213745116, 0.026420608520507812, 0.026427616119384767, 0.026521343231201172, 0.026619968414306642, 0.02653785514831543, 0.026376895904541016, 0.026561664581298827, 0.026629024505615235, 0.026687488555908204, 0.0267325439453125, 0.02669158363342285, 0.026649728775024414, 0.0264979190826416, 0.026468223571777343, 0.026627296447753905, 0.02664678382873535, 0.026535839080810548, 0.026534656524658203, 0.026744352340698243, 0.026707935333251952, 0.026403327941894532, 0.026300416946411134, 0.026346624374389647, 0.02618867111206055, 0.02610585594177246, 0.026164512634277343, 0.02642812728881836, 0.026472448348999023, 0.026371871948242188, 0.026187999725341797, 0.026234880447387695, 0.026287328720092772, 0.026270496368408204, 0.026369535446166992, 0.026382848739624022, 0.02636150360107422, 0.0263089599609375, 0.026359807968139647, 0.02627993583679199, 0.02656051254272461, 0.026461984634399412, 0.026283712387084962, 0.026238719940185548, 0.026399648666381836, 0.02634441566467285, 0.026139200210571287, 0.026138015747070312, 0.02626041603088379, 0.02612944030761719, 0.026245471954345703, 0.02594207954406738, 0.026090047836303712, 0.02610963249206543, 0.02657004737854004, 0.02661244773864746, 0.026497312545776367, 0.026596479415893555, 0.02648320007324219, 0.026408479690551757, 0.02641391944885254, 0.02647859191894531, 0.02652774429321289, 0.02650111961364746, 0.026402816772460938, 0.026402816772460938, 0.02660710334777832, 0.026556928634643553, 0.02647039985656738, 0.026414688110351563, 0.026562400817871094, 0.026577215194702148, 0.026435840606689454, 0.026394176483154296, 0.026345439910888672, 0.026400800704956054, 0.02676710319519043, 0.02668601608276367, 0.026771583557128907, 0.026681343078613282, 0.026660863876342773, 0.026664543151855468, 0.026912736892700195, 0.026548255920410158, 0.02649087905883789, 0.02652342414855957, 0.02617366409301758, 0.026142047882080077, 0.02615158462524414, 0.02629417610168457, 0.02618377685546875, 0.026300416946411134, 0.026639904022216797, 0.026405344009399413, 0.026224224090576172, 0.026232608795166014, 0.026702175140380858, 0.026295711517333984, 0.026380287170410157, 0.026329727172851564, 0.026402847290039062, 0.02635593605041504, 0.026544031143188478, 0.02651875114440918, 0.026297216415405274, 0.026317983627319335, 0.026305376052856447, 0.026257408142089843, 0.026372095108032227, 0.026877952575683595, 0.026595327377319337, 0.026253311157226563, 0.026619199752807618, 0.026274240493774415, 0.026621952056884765, 0.026283647537231444, 0.026514047622680663, 0.026599103927612305, 0.026499391555786133, 0.026732095718383787, 0.026519775390625, 0.02652796745300293, 0.02652569580078125, 0.02656198310852051, 0.026422975540161132, 0.02628432083129883, 0.02623958396911621, 0.02629734420776367, 0.026389375686645507, 0.026477888107299806, 0.02645884895324707, 0.026408063888549806, 0.026465248107910157, 0.02647039985656738, 0.026443775177001954, 0.026468351364135743, 0.026359807968139647, 0.026537919998168947, 0.026656831741333008, 0.026513023376464842, 0.026450111389160157, 0.026514751434326172, 0.026604223251342773, 0.02654636764526367, 0.026562559127807618]",tokens/s,37.83792144246474,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2227.376128,2551.119872,0.0,2155.872256,2032.413184,s,1,8.578361328125,8.578361328125,0.0,8.578361328125,8.578361328125,8.578361328125,8.578361328125,[8.578361328125],,kWh,4.871220372082234e-05,5.366284855721087e-06,1.557001245600098e-05,6.96485010325444e-05,,MB,2285.416448,2827.943936,0.0,2418.016256,2280.154112,s,10,0.9276774215698241,0.09276774215698241,0.00017955024401048356,0.09279806518554687,0.09292477111816407,0.09296712341308594,0.09300100524902344,"[0.09300947570800781, 0.09272541046142578, 0.0928094711303711, 0.09280976104736328, 0.09272265625, 0.09277798461914062, 0.09291535949707032, 0.09228800201416015, 0.09278665924072266, 0.0928326416015625]",tokens/s,2759.5799363834303,kWh,2.754002014937018e-06,3.0371625865989896e-07,1.822247264924521e-06,4.879965538521438e-06,tokens/kWh,52459386.850007236,MB,2300.915712,2911.830016,0.0,2501.902336,2389.055488,s,10,18.30338610839844,1.8303386108398434,0.009095563549333042,1.8309453735351562,1.8442125366210937,1.8443202575683595,1.844406434326172,"[1.831056396484375, 1.831028076171875, 1.82028466796875, 1.8239434814453126, 1.8308626708984375, 1.838055908203125, 1.844427978515625, 1.822085693359375, 1.81745263671875, 1.8441885986328126]",tokens/s,34.41986068965278,kWh,5.320215330131275e-05,5.867637290272799e-06,2.6927331290275196e-05,8.599712188186075e-05,tokens/kWh,732582.6565050255,,s,630,18.300550205230714,0.029048492389255103,0.0004858272422063414,0.028922767639160157,0.02944813804626465,0.02959853448867798,0.030944135189056407,"[0.02943180847167969, 0.029294719696044923, 0.02919411277770996, 0.029276031494140625, 0.029212703704833986, 0.029059167861938476, 0.028839935302734376, 0.02882691192626953, 0.028779232025146484, 0.02880905532836914, 0.029033632278442384, 0.029175840377807617, 0.029332447052001952, 0.029549888610839844, 0.029122304916381837, 0.028951040267944338, 0.028757568359375, 0.028894079208374022, 0.028909568786621095, 0.03006025505065918, 0.02959916877746582, 0.029191007614135744, 0.0290119686126709, 0.028915712356567383, 0.029038591384887694, 0.029081567764282227, 0.029542207717895508, 0.02938630485534668, 0.029187776565551757, 0.028896223068237303, 0.028833728790283203, 0.028825664520263673, 0.02881852722167969, 0.029664159774780274, 0.02893414306640625, 0.028911615371704103, 0.02873958396911621, 0.028927135467529296, 0.028894048690795898, 0.02952729606628418, 0.028877567291259766, 0.02876006317138672, 0.028732608795166016, 0.02890220832824707, 0.028848127365112306, 0.028872703552246092, 0.028833152770996094, 0.028920448303222657, 0.029054975509643553, 0.028829439163208008, 0.028819711685180664, 0.0288973445892334, 0.028861919403076173, 0.02900009536743164, 0.029076896667480468, 0.029157440185546876, 0.029561439514160157, 0.029016063690185546, 0.028785951614379884, 0.028951263427734374, 0.029267520904541017, 0.029432256698608397, 0.028886432647705077, 0.029036224365234373, 0.030164960861206055, 0.028903360366821288, 0.028950368881225586, 0.02885696029663086, 0.028719295501708986, 0.028641151428222655, 0.028889055252075194, 0.02889129638671875, 0.02881331253051758, 0.028809215545654295, 0.028907520294189453, 0.028837440490722656, 0.0287523193359375, 0.029069311141967775, 0.028837343215942383, 0.02945484733581543, 0.031023136138916017, 0.029276159286499022, 0.029009920120239258, 0.02879283142089844, 0.029199359893798828, 0.02997478485107422, 0.029546911239624024, 0.029472511291503908, 0.029121120452880858, 0.028874752044677734, 0.028737152099609375, 0.028921567916870117, 0.029080223083496094, 0.028868608474731446, 0.02910207939147949, 0.028899328231811523, 0.029523935317993164, 0.02883731269836426, 0.0288035831451416, 0.02881283187866211, 0.028895456314086913, 0.029047136306762696, 0.02931065559387207, 0.031068479537963867, 0.028968448638916015, 0.028807680130004884, 0.028825279235839843, 0.02878665542602539, 0.02894416046142578, 0.02879302406311035, 0.02885875129699707, 0.028817407608032225, 0.028862464904785157, 0.028915712356567383, 0.02955232048034668, 0.02905891227722168, 0.02889299201965332, 0.02879759979248047, 0.02895871925354004, 0.029109439849853515, 0.02884281539916992, 0.028940288543701172, 0.0289751033782959, 0.028804927825927733, 0.028771615982055663, 0.02875254440307617, 0.02896281623840332, 0.028829984664916992, 0.028855968475341796, 0.02885830307006836, 0.028848255157470703, 0.028841663360595703, 0.028846368789672852, 0.028886112213134765, 0.028799936294555663, 0.028869888305664063, 0.02905369567871094, 0.028991487503051756, 0.028866559982299804, 0.028839935302734376, 0.028729055404663088, 0.028987680435180664, 0.028716480255126953, 0.02877907180786133, 0.028594175338745118, 0.03075071907043457, 0.02903603172302246, 0.029235712051391603, 0.029016063690185546, 0.02875961685180664, 0.028838336944580076, 0.028931520462036134, 0.028800991058349608, 0.028840543746948243, 0.028703935623168947, 0.028672704696655272, 0.028695680618286132, 0.02870368003845215, 0.029064416885375977, 0.028760927200317383, 0.028868608474731446, 0.029007871627807616, 0.028868608474731446, 0.028733440399169922, 0.028794719696044923, 0.028857631683349608, 0.028856191635131836, 0.028688703536987305, 0.028768287658691407, 0.02869862365722656, 0.028672351837158203, 0.0287869758605957, 0.029167648315429687, 0.02930796813964844, 0.02910713577270508, 0.029079551696777343, 0.029050880432128907, 0.02892367935180664, 0.028672224044799806, 0.028819456100463867, 0.028765695571899414, 0.02884783935546875, 0.028725856781005858, 0.028778688430786133, 0.028727296829223634, 0.029040639877319335, 0.0287825927734375, 0.02870681571960449, 0.028940223693847657, 0.029313760757446287, 0.029163135528564452, 0.028950431823730468, 0.02893667221069336, 0.028984447479248047, 0.02883628845214844, 0.02894304084777832, 0.028673791885375978, 0.028875040054321288, 0.028698335647583006, 0.0289355525970459, 0.02905766487121582, 0.028846080780029298, 0.0294072322845459, 0.028639232635498047, 0.02874982452392578, 0.028773567199707032, 0.02876089668273926, 0.02877644729614258, 0.02874982452392578, 0.028831743240356447, 0.02880512046813965, 0.028854272842407228, 0.02919424057006836, 0.02954649543762207, 0.029287584304809572, 0.02902716827392578, 0.029222911834716796, 0.029340927124023437, 0.02883865547180176, 0.02870848083496094, 0.028748159408569337, 0.03018956756591797, 0.029814783096313476, 0.02876380729675293, 0.028768224716186522, 0.02866988754272461, 0.02881760025024414, 0.02887295913696289, 0.02876006317138672, 0.028720447540283203, 0.028992191314697265, 0.02874777603149414, 0.0288275203704834, 0.028780031204223632, 0.028924543380737303, 0.02879267120361328, 0.02886841583251953, 0.02874166488647461, 0.02866003227233887, 0.028643264770507812, 0.02887411117553711, 0.028751903533935547, 0.028829568862915038, 0.02882537651062012, 0.029029375076293946, 0.02882745552062988, 0.02883603286743164, 0.02883516883850098, 0.02903856086730957, 0.0289553279876709, 0.02900534439086914, 0.030042591094970702, 0.02873472023010254, 0.028759967803955077, 0.02880556869506836, 0.02932383918762207, 0.029491199493408202, 0.02877644729614258, 0.028849760055541993, 0.028862464904785157, 0.03125494384765625, 0.02920003128051758, 0.02909833526611328, 0.028989280700683594, 0.028737119674682617, 0.028869184494018554, 0.028882944107055664, 0.028853759765625, 0.028860736846923828, 0.028928192138671874, 0.028736671447753905, 0.028717920303344725, 0.02860201644897461, 0.028817760467529298, 0.028688383102416993, 0.02901126480102539, 0.02911097526550293, 0.029014015197753908, 0.028825439453125, 0.02891107177734375, 0.028796640396118164, 0.028784799575805663, 0.029544384002685546, 0.036940128326416015, 0.029493471145629883, 0.028904863357543945, 0.028780895233154295, 0.028830175399780274, 0.028880992889404298, 0.028686336517333984, 0.02892185592651367, 0.028860416412353516, 0.02879897689819336, 0.028738752365112304, 0.02867487907409668, 0.02875801658630371, 0.028951839447021486, 0.02926665687561035, 0.028874015808105467, 0.0289835205078125, 0.02876374435424805, 0.02889411163330078, 0.028792255401611327, 0.028848512649536133, 0.028782079696655274, 0.028748416900634767, 0.02877241516113281, 0.028864320755004884, 0.02865171241760254, 0.02873958396911621, 0.02876825523376465, 0.029011520385742187, 0.02883014488220215, 0.029108224868774416, 0.028819456100463867, 0.02881878471374512, 0.02864387130737305, 0.028591680526733398, 0.02871353530883789, 0.028797279357910155, 0.028694175720214845, 0.028898624420166014, 0.02875257682800293, 0.028929407119750977, 0.028835840225219726, 0.028811040878295897, 0.028881664276123046, 0.02894857597351074, 0.028895231246948243, 0.02922844886779785, 0.029243200302124024, 0.028963647842407226, 0.029077472686767577, 0.028993120193481447, 0.028977567672729493, 0.02887049674987793, 0.029382816314697267, 0.028753055572509765, 0.028930912017822264, 0.029027999877929686, 0.029206207275390625, 0.029145759582519533, 0.029572608947753907, 0.02947942352294922, 0.029347232818603516, 0.029475296020507812, 0.02932134437561035, 0.029250751495361327, 0.029616960525512694, 0.02953625679016113, 0.02938047981262207, 0.02939686393737793, 0.02956723213195801, 0.029287424087524414, 0.02934272003173828, 0.029730335235595703, 0.029501920700073243, 0.02932310485839844, 0.029247648239135744, 0.029237056732177736, 0.02942790412902832, 0.02936832046508789, 0.02973695945739746, 0.02937446403503418, 0.029667327880859375, 0.02939904022216797, 0.02949839973449707, 0.029408224105834962, 0.029203872680664062, 0.029153888702392577, 0.029136831283569337, 0.029134016036987304, 0.029005727767944335, 0.028773344039916993, 0.028947807312011718, 0.028916128158569337, 0.029169919967651368, 0.029746400833129884, 0.029960031509399413, 0.029597759246826172, 0.029247488021850586, 0.029406368255615236, 0.029373279571533205, 0.02925676727294922, 0.029143104553222655, 0.028918655395507812, 0.0293703670501709, 0.028927072525024414, 0.028935007095336914, 0.029185760498046876, 0.029462879180908202, 0.029574432373046876, 0.03015545654296875, 0.029418624877929688, 0.029102655410766603, 0.02917616081237793, 0.029077503204345705, 0.029276159286499022, 0.02920022392272949, 0.028981407165527343, 0.02894374465942383, 0.029210592269897463, 0.02919491195678711, 0.02896895980834961, 0.02889727973937988, 0.028891136169433593, 0.028887039184570314, 0.029453472137451173, 0.0311297607421875, 0.02952262306213379, 0.029450239181518553, 0.029207647323608397, 0.029254560470581056, 0.02985536003112793, 0.029393056869506835, 0.029397216796875, 0.02918400001525879, 0.029409120559692383, 0.029444095611572265, 0.029116575241088866, 0.028990848541259766, 0.029543039321899413, 0.02898739242553711, 0.029035968780517576, 0.029106592178344725, 0.029012128829956053, 0.028770303726196288, 0.028823360443115235, 0.02880940818786621, 0.028876800537109375, 0.028815359115600587, 0.02878873634338379, 0.031192735671997072, 0.030237024307250976, 0.029025312423706054, 0.02884048080444336, 0.029639104843139648, 0.029106176376342774, 0.029112319946289062, 0.029073408126831055, 0.02879078483581543, 0.02909721565246582, 0.029363616943359375, 0.029245759963989256, 0.02936832046508789, 0.028880895614624022, 0.029038591384887694, 0.028872703552246092, 0.02886841583251953, 0.02879302406311035, 0.028862464904785157, 0.028669952392578125, 0.02893619155883789, 0.028848127365112306, 0.028884992599487305, 0.029263231277465822, 0.029319807052612303, 0.029181215286254884, 0.029143775939941406, 0.028882047653198243, 0.028768224716186522, 0.028767135620117186, 0.028757823944091796, 0.028852415084838868, 0.028818687438964843, 0.02888755226135254, 0.02869264030456543, 0.028721248626708985, 0.02874163246154785, 0.02905606460571289, 0.02893305587768555, 0.029087135314941406, 0.02910063934326172, 0.029027360916137696, 0.02880406379699707, 0.02876416015625, 0.02879897689819336, 0.02904473686218262, 0.02897100830078125, 0.029220863342285155, 0.028853504180908204, 0.028697343826293947, 0.028698400497436525, 0.028702207565307617, 0.028766944885253908, 0.02874339294433594, 0.028920095443725587, 0.028851232528686523, 0.028895519256591798, 0.028656320571899416, 0.028822656631469726, 0.029061376571655275, 0.0291146240234375, 0.02886284828186035, 0.028704767227172853, 0.028655071258544922, 0.028712703704833985, 0.02876905632019043, 0.028753440856933595, 0.028824031829833983, 0.029343263626098633, 0.02919830322265625, 0.02906982421875, 0.028788608551025392, 0.02924435234069824, 0.029014015197753908, 0.02871903991699219, 0.028768320083618164, 0.028835840225219726, 0.028844032287597656, 0.028984447479248047, 0.02877529525756836, 0.028747392654418946, 0.0287010555267334, 0.028678144454956055, 0.028785791397094727, 0.02878963279724121, 0.029009920120239258, 0.02896691131591797, 0.028913280487060548, 0.0288155517578125, 0.028824960708618164, 0.028809471130371092, 0.02891993522644043, 0.0289366397857666, 0.028841663360595703, 0.028704608917236328, 0.02871548843383789, 0.028895008087158204, 0.02865558433532715, 0.02913920021057129, 0.029070335388183592, 0.028877824783325196, 0.028620800018310546, 0.028626943588256838, 0.02860598373413086, 0.028694271087646484, 0.02865385627746582, 0.028935840606689453, 0.028863264083862306, 0.029314367294311524, 0.029194719314575197, 0.02916988754272461, 0.029120512008666992, 0.0294021110534668, 0.029612672805786132, 0.028909759521484377, 0.02880441665649414, 0.028699520111083985, 0.02862678337097168, 0.02866320037841797, 0.028770624160766603, 0.028602815628051757, 0.028807167053222657, 0.02855936050415039, 0.028667903900146483, 0.028674047470092775, 0.028733440399169922, 0.029122560501098634, 0.02879475212097168, 0.028607999801635742, 0.029067903518676757, 0.02854707145690918, 0.02866899108886719, 0.028566463470458985, 0.02870841598510742, 0.028795263290405274, 0.02898905563354492, 0.02886899185180664, 0.028837888717651368, 0.0290119686126709, 0.02893619155883789, 0.028786687850952147, 0.02870889663696289, 0.02864944076538086, 0.02874982452392578, 0.028868608474731446, 0.028704767227172853, 0.029014015197753908, 0.02894438362121582, 0.029063167572021483, 0.02954582405090332, 0.029067935943603514, 0.029128095626831055, 0.02946928024291992, 0.029190143585205077, 0.029327360153198243, 0.029224672317504884, 0.031176992416381836, 0.03018342399597168, 0.029560831069946288, 0.029593599319458007, 0.029409183502197265, 0.029389184951782228, 0.029260704040527344, 0.02939974403381348, 0.029171424865722655, 0.029401504516601562, 0.029439456939697267, 0.02931065559387207, 0.029184864044189452, 0.029104127883911132, 0.029243200302124024, 0.029137088775634767, 0.029261823654174804, 0.029328800201416014, 0.029439712524414064, 0.02920537567138672, 0.02915123176574707, 0.029046783447265623, 0.03036774444580078, 0.03038412857055664, 0.02966102409362793, 0.0293090877532959, 0.029360128402709962, 0.029265920639038087, 0.029871103286743163, 0.02896143913269043, 0.02935433578491211, 0.029003135681152342, 0.029141471862792968, 0.029026464462280275, 0.029079551696777343, 0.02917136001586914, 0.029065568923950194, 0.0291429443359375, 0.029447904586791994, 0.02928883171081543, 0.029286399841308593, 0.029212671279907225]",tokens/s,34.425194485132565,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1170.219008,1101.98784,0.0,706.740224,681.6384,s,1,7.8936787109375,7.8936787109375,0.0,7.8936787109375,7.8936787109375,7.8936787109375,7.8936787109375,[7.8936787109375],,kWh,3.011512195416799e-05,3.3143799531649245e-06,8.991673859995442e-06,4.242117576732835e-05,,MB,1514.057728,1406.07488,0.0,996.1472,949.238272,s,10,0.3091976623535156,0.03091976623535156,0.0008538072460815612,0.03063159942626953,0.031216042327880857,0.032319124603271486,0.03320159042358398,"[0.03048031997680664, 0.030715423583984373, 0.030547775268554688, 0.03097091293334961, 0.030498783111572267, 0.03342220687866211, 0.0304355525970459, 0.03088038444519043, 0.030446239471435547, 0.03080006408691406]",tokens/s,8279.493384633257,kWh,8.967287174462902e-07,9.889336138598275e-08,5.504401131533824e-07,1.5460621919856557e-06,tokens/kWh,165581954.80558982,MB,1547.411456,1414.463488,0.0,1004.535808,949.240832,s,10,15.011969116210938,1.501196911621094,0.006065398363650671,1.4994981079101564,1.5073589721679688,1.5110018737792967,1.5139161950683593,"[1.514644775390625, 1.494724853515625, 1.4974677734375, 1.5023614501953124, 1.5009422607421874, 1.4954993896484374, 1.498053955078125, 1.5062763671875, 1.4954488525390626, 1.5065494384765625]",tokens/s,41.96651319510666,kWh,4.3876149645470506e-05,4.839156124163211e-06,1.7077490655846606e-05,6.579279642548033e-05,tokens/kWh,957551.6382155367,,s,630,15.009932506561269,0.023825289692954416,0.0004177225653914419,0.02373446369171143,0.024083247756958005,0.024272825336456298,0.02621741447448731,"[0.023875583648681642, 0.023785472869873047, 0.023803903579711915, 0.02387353515625, 0.02369491195678711, 0.02679827117919922, 0.02461676788330078, 0.024101312637329102, 0.02366464042663574, 0.02370150375366211, 0.023621248245239257, 0.023894399642944337, 0.023719648361206054, 0.023734560012817384, 0.02372403144836426, 0.02370355224609375, 0.02371788787841797, 0.023792703628540038, 0.023747520446777345, 0.02388921546936035, 0.02401081657409668, 0.023719839096069336, 0.023870176315307617, 0.024176639556884767, 0.02389958381652832, 0.02389039993286133, 0.023762144088745118, 0.024040319442749022, 0.024091808319091797, 0.024021856307983397, 0.024206911087036133, 0.02432454490661621, 0.025222208023071287, 0.026256288528442383, 0.02446886444091797, 0.02406630325317383, 0.024072608947753905, 0.023932928085327147, 0.023913951873779298, 0.023689056396484377, 0.02409059143066406, 0.02378211212158203, 0.02365644836425781, 0.024152063369750978, 0.02379743957519531, 0.02373868751525879, 0.023871488571166992, 0.023799808502197265, 0.023963647842407225, 0.023812095642089845, 0.02371379280090332, 0.024164352416992187, 0.023771135330200196, 0.023837984085083006, 0.02476652717590332, 0.023673503875732423, 0.02371583938598633, 0.026605728149414063, 0.02386934471130371, 0.02370899200439453, 0.023580831527709963, 0.02368076705932617, 0.02359574317932129, 0.02410963249206543, 0.023945215225219727, 0.023807296752929686, 0.02367305564880371, 0.02364601516723633, 0.02360710334777832, 0.023509279251098632, 0.02356617546081543, 0.023507680892944336, 0.02363529586791992, 0.02369536018371582, 0.023642688751220702, 0.02373651123046875, 0.02371148872375488, 0.023713823318481445, 0.023730207443237304, 0.023830623626708985, 0.023889951705932617, 0.02374448013305664, 0.023680639266967774, 0.023701887130737304, 0.02392416000366211, 0.02358233642578125, 0.023670751571655272, 0.023843679428100586, 0.02373641586303711, 0.02378767967224121, 0.023639936447143555, 0.02371788787841797, 0.023665727615356444, 0.023688127517700195, 0.023787519454956055, 0.02371788787841797, 0.023735616683959963, 0.023784128189086914, 0.02375481605529785, 0.023736255645751953, 0.023595008850097656, 0.023574527740478517, 0.023472127914428712, 0.023788768768310545, 0.023886623382568358, 0.023754911422729494, 0.024133472442626952, 0.023977983474731446, 0.023860639572143554, 0.023724639892578125, 0.02370956802368164, 0.02371187210083008, 0.023508352279663088, 0.02358131217956543, 0.023476224899291992, 0.023734272003173826, 0.02356608009338379, 0.023536127090454103, 0.023495807647705078, 0.02368076705932617, 0.023894912719726564, 0.02384435272216797, 0.02379417610168457, 0.023830528259277343, 0.02377235221862793, 0.02376688003540039, 0.02365635108947754, 0.023665279388427735, 0.0235807991027832, 0.02361510467529297, 0.02383910369873047, 0.02425593566894531, 0.02398476791381836, 0.023856895446777344, 0.023685312271118163, 0.02368716812133789, 0.023613439559936524, 0.02369692802429199, 0.02364028739929199, 0.023552255630493166, 0.023619583129882812, 0.023629823684692384, 0.023611391067504883, 0.02356172752380371, 0.023648191452026367, 0.023531904220581056, 0.023652128219604492, 0.024197536468505858, 0.02422921562194824, 0.024018911361694335, 0.023732927322387694, 0.023799327850341796, 0.023560575485229492, 0.02374460792541504, 0.024065088272094727, 0.024243135452270508, 0.023995616912841796, 0.023941919326782225, 0.023732032775878906, 0.023443647384643555, 0.023610559463500977, 0.02365318489074707, 0.02352284812927246, 0.02353955268859863, 0.02364236831665039, 0.02359334373474121, 0.023638015747070314, 0.023613439559936524, 0.02376848030090332, 0.023583328247070313, 0.023606784820556642, 0.023626239776611328, 0.023631872177124022, 0.023797760009765623, 0.02370150375366211, 0.02381955146789551, 0.023882463455200197, 0.02378704071044922, 0.02376697540283203, 0.023679519653320314, 0.02369935989379883, 0.023889312744140623, 0.024936864852905274, 0.024049951553344728, 0.023783424377441405, 0.023821920394897462, 0.023746463775634767, 0.023793216705322265, 0.02379667282104492, 0.024821247100830078, 0.027457984924316406, 0.023902271270751955, 0.023799808502197265, 0.023627456665039064, 0.023628032684326172, 0.023896127700805663, 0.025265504837036133, 0.02428585624694824, 0.02452889633178711, 0.02410700798034668, 0.023952831268310548, 0.023695615768432616, 0.024369472503662108, 0.023754751205444336, 0.023934335708618165, 0.023847551345825196, 0.02373347282409668, 0.023593759536743163, 0.02353152084350586, 0.023472127914428712, 0.02352262306213379, 0.02364076805114746, 0.023582176208496095, 0.023691808700561524, 0.02371788787841797, 0.023809823989868164, 0.02357414436340332, 0.023784032821655275, 0.023729951858520507, 0.023772512435913086, 0.02370796775817871, 0.023939039230346678, 0.024135488510131836, 0.023933792114257814, 0.023821855545043947, 0.023675296783447267, 0.023768672943115233, 0.02363974380493164, 0.02345238494873047, 0.02349260711669922, 0.023560192108154295, 0.02437068748474121, 0.023842399597167968, 0.024001440048217772, 0.02384614372253418, 0.023671552658081053, 0.023506752014160158, 0.023396543502807617, 0.02356831932067871, 0.02361897659301758, 0.02360736083984375, 0.023511648178100586, 0.023504896163940428, 0.023680063247680665, 0.023636608123779296, 0.02354412841796875, 0.023496511459350587, 0.023482559204101562, 0.02374774360656738, 0.023645023345947265, 0.02371174430847168, 0.023617536544799804, 0.0237061767578125, 0.02360495948791504, 0.023775583267211915, 0.023648319244384767, 0.023637056350708008, 0.023574943542480468, 0.02380451202392578, 0.02366873550415039, 0.023692480087280275, 0.02373468780517578, 0.02368320083618164, 0.0237673282623291, 0.02370355224609375, 0.02371379280090332, 0.02371174430847168, 0.023654399871826173, 0.02360550308227539, 0.02379305648803711, 0.02369772720336914, 0.023756832122802735, 0.02464143943786621, 0.02414601516723633, 0.023867391586303712, 0.023895584106445312, 0.023914976119995116, 0.023820192337036132, 0.023742464065551756, 0.024264799118041993, 0.023909759521484376, 0.02374457550048828, 0.023788095474243164, 0.02388991928100586, 0.023875583648681642, 0.02386124801635742, 0.02387059211730957, 0.023771520614624023, 0.02385980796813965, 0.024012704849243165, 0.024350143432617186, 0.023960128784179687, 0.023877599716186523, 0.023922367095947264, 0.023869792938232423, 0.023767040252685546, 0.023750656127929686, 0.023937152862548827, 0.023671968460083008, 0.02363670349121094, 0.023838623046875, 0.023631967544555665, 0.023621631622314454, 0.02363910484313965, 0.023679935455322265, 0.023852607727050782, 0.024056255340576174, 0.024063392639160155, 0.023734880447387696, 0.023826431274414063, 0.023832096099853515, 0.023744991302490234, 0.023688703536987304, 0.02382614326477051, 0.02414668846130371, 0.02394870376586914, 0.024070432662963867, 0.023959871292114257, 0.023733823776245118, 0.023985664367675782, 0.02358742332458496, 0.0235807991027832, 0.0236824951171875, 0.02368979263305664, 0.023694847106933595, 0.023575040817260744, 0.023563423156738282, 0.02355695915222168, 0.023541759490966797, 0.02347398376464844, 0.023549184799194336, 0.023629983901977538, 0.02360393524169922, 0.023912511825561523, 0.023789567947387694, 0.02371379280090332, 0.023635616302490236, 0.02360086441040039, 0.023497184753417968, 0.02365635108947754, 0.02356393623352051, 0.02386774444580078, 0.0235545597076416, 0.023666431427001953, 0.023689184188842773, 0.02368671989440918, 0.02364259147644043, 0.023723552703857422, 0.02362771224975586, 0.02363155174255371, 0.023594911575317384, 0.023625759124755858, 0.023550880432128905, 0.023608831405639647, 0.02367145538330078, 0.02371567916870117, 0.0241213436126709, 0.023889888763427736, 0.02385103988647461, 0.02372198486328125, 0.02385251235961914, 0.02412326431274414, 0.02436521530151367, 0.024005119323730468, 0.023801759719848634, 0.02370992088317871, 0.023705472946166994, 0.023834623336791993, 0.02374812889099121, 0.023908832550048827, 0.02371788787841797, 0.02388787269592285, 0.023580575942993166, 0.023652448654174804, 0.023783424377441405, 0.023862720489501953, 0.023730112075805665, 0.023768831253051757, 0.023799999237060547, 0.023577152252197267, 0.023646495819091798, 0.023496192932128908, 0.02372140884399414, 0.023509248733520508, 0.023558528900146484, 0.02362713623046875, 0.023570655822753906, 0.02371436882019043, 0.023601375579833984, 0.02362710380554199, 0.023748096466064454, 0.02382291221618652, 0.023521663665771485, 0.02358502388000488, 0.023594240188598632, 0.023708160400390626, 0.023752992630004882, 0.02421228790283203, 0.02373110389709473, 0.023743520736694335, 0.023644159317016602, 0.023690208435058594, 0.02423811149597168, 0.02377724838256836, 0.023805952072143553, 0.02372403144836426, 0.023592575073242188, 0.023682687759399416, 0.023862016677856444, 0.023669952392578124, 0.02352351951599121, 0.02359359931945801, 0.023828479766845705, 0.023670783996582033, 0.02375587272644043, 0.023669599533081054, 0.02371993637084961, 0.02578598403930664, 0.02471776008605957, 0.02407596778869629, 0.023828800201416016, 0.023654048919677734, 0.023551616668701172, 0.02357052803039551, 0.023716480255126952, 0.023762943267822266, 0.023769088745117187, 0.023618560791015625, 0.02356915283203125, 0.02364646339416504, 0.023846912384033202, 0.02370560073852539, 0.023495744705200196, 0.023661504745483397, 0.023586687088012696, 0.023665792465209962, 0.023950336456298828, 0.024102912902832032, 0.02387763214111328, 0.024143871307373048, 0.024216896057128907, 0.02416640090942383, 0.02406399917602539, 0.024680448532104493, 0.02612224006652832, 0.02432156753540039, 0.023899871826171874, 0.023996320724487305, 0.023740287780761718, 0.023683103561401367, 0.023958463668823243, 0.02479516792297363, 0.02391446495056152, 0.023867391586303712, 0.0238623046875, 0.023849567413330077, 0.02377356719970703, 0.023803808212280272, 0.023920095443725586, 0.023844575881958006, 0.02383692741394043, 0.023746944427490233, 0.023660192489624022, 0.02369945526123047, 0.024009344100952148, 0.023775232315063476, 0.0237076473236084, 0.023740127563476564, 0.023822303771972655, 0.02677382469177246, 0.02384214401245117, 0.023870111465454102, 0.02390550422668457, 0.02376313591003418, 0.0237544002532959, 0.02382716751098633, 0.023754175186157227, 0.023628576278686524, 0.02375094413757324, 0.02373347282409668, 0.02420172882080078, 0.023838144302368164, 0.023824544906616212, 0.023753120422363282, 0.023866912841796876, 0.02373263931274414, 0.023652416229248047, 0.023733503341674806, 0.023741119384765624, 0.023732288360595703, 0.02361280059814453, 0.023791711807250978, 0.023620128631591797, 0.023572479248046875, 0.023590911865234376, 0.02372403144836426, 0.023631872177124022, 0.023486080169677733, 0.02368115234375, 0.02366080093383789, 0.023734272003173826, 0.02365398406982422, 0.023633663177490234, 0.02373811149597168, 0.023608896255493166, 0.023683263778686525, 0.023658943176269532, 0.023667808532714843, 0.02371046447753906, 0.02355766487121582, 0.02360793685913086, 0.023488512039184572, 0.02342911911010742, 0.023513088226318358, 0.023513088226318358, 0.02366464042663574, 0.023607231140136718, 0.023674943923950195, 0.023823871612548828, 0.023865856170654298, 0.023870719909667968, 0.02393164825439453, 0.02387763214111328, 0.02370476722717285, 0.023581504821777344, 0.023746559143066406, 0.02399555206298828, 0.023735136032104493, 0.02363382339477539, 0.02373580741882324, 0.0237259521484375, 0.023775264739990233, 0.023720640182495117, 0.02408243179321289, 0.023812095642089845, 0.0237238712310791, 0.023650463104248048, 0.023770111083984375, 0.023768064498901367, 0.023586719512939454, 0.023607391357421875, 0.023551647186279296, 0.023621984481811523, 0.023586816787719726, 0.023609312057495117, 0.023635263442993163, 0.023550687789916994, 0.02386124801635742, 0.023635967254638672, 0.023842815399169923, 0.023810047149658203, 0.02366054344177246, 0.023713567733764648, 0.0236363525390625, 0.023584224700927733, 0.023682880401611327, 0.023648832321166994, 0.02364944076538086, 0.023679136276245117, 0.023865407943725585, 0.02371206474304199, 0.024023359298706054, 0.023961280822753905, 0.023895584106445312, 0.02495568084716797, 0.02387081527709961, 0.023888544082641603, 0.023862943649291993, 0.023724384307861328, 0.02370947265625, 0.02367740821838379, 0.023629024505615236, 0.026835487365722655, 0.02406921577453613, 0.023784608840942384, 0.023837823867797852, 0.023814783096313477, 0.023773183822631837, 0.023746559143066406, 0.023797216415405272, 0.023759391784667967, 0.02366873550415039, 0.023756799697875978, 0.023777055740356445, 0.023660768508911134, 0.023652032852172853, 0.02359878349304199, 0.023625728607177734, 0.023861888885498048, 0.02362883186340332, 0.023626720428466797, 0.02368921661376953, 0.023768831253051757, 0.024657279968261718, 0.023714399337768553, 0.023855039596557617, 0.023996768951416017, 0.023803903579711915, 0.02373436737060547, 0.02386115264892578, 0.023644159317016602, 0.024195072174072265, 0.027305376052856444, 0.024035520553588867, 0.023902624130249024, 0.024221696853637696, 0.023748607635498048, 0.023855104446411132, 0.02446710395812988, 0.02427939224243164, 0.024213504791259766, 0.02412473678588867, 0.02405036735534668, 0.02369945526123047, 0.02354582405090332, 0.023688352584838868, 0.02351568031311035, 0.02363376045227051, 0.023545696258544923, 0.023597312927246095, 0.0236200008392334, 0.023777280807495117, 0.0235762882232666, 0.023718175888061525, 0.02389788818359375, 0.023765216827392577, 0.023604736328125, 0.02372230339050293, 0.023608928680419923, 0.023822944641113283]",tokens/s,41.97220738498381,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1841.913856,2760.835072,0.0,2365.587456,2314.318336,s,1,8.89811328125,8.89811328125,0.0,8.89811328125,8.89811328125,8.89811328125,8.89811328125,[8.89811328125],,kWh,5.511781417915623e-05,6.072691714418805e-06,1.7201402650005027e-05,7.839190854358007e-05,,MB,1845.071872,3092.185088,0.0,2682.257408,2607.60832,s,10,0.5073520698547364,0.05073520698547364,0.0002747971853071152,0.05066675186157227,0.05080905418395996,0.05117609539031982,0.051469728355407716,"[0.05154313659667969, 0.05068672180175781, 0.05066864013671875, 0.05062307357788086, 0.05066486358642578, 0.050727489471435544, 0.050714176177978514, 0.05058483123779297, 0.050576416015625, 0.050562721252441406]",tokens/s,5045.805767054369,kWh,1.5419928673027206e-06,1.700195465713278e-07,1.0264642714631476e-06,2.738476685337196e-06,tokens/kWh,93482628.99980763,MB,1849.36448,3092.185088,0.0,2682.257408,2607.61088,s,10,15.497951171874998,1.5497951171875,0.005800013507732758,1.5492863159179686,1.5571719116210936,1.558186114501953,1.5589974768066406,"[1.5518857421875, 1.5592003173828124, 1.544307861328125, 1.547433837890625, 1.55082470703125, 1.556946533203125, 1.555377685546875, 1.5431873779296874, 1.5477479248046875, 1.5410391845703124]",tokens/s,40.6505345779703,kWh,4.5299420457698424e-05,4.995927556806055e-06,2.1374414760337175e-05,7.166976277484166e-05,tokens/kWh,879031.7919416218,,s,630,15.495820976257319,0.024596541232154483,0.0003772357676909649,0.024506224632263186,0.024912055206298828,0.025060841846466064,0.02604591617584229,"[0.0248623046875, 0.024609184265136717, 0.024407039642333983, 0.024689664840698244, 0.02434867286682129, 0.024375040054321288, 0.024914176940917968, 0.024778495788574217, 0.024428287506103517, 0.02441472053527832, 0.0245166072845459, 0.024444927215576173, 0.02442848014831543, 0.02484230422973633, 0.024663488388061525, 0.024622751235961915, 0.02451968002319336, 0.024503807067871093, 0.02456822395324707, 0.02443878364562988, 0.024337696075439452, 0.02458844757080078, 0.024701503753662108, 0.02496512031555176, 0.02471731185913086, 0.02478643226623535, 0.024644031524658203, 0.02481977653503418, 0.02462905693054199, 0.024594623565673827, 0.024576095581054686, 0.02448784065246582, 0.024380512237548828, 0.02450115203857422, 0.024452512741088867, 0.024406335830688478, 0.024409664154052733, 0.02437398338317871, 0.024434688568115235, 0.028899328231811523, 0.024751903533935547, 0.024668384552001953, 0.02451251220703125, 0.02448579216003418, 0.024395488739013673, 0.02459257507324219, 0.024477888107299804, 0.024669824600219728, 0.024559999465942384, 0.024571903228759767, 0.02443059158325195, 0.024436735153198243, 0.024231935501098634, 0.024323551177978516, 0.02438748741149902, 0.024414783477783204, 0.02443187141418457, 0.02470361518859863, 0.02480496025085449, 0.024885215759277345, 0.024476287841796875, 0.024577312469482422, 0.02479587173461914, 0.02486502456665039, 0.024622848510742187, 0.024599903106689452, 0.024760288238525392, 0.024523456573486327, 0.024485727310180665, 0.02444211196899414, 0.024507295608520507, 0.024726943969726564, 0.024621664047241212, 0.024714496612548827, 0.024437503814697267, 0.024371200561523438, 0.024731552124023438, 0.02471513557434082, 0.025053632736206054, 0.024944000244140625, 0.024697248458862304, 0.024559616088867187, 0.025007360458374022, 0.02490025520324707, 0.024709215164184572, 0.02504409599304199, 0.024980352401733397, 0.024867967605590822, 0.024814464569091796, 0.02476851272583008, 0.02458847999572754, 0.024297279357910158, 0.024542335510253907, 0.024488607406616212, 0.024428768157958983, 0.024365055084228517, 0.024430400848388673, 0.024643775939941406, 0.02446067237854004, 0.024492639541625977, 0.024342559814453126, 0.024559616088867187, 0.024738943099975586, 0.024449920654296874, 0.024852479934692383, 0.02474332809448242, 0.024518335342407226, 0.02460256004333496, 0.02459132766723633, 0.024758399963378905, 0.025031904220581054, 0.02447427177429199, 0.025964351654052736, 0.02489356803894043, 0.024557632446289064, 0.024811199188232422, 0.024678720474243163, 0.024759647369384765, 0.02476304054260254, 0.025686016082763673, 0.026223936080932618, 0.02506208038330078, 0.02485856056213379, 0.024579456329345703, 0.024752960205078126, 0.02548294448852539, 0.02539091110229492, 0.0249051513671875, 0.024630016326904296, 0.02451046371459961, 0.024610271453857423, 0.02437174415588379, 0.02460032081604004, 0.024336639404296874, 0.02441935920715332, 0.024615392684936524, 0.024670719146728515, 0.025216224670410157, 0.024416608810424803, 0.024514368057250976, 0.024584096908569338, 0.02456972885131836, 0.024421215057373047, 0.02434867286682129, 0.024487871170043946, 0.02453715133666992, 0.024756128311157227, 0.024465120315551758, 0.02439583969116211, 0.02436265563964844, 0.02441632080078125, 0.024662624359130858, 0.02457804870605469, 0.02467020797729492, 0.02438956832885742, 0.02441788864135742, 0.024449440002441408, 0.02442451286315918, 0.024383487701416014, 0.0243056640625, 0.024311807632446288, 0.0244135684967041, 0.02434726333618164, 0.02444041633605957, 0.0256944637298584, 0.024508544921875, 0.02446339225769043, 0.024384895324707032, 0.024528928756713867, 0.02459913635253906, 0.02449203109741211, 0.024600576400756836, 0.02433126449584961, 0.024240383148193358, 0.024400543212890625, 0.024379487991333007, 0.024255680084228515, 0.024367168426513673, 0.024302335739135743, 0.024309984207153322, 0.024352607727050782, 0.024479135513305664, 0.024209056854248047, 0.02431056022644043, 0.02427503967285156, 0.02490163230895996, 0.024624927520751953, 0.02436841583251953, 0.024374048233032228, 0.024444704055786134, 0.024377567291259766, 0.024309120178222655, 0.024496768951416014, 0.024758272171020508, 0.024284351348876954, 0.02440457534790039, 0.02429110336303711, 0.024285184860229493, 0.02429587173461914, 0.02434662437438965, 0.02454732894897461, 0.02422547149658203, 0.024396095275878906, 0.024968896865844727, 0.024508575439453124, 0.0244553279876709, 0.024449024200439453, 0.02494054412841797, 0.024606719970703125, 0.024541183471679686, 0.024698879241943358, 0.02434048080444336, 0.02433024024963379, 0.02426041603088379, 0.024264896392822265, 0.02429257583618164, 0.02448633575439453, 0.02448419189453125, 0.024544479370117188, 0.02453993606567383, 0.024754240036010743, 0.024485824584960937, 0.0245166072845459, 0.02443878364562988, 0.024398847579956053, 0.02438041687011719, 0.02450227165222168, 0.02446272087097168, 0.02493283271789551, 0.024481952667236326, 0.024391679763793944, 0.02451046371459961, 0.024715263366699217, 0.025038400650024415, 0.0249136962890625, 0.02470159912109375, 0.024594432830810548, 0.024765472412109375, 0.024685535430908203, 0.02471049690246582, 0.02480793571472168, 0.024559776306152345, 0.024457216262817383, 0.02481132888793945, 0.02443283271789551, 0.02476032066345215, 0.024661056518554686, 0.024521152496337892, 0.024537599563598633, 0.025026559829711914, 0.024939872741699218, 0.025141664505004883, 0.024842432022094726, 0.024715744018554687, 0.02475993537902832, 0.024768960952758788, 0.02448793601989746, 0.02448591995239258, 0.024602592468261717, 0.024406015396118166, 0.024368671417236327, 0.02485500717163086, 0.024365055084228517, 0.024606719970703125, 0.024486080169677734, 0.024803136825561522, 0.024762367248535155, 0.024790752410888673, 0.024596063613891602, 0.024519359588623047, 0.024625152587890626, 0.024763904571533202, 0.02451315116882324, 0.024635263442993164, 0.024800832748413087, 0.02470751953125, 0.02442425537109375, 0.024422111511230468, 0.02440387153625488, 0.02439583969116211, 0.024243743896484374, 0.024509408950805663, 0.02733807945251465, 0.02493712043762207, 0.02458624076843262, 0.024606719970703125, 0.02461631965637207, 0.024609407424926757, 0.02441366386413574, 0.024523199081420897, 0.02451241683959961, 0.024466880798339842, 0.02443283271789551, 0.024398399353027345, 0.024432544708251954, 0.025038944244384766, 0.02448188781738281, 0.024572959899902345, 0.02450521659851074, 0.024426496505737305, 0.024453119277954103, 0.024411552429199217, 0.024512351989746092, 0.024466304779052733, 0.024670112609863282, 0.024544607162475585, 0.024484479904174804, 0.024678272247314455, 0.024569952011108398, 0.024485599517822264, 0.024457536697387695, 0.024385536193847656, 0.024600576400756836, 0.024567808151245117, 0.024754175186157225, 0.025006080627441408, 0.02467020797729492, 0.02446950340270996, 0.02477987289428711, 0.024415136337280274, 0.024633056640625, 0.024873247146606447, 0.024979455947875977, 0.024723295211791993, 0.024649663925170897, 0.024544767379760742, 0.024767200469970704, 0.024393375396728516, 0.024680767059326172, 0.024445087432861327, 0.024434560775756835, 0.0243240966796875, 0.02429132843017578, 0.024344287872314452, 0.02421379280090332, 0.027205631256103514, 0.024791040420532227, 0.025149440765380858, 0.02465996742248535, 0.024506080627441407, 0.024430303573608397, 0.02443280029296875, 0.02440233612060547, 0.02433827209472656, 0.02458025550842285, 0.02454528045654297, 0.024666112899780275, 0.024786943435668944, 0.02470854377746582, 0.0245948486328125, 0.024354368209838866, 0.024438495635986327, 0.024498111724853517, 0.024394527435302734, 0.02573958396911621, 0.02469856071472168, 0.02442367935180664, 0.024363935470581053, 0.024383487701416014, 0.024444063186645507, 0.02453590393066406, 0.025157632827758788, 0.02607923126220703, 0.02541923141479492, 0.02497529602050781, 0.02501420783996582, 0.024840608596801757, 0.02474825668334961, 0.025013792037963868, 0.024745664596557616, 0.02462179183959961, 0.02492326354980469, 0.024863712310791014, 0.024647680282592774, 0.02444697570800781, 0.024401023864746095, 0.024658592224121093, 0.02447808074951172, 0.025485567092895508, 0.02634419250488281, 0.024798208236694336, 0.024801279067993166, 0.024967168807983397, 0.024687616348266602, 0.02451968002319336, 0.024261695861816406, 0.024372159957885744, 0.024515743255615233, 0.024521568298339843, 0.024924415588378906, 0.024599903106689452, 0.02458255958557129, 0.02447769546508789, 0.02491187286376953, 0.0248371524810791, 0.024840991973876955, 0.024832191467285155, 0.02458995246887207, 0.02445350456237793, 0.025453632354736327, 0.02501487922668457, 0.024606271743774413, 0.024453920364379884, 0.02430156707763672, 0.02441212844848633, 0.024268831253051758, 0.024645631790161132, 0.025068927764892578, 0.02460531234741211, 0.02456559944152832, 0.024729440689086914, 0.024662336349487304, 0.02450432014465332, 0.024702560424804686, 0.02437318420410156, 0.02441788864135742, 0.024626272201538086, 0.024438560485839842, 0.024385536193847656, 0.024385599136352538, 0.025059328079223633, 0.02451375961303711, 0.024322240829467774, 0.024334880828857423, 0.024328191757202147, 0.024276992797851563, 0.024360960006713867, 0.02439900779724121, 0.02494761657714844, 0.02440934371948242, 0.025362432479858397, 0.024392383575439453, 0.024395776748657227, 0.024755840301513673, 0.02723580741882324, 0.024611743927001953, 0.024549375534057616, 0.024452991485595703, 0.024477184295654295, 0.02446940803527832, 0.024565759658813476, 0.024991743087768553, 0.02473369598388672, 0.024498176574707032, 0.0243503360748291, 0.024268800735473633, 0.02457360076904297, 0.024492992401123046, 0.024231712341308595, 0.024258560180664062, 0.02429747200012207, 0.024414239883422853, 0.024358879089355467, 0.024333856582641603, 0.024291872024536133, 0.024387327194213868, 0.024274751663208007, 0.024260480880737303, 0.02444339179992676, 0.024459264755249024, 0.024381439208984376, 0.02449571228027344, 0.024328607559204102, 0.024420352935791017, 0.0243787841796875, 0.02436128044128418, 0.024396064758300782, 0.02451433563232422, 0.02450044822692871, 0.024391551971435547, 0.02441347122192383, 0.024474464416503906, 0.02459382438659668, 0.02448240089416504, 0.024629247665405272, 0.024848255157470703, 0.02489356803894043, 0.024592384338378907, 0.025460351943969728, 0.02457638359069824, 0.024759391784667968, 0.024941471099853514, 0.024469120025634766, 0.024506752014160155, 0.02434867286682129, 0.024518655776977538, 0.024772640228271484, 0.024583488464355468, 0.024332000732421876, 0.024445600509643554, 0.024405344009399414, 0.024647743225097656, 0.02434547233581543, 0.02431737518310547, 0.024272607803344726, 0.024560735702514647, 0.024280479431152344, 0.02431420707702637, 0.024438207626342773, 0.024343103408813477, 0.02454252815246582, 0.024891551971435548, 0.024621248245239258, 0.02428163146972656, 0.02530240058898926, 0.024339008331298827, 0.024559135437011718, 0.02461516761779785, 0.024332000732421876, 0.02425449562072754, 0.02426019287109375, 0.025659360885620118, 0.025920480728149415, 0.024506368637084962, 0.024421823501586913, 0.024555391311645507, 0.024789695739746095, 0.025417728424072264, 0.02508121681213379, 0.0249800968170166, 0.02485798454284668, 0.024807552337646484, 0.024634111404418946, 0.024524543762207033, 0.024805376052856445, 0.02481100845336914, 0.02442470359802246, 0.02420966339111328, 0.024854528427124024, 0.02456985664367676, 0.024485824584960937, 0.024328256607055666, 0.02447974395751953, 0.024337663650512695, 0.024381696701049806, 0.024271520614624023, 0.024461151123046875, 0.024449024200439453, 0.024399967193603517, 0.024219263076782228, 0.024315488815307616, 0.02439593505859375, 0.024285728454589844, 0.024180543899536132, 0.024229888916015626, 0.02440563201904297, 0.024394304275512695, 0.02430499267578125, 0.02435545539855957, 0.02440950393676758, 0.02440012741088867, 0.024314239501953126, 0.024391616821289062, 0.024418367385864257, 0.024385536193847656, 0.024219648361206055, 0.024333696365356445, 0.024636032104492188, 0.025140575408935547, 0.024619935989379883, 0.024289024353027343, 0.02484422492980957, 0.024647743225097656, 0.02445654487609863, 0.02432579231262207, 0.024638111114501954, 0.024891807556152345, 0.024613088607788085, 0.024320159912109375, 0.02431590461730957, 0.024488096237182618, 0.024440671920776365, 0.024332351684570312, 0.02438956832885742, 0.024253759384155273, 0.024382144927978515, 0.024399871826171874, 0.02441324806213379, 0.02441049575805664, 0.024294111251831056, 0.024530048370361327, 0.024386272430419922, 0.024352767944335937, 0.02430156707763672, 0.02470025634765625, 0.024470176696777344, 0.024412160873413087, 0.024491071701049805, 0.024593343734741212, 0.024592384338378907, 0.024364639282226562, 0.02433244705200195, 0.024406272888183592, 0.025145376205444336, 0.02435868835449219, 0.024332063674926758, 0.02443894386291504, 0.025069215774536132, 0.024689247131347656, 0.02447932815551758, 0.024877056121826172, 0.02452521514892578, 0.02433433532714844, 0.024395519256591797, 0.02445747184753418, 0.024809503555297853, 0.024511520385742187, 0.024476768493652344, 0.024480672836303712, 0.024449119567871092, 0.02434079933166504, 0.024408607482910155, 0.024425792694091796, 0.024373952865600585, 0.024252416610717774, 0.024285280227661132, 0.024333248138427733, 0.024412384033203127, 0.02423788833618164, 0.024262912750244142, 0.02432614326477051, 0.024582080841064453, 0.024725696563720704, 0.02456585693359375, 0.024387456893920897, 0.02453590393066406, 0.024274656295776367, 0.024423776626586916, 0.02440675163269043, 0.02445254325866699]",tokens/s,40.6561227678924,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,811.737088,554.631168,0.0,159.383552,143.673856,s,1,7.2645595703125,7.2645595703125,0.0,7.2645595703125,7.2645595703125,7.2645595703125,7.2645595703125,[7.2645595703125],,kWh,1.059855921249664e-05,1.1614199558078427e-06,3.5919473179990558e-06,1.535192648630354e-05,,MB,1332.49024,609.15712,0.0,199.22944,186.684928,s,26,0.1986882557868958,0.007641855991803682,0.00010765275712770247,0.007627088069915772,0.007689568042755127,0.007762360215187072,0.008034847855567932,"[0.007783520221710205, 0.007632544040679931, 0.007615615844726562, 0.007631135940551758, 0.007592576026916504, 0.007626304149627686, 0.007538559913635254, 0.008118623733520508, 0.007698880195617676, 0.007650271892547607, 0.007627871990203858, 0.007642591953277588, 0.007630976200103759, 0.007672863960266113, 0.007640255928039551, 0.00756166410446167, 0.0076802558898925784, 0.007628064155578613, 0.007584799766540527, 0.0075781760215759275, 0.007624320030212402, 0.0075511040687561036, 0.0075608639717102055, 0.0076089282035827635, 0.0076212158203125, 0.007586271762847901]",tokens/s,33499.71528835067,kWh,2.2710448227302365e-07,2.5039945156926532e-08,9.852234427392751e-08,3.506667717038777e-07,tokens/kWh,730037804.1412503,MB,1371.762688,611.254272,0.0,201.326592,186.687488,s,26,10.00890411376953,0.38495785052959736,0.0016175626169597206,0.3846318359375,0.38742640686035157,0.38829644012451175,0.3885595932006836,"[0.3870013427734375, 0.38459976196289064, 0.3859169616699219, 0.384282470703125, 0.3852095642089844, 0.3845240783691406, 0.38484475708007815, 0.3884447631835938, 0.3885978698730469, 0.38477554321289065, 0.3828612976074219, 0.3863381042480469, 0.38619061279296873, 0.38514959716796876, 0.38294338989257815, 0.38269284057617187, 0.38385476684570313, 0.3836585388183594, 0.38410577392578127, 0.38320263671875, 0.38466390991210936, 0.38455780029296877, 0.38785147094726563, 0.3855151672363281, 0.38340081787109376, 0.3837202758789062]",tokens/s,163.65428036687425,kWh,1.1244997103624623e-05,1.240140582539681e-06,3.929803527956919e-06,1.641494121412122e-05,tokens/kWh,3837966.8363236794,,s,1638,9.997071547031386,0.006103218282680955,0.00012193710248929396,0.0060904319286346435,0.006164614391326905,0.006209982371330262,0.006590835213661193,"[0.006099040031433105, 0.0060965118408203125, 0.006154816150665283, 0.00604095983505249, 0.00616425609588623, 0.006230591773986816, 0.006178815841674804, 0.006094624042510986, 0.00619865608215332, 0.006209407806396484, 0.006185696125030517, 0.006164735794067383, 0.006086656093597412, 0.006123519897460937, 0.006047743797302246, 0.006168896198272705, 0.006061759948730469, 0.006155551910400391, 0.0061036162376403805, 0.006166368007659912, 0.006070591926574707, 0.006203392028808594, 0.006240255832672119, 0.0061658558845520016, 0.006145855903625488, 0.006069087982177734, 0.0061931519508361815, 0.006078559875488282, 0.006131616115570069, 0.006065248012542724, 0.006124512195587158, 0.006045536041259765, 0.006127711772918701, 0.006042719841003418, 0.006149312019348144, 0.006063839912414551, 0.006166143894195557, 0.0060538239479064945, 0.006422463893890381, 0.00618336009979248, 0.006080031871795654, 0.006107711791992187, 0.0060553278923034665, 0.006209919929504395, 0.006084799766540527, 0.00611737585067749, 0.006307648181915283, 0.006162079811096191, 0.006007328033447266, 0.006127232074737548, 0.00604963207244873, 0.0062349758148193355, 0.0060310401916503905, 0.006153952121734619, 0.006572319984436035, 0.006065855979919434, 0.006162176132202149, 0.0060544638633728024, 0.006111008167266846, 0.006068448066711426, 0.006127295970916748, 0.00605625581741333, 0.006141952037811279, 0.005943903923034668, 0.006113120079040527, 0.006031551837921143, 0.006188704013824463, 0.006022848129272461, 0.006118048191070557, 0.006088031768798828, 0.0060850238800048825, 0.006090976238250732, 0.006091872215270996, 0.006142367839813232, 0.0061420159339904785, 0.006123104095458984, 0.006067039966583252, 0.006170623779296875, 0.006061215877532959, 0.006161248207092285, 0.006045567989349365, 0.006142079830169678, 0.006053088188171386, 0.0061519680023193355, 0.006061024188995361, 0.00616483211517334, 0.006089759826660156, 0.006165311813354492, 0.006079999923706054, 0.00609657621383667, 0.006113952159881592, 0.006086175918579102, 0.006113759994506836, 0.006055647850036621, 0.006100543975830078, 0.006048799991607666, 0.006123295783996582, 0.0060529599189758304, 0.006128448009490967, 0.006039008140563965, 0.006160927772521973, 0.006036928176879883, 0.006169151782989502, 0.006067776203155518, 0.006164927959442138, 0.006039391994476318, 0.006125152111053467, 0.006033792018890381, 0.006117824077606201, 0.006059199810028076, 0.00607916784286499, 0.006053440093994141, 0.006058303833007812, 0.006107135772705078, 0.006074368000030517, 0.006103040218353272, 0.006059072017669678, 0.0061092801094055176, 0.006125664234161377, 0.0061543679237365725, 0.006027904033660889, 0.006265183925628662, 0.006013855934143066, 0.006243072032928467, 0.006037504196166992, 0.006123519897460937, 0.005929120063781738, 0.006117216110229492, 0.00605625581741333, 0.006286079883575439, 0.006222752094268799, 0.006074399948120117, 0.006158239841461181, 0.006090911865234375, 0.006168511867523194, 0.006053887844085694, 0.006148128032684326, 0.006051328182220459, 0.0061279358863830565, 0.006058303833007812, 0.006149983882904052, 0.006044672012329101, 0.006156544208526612, 0.006052608013153076, 0.006150144100189209, 0.006032512187957763, 0.006142848014831543, 0.006072480201721192, 0.0061008319854736325, 0.006104671955108642, 0.006070240020751953, 0.006175168037414551, 0.006105440139770508, 0.0061660480499267575, 0.00606774377822876, 0.006145887851715088, 0.006070720195770264, 0.006177087783813476, 0.00611084794998169, 0.006447904109954834, 0.006059616088867187, 0.006187007904052734, 0.00612556791305542, 0.0060661759376525876, 0.006135807991027832, 0.006135807991027832, 0.006184447765350342, 0.006101503849029541, 0.006133471965789795, 0.006080800056457519, 0.006170559883117676, 0.0060739521980285646, 0.006160192012786865, 0.006085279941558838, 0.006155456066131592, 0.006041408061981201, 0.006130559921264649, 0.006051360130310058, 0.006144832134246827, 0.0061001920700073245, 0.0060657281875610355, 0.006113791942596435, 0.006106624126434326, 0.006095871925354004, 0.006055935859680176, 0.006144000053405762, 0.006049536228179932, 0.006369311809539795, 0.006070655822753906, 0.00601087999343872, 0.0060065598487854, 0.006118720054626465, 0.006013855934143066, 0.006125408172607422, 0.006049952030181885, 0.006119423866271972, 0.00602342414855957, 0.006133600234985352, 0.006073760032653809, 0.006137792110443115, 0.006163008213043213, 0.006064256191253662, 0.006081535816192627, 0.006060256004333496, 0.006124351978302002, 0.006045631885528565, 0.006096864223480224, 0.006012864112854004, 0.00609881591796875, 0.006031455993652343, 0.006112480163574218, 0.00601580810546875, 0.006131360054016113, 0.006009535789489746, 0.006142720222473145, 0.0059909758567810055, 0.006125887870788574, 0.006096928119659424, 0.006260960102081299, 0.0061212477684020996, 0.006158336162567139, 0.006149983882904052, 0.006059296131134033, 0.006097856044769287, 0.006253600120544434, 0.006115935802459717, 0.006032927989959717, 0.00612175989151001, 0.006031968116760254, 0.006104000091552734, 0.006431712150573731, 0.006182911872863769, 0.006037504196166992, 0.006121568202972412, 0.0060128321647644044, 0.006158048152923584, 0.006088128089904785, 0.006089439868927002, 0.006155935764312745, 0.006089119911193847, 0.006090816020965576, 0.006033152103424072, 0.006095104217529297, 0.00602726411819458, 0.006095967769622803, 0.006015103816986084, 0.006091839790344238, 0.006029024124145508, 0.006118879795074463, 0.0060126399993896485, 0.006124351978302002, 0.00601907205581665, 0.006031424045562744, 0.006003392219543457, 0.0061171197891235355, 0.00611846399307251, 0.006131872177124024, 0.0060178241729736326, 0.006145120143890381, 0.006039552211761475, 0.006124512195587158, 0.006114463806152343, 0.006365983963012695, 0.006104479789733887, 0.006070655822753906, 0.006082272052764893, 0.0060033597946166995, 0.006098015785217285, 0.006040319919586181, 0.006091104030609131, 0.0060804481506347655, 0.006098368167877197, 0.006043935775756836, 0.006131519794464111, 0.006074368000030517, 0.006135456085205078, 0.006025760173797607, 0.0061337599754333495, 0.006004479885101319, 0.006139232158660889, 0.006056128025054931, 0.006219871997833252, 0.00607913589477539, 0.0060704960823059085, 0.006123263835906983, 0.006074399948120117, 0.006135359764099121, 0.006064511775970459, 0.0061047358512878415, 0.00605017614364624, 0.00611952018737793, 0.006207071781158448, 0.006131328105926513, 0.006058688163757324, 0.006111519813537598, 0.006270112037658692, 0.006169407844543457, 0.006038559913635254, 0.006224959850311279, 0.006083712100982666, 0.006101312160491944, 0.00614131212234497, 0.006118239879608155, 0.006144000053405762, 0.006082560062408447, 0.00612550401687622, 0.006076191902160645, 0.0061413440704345705, 0.006163328170776367, 0.006145567893981934, 0.006041567802429199, 0.006121119976043701, 0.006046559810638428, 0.006189280033111573, 0.006110527992248535, 0.006060031890869141, 0.006350560188293457, 0.0061197118759155275, 0.006146207809448242, 0.006072127819061279, 0.006120512008666992, 0.00603542423248291, 0.006112480163574218, 0.0060516161918640135, 0.0061188478469848635, 0.006012991905212402, 0.006099455833435059, 0.006027103900909424, 0.006144159793853759, 0.006051839828491211, 0.006131519794464111, 0.006027455806732178, 0.006151423931121826, 0.006060768127441407, 0.006102784156799317, 0.006082848072052002, 0.006067615985870361, 0.006074143886566162, 0.006036416053771972, 0.006087711811065674, 0.0060342721939086914, 0.006113279819488526, 0.006131711959838867, 0.006146143913269043, 0.006092864036560059, 0.006137824058532715, 0.00604966402053833, 0.006147903919219971, 0.006020639896392822, 0.006158783912658692, 0.006107232093811035, 0.006141503810882568, 0.006013279914855957, 0.006125792026519776, 0.00601907205581665, 0.006115327835083008, 0.0060663681030273435, 0.006076223850250244, 0.00606822395324707, 0.006178944110870361, 0.0062128958702087405, 0.006089503765106201, 0.006111040115356446, 0.006057695865631103, 0.006091360092163086, 0.006149087905883789, 0.006152927875518799, 0.006070367813110352, 0.006147583961486816, 0.0060215358734130855, 0.006089727878570556, 0.006012959957122803, 0.006162975788116455, 0.005999008178710938, 0.006176799774169922, 0.006060031890869141, 0.006074431896209717, 0.006100543975830078, 0.006034048080444336, 0.006078752040863037, 0.006112927913665771, 0.006105408191680908, 0.006057983875274659, 0.006082560062408447, 0.006042975902557373, 0.006084928035736084, 0.006023519992828369, 0.006082880020141602, 0.006014656066894531, 0.006078464031219482, 0.0061211838722229, 0.0064067840576171875, 0.006059679985046387, 0.006111231803894043, 0.0060234880447387696, 0.006188735961914062, 0.006094783782958984, 0.006196959972381592, 0.006188640117645264, 0.006115871906280518, 0.006154463768005371, 0.006104351997375489, 0.006118112087249756, 0.006060031890869141, 0.006123295783996582, 0.006041791915893555, 0.006108672142028809, 0.006025760173797607, 0.006176191806793213, 0.006078239917755127, 0.006210336208343506, 0.006049791812896729, 0.006129759788513184, 0.006180672168731689, 0.006107583999633789, 0.006137504100799561, 0.006121664047241211, 0.006114463806152343, 0.006049536228179932, 0.0061019201278686526, 0.006048799991607666, 0.006108479976654053, 0.006084256172180176, 0.006109151840209961, 0.006038847923278808, 0.0062039680480957034, 0.006009247779846192, 0.006120927810668945, 0.006030784130096435, 0.0061242241859436036, 0.006057983875274659, 0.006152224063873291, 0.006097023963928223, 0.006111231803894043, 0.006051104068756103, 0.0061058239936828615, 0.006099232196807861, 0.006090047836303711, 0.00609116792678833, 0.00605731201171875, 0.00610371208190918, 0.0060152320861816405, 0.006131487846374512, 0.006014944076538086, 0.0061562881469726565, 0.006107135772705078, 0.006137728214263916, 0.0060555520057678225, 0.0061055998802185055, 0.006088031768798828, 0.0060991358757019045, 0.006261216163635254, 0.006074368000030517, 0.006199295997619629, 0.0060412797927856445, 0.006152512073516846, 0.006027520179748535, 0.006198048114776611, 0.006112224102020264, 0.006113471984863281, 0.006002528190612793, 0.006160064220428467, 0.006080800056457519, 0.006161695957183838, 0.0060193600654602055, 0.006138495922088623, 0.005998303890228271, 0.006107232093811035, 0.006081535816192627, 0.006091392040252686, 0.006060416221618652, 0.006033408164978027, 0.006084832191467285, 0.006160416126251221, 0.006088448047637939, 0.00607587194442749, 0.006093344211578369, 0.006080512046813965, 0.006203487873077392, 0.006123136043548584, 0.0065550398826599125, 0.007563968181610107, 0.00660643196105957, 0.00670684814453125, 0.006532192230224609, 0.006109024047851562, 0.006158080101013184, 0.006080704212188721, 0.006160607814788818, 0.006051680088043213, 0.00615228796005249, 0.0060496959686279295, 0.006141952037811279, 0.00606822395324707, 0.006123519897460937, 0.006139743804931641, 0.006079872131347656, 0.006118303775787354, 0.006067264080047608, 0.006134592056274414, 0.0061151041984558105, 0.006164703845977783, 0.006030879974365235, 0.006162752151489257, 0.00599894380569458, 0.006110015869140625, 0.006032224178314209, 0.00611084794998169, 0.005988736152648926, 0.006122848033905029, 0.00601145601272583, 0.0061175041198730466, 0.006004703998565674, 0.006093952178955078, 0.0060488319396972655, 0.0060700798034667965, 0.006082528114318848, 0.006061312198638916, 0.006098944187164307, 0.006272928237915039, 0.0061121277809143065, 0.006096255779266357, 0.006127679824829101, 0.006027135848999023, 0.00610745620727539, 0.006196703910827637, 0.007695168018341064, 0.007882944107055664, 0.006240447998046875, 0.0060495038032531735, 0.0061718721389770505, 0.006066048145294189, 0.006126495838165283, 0.006031360149383545, 0.006109183788299561, 0.006028351783752442, 0.006114431858062744, 0.006059743881225586, 0.0061133761405944825, 0.006065343856811524, 0.006230432033538819, 0.0062137279510498045, 0.006058527946472168, 0.006128767967224121, 0.006041759967803955, 0.00610537576675415, 0.006039775848388672, 0.00611081600189209, 0.0060832958221435545, 0.006143680095672607, 0.00603545618057251, 0.00658022403717041, 0.006032671928405762, 0.006138591766357422, 0.0060347838401794435, 0.006092639923095703, 0.00608355188369751, 0.006070112228393555, 0.0062911038398742675, 0.006109151840209961, 0.006142591953277588, 0.0060919361114501955, 0.006184544086456299, 0.006085792064666748, 0.006186272144317627, 0.006040256023406982, 0.0061561598777771, 0.0059155840873718266, 0.006141952037811279, 0.0060878081321716305, 0.006160639762878418, 0.006061791896820068, 0.006073247909545898, 0.006196735858917236, 0.006037600040435791, 0.006093215942382812, 0.0060351681709289555, 0.006103328227996826, 0.006064127922058105, 0.006172416210174561, 0.006048319816589355, 0.006147776126861572, 0.006097184181213379, 0.0061140480041503905, 0.00604259204864502, 0.006126688003540039, 0.006011807918548584, 0.006120512008666992, 0.0060503678321838375, 0.006291200160980224, 0.006578815937042236, 0.006028575897216797, 0.006071008205413818, 0.006045695781707764, 0.006140223979949951, 0.006049215793609619, 0.006115647792816162, 0.006043583869934082, 0.006118688106536865, 0.00605456018447876, 0.006116799831390381, 0.006067071914672852, 0.006159264087677002, 0.006011648178100586, 0.006129759788513184, 0.006016160011291504, 0.006139967918395996, 0.006031455993652343, 0.006128575801849365, 0.006036448001861572, 0.006107711791992187, 0.006310527801513672, 0.006153632164001465, 0.0061298561096191405, 0.006074272155761719, 0.006183008193969727, 0.006049791812896729, 0.00612556791305542, 0.006092735767364502, 0.006110879898071289, 0.0060665922164916996, 0.006106719970703125, 0.005994336128234863, 0.006091328144073487, 0.006004608154296875, 0.006113408088684082, 0.0060119681358337404, 0.006121535778045654, 0.006095808029174804, 0.006119359970092773, 0.00595689582824707, 0.0060953922271728515, 0.006076608180999756, 0.006051839828491211, 0.006066431999206543, 0.006032800197601319, 0.006066656112670899, 0.006033184051513672, 0.006078271865844726, 0.006048064231872558, 0.0060845761299133305, 0.006029407978057861, 0.0061049599647521975, 0.005994751930236817, 0.0060713281631469725, 0.006021696090698242, 0.00607366418838501, 0.005995391845703125, 0.006078688144683838, 0.006022336006164551, 0.006097055912017822, 0.00601251220703125, 0.006080800056457519, 0.0060424962043762204, 0.006129312038421631, 0.0060414400100708004, 0.006107327938079834, 0.006012608051300048, 0.006118783950805664, 0.006009791851043701, 0.006129216194152832, 0.006037792205810547, 0.006121984004974365, 0.006202400207519531, 0.006088352203369141, 0.006161375999450684, 0.006127615928649902, 0.006119616031646728, 0.006061888217926025, 0.0060943360328674315, 0.0060349760055541995, 0.0060917439460754395, 0.006025216102600098, 0.006094848155975342, 0.006010240077972412, 0.006097631931304932, 0.005996448040008545, 0.006099199771881104, 0.006008575916290284, 0.006114975929260254, 0.006133408069610596, 0.0061242241859436036, 0.006075488090515137, 0.006119743824005127, 0.006034239768981934, 0.006089759826660156, 0.006053664207458496, 0.006067168235778808, 0.006039103984832764, 0.006034048080444336, 0.006091616153717041, 0.006040544033050537, 0.006154240131378174, 0.006378304004669189, 0.006119200229644775, 0.006504127979278565, 0.006744607925415039, 0.006352960109710693, 0.0060538239479064945, 0.006125823974609375, 0.006051199913024902, 0.006189663887023925, 0.00611516809463501, 0.006154047966003418, 0.006042975902557373, 0.006128352165222168, 0.006035679817199707, 0.006122623920440674, 0.006044159889221192, 0.006129983901977539, 0.006073984146118164, 0.00612995195388794, 0.006062399864196777, 0.006057663917541504, 0.006109344005584717, 0.006045536041259765, 0.0060661759376525876, 0.006044960021972657, 0.006165215969085694, 0.006052095890045166, 0.006094624042510986, 0.006016992092132569, 0.006206495761871338, 0.006026271820068359, 0.006102975845336914, 0.005992447853088379, 0.006106847763061523, 0.006617472171783447, 0.006460832118988037, 0.006097631931304932, 0.00603113603591919, 0.006086656093597412, 0.006016895771026611, 0.00612550401687622, 0.0061296639442443845, 0.006123104095458984, 0.006029920101165771, 0.006141952037811279, 0.006012928009033203, 0.006145823955535889, 0.006024576187133789, 0.006136672019958496, 0.006120448112487793, 0.006112063884735107, 0.006057695865631103, 0.006124000072479248, 0.00601308822631836, 0.006122432231903076, 0.006044479846954345, 0.006080863952636718, 0.006085696220397949, 0.006058688163757324, 0.006076416015625, 0.006051839828491211, 0.006100543975830078, 0.006027616024017334, 0.005961760044097901, 0.0060299839973449705, 0.006102911949157715, 0.006037407875061035, 0.006073599815368653, 0.006081503868103027, 0.006115327835083008, 0.0060183038711547855, 0.006093344211578369, 0.005998847961425781, 0.006080031871795654, 0.0060375361442565915, 0.006096799850463867, 0.005994495868682862, 0.006083072185516358, 0.006192575931549072, 0.0061363840103149415, 0.00606825590133667, 0.006119359970092773, 0.006064159870147705, 0.006086656093597412, 0.006088064193725586, 0.00609772777557373, 0.006080319881439209, 0.006025216102600098, 0.006096672058105468, 0.006078879833221435, 0.006091839790344238, 0.006046495914459229, 0.006059967994689942, 0.005987616062164307, 0.006079520225524902, 0.006041088104248047, 0.006076255798339844, 0.0060189437866210935, 0.006098847866058349, 0.006029024124145508, 0.006087135791778564, 0.006004288196563721, 0.0060936641693115235, 0.005977568149566651, 0.006119967937469483, 0.00601423978805542, 0.006310624122619629, 0.00707583999633789, 0.006176767826080322, 0.006633471965789795, 0.006054912090301514, 0.006121920108795166, 0.006855231761932373, 0.006351103782653808, 0.006156032085418701, 0.006082399845123291, 0.006154399871826172, 0.006103040218353272, 0.006161791801452637, 0.00602784013748169, 0.006149951934814453, 0.0060561919212341305, 0.0061337599754333495, 0.006108895778656006, 0.00611356782913208, 0.0061394238471984865, 0.0060293121337890625, 0.006131743907928467, 0.0060490560531616215, 0.0061283202171325685, 0.006041600227355957, 0.0061155838966369626, 0.006034304141998291, 0.006162911891937256, 0.006039968013763428, 0.006115424156188965, 0.006021024227142334, 0.006127615928649902, 0.00604918384552002, 0.006104991912841797, 0.006006495952606201, 0.006144991874694824, 0.005993567943572998, 0.006117919921875, 0.00605017614364624, 0.006094111919403076, 0.006077343940734863, 0.006127840042114258, 0.006090335845947266, 0.006019008159637451, 0.006109248161315918, 0.006028319835662842, 0.006063072204589843, 0.00603545618057251, 0.006094848155975342, 0.006051839828491211, 0.006104447841644287, 0.0060013761520385745, 0.006127007961273193, 0.006008768081665039, 0.006096704006195068, 0.006044415950775146, 0.006135807991027832, 0.0059985918998718265, 0.006106400012969971, 0.0060538239479064945, 0.006113887786865235, 0.006019264221191406, 0.006100992202758789, 0.005973311901092529, 0.006082240104675293, 0.006009856224060059, 0.006169792175292969, 0.006030015945434571, 0.006091040134429931, 0.006047584056854248, 0.0060702719688415525, 0.006360127925872803, 0.006351808071136475, 0.006922239780426025, 0.0065474557876586915, 0.006060256004333496, 0.00611030387878418, 0.0060722241401672365, 0.006128064155578613, 0.00603545618057251, 0.006175072193145752, 0.006090943813323975, 0.006181888103485108, 0.005944928169250488, 0.006095263957977295, 0.0060026879310607914, 0.006111104011535645, 0.006040768146514892, 0.006095808029174804, 0.006027328014373779, 0.006108352184295654, 0.006025983810424805, 0.006120639801025391, 0.006057919979095459, 0.006089600086212158, 0.006094848155975342, 0.006051392078399658, 0.00609935998916626, 0.006049983978271485, 0.006086495876312256, 0.006043647766113281, 0.006083807945251465, 0.006048255920410156, 0.006144288063049316, 0.006043647766113281, 0.00610313606262207, 0.006030655860900879, 0.006099455833435059, 0.006022687911987305, 0.006086656093597412, 0.006023744106292725, 0.00610700798034668, 0.005988831996917725, 0.006139552116394043, 0.006024640083312988, 0.0061075201034545895, 0.006013120174407959, 0.006108352184295654, 0.006031424045562744, 0.0061178879737854, 0.006014336109161377, 0.006113408088684082, 0.006002528190612793, 0.00614902400970459, 0.006053184032440186, 0.006046400070190429, 0.0060759038925170894, 0.006111775875091553, 0.006077919960021973, 0.00602569580078125, 0.006055679798126221, 0.006019519805908203, 0.0060752639770507814, 0.0061756157875061035, 0.00609657621383667, 0.006052256107330322, 0.006184959888458252, 0.00602947187423706, 0.006162464141845703, 0.0060332159996032714, 0.006166592121124268, 0.006027200222015381, 0.006116479873657227, 0.00600710391998291, 0.00608460807800293, 0.0060104641914367676, 0.006013120174407959, 0.006080800056457519, 0.006125279903411865, 0.006102047920227051, 0.006079040050506592, 0.006085375785827637, 0.006065824031829834, 0.0061147198677062985, 0.006041215896606445, 0.006087647914886475, 0.006037792205810547, 0.006096479892730713, 0.006029439926147461, 0.006115071773529053, 0.006030911922454834, 0.006136767864227295, 0.0060395197868347164, 0.006117152214050293, 0.006036960124969483, 0.006103775978088379, 0.006067647933959961, 0.006105472087860107, 0.006037504196166992, 0.006123839855194092, 0.005994175910949707, 0.006107135772705078, 0.006025055885314942, 0.006115327835083008, 0.006077983856201172, 0.00608028793334961, 0.006082784175872802, 0.006048543930053711, 0.006063839912414551, 0.006054175853729248, 0.006102880001068115, 0.006011168003082276, 0.006069983959197998, 0.006053887844085694, 0.006078464031219482, 0.006031360149383545, 0.006168575763702393, 0.00601907205581665, 0.006086656093597412, 0.006006368160247803, 0.0060965118408203125, 0.0059911999702453615, 0.006098400115966797, 0.005984928131103516, 0.006137343883514404, 0.006005119800567627, 0.006078815937042236, 0.006004384040832519, 0.0061042881011962894, 0.005983007907867432, 0.006080639839172363, 0.005982079982757568, 0.006089824199676514, 0.006047711849212646, 0.006165599822998047, 0.006025055885314942, 0.006098944187164307, 0.006030752182006836, 0.00609984016418457, 0.005958655834197998, 0.006043903827667237, 0.006097119808197021, 0.006044191837310791, 0.006149888038635254, 0.006027520179748535, 0.006115520000457763, 0.006038784027099609, 0.0061138558387756345, 0.006062111854553223, 0.006108352184295654, 0.0060731201171875, 0.006123424053192139, 0.006006591796875, 0.006138463973999023, 0.006179520130157471, 0.006116352081298828, 0.006008831977844238, 0.006098144054412842, 0.006034207820892334, 0.006120639801025391, 0.006045919895172119, 0.006152512073516846, 0.006084991931915283, 0.006039455890655517, 0.006078464031219482, 0.006042751789093017, 0.006078559875488282, 0.00602396821975708, 0.00618393611907959, 0.00602623987197876, 0.00614515209197998, 0.0060182719230651854, 0.006061791896820068, 0.006037439823150635, 0.006094848155975342, 0.00605292797088623, 0.006115712165832519, 0.006005152225494385, 0.006123680114746094, 0.006024479866027832, 0.00652566385269165, 0.006076416015625, 0.006078464031219482, 0.006395455837249756, 0.006068640232086182, 0.006094880104064941, 0.006053760051727295, 0.006109087944030762, 0.00605398416519165, 0.006072447776794433, 0.006041376113891601, 0.006084095954895019, 0.006025504112243652, 0.0061262397766113285, 0.005998496055603027, 0.00616864013671875, 0.006018879890441894, 0.00611356782913208, 0.006024928092956543, 0.00610700798034668, 0.005992576122283936, 0.006095935821533203, 0.005888000011444092, 0.006104703903198242, 0.00600054407119751, 0.006103519916534424, 0.006017119884490967, 0.00609225606918335, 0.006158048152923584, 0.006060544013977051, 0.006098336219787598, 0.006168384075164795, 0.006118400096893311, 0.006033408164978027, 0.006168575763702393, 0.006031072139739991, 0.0060910720825195315, 0.006027040004730225, 0.006101183891296387, 0.006027040004730225, 0.006099167823791504, 0.0060018239021301266, 0.006101408004760742, 0.00601043176651001, 0.0062473278045654295, 0.00606982421875, 0.006124991893768311, 0.0060234880447387696, 0.006132351875305176, 0.006042816162109375, 0.006097760200500488, 0.006049471855163574, 0.006047200202941894, 0.00607747220993042, 0.0060548801422119145, 0.006148767948150635, 0.006041791915893555, 0.006110976219177246, 0.006125823974609375, 0.006094848155975342, 0.006018784046173096, 0.006072383880615234, 0.006000832080841064, 0.006080480098724366, 0.0060059518814086915, 0.006084928035736084, 0.0060769920349121095, 0.006141695976257324, 0.006017280101776123, 0.0061562881469726565, 0.006019264221191406, 0.006164576053619385, 0.006026976108551026, 0.006584383964538574, 0.006116320133209228, 0.006111839771270752, 0.0060993280410766605, 0.006045695781707764, 0.006076159954071045, 0.0060145602226257325, 0.006077151775360107, 0.006045631885528565, 0.006072319984436035, 0.006017024040222168, 0.006090752124786377, 0.00593123197555542, 0.0060845441818237304, 0.006006847858428955, 0.00611737585067749, 0.005980160236358643, 0.006187007904052734, 0.006137504100799561, 0.006121664047241211, 0.006044991970062256, 0.00613647985458374, 0.006004928112030029, 0.006103263854980469, 0.006133535861968994, 0.006075456142425537, 0.006099904060363769, 0.006059391975402832, 0.006105728149414062, 0.0060555200576782224, 0.006101408004760742, 0.006056032180786133, 0.006111135959625244, 0.006168575763702393, 0.006116479873657227, 0.006100895881652832, 0.006146848201751709, 0.006003136157989502, 0.00609830379486084, 0.0059985599517822265, 0.006097311973571777, 0.006232063770294189, 0.006129312038421631, 0.00600713586807251, 0.006137119770050049, 0.006101727962493897, 0.006150144100189209, 0.0060764479637146, 0.00602668809890747, 0.006091584205627441, 0.0060497279167175295, 0.00607209587097168, 0.006033408164978027, 0.006073791980743409, 0.006064703941345215, 0.0060661759376525876, 0.006021120071411133, 0.0060841598510742185, 0.006013376235961914, 0.006594624042510986, 0.006074592113494873, 0.00613369607925415, 0.00599564790725708, 0.006148640155792236, 0.005988480091094971, 0.006107295989990234, 0.006110176086425782, 0.006097536087036132, 0.0061521601676940915, 0.006063712120056152, 0.0061356801986694335, 0.006022304058074951, 0.006078207969665527, 0.006031199932098389, 0.006096960067749024, 0.005920767784118652, 0.006104608058929443, 0.006017183780670166, 0.006125984191894532, 0.005994912147521973, 0.0060783357620239255, 0.006049856185913086, 0.006156223773956299, 0.006415808200836181, 0.006105887889862061, 0.0060128321647644044, 0.006111231803894043, 0.006059552192687988, 0.006114848136901855, 0.006121791839599609, 0.0061008319854736325, 0.006149919986724853, 0.006056032180786133, 0.006118656158447266, 0.006083839893341064, 0.006095615863800049, 0.006047391891479492, 0.006068128108978272, 0.006119296073913574, 0.0061073598861694335, 0.005985407829284668, 0.006107071876525879, 0.0060115838050842285, 0.006093056201934815, 0.00603545618057251, 0.006104864120483398, 0.00601094388961792, 0.00610265588760376, 0.005986847877502441, 0.006113279819488526, 0.006006015777587891, 0.006081088066101074, 0.0059845118522644045, 0.006088640213012695, 0.006013279914855957, 0.006176415920257568, 0.006033535957336426, 0.0060721921920776365, 0.0060928001403808595, 0.006038943767547608, 0.00606217622756958, 0.006050303936004638, 0.00611737585067749, 0.0060160961151123045, 0.006056064128875732, 0.006023583889007568, 0.006082335948944092, 0.006008480072021484, 0.0061363840103149415, 0.0060193600654602055, 0.006063936233520507, 0.0060850238800048825, 0.0061910400390625, 0.006025152206420898, 0.006147103786468506, 0.006017951965332031, 0.006107071876525879, 0.006049088001251221, 0.005980224132537842, 0.006062079906463623, 0.006081791877746582, 0.006020127773284912, 0.006163424015045166, 0.006039648056030273, 0.006075039863586425, 0.0060657281875610355, 0.006139935970306397, 0.006093215942382812, 0.006053887844085694, 0.006105088233947754, 0.006044896125793457, 0.0060813121795654295, 0.00602342414855957, 0.006100736141204834, 0.006062079906463623, 0.006098944187164307, 0.006066463947296143, 0.00646073579788208, 0.006035871982574463, 0.006139776229858398, 0.006037631988525391, 0.006137856006622314, 0.006024320125579834, 0.006132607936859131, 0.0060702719688415525, 0.006090144157409668, 0.0060854401588439945, 0.006368095874786377, 0.006142496109008789, 0.006074848175048828, 0.00613369607925415, 0.006078464031219482, 0.006160384178161621, 0.006004735946655273, 0.00613100814819336, 0.006019711971282959, 0.006127168178558349, 0.006023263931274414, 0.00611737585067749, 0.0060215358734130855, 0.006174592018127442, 0.00605401611328125, 0.006164480209350586, 0.006073887825012207, 0.006092735767364502, 0.006156832218170166, 0.006086656093597412, 0.00615180778503418, 0.006069952011108398, 0.0061344318389892575, 0.0060759038925170894, 0.0061456642150878905, 0.006052063941955566, 0.006126048088073731, 0.006125792026519776, 0.006155935764312745, 0.006029664039611816, 0.006123519897460937, 0.006035295963287354, 0.006119103908538819, 0.006033599853515625, 0.006162975788116455, 0.006112576007843018, 0.006101952075958252, 0.006110239982604981, 0.006050879955291748, 0.006059679985046387, 0.006052063941955566, 0.006082304000854492, 0.006062111854553223, 0.006096896171569824, 0.006043680191040039, 0.006084512233734131, 0.0060349760055541995, 0.0061066880226135255, 0.00604259204864502, 0.00608892822265625, 0.00601475191116333, 0.006148096084594727, 0.006026303768157959, 0.006179647922515869, 0.006037792205810547, 0.00613318395614624, 0.006080383777618408, 0.006130208015441894, 0.006071296215057373, 0.006104063987731933, 0.006086368083953858, 0.0060524802207946775, 0.006118624210357666, 0.006047935962677002, 0.006119135856628418, 0.006076576232910157, 0.0061270718574523925, 0.006050271987915039, 0.00609119987487793, 0.006049088001251221, 0.006125792026519776, 0.00603334379196167, 0.006131648063659668, 0.006103648185729981, 0.006127615928649902, 0.006013216018676758, 0.006116415977478027, 0.006001632213592529, 0.006108736038208008, 0.00667855978012085, 0.006311488151550293, 0.006097439765930176, 0.006041088104248047, 0.006105120182037353, 0.006038303852081299, 0.006119103908538819, 0.006055903911590576, 0.006110559940338134, 0.0060440640449523925, 0.0061504321098327635, 0.006021471977233886, 0.0061231679916381836, 0.006021408081054688, 0.006184671878814697, 0.006075647830963135, 0.006089375972747803, 0.006014688014984131, 0.006020736217498779, 0.006488416194915771, 0.006039360046386719, 0.006076288223266601, 0.006027743816375732, 0.0061047677993774415, 0.006067647933959961, 0.006099232196807861, 0.006040160179138184, 0.006558879852294922, 0.0060342721939086914, 0.006133600234985352, 0.006061503887176514, 0.0061561279296875, 0.006050687789916992, 0.006086656093597412, 0.006092512130737305, 0.006037792205810547, 0.0061171197891235355, 0.006037759780883789, 0.006086656093597412, 0.006047808170318604, 0.006115263938903809, 0.006170623779296875, 0.006129280090332031, 0.00605452823638916, 0.00613267183303833, 0.00609164810180664, 0.006318016052246094, 0.006119423866271972, 0.006426623821258545, 0.006148096084594727, 0.006838272094726563, 0.007007552146911621, 0.006165440082550049, 0.006115071773529053, 0.0061972479820251464, 0.006086656093597412, 0.006210912227630616, 0.006158368110656738, 0.0061569280624389645, 0.006038911819458008, 0.006229695796966553, 0.006038047790527344, 0.006156703948974609, 0.005992447853088379, 0.006135807991027832, 0.006017024040222168, 0.006116799831390381, 0.006009407997131348, 0.006107423782348633, 0.006047455787658691, 0.006109407901763916, 0.006102208137512207, 0.006099487781524658, 0.006108543872833252, 0.006193664073944092, 0.00622815990447998, 0.006105088233947754, 0.006107135772705078, 0.006057151794433594, 0.0062568001747131344, 0.0060340800285339355, 0.006021120071411133, 0.006011903762817383, 0.007173376083374023, 0.006127359867095947, 0.006037504196166992, 0.006119423866271972, 0.006117663860321045, 0.006205152034759522, 0.006049312114715576, 0.006117856025695801, 0.006013984203338623, 0.006106080055236817, 0.006028351783752442, 0.006089663982391357, 0.00603872013092041, 0.006137887954711914, 0.006065087795257568, 0.0066722240447998045, 0.006217728137969971, 0.006082560062408447, 0.006161695957183838, 0.006058623790740967, 0.006146143913269043, 0.0060210561752319335, 0.006141248226165771, 0.0061138558387756345, 0.006144192218780518, 0.00603334379196167, 0.006131072044372559, 0.006087488174438477, 0.0061049599647521975, 0.006085760116577148, 0.0060588159561157226, 0.006111392021179199, 0.00608019208908081, 0.006350080013275147, 0.006062687873840332, 0.006127200126647949, 0.006054751873016358, 0.006128960132598877, 0.006023808002471924, 0.006107135772705078, 0.0060124797821044925, 0.006106783866882324, 0.006004992008209228, 0.006121471881866455, 0.005995039939880371, 0.006115488052368164, 0.006012415885925293, 0.006107295989990234, 0.006008863925933838, 0.00611568021774292, 0.006035071849822998, 0.006090240001678467, 0.00610700798034668, 0.006068511962890625, 0.006087007999420166, 0.006019264221191406, 0.0060928001403808595, 0.00603545618057251, 0.006069695949554444, 0.006040063858032227, 0.00606601619720459, 0.005933184146881104, 0.006119423866271972, 0.006051839828491211, 0.006092127799987793, 0.00599948787689209, 0.006075744152069092, 0.006013376235961914, 0.006072480201721192, 0.006005631923675537, 0.006136799812316894, 0.0060128321647644044, 0.00608787202835083, 0.006022240161895752, 0.006121471881866455, 0.005998400211334229, 0.006103104114532471, 0.006001984119415283, 0.006133376121520996, 0.0060382719039917, 0.006148128032684326, 0.006074592113494873, 0.0061040959358215335, 0.006079455852508545, 0.006061151981353759, 0.006097663879394531, 0.006060192108154297, 0.006096704006195068, 0.006105279922485352, 0.006113599777221679, 0.006051519870758057, 0.006128640174865723, 0.006062431812286377, 0.00610595178604126, 0.006110688209533691, 0.006142591953277588, 0.006022880077362061, 0.006551136016845703, 0.006021312236785889, 0.006093023777008056, 0.006067359924316406, 0.0060711359977722165, 0.006079936027526855, 0.006047679901123047, 0.006080671787261963, 0.006055935859680176, 0.006162911891937256, 0.006080512046813965, 0.006083936214447022, 0.006023839950561523, 0.006110335826873779, 0.0060424962043762204, 0.0060661759376525876, 0.006019008159637451, 0.006150207996368408, 0.005992447853088379, 0.006115327835083008, 0.006002143859863281, 0.00610316801071167, 0.006015168190002441, 0.006136256217956543, 0.006014848232269287, 0.0061131839752197265, 0.006017055988311768, 0.005997344017028809, 0.0060234560966491695, 0.006090688228607178, 0.005996416091918946, 0.0061415362358093264, 0.006029344081878662, 0.00613372802734375, 0.00612992000579834, 0.0060356159210205075, 0.006096159934997559, 0.006052576065063477, 0.006111328125, 0.006036992073059082, 0.006093215942382812, 0.006047264099121093, 0.006140384197235107, 0.006023168087005615, 0.006067327976226807, 0.006038080215454102, 0.006105760097503662, 0.006034207820892334, 0.006098879814147949, 0.006016223907470703, 0.006147136211395263, 0.006036128044128418, 0.006211584091186524, 0.0060430078506469725, 0.006195839881896973, 0.0061356801986694335, 0.0061669120788574215, 0.0061212158203125, 0.006118912220001221, 0.006123775959014892, 0.006088736057281494, 0.006150591850280762, 0.006049824237823486, 0.006102784156799317, 0.006076032161712646, 0.006104479789733887, 0.006013919830322265, 0.006126944065093994, 0.006024064064025879, 0.006124991893768311, 0.006021312236785889, 0.006147903919219971, 0.006039008140563965, 0.006132607936859131, 0.006016223907470703, 0.006140927791595459, 0.006090528011322022, 0.006122719764709472, 0.0060854082107543945, 0.006052127838134766, 0.006100480079650879, 0.006049119949340821, 0.006093696117401123, 0.006042943954467773, 0.006103519916534424, 0.006037727832794189, 0.006092959880828857, 0.006022751808166504, 0.00608460807800293, 0.006098847866058349]",tokens/s,163.84798211096114,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,814.20288,538.836992,0.0,136.31488,130.303488,s,1,7.63646923828125,7.63646923828125,0.0,7.63646923828125,7.63646923828125,7.63646923828125,7.63646923828125,[7.63646923828125],,kWh,1.3435531637492204e-05,1.4748280618082226e-06,3.4375027500033184e-06,1.8347862449303745e-05,,MB,1285.30432,616.431616,0.0,199.22944,174.868992,s,19,0.22155593395233153,0.011660838629070079,0.00015955394109839742,0.011625215530395508,0.011856383895874024,0.011876995277404784,0.011931392707824708,"[0.01184118366241455, 0.011625215530395508, 0.011628992080688476, 0.011605279922485352, 0.011869440078735352, 0.011753439903259277, 0.011944992065429687, 0.0115894718170166, 0.011819135665893555, 0.011613504409790039, 0.011830623626708985, 0.01151968002319336, 0.011720959663391112, 0.011528287887573242, 0.011853119850158692, 0.011445664405822753, 0.011470239639282226, 0.011429183959960938, 0.011467519760131837]",tokens/s,21953.8240896157,kWh,3.355739868725958e-07,3.700785848307786e-08,1.686687173103406e-07,5.412505626660143e-07,tokens/kWh,472978723.08720005,MB,1318.629376,629.014528,0.0,211.812352,174.871552,s,19,10.2297783203125,0.5384093852796054,0.008846364402158519,0.5383507690429687,0.5456586669921875,0.5498354980468749,0.5601568457031251,"[0.5484019775390625, 0.5382801513671875, 0.5361609497070312, 0.5383507690429687, 0.5449728393554687, 0.540945556640625, 0.5627371826171875, 0.5342178344726562, 0.5384573364257812, 0.5369503784179688, 0.544735107421875, 0.5366571044921875, 0.5440146484375, 0.540578125, 0.5384151000976563, 0.5245339965820313, 0.5232603149414062, 0.52432861328125, 0.5337803344726563]",tokens/s,117.01133323907979,kWh,1.5454832351066345e-05,1.7043991639598486e-06,5.5412773711106555e-06,2.270050888613685e-05,tokens/kWh,2775268.180814835,,s,1197,10.21945331096647,0.00853755497992188,0.0003395791310526462,0.008482144355773926,0.00879378547668457,0.008940006065368654,0.009569785537719719,"[0.008351743698120117, 0.008641504287719726, 0.008648703575134278, 0.008827936172485352, 0.00862889575958252, 0.008481087684631348, 0.008476672172546386, 0.008499199867248536, 0.008435711860656739, 0.008413536071777344, 0.008429408073425293, 0.008380224227905273, 0.008914943695068359, 0.008500896453857421, 0.009820511817932129, 0.00885529613494873, 0.00854860782623291, 0.008605695724487305, 0.008591360092163085, 0.008677375793457032, 0.008705632209777832, 0.00869007968902588, 0.008728575706481934, 0.008664928436279296, 0.008783871650695801, 0.008832287788391114, 0.00890777587890625, 0.009018752098083496, 0.008946175575256348, 0.009016736030578613, 0.00887065601348877, 0.009072480201721191, 0.008998847961425782, 0.009011039733886719, 0.008909088134765625, 0.008973567962646485, 0.00884115219116211, 0.008772352218627929, 0.008953856468200684, 0.008941184043884278, 0.00892147159576416, 0.008849696159362793, 0.008871647834777832, 0.00879635238647461, 0.008789823532104493, 0.008787167549133302, 0.008758048057556152, 0.008841216087341308, 0.008750335693359375, 0.00880620765686035, 0.00858124828338623, 0.008464256286621093, 0.00865516757965088, 0.008473407745361329, 0.00835523223876953, 0.008329312324523925, 0.00834182357788086, 0.008378432273864747, 0.008400192260742187, 0.00829641628265381, 0.008384896278381347, 0.008337696075439454, 0.008362048149108887, 0.008446080207824707, 0.008667327880859376, 0.008597184181213378, 0.008496352195739746, 0.008522527694702148, 0.0086179838180542, 0.008649920463562012, 0.008834943771362305, 0.008747584342956543, 0.008742464065551759, 0.008710592269897462, 0.00865875244140625, 0.008683072090148927, 0.0087010555267334, 0.008703392028808593, 0.008784288406372071, 0.00868563175201416, 0.008486911773681641, 0.008390656471252441, 0.008337663650512696, 0.008400768280029296, 0.008349408149719238, 0.00829155158996582, 0.008307776451110839, 0.008353728294372559, 0.008380352020263672, 0.008374272346496582, 0.00832921600341797, 0.008308735847473145, 0.008306591987609864, 0.008280159950256348, 0.00838150405883789, 0.008360896110534668, 0.008339455604553223, 0.008298175811767578, 0.008319295883178712, 0.008278016090393067, 0.00870406436920166, 0.008525568008422852, 0.0086430082321167, 0.008736512184143067, 0.008543744087219238, 0.008460800170898437, 0.00844927978515625, 0.008415424346923828, 0.008424160003662109, 0.008367967605590821, 0.00834175968170166, 0.008359040260314941, 0.008387200355529785, 0.00849897575378418, 0.008597663879394531, 0.008456031799316406, 0.008642751693725586, 0.00880835247039795, 0.009143551826477051, 0.008717120170593262, 0.008746175765991212, 0.008604543685913086, 0.00947920036315918, 0.008662079811096192, 0.00848044776916504, 0.008448224067687988, 0.008291616439819335, 0.008635007858276367, 0.008501248359680176, 0.008377728462219238, 0.008710847854614259, 0.00848038387298584, 0.008368448257446289, 0.008415231704711914, 0.008345600128173827, 0.00841113567352295, 0.008427519798278809, 0.00858518409729004, 0.00862825584411621, 0.008603008270263671, 0.008568608283996582, 0.008453280448913573, 0.008550080299377441, 0.008373503684997558, 0.008406047821044921, 0.008364928245544434, 0.008331680297851562, 0.008452544212341309, 0.008405152320861817, 0.008373727798461914, 0.008417216300964356, 0.008415679931640625, 0.00841318416595459, 0.008347583770751952, 0.008476287841796874, 0.008346367835998535, 0.008427231788635255, 0.008549471855163575, 0.008676095962524414, 0.008985983848571777, 0.008786848068237305, 0.00868131160736084, 0.008623456001281738, 0.008630592346191407, 0.008541983604431152, 0.008610655784606933, 0.00843337631225586, 0.008566368103027343, 0.008446368217468261, 0.008410752296447754, 0.008432160377502442, 0.008559519767761231, 0.008600704193115235, 0.008654656410217286, 0.008638496398925782, 0.008666496276855468, 0.008575584411621094, 0.008642560005187988, 0.008527872085571288, 0.00845849609375, 0.008479840278625488, 0.008503007888793945, 0.008455103874206542, 0.008417280197143554, 0.008396127700805663, 0.00845686435699463, 0.008435680389404296, 0.008409119606018067, 0.008501248359680176, 0.008240063667297364, 0.008352095603942872, 0.008385824203491212, 0.008382847785949707, 0.008384351730346679, 0.00842563247680664, 0.008617440223693847, 0.00879635238647461, 0.00869820785522461, 0.00863871955871582, 0.008716032028198243, 0.008591360092163085, 0.008591327667236328, 0.008591391563415528, 0.008601216316223145, 0.008641247749328613, 0.008637696266174317, 0.008483231544494629, 0.009158687591552734, 0.008638431549072265, 0.008691712379455567, 0.008651007652282714, 0.008705087661743164, 0.008499903678894043, 0.00840617561340332, 0.00856924819946289, 0.008452544212341309, 0.008400896072387695, 0.008272128105163574, 0.00828166389465332, 0.008316320419311523, 0.008292575836181641, 0.008284031867980957, 0.008397855758666993, 0.008395456314086914, 0.009116640090942383, 0.008802304267883301, 0.008366304397583008, 0.008353023529052735, 0.00866102409362793, 0.008354111671447754, 0.008418751716613769, 0.008431455612182618, 0.008450976371765137, 0.009041791915893554, 0.008624128341674805, 0.008648832321166993, 0.008531519889831543, 0.008536767959594727, 0.00871769618988037, 0.0084966402053833, 0.008393600463867187, 0.008393792152404786, 0.008381183624267579, 0.008431903839111328, 0.008397855758666993, 0.008446880340576172, 0.008483967781066894, 0.008474559783935546, 0.008516511917114257, 0.008639871597290039, 0.008727295875549317, 0.008738688468933106, 0.008482848167419434, 0.00869974422454834, 0.00862399959564209, 0.008589568138122559, 0.008570879936218261, 0.008605152130126954, 0.008461024284362793, 0.00844985580444336, 0.008421376228332519, 0.008427359580993653, 0.008480704307556153, 0.008524224281311036, 0.008513312339782714, 0.008587136268615722, 0.008668831825256348, 0.008944095611572266, 0.008939711570739747, 0.008703392028808593, 0.008619647979736328, 0.008659744262695312, 0.008693504333496093, 0.008595071792602538, 0.008565376281738282, 0.00851910400390625, 0.00852336025238037, 0.008559328079223633, 0.008523903846740722, 0.00851676845550537, 0.008561951637268066, 0.008660703659057618, 0.008560480117797852, 0.008482144355773926, 0.008612640380859375, 0.008558239936828613, 0.008504799842834473, 0.00853279972076416, 0.008635904312133789, 0.00873904037475586, 0.009004799842834473, 0.008876640319824219, 0.00897555160522461, 0.008870719909667969, 0.00899891185760498, 0.008728447914123536, 0.0085731201171875, 0.00851091194152832, 0.008562848091125489, 0.009769311904907227, 0.008640512466430664, 0.008937472343444825, 0.008802111625671388, 0.008812735557556153, 0.008586655616760254, 0.008526016235351562, 0.008563103675842286, 0.00851353645324707, 0.008498175621032715, 0.008440768241882324, 0.008452032089233398, 0.00846454429626465, 0.008844672203063965, 0.008437472343444824, 0.008713088035583495, 0.008151167869567872, 0.008287487983703613, 0.008450431823730469, 0.008382399559020997, 0.00835206413269043, 0.008361984252929687, 0.008355839729309082, 0.00835923194885254, 0.008383168220520019, 0.008357888221740722, 0.008550399780273438, 0.008464320182800293, 0.008536128044128418, 0.008738816261291504, 0.008773056030273438, 0.008698431968688965, 0.00868678379058838, 0.008705056190490723, 0.008681247711181641, 0.0087010555267334, 0.008614656448364257, 0.008666848182678223, 0.008901023864746093, 0.009037823677062988, 0.009186464309692383, 0.008671775817871093, 0.008593952178955078, 0.008610591888427735, 0.008557567596435547, 0.008451680183410644, 0.008490592002868653, 0.00868563175201416, 0.008477536201477051, 0.00844486427307129, 0.00843785572052002, 0.008620927810668946, 0.008509696006774902, 0.008603679656982421, 0.008477631568908692, 0.008436256408691407, 0.00840339183807373, 0.008384320259094238, 0.008409088134765624, 0.008420384407043457, 0.008416223526000976, 0.008392959594726563, 0.008605152130126954, 0.008400863647460937, 0.008345919609069824, 0.008390751838684082, 0.008461759567260743, 0.008586943626403809, 0.008760095596313477, 0.008643967628479004, 0.008665727615356446, 0.008572192192077636, 0.00860643196105957, 0.008799936294555664, 0.008769856452941895, 0.008822784423828126, 0.008976575851440429, 0.009019264221191406, 0.00901318359375, 0.008644736289978028, 0.008639167785644532, 0.008615200042724609, 0.008618720054626465, 0.00870751953125, 0.008599712371826171, 0.0086278076171875, 0.008663392066955567, 0.008591615676879882, 0.008604000091552735, 0.008766816139221192, 0.00884995174407959, 0.008806591987609863, 0.008776639938354492, 0.008714367866516113, 0.008685407638549804, 0.008956671714782715, 0.008586624145507812, 0.008565376281738282, 0.008581119537353516, 0.008622048377990722, 0.008579423904418945, 0.010622655868530274, 0.012289536476135255, 0.01336575984954834, 0.008740863800048827, 0.00868556785583496, 0.00854537582397461, 0.008590432167053222, 0.008400704383850098, 0.008343263626098633, 0.00834166431427002, 0.008413311958312988, 0.008293472290039062, 0.008303520202636718, 0.008482815742492676, 0.00864668846130371, 0.00866921615600586, 0.008877216339111328, 0.00867136001586914, 0.008714559555053712, 0.008849760055541993, 0.008678815841674804, 0.008652576446533203, 0.008652671813964844, 0.008657055854797363, 0.00902239990234375, 0.009902112007141113, 0.009014911651611328, 0.012383808135986328, 0.01116425609588623, 0.00935324764251709, 0.008588895797729493, 0.0085316162109375, 0.008532832145690918, 0.008621120452880859, 0.008756064414978027, 0.00859062385559082, 0.00854911994934082, 0.008591584205627441, 0.008400863647460937, 0.008396575927734375, 0.00852079963684082, 0.00812224006652832, 0.008376031875610352, 0.008375712394714355, 0.00839577579498291, 0.008488960266113281, 0.00841305637359619, 0.008382880210876464, 0.00838047981262207, 0.008570528030395508, 0.00848185634613037, 0.008546943664550782, 0.00864902400970459, 0.008893888473510742, 0.008497216224670411, 0.00850175952911377, 0.008501248359680176, 0.008466015815734864, 0.008605183601379395, 0.008649632453918457, 0.0086179838180542, 0.008620160102844238, 0.008406815528869628, 0.008429439544677734, 0.008296671867370606, 0.008285504341125488, 0.008358592033386231, 0.008352928161621094, 0.008325663566589356, 0.008427455902099609, 0.00848844814300537, 0.00855123233795166, 0.008513600349426269, 0.008418463706970214, 0.008348511695861817, 0.008300512313842774, 0.008271967887878418, 0.008311967849731445, 0.008290944099426269, 0.008492287635803222, 0.008401887893676758, 0.008301759719848633, 0.008309503555297852, 0.0083221435546875, 0.00831328010559082, 0.008322591781616212, 0.008348608016967773, 0.008376319885253907, 0.0083755521774292, 0.008581472396850585, 0.008868255615234376, 0.008976575851440429, 0.008697471618652343, 0.008568127632141114, 0.00869215965270996, 0.008714688301086427, 0.008470463752746581, 0.00852336025238037, 0.008446240425109863, 0.008458016395568847, 0.008501664161682129, 0.00850153636932373, 0.008511199951171874, 0.00864412784576416, 0.008300095558166505, 0.008442303657531739, 0.008564736366271973, 0.009029631614685058, 0.01102883243560791, 0.009320096015930176, 0.008751104354858399, 0.008496255874633788, 0.008485631942749023, 0.00837235164642334, 0.008361984252929687, 0.008370431900024414, 0.008316096305847167, 0.00839737606048584, 0.008405216217041015, 0.008308768272399902, 0.008349568367004394, 0.008288127899169922, 0.008383808135986328, 0.008286496162414551, 0.008249407768249511, 0.00825500774383545, 0.008395872116088866, 0.008482815742492676, 0.008550111770629882, 0.008619296073913573, 0.008532928466796876, 0.008437760353088379, 0.008374048233032226, 0.008335359573364258, 0.008400639533996583, 0.008503135681152343, 0.008507807731628418, 0.008316543579101563, 0.008429951667785644, 0.008340895652770996, 0.008354656219482422, 0.008375519752502442, 0.008563232421875, 0.008560704231262208, 0.008508735656738282, 0.008490688323974609, 0.00860870361328125, 0.008720576286315918, 0.00878163242340088, 0.008785920143127441, 0.008712191581726075, 0.00859545612335205, 0.008662272453308105, 0.00847539234161377, 0.008427519798278809, 0.008488639831542969, 0.008378399848937988, 0.008378656387329101, 0.008332927703857422, 0.008331071853637696, 0.008507967948913574, 0.008478240013122558, 0.00855724811553955, 0.008846879959106446, 0.008601792335510254, 0.008703807830810546, 0.008618495941162109, 0.009083552360534668, 0.0092359037399292, 0.00981164836883545, 0.008668031692504883, 0.008509183883666992, 0.008697248458862305, 0.008336000442504883, 0.008361791610717774, 0.008507328033447266, 0.008383135795593261, 0.0083373441696167, 0.008368032455444336, 0.008398816108703614, 0.008343040466308594, 0.008336159706115722, 0.008358816146850585, 0.008397631645202637, 0.008370176315307617, 0.008347040176391601, 0.008398655891418458, 0.008673855781555176, 0.008658880233764649, 0.00877344036102295, 0.008824447631835937, 0.008717280387878418, 0.008646528244018554, 0.008607647895812988, 0.008679360389709472, 0.008719712257385255, 0.00895468807220459, 0.008697855949401855, 0.00851375961303711, 0.008560416221618652, 0.008447999954223634, 0.008363903999328614, 0.008342816352844238, 0.008321887969970703, 0.00830361557006836, 0.008399456024169923, 0.008388640403747559, 0.00831116771697998, 0.008326335906982422, 0.008426303863525391, 0.008381695747375488, 0.008386303901672363, 0.008329248428344727, 0.008311776161193848, 0.008302687644958496, 0.008289631843566894, 0.008303168296813965, 0.00825705623626709, 0.008259712219238281, 0.00827836799621582, 0.008230912208557128, 0.00837769603729248, 0.008436384201049805, 0.008654848098754882, 0.00850928020477295, 0.008468640327453614, 0.00837337589263916, 0.009122015953063965, 0.008479328155517578, 0.008634271621704102, 0.008623807907104493, 0.00861622428894043, 0.008744799613952636, 0.008894335746765137, 0.00892950439453125, 0.008734175682067871, 0.008542112350463867, 0.0084136962890625, 0.008406944274902343, 0.008382464408874512, 0.008376447677612305, 0.008356032371520996, 0.00835142421722412, 0.008353631973266601, 0.008345215797424317, 0.008475168228149414, 0.008441856384277344, 0.008365280151367187, 0.008321215629577636, 0.008409855842590332, 0.008931424140930176, 0.009561471939086914, 0.00905020809173584, 0.008839455604553223, 0.008654848098754882, 0.008597503662109375, 0.008648799896240235, 0.008629599571228027, 0.008667712211608886, 0.008628543853759765, 0.008668671607971192, 0.008700096130371093, 0.008584223747253417, 0.00854092788696289, 0.008573151588439942, 0.008646944046020507, 0.008741632461547851, 0.008673664093017577, 0.008672127723693848, 0.008672991752624511, 0.008508671760559082, 0.008606464385986327, 0.008439807891845704, 0.008512767791748047, 0.008659711837768555, 0.00882688045501709, 0.008817983627319335, 0.008838175773620605, 0.008787615776062012, 0.009903743743896484, 0.008712448120117188, 0.008560768127441406, 0.008572928428649903, 0.008914943695068359, 0.008529919624328614, 0.008962047576904298, 0.00844495964050293, 0.008422368049621582, 0.008670304298400879, 0.008518560409545899, 0.008377663612365723, 0.00848147201538086, 0.00838771152496338, 0.008177663803100586, 0.008347040176391601, 0.008327775955200196, 0.008345600128173827, 0.008441632270812988, 0.008574527740478516, 0.008743807792663575, 0.008752927780151368, 0.008566783905029298, 0.008531968116760253, 0.008385984420776368, 0.008538944244384766, 0.008429311752319335, 0.008352992057800292, 0.008382816314697266, 0.008368576049804687, 0.008425472259521484, 0.008295488357543946, 0.008364768028259277, 0.00848630428314209, 0.008636832237243652, 0.008702367782592773, 0.008658368110656739, 0.008757472038269043, 0.008595840454101562, 0.008673215866088867, 0.008728351593017578, 0.008653056144714356, 0.008560640335083008, 0.008485183715820313, 0.008449728012084962, 0.008415200233459473, 0.008511520385742188, 0.008332384109497071, 0.008327232360839844, 0.008320927619934082, 0.0083056001663208, 0.008326560020446778, 0.008595871925354003, 0.008528063774108887, 0.008348768234252929, 0.00836678409576416, 0.008548576354980469, 0.00882585620880127, 0.008896639823913574, 0.008759615898132324, 0.008567359924316406, 0.008521727561950684, 0.008479999542236329, 0.00847696018218994, 0.00844816017150879, 0.008481216430664063, 0.008447775840759278, 0.008374367713928223, 0.00838163185119629, 0.008437888145446777, 0.008602304458618164, 0.008465951919555664, 0.008583935737609863, 0.008654656410217286, 0.008767680168151855, 0.008708064079284667, 0.008560383796691894, 0.008354847908020019, 0.008645536422729493, 0.008722304344177246, 0.008636672019958496, 0.008844511985778809, 0.008872447967529297, 0.008793439865112305, 0.008647551536560058, 0.008652735710144042, 0.008515423774719238, 0.00860364818572998, 0.008437439918518067, 0.008440671920776367, 0.008502079963684081, 0.008419584274291993, 0.008587776184082031, 0.008530048370361329, 0.008646656036376953, 0.008714240074157715, 0.008784223556518554, 0.008667872428894044, 0.0085349760055542, 0.008478719711303711, 0.008452095985412598, 0.008529919624328614, 0.008400416374206543, 0.008487392425537109, 0.00863644790649414, 0.008466400146484376, 0.00841055965423584, 0.00838918399810791, 0.008544256210327148, 0.008648768424987793, 0.008654720306396484, 0.008794303894042968, 0.008707967758178711, 0.008582464218139648, 0.008601984024047852, 0.008683839797973632, 0.008546303749084473, 0.008509440422058106, 0.008472895622253418, 0.00854297637939453, 0.008577343940734863, 0.008624608039855958, 0.008833279609680176, 0.009164511680603027, 0.008829119682312012, 0.008810272216796875, 0.008816864013671875, 0.008874015808105469, 0.008656864166259765, 0.008668831825256348, 0.008638879776000977, 0.008531071662902833, 0.009024319648742675, 0.008638463973999023, 0.008617856025695802, 0.008867039680480956, 0.008643744468688964, 0.008455167770385743, 0.008653568267822266, 0.008474623680114746, 0.008098431587219238, 0.008298463821411133, 0.00830668830871582, 0.008309856414794922, 0.008399423599243163, 0.008347999572753907, 0.008345600128173827, 0.008492256164550781, 0.008698847770690918, 0.00886355209350586, 0.00868115234375, 0.008646976470947266, 0.008630528450012206, 0.008574848175048827, 0.008585087776184082, 0.008654848098754882, 0.008654848098754882, 0.008691712379455567, 0.008560928344726562, 0.008598527908325194, 0.008626912117004394, 0.008549440383911132, 0.008504256248474121, 0.008412863731384277, 0.00869222354888916, 0.008677184104919433, 0.008601696014404296, 0.0086746244430542, 0.008757823944091797, 0.008615967750549316, 0.0086179838180542, 0.008846367835998535, 0.008909791946411132, 0.008736767768859864, 0.008725791931152345, 0.008573792457580566, 0.008543135643005371, 0.008545248031616211, 0.008626175880432128, 0.008585503578186035, 0.008568032264709473, 0.00869222354888916, 0.00858233642578125, 0.008557375907897949, 0.00868556785583496, 0.008740863800048827, 0.0087326717376709, 0.008633919715881348, 0.008663647651672364, 0.008539999961853027, 0.008493184089660644, 0.008433216094970703, 0.00846617603302002, 0.008415583610534669, 0.008468000411987305, 0.008410112380981445, 0.008465760231018067, 0.008423775672912597, 0.008477791786193848, 0.008567168235778808, 0.008507935523986817, 0.008540160179138183, 0.008665184020996093, 0.008694111824035644, 0.008742848396301269, 0.008662752151489258, 0.00860979175567627, 0.00861184024810791, 0.00859545612335205, 0.008512800216674804, 0.00854047966003418, 0.008523327827453613, 0.00849392032623291, 0.008458208084106446, 0.008530976295471192, 0.00837939167022705, 0.008472000122070313, 0.008637311935424805, 0.008562368392944336, 0.008741151809692383, 0.00873852825164795, 0.009278911590576172, 0.008534144401550293, 0.008485312461853028, 0.008449888229370118, 0.008443648338317872, 0.008360639572143554, 0.008400128364562989, 0.00877184009552002, 0.008422944068908691, 0.008372927665710449, 0.00863167953491211, 0.00834771156311035, 0.008473024368286133, 0.008996992111206055, 0.00856230354309082, 0.008746975898742676, 0.00911302375793457, 0.008578335762023926, 0.008557855606079101, 0.008548768043518066, 0.00860086441040039, 0.008647263526916504, 0.008393983840942382, 0.008323391914367676, 0.008300415992736817, 0.00831497573852539, 0.008346207618713379, 0.00835193634033203, 0.008447808265686035, 0.008552448272705078, 0.009043295860290528, 0.00868553638458252, 0.008794816017150878, 0.008605695724487305, 0.008481023788452148, 0.008419072151184082, 0.008409119606018067, 0.008296256065368652, 0.0083306884765625, 0.00830128002166748, 0.008437760353088379, 0.00840726375579834, 0.00825551986694336, 0.008234432220458984, 0.008361887931823731, 0.00797273588180542, 0.00822492790222168, 0.008237024307250977, 0.008292351722717285, 0.008272159576416015, 0.00824454402923584, 0.008333951950073241, 0.008247072219848633, 0.008294624328613281, 0.008252767562866211, 0.008343423843383789, 0.008304863929748536, 0.008280415534973145, 0.00828006362915039, 0.00829849624633789, 0.008300352096557616, 0.00827616024017334, 0.00828166389465332, 0.008241600036621093, 0.008421119689941406, 0.008278271675109864, 0.008234272003173828, 0.0083854398727417, 0.008356703758239745, 0.008253919601440429, 0.008253952026367188, 0.008226816177368163, 0.008239104270935058, 0.008257696151733399, 0.00823855972290039, 0.008229248046875, 0.008227904319763184, 0.0084301118850708, 0.008359456062316894, 0.00937657642364502, 0.008341440200805664, 0.008296575546264649, 0.008333375930786133, 0.008324095726013184, 0.008307647705078126, 0.008306015968322754, 0.008357760429382324, 0.008350496292114257, 0.00829849624633789, 0.008331263542175293, 0.008309087753295898, 0.008314528465270997, 0.008314720153808594, 0.0082957124710083, 0.008348575592041016, 0.008294367790222168, 0.008322367668151856, 0.00827462387084961, 0.008253439903259278, 0.008289728164672851, 0.008288479804992676, 0.00830089569091797, 0.008278112411499023, 0.008507295608520508, 0.008384415626525879, 0.00847702407836914, 0.008334815979003906, 0.008465951919555664, 0.008056991577148438, 0.008361887931823731, 0.008281920433044434, 0.008292511940002441, 0.008276127815246581, 0.008495103836059571, 0.008293567657470703, 0.008275808334350586, 0.008254079818725587, 0.008269791603088378, 0.008284223556518555, 0.008327679634094238, 0.008272000312805176, 0.008316191673278808, 0.008265407562255859, 0.008479104042053223, 0.008385120391845703, 0.008273056030273437, 0.0083438720703125, 0.008240480422973633, 0.008207584381103515, 0.00823094367980957, 0.008230208396911621, 0.008229248046875, 0.008218751907348634, 0.008619487762451172, 0.00827228832244873, 0.008271807670593262, 0.00828217601776123, 0.008232640266418458, 0.008405311584472656, 0.008242591857910157, 0.008259391784667969, 0.008257663726806641, 0.008766112327575683, 0.00825331211090088, 0.00825494384765625, 0.008282431602478027, 0.008278176307678223, 0.008278271675109864, 0.008228256225585937, 0.008243712425231933, 0.008361215591430665, 0.008264479637145996, 0.008259103775024415, 0.008278271675109864, 0.008279616355895997, 0.008346272468566894, 0.008445952415466309, 0.008285375595092773, 0.008253952026367188, 0.008243328094482421, 0.008237600326538086, 0.00826159954071045, 0.008363712310791015, 0.008264863967895508, 0.008252256393432618, 0.008279775619506836, 0.008243231773376465, 0.008445823669433594, 0.008266112327575684, 0.008260640144348144, 0.008300767898559571, 0.007989088058471679, 0.008210623741149902, 0.008333663940429687, 0.008262911796569824, 0.008290719985961915, 0.008277503967285157, 0.008242015838623046, 0.008295968055725097, 0.008323200225830078, 0.008295583724975585, 0.008260448455810547, 0.008382271766662598, 0.008323455810546875, 0.008282079696655273, 0.008441535949707032, 0.008338687896728515, 0.008309663772583008, 0.008281408309936523, 0.00826358413696289, 0.00829520034790039, 0.00828006362915039, 0.008265088081359864, 0.00829417610168457, 0.008278271675109864, 0.008258144378662109, 0.008302816390991211, 0.00838633632659912, 0.008286208152770995, 0.008681471824645997, 0.008357888221740722, 0.00834716796875, 0.00832969570159912, 0.008341695785522461, 0.008313887596130371, 0.008257344245910644, 0.008237631797790527, 0.008228544235229493, 0.008230624198913574, 0.008430591583251953, 0.00832431983947754, 0.008251775741577148, 0.008472991943359374, 0.00832636833190918, 0.008391391754150391, 0.008443936347961426, 0.008258912086486816, 0.00824988842010498, 0.008482975959777832, 0.008275967597961426, 0.00828991985321045, 0.008482751846313476, 0.008269280433654784, 0.008317791938781738, 0.008277119636535645, 0.008407520294189454, 0.008270591735839843, 0.008308511734008789, 0.008341407775878907, 0.00831702423095703, 0.008294400215148925, 0.008376319885253907, 0.00825046443939209, 0.008358816146850585, 0.007969791889190675, 0.008220255851745606, 0.008381088256835938, 0.008193632125854493, 0.008214559555053711, 0.008243328094482421, 0.008443455696105957, 0.008558719635009766, 0.011220640182495117, 0.008382847785949707, 0.008724767684936524, 0.008376319885253907, 0.00826313591003418, 0.008774432182312011, 0.008315936088562011, 0.008222975730895997, 0.008503487586975098, 0.008242815971374511, 0.00824118423461914, 0.00826972770690918, 0.008250304222106934, 0.008293536186218262, 0.00824345588684082, 0.0085797758102417, 0.008678560256958007, 0.009235039710998535, 0.009101119995117188, 0.008465696334838867, 0.008379008293151856, 0.00832710361480713, 0.008388095855712891, 0.008372735977172852, 0.008338944435119629, 0.008371264457702636, 0.008447744369506837, 0.008317119598388672, 0.00826460838317871, 0.009308352470397949, 0.008323776245117187, 0.008259136199951172, 0.008243647575378418, 0.008263680458068847, 0.008268896102905274, 0.008276191711425782, 0.008300800323486329, 0.008410880088806152, 0.008331968307495117, 0.008289728164672851, 0.008304896354675293, 0.008339648246765136, 0.008521856307983399, 0.008553631782531738, 0.008553631782531738, 0.008380096435546875, 0.00836201572418213, 0.008357855796813965, 0.008330752372741699, 0.0083023681640625, 0.00833407974243164, 0.008840895652770997, 0.00859705638885498, 0.008614527702331542, 0.008710399627685547]",tokens/s,117.12955317438553,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,930.025472,641.59744,0.0,239.075328,225.530368,s,1,8.3208642578125,8.3208642578125,0.0,8.3208642578125,8.3208642578125,8.3208642578125,8.3208642578125,[8.3208642578125],,kWh,2.2302540895839228e-05,2.4528961789635882e-06,6.915283309996689e-06,3.16707203847995e-05,,MB,1320.726528,733.872128,0.0,316.669952,285.824512,s,10,0.20532976150512697,0.020532976150512697,0.0004285717108414186,0.020285167694091798,0.02109200038909912,0.021108992290496828,0.02112258581161499,"[0.02107948875427246, 0.020902368545532228, 0.02009059143066406, 0.02014787292480469, 0.020173343658447265, 0.020286687850952147, 0.020151552200317384, 0.020283647537231445, 0.021125984191894532, 0.021088224411010742]",tokens/s,12467.749347364232,kWh,6.220872521765983e-07,6.860488641940567e-08,3.681956274157693e-07,1.0588877660117731e-06,tokens/kWh,241763110.51756328,MB,1354.051584,775.815168,0.0,358.612992,297.747968,s,10,9.887811218261719,0.9887811218261717,0.02167799515446516,0.9833120422363282,1.0199381958007812,1.0272988708496094,1.0331874108886718,"[0.992834228515625, 0.9797584228515624, 0.9868656616210938, 0.9697926025390625, 0.9673882446289063, 0.9713040771484375, 0.9686026611328125, 0.9983032836914062, 1.0346595458984376, 1.018302490234375]",tokens/s,63.71480867640941,kWh,2.8908675988240202e-05,3.1881045338908655e-06,1.0581174243184032e-05,4.267795476531509e-05,tokens/kWh,1476171.956843651,,s,630,9.881662103652952,0.015685177942306276,0.0004858996088117791,0.015508959770202637,0.0162648099899292,0.01635374069213867,0.01699263513565064,"[0.015949695587158204, 0.015929183959960937, 0.015565312385559082, 0.015512639999389649, 0.016214303970336914, 0.016141984939575197, 0.01612406349182129, 0.015712767601013183, 0.015632384300231932, 0.01544547176361084, 0.015458847999572754, 0.015476832389831543, 0.015740832328796386, 0.015837183952331545, 0.01597868824005127, 0.015900480270385743, 0.0160501766204834, 0.015964159965515135, 0.01600070381164551, 0.01586207962036133, 0.015646047592163086, 0.015578080177307128, 0.015472224235534668, 0.015480928421020508, 0.015342944145202637, 0.015466303825378417, 0.015371135711669922, 0.015420543670654296, 0.015513824462890626, 0.01571878433227539, 0.01632598304748535, 0.016048896789550782, 0.015956128120422363, 0.015958016395568847, 0.01584332847595215, 0.01587772846221924, 0.015960160255432128, 0.01582521629333496, 0.015634431838989257, 0.015476896286010742, 0.01537939167022705, 0.015334591865539551, 0.01541500759124756, 0.015445280075073241, 0.015397600173950195, 0.01573583984375, 0.01627849578857422, 0.016133951187133788, 0.01599897575378418, 0.01588028812408447, 0.015704159736633302, 0.01587398433685303, 0.015662783622741698, 0.0156430082321167, 0.015563039779663087, 0.01567263984680176, 0.015817119598388673, 0.01565065574645996, 0.0156079683303833, 0.015525888442993165, 0.015831328392028807, 0.0161297607421875, 0.01604230308532715, 0.01567734432220459, 0.015456352233886719, 0.015337504386901856, 0.015281472206115723, 0.015401056289672851, 0.015403936386108399, 0.015381471633911133, 0.015410143852233887, 0.015272064208984375, 0.015330975532531738, 0.01573859214782715, 0.01599721622467041, 0.01588217639923096, 0.015714303970336914, 0.01574502372741699, 0.015670783996582033, 0.015507967948913574, 0.015378047943115234, 0.015288607597351074, 0.01532528018951416, 0.015206496238708496, 0.01532316780090332, 0.015339391708374023, 0.01560985565185547, 0.015839327812194826, 0.015720288276672365, 0.015495231628417969, 0.015325183868408204, 0.015242783546447754, 0.015310463905334472, 0.015288703918457032, 0.015422240257263183, 0.015337311744689941, 0.0157260799407959, 0.01605241584777832, 0.015830944061279297, 0.015692031860351563, 0.01569372844696045, 0.015802528381347655, 0.01585923194885254, 0.01579599952697754, 0.016063360214233397, 0.016072351455688475, 0.01576908779144287, 0.015467103958129882, 0.015544320106506348, 0.01542080020904541, 0.015520383834838867, 0.015521504402160645, 0.01584598445892334, 0.015982303619384765, 0.015843584060668946, 0.015382464408874513, 0.015206175804138184, 0.015220959663391114, 0.01529526424407959, 0.015274239540100097, 0.015581567764282227, 0.01583071994781494, 0.015504063606262207, 0.015340831756591797, 0.015282912254333496, 0.01514128017425537, 0.015233920097351075, 0.01578812789916992, 0.015725695610046386, 0.015417920112609864, 0.01527513599395752, 0.01540822410583496, 0.016152544021606444, 0.015738847732543946, 0.015753055572509767, 0.01590060806274414, 0.015847328186035157, 0.01572665596008301, 0.015451423645019531, 0.015477567672729493, 0.015392352104187011, 0.015473055839538574, 0.015499263763427735, 0.015927552223205566, 0.015892224311828615, 0.01568278408050537, 0.015634943962097168, 0.015677727699279786, 0.015550463676452637, 0.01567129611968994, 0.01563212776184082, 0.015974656105041504, 0.015945376396179198, 0.015656800270080568, 0.015597087860107422, 0.015575200080871583, 0.015730624198913575, 0.015968576431274414, 0.01618374443054199, 0.016111743927001952, 0.016024831771850587, 0.01603971290588379, 0.016156959533691406, 0.01601366424560547, 0.015803872108459474, 0.015938431739807128, 0.01597644805908203, 0.01616864013671875, 0.01603206443786621, 0.015817888259887697, 0.015786848068237304, 0.01577894401550293, 0.015696767807006837, 0.015575039863586425, 0.015529760360717773, 0.015507935523986816, 0.015417152404785157, 0.015439776420593262, 0.015269439697265625, 0.015221088409423829, 0.01522492790222168, 0.015332799911499023, 0.01528656005859375, 0.015279775619506835, 0.015202976226806641, 0.015263744354248047, 0.01524131202697754, 0.015317119598388673, 0.015318464279174805, 0.01522655963897705, 0.015205056190490722, 0.01521884822845459, 0.015218815803527831, 0.015195839881896973, 0.01589776039123535, 0.015909024238586426, 0.015360095977783202, 0.015249152183532715, 0.015317888259887695, 0.015316864013671876, 0.01528451156616211, 0.015238080024719238, 0.015146240234375, 0.015236767768859863, 0.015226880073547363, 0.015339520454406739, 0.015172767639160156, 0.015291232109069824, 0.015232768058776856, 0.015249664306640626, 0.015280159950256347, 0.015225055694580079, 0.015152704238891602, 0.015214271545410156, 0.015245823860168458, 0.015239520072937012, 0.015244192123413085, 0.015245759963989258, 0.015180095672607422, 0.015196160316467285, 0.015239423751831054, 0.01535974407196045, 0.015304320335388184, 0.015405280113220215, 0.015251392364501954, 0.015240608215332031, 0.01524732780456543, 0.015270751953125, 0.015623807907104492, 0.015324640274047851, 0.015706656455993652, 0.015360383987426758, 0.015273088455200196, 0.015301600456237793, 0.015316896438598633, 0.016030752182006835, 0.015495231628417969, 0.01599180793762207, 0.018083744049072266, 0.015622143745422363, 0.01538428783416748, 0.015332703590393066, 0.015360671997070313, 0.015268128395080566, 0.015361280441284179, 0.015485695838928223, 0.01529036808013916, 0.015402496337890625, 0.015417856216430664, 0.015231231689453124, 0.015262656211853028, 0.015256383895874023, 0.015170592308044434, 0.015291359901428223, 0.015191840171813966, 0.01517142391204834, 0.01535366439819336, 0.015226816177368164, 0.015768192291259767, 0.015575039863586425, 0.015262016296386719, 0.015312159538269043, 0.015284640312194824, 0.015532032012939453, 0.01546668815612793, 0.01538809585571289, 0.015274399757385254, 0.015318400382995605, 0.015231328010559082, 0.015228320121765136, 0.015467488288879394, 0.01541312026977539, 0.015340607643127441, 0.015367103576660155, 0.015281184196472168, 0.015322367668151855, 0.015417183876037598, 0.015439743995666505, 0.015368191719055176, 0.0153372163772583, 0.015287775993347168, 0.015147487640380859, 0.015284064292907715, 0.01524783992767334, 0.015372287750244141, 0.015341567993164062, 0.01526371192932129, 0.01540732765197754, 0.015411328315734864, 0.01527571201324463, 0.015545663833618164, 0.01541801643371582, 0.015269920349121094, 0.015197216033935547, 0.015266783714294433, 0.015415295600891114, 0.01537446403503418, 0.015298432350158691, 0.01604374313354492, 0.015552800178527832, 0.015339136123657227, 0.015339903831481934, 0.015240608215332031, 0.015314944267272949, 0.015300640106201172, 0.015352383613586425, 0.015238719940185547, 0.015316736221313477, 0.015379072189331055, 0.015261823654174805, 0.015218624114990234, 0.015388671875, 0.015263615608215332, 0.015251680374145508, 0.015393823623657226, 0.015273887634277344, 0.015509951591491699, 0.015585280418395997, 0.015249695777893067, 0.015236319541931153, 0.015227999687194824, 0.015265664100646973, 0.015650143623352052, 0.015460639953613282, 0.015405247688293457, 0.015484031677246093, 0.015410016059875489, 0.015321248054504395, 0.01554214382171631, 0.015727840423583984, 0.015700063705444335, 0.015733344078063965, 0.015657024383544924, 0.01557215976715088, 0.015518303871154784, 0.015454463958740235, 0.015370304107666016, 0.015289759635925293, 0.015280672073364258, 0.015339520454406739, 0.015361568450927735, 0.01541478443145752, 0.015414239883422852, 0.01534768009185791, 0.015386528015136718, 0.015372223854064941, 0.015274175643920898, 0.015300000190734863, 0.015349535942077637, 0.015260448455810547, 0.015211647987365722, 0.015260576248168945, 0.015316255569458008, 0.015275872230529786, 0.015286751747131348, 0.016615840911865236, 0.015372287750244141, 0.015892191886901854, 0.015289952278137207, 0.015436767578125, 0.015325984001159669, 0.015341823577880859, 0.016048095703125, 0.015333632469177245, 0.015335904121398927, 0.015481951713562012, 0.015335647583007813, 0.015356320381164551, 0.015301088333129882, 0.015191871643066407, 0.015224703788757324, 0.015322879791259765, 0.015176223754882813, 0.01529635238647461, 0.015138815879821778, 0.015291744232177735, 0.015219327926635742, 0.015339808464050293, 0.015140255928039551, 0.01520639991760254, 0.015264351844787598, 0.015239359855651856, 0.015222399711608887, 0.015225055694580079, 0.01520803165435791, 0.015241408348083496, 0.015275839805603028, 0.015340991973876953, 0.015365056037902832, 0.015366175651550292, 0.01533779239654541, 0.015298015594482422, 0.015309087753295899, 0.015263775825500488, 0.015169407844543457, 0.015170751571655273, 0.01519820785522461, 0.01521337604522705, 0.01574502372741699, 0.015474687576293946, 0.015376031875610352, 0.015354207992553711, 0.015338784217834472, 0.015504096031188965, 0.015308159828186035, 0.01552790355682373, 0.015378591537475585, 0.015357983589172364, 0.015319519996643067, 0.015258848190307617, 0.015108736038208007, 0.015292832374572754, 0.015363424301147462, 0.015336000442504882, 0.015273823738098144, 0.015183967590332031, 0.01534761619567871, 0.015231200218200684, 0.015451935768127442, 0.01593139171600342, 0.015466496467590332, 0.015331680297851563, 0.015302304267883302, 0.015339296340942382, 0.015380288124084473, 0.015368320465087891, 0.01618515205383301, 0.015571423530578613, 0.015593215942382813, 0.01544166374206543, 0.015630847930908204, 0.015534079551696778, 0.015466239929199219, 0.01536025619506836, 0.015359007835388183, 0.015393280029296874, 0.015415679931640626, 0.015388832092285157, 0.015284159660339355, 0.01532960033416748, 0.015398464202880859, 0.015390912055969238, 0.015633919715881347, 0.015599871635437012, 0.01551200008392334, 0.015542176246643067, 0.015457792282104492, 0.015397215843200683, 0.015400768280029297, 0.01533142375946045, 0.01569215965270996, 0.01548902416229248, 0.015404095649719238, 0.015412320137023925, 0.015339232444763184, 0.015374431610107422, 0.015404704093933106, 0.015370847702026368, 0.01533027172088623, 0.015452896118164063, 0.015259743690490723, 0.015302656173706054, 0.015351391792297364, 0.015345248222351074, 0.016536544799804688, 0.017024864196777345, 0.016726015090942382, 0.017241376876831055, 0.01662575912475586, 0.0160600643157959, 0.015696479797363282, 0.015319359779357911, 0.015376447677612304, 0.01533679962158203, 0.015407103538513184, 0.015471487998962403, 0.015672160148620606, 0.015510368347167968, 0.015269856452941894, 0.015208191871643066, 0.01536963176727295, 0.01563270378112793, 0.01625974464416504, 0.016045440673828126, 0.016181631088256834, 0.016002815246582033, 0.015974271774291993, 0.01597702407836914, 0.016052288055419923, 0.01701273536682129, 0.01640652847290039, 0.01639628791809082, 0.016237791061401368, 0.016130399703979493, 0.016060640335083008, 0.016238304138183595, 0.016384639739990235, 0.016473983764648436, 0.016270559310913087, 0.016314367294311523, 0.016239391326904298, 0.01606559944152832, 0.016217376708984373, 0.016354719161987306, 0.01632784080505371, 0.01632419204711914, 0.01626460838317871, 0.016171648025512696, 0.016250240325927735, 0.016209728240966798, 0.016099615097045897, 0.016417280197143554, 0.01624239921569824, 0.01613590431213379, 0.0162106876373291, 0.01624678421020508, 0.016261119842529297, 0.01627449607849121, 0.016096031188964844, 0.016224191665649413, 0.016104799270629883, 0.016128255844116212, 0.016224063873291016, 0.016210208892822264, 0.016161312103271486, 0.016210016250610353, 0.016502527236938475, 0.016256351470947266, 0.016229183197021484, 0.01622220802307129, 0.016215391159057617, 0.01635103988647461, 0.016276351928710936, 0.0162774715423584, 0.016246400833129882, 0.01632614326477051, 0.01635807991027832, 0.016164384841918945, 0.016267711639404298, 0.01623472023010254, 0.01620582389831543, 0.016359519958496094, 0.01642857551574707, 0.016886144638061523, 0.020797119140625, 0.016943424224853516, 0.016508928298950197, 0.016352544784545897, 0.016386783599853516, 0.01718681526184082, 0.016306495666503905, 0.01636083221435547, 0.0162573127746582, 0.016197664260864258, 0.016215551376342775, 0.01632419204711914, 0.016212543487548827, 0.016877920150756835, 0.01845359992980957, 0.016303104400634767, 0.016269216537475584, 0.016234495162963866, 0.016189567565917967, 0.016274368286132813, 0.016146976470947264, 0.01623263931274414, 0.016294111251831056, 0.016190176010131837, 0.016309728622436525, 0.01627712059020996, 0.01608732795715332, 0.01621443176269531, 0.016341087341308593, 0.016223743438720704, 0.016005535125732422, 0.016375455856323242, 0.016263776779174805, 0.016195327758789062, 0.016280832290649413, 0.016327423095703127, 0.01611555290222168, 0.016220512390136718, 0.016174911499023437, 0.01603993606567383, 0.01590505599975586, 0.015884287834167482, 0.015949536323547363, 0.016109535217285156, 0.01604528045654297, 0.015813440322875977, 0.015958335876464842, 0.016080575942993162, 0.016060575485229493, 0.016095071792602538, 0.015913120269775392, 0.01584220790863037, 0.015850048065185546, 0.015958399772644044, 0.015922592163085936, 0.015917375564575197, 0.015910719871520995, 0.016040416717529298, 0.016312320709228514, 0.016103424072265626, 0.016457887649536134, 0.016278400421142578, 0.016384992599487305, 0.016058399200439454, 0.016205728530883787, 0.016924320220947267, 0.016332319259643555, 0.01670028877258301, 0.016326656341552736, 0.01621811294555664, 0.0163450870513916, 0.016266624450683595, 0.016245088577270507, 0.016042272567749025, 0.01618124771118164, 0.016148351669311525, 0.01601958465576172, 0.01598591995239258, 0.01592963218688965, 0.01612835121154785, 0.0161527042388916, 0.01613520050048828, 0.016104223251342774, 0.016181440353393556, 0.01618534469604492, 0.016287647247314452]",tokens/s,63.75445683040588,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7434.891264,8041.463808,0.0,7646.216192,7627.584,s,1,12.864208984375,12.864208984375,0.0,12.864208984375,12.864208984375,12.864208984375,12.864208984375,[12.864208984375],,kWh,0.00017115813892916567,1.8867194484805756e-05,5.499921066600029e-05,0.0002450245440799717,,MB,1770.37312,8687.386624,0.0,8277.458944,8199.8592,s,10,3.5576812744140627,0.3557681274414063,0.0009765429764703398,0.35559803771972653,0.3571989959716797,0.35724894256591794,0.35728889984130857,"[0.35387228393554687, 0.35567178344726563, 0.3555242919921875, 0.35504611206054687, 0.356594970703125, 0.35571722412109374, 0.35729888916015623, 0.35718789672851564, 0.3553077087402344, 0.35546011352539064]",tokens/s,719.5697991303683,kWh,1.040425594511478e-05,1.1474067470608325e-06,6.919526608413771e-06,1.8471189300589384e-05,tokens/kWh,13859421.601609133,MB,1777.283072,9001.959424,0.0,8592.031744,8476.849152,s,10,29.4489931640625,2.94489931640625,0.004694868985522543,2.9471679687499996,2.9489606689453125,2.950297595214844,2.951367136230469,"[2.9393134765625, 2.937792724609375, 2.941801513671875, 2.951634521484375, 2.947964111328125, 2.938910400390625, 2.948576904296875, 2.947670654296875, 2.946665283203125, 2.94866357421875]",tokens/s,21.392921533521495,kWh,8.607475950238492e-05,9.494233178704241e-06,5.7117385732186307e-05,0.00015268637841327548,tokens/kWh,412610.48074293963,,s,630,29.442196475982673,0.046733645199972494,0.00039584723390524517,0.04672246360778809,0.047179973983764646,0.04730684471130371,0.04758142642974854,"[0.04675174331665039, 0.04630732727050781, 0.04612227249145508, 0.04621382522583008, 0.04601222229003906, 0.04603513717651367, 0.045977375030517575, 0.046186145782470704, 0.04600214385986328, 0.04604787063598633, 0.04617942428588867, 0.046322593688964846, 0.04635238265991211, 0.04619488143920898, 0.04595267105102539, 0.047589054107666014, 0.04664163208007813, 0.04619468688964844, 0.046137344360351565, 0.04655923080444336, 0.046532608032226565, 0.0466921272277832, 0.04645084762573242, 0.046775775909423827, 0.04659574508666992, 0.04660707092285156, 0.046514175415039063, 0.0465307502746582, 0.046610462188720704, 0.04665456008911133, 0.046824542999267575, 0.04699894332885742, 0.04659212875366211, 0.046515968322753905, 0.04667855834960938, 0.046647262573242185, 0.04655107116699219, 0.04651827239990235, 0.04657356643676758, 0.046598014831542967, 0.04691088104248047, 0.046701278686523434, 0.046870529174804686, 0.04680089569091797, 0.04681887817382813, 0.04688326263427734, 0.04726988983154297, 0.047314945220947265, 0.04676812744140625, 0.04679065704345703, 0.04725078582763672, 0.04715711975097656, 0.0470263671875, 0.04682403182983398, 0.047108097076416014, 0.047179359436035156, 0.04681513595581055, 0.04697753524780274, 0.046992542266845704, 0.046936065673828124, 0.04703881454467773, 0.047027839660644534, 0.04711075210571289, 0.04678668975830078, 0.04634348678588867, 0.04602937698364258, 0.04615167999267578, 0.04604083251953125, 0.046117118835449215, 0.04604278564453125, 0.046112991333007815, 0.04624806213378906, 0.046059070587158205, 0.04613167953491211, 0.046142784118652344, 0.046412448883056644, 0.04644454574584961, 0.04618841552734375, 0.04636275100708008, 0.046405406951904295, 0.04638947296142578, 0.046378753662109376, 0.04655539321899414, 0.046561088562011715, 0.04673286437988281, 0.04660697555541992, 0.04645251083374023, 0.04626192092895508, 0.04644716644287109, 0.04657148742675781, 0.04634627151489258, 0.04653635025024414, 0.046569278717041016, 0.04647951889038086, 0.04656911849975586, 0.04654108810424805, 0.04648390579223633, 0.04695852661132813, 0.04680505752563477, 0.04665139389038086, 0.0466732177734375, 0.04684255981445312, 0.046782302856445315, 0.04661468887329102, 0.046656929016113284, 0.04688070297241211, 0.046907806396484376, 0.04685030364990234, 0.04668620681762695, 0.04675718307495117, 0.046903553009033205, 0.04694675064086914, 0.04683161544799805, 0.046911487579345705, 0.047023582458496097, 0.04688057708740234, 0.04682825469970703, 0.04681488037109375, 0.047687553405761716, 0.04710015869140625, 0.04689712142944336, 0.04690470504760742, 0.04720729446411133, 0.04728358459472656, 0.04714966583251953, 0.04723305511474609, 0.046950878143310545, 0.046276607513427735, 0.046053375244140625, 0.046298145294189456, 0.046391902923583986, 0.04619820785522461, 0.046107486724853514, 0.04644259262084961, 0.046209022521972655, 0.04628611373901367, 0.046545631408691404, 0.04641177749633789, 0.046578784942626954, 0.04634921646118164, 0.046198719024658205, 0.04683987045288086, 0.04657670211791992, 0.04654560089111328, 0.04636608123779297, 0.046387264251708984, 0.046492481231689455, 0.04659548950195312, 0.04657187271118164, 0.04680313491821289, 0.046626880645751954, 0.0465428466796875, 0.046639102935791016, 0.04648080062866211, 0.046588512420654295, 0.04672716903686523, 0.04665139389038086, 0.0465715217590332, 0.046709983825683594, 0.04660713577270508, 0.0464832649230957, 0.04665267181396485, 0.04670969772338867, 0.047304222106933594, 0.04710652923583984, 0.04703619384765625, 0.04677257537841797, 0.046575103759765625, 0.0468375358581543, 0.046790721893310544, 0.04671132659912109, 0.04687420654296875, 0.04707145690917969, 0.04687686538696289, 0.04673936080932617, 0.046857566833496095, 0.046793472290039065, 0.04679827117919922, 0.04704108810424805, 0.04687177658081055, 0.04691775894165039, 0.04696745681762695, 0.04695040130615234, 0.04698099136352539, 0.0470748176574707, 0.047075294494628904, 0.04735657501220703, 0.04726784133911133, 0.04704630279541016, 0.046771488189697265, 0.04653152084350586, 0.046195838928222654, 0.04627260971069336, 0.04611481475830078, 0.04648787307739258, 0.04628121566772461, 0.046383102416992186, 0.04628275299072265, 0.04667391967773438, 0.046577503204345706, 0.04658396911621094, 0.047043617248535154, 0.04635286331176758, 0.04654959869384766, 0.04636415863037109, 0.04645724868774414, 0.04655104064941406, 0.046690303802490236, 0.046698497772216796, 0.046712833404541014, 0.046827518463134765, 0.04659814453125, 0.04686643218994141, 0.046632575988769534, 0.04665996932983398, 0.04673503875732422, 0.04665731048583984, 0.046725440979003906, 0.046747871398925785, 0.04674764633178711, 0.046731487274169925, 0.046870304107666017, 0.04697644805908203, 0.04708819198608399, 0.04674345779418945, 0.04669200134277344, 0.04674105453491211, 0.04686140823364258, 0.046947456359863284, 0.046949024200439456, 0.046944255828857424, 0.0470362548828125, 0.04706067276000977, 0.047032798767089844, 0.046964542388916015, 0.04718764877319336, 0.04708607864379883, 0.047355712890625, 0.04789456176757813, 0.04701609420776367, 0.047088638305664066, 0.047378593444824216, 0.04722979354858398, 0.047388671875, 0.04740630340576172, 0.047112640380859376, 0.04718550491333008, 0.04723174285888672, 0.04712857437133789, 0.047085567474365236, 0.04717907333374023, 0.047532737731933596, 0.046886878967285155, 0.04640470504760742, 0.04616672134399414, 0.04625616073608398, 0.04635881423950195, 0.04633190536499023, 0.04627257537841797, 0.04637472152709961, 0.04632793426513672, 0.04638057708740234, 0.046549121856689454, 0.046526817321777346, 0.04638627243041992, 0.046549919128417966, 0.04652032089233398, 0.04656947326660156, 0.04644659042358398, 0.046772224426269535, 0.04675971221923828, 0.04668150329589844, 0.046911487579345705, 0.046805824279785156, 0.04672512054443359, 0.04671897506713867, 0.046628543853759766, 0.046551361083984374, 0.0467242546081543, 0.04657443237304688, 0.04653180694580078, 0.04678940963745117, 0.046632095336914064, 0.04652732849121094, 0.046618175506591794, 0.04686892700195312, 0.04678601455688477, 0.046763809204101565, 0.04661280059814453, 0.04675990295410156, 0.046938175201416014, 0.04693443298339844, 0.04671846389770508, 0.04678092956542969, 0.04696176147460938, 0.046870590209960934, 0.04685641479492188, 0.046895328521728515, 0.046858657836914064, 0.04683308792114258, 0.04675823974609375, 0.04678678512573242, 0.04701513671875, 0.04691628646850586, 0.04674979019165039, 0.046766399383544925, 0.04702105712890625, 0.04704915237426758, 0.047167713165283204, 0.04698934555053711, 0.05101363372802734, 0.046992385864257816, 0.04718899154663086, 0.047017982482910156, 0.04716857528686524, 0.0469502067565918, 0.04632777786254883, 0.046007553100585935, 0.046236129760742185, 0.04599014282226563, 0.04598175811767578, 0.04607084655761719, 0.04626512145996094, 0.04623580932617188, 0.046063617706298826, 0.04633190536499023, 0.04641177749633789, 0.04627807998657227, 0.04620550537109375, 0.04622332763671875, 0.0462380485534668, 0.04642575836181641, 0.04644457626342773, 0.04628438568115235, 0.04657603073120117, 0.046483070373535156, 0.04647155380249023, 0.04640972900390625, 0.046585662841796875, 0.04639968109130859, 0.046927871704101565, 0.04662681579589844, 0.04660224151611328, 0.046604286193847655, 0.046564510345458984, 0.046469982147216794, 0.046572574615478514, 0.04651084899902344, 0.04661455917358399, 0.046400798797607425, 0.046715328216552734, 0.0467276496887207, 0.04673235321044922, 0.04671379089355469, 0.047067134857177735, 0.04703033447265625, 0.04680435180664062, 0.04695302581787109, 0.04717363357543945, 0.046974433898925784, 0.04689769744873047, 0.046876670837402344, 0.04688076782226563, 0.046935264587402346, 0.046741886138916014, 0.04703417587280274, 0.047019870758056644, 0.04681308746337891, 0.04661743927001953, 0.04711782455444336, 0.04725763320922852, 0.04694883346557617, 0.046865535736083985, 0.046951297760009766, 0.04711334228515625, 0.04722880172729492, 0.04698828887939453, 0.04722809600830078, 0.047352577209472654, 0.04655500793457031, 0.04614361572265625, 0.04616396713256836, 0.04604927825927734, 0.04619878387451172, 0.046186496734619144, 0.046080001831054686, 0.04636483383178711, 0.046333633422851565, 0.04637449645996094, 0.04641030502319336, 0.046432254791259765, 0.04627983856201172, 0.046377536773681644, 0.04661379241943359, 0.04652668762207031, 0.04658598327636719, 0.04649635314941406, 0.04657555389404297, 0.04688294219970703, 0.046720001220703126, 0.04659491348266601, 0.04659225463867187, 0.04657756805419922, 0.0465299186706543, 0.04648614501953125, 0.0465530891418457, 0.04659164810180664, 0.04650947189331055, 0.04654380798339844, 0.046611808776855466, 0.046817214965820315, 0.04690198516845703, 0.046794750213623046, 0.046936065673828124, 0.047282176971435545, 0.046876224517822265, 0.046956993103027346, 0.048097278594970705, 0.04684185409545898, 0.046847488403320314, 0.04699801635742187, 0.04717513656616211, 0.04698076629638672, 0.046873470306396485, 0.04692323303222656, 0.046774078369140625, 0.046754528045654296, 0.04701593780517578, 0.04731903839111328, 0.047562751770019535, 0.04712172698974609, 0.04693619155883789, 0.04728070449829101, 0.047308990478515625, 0.04722022247314453, 0.04727974319458008, 0.047282878875732424, 0.04724313735961914, 0.04750956726074219, 0.0471736946105957, 0.04748672103881836, 0.04725417709350586, 0.04667107009887695, 0.04606032180786133, 0.046238849639892575, 0.046107521057128904, 0.04626432037353516, 0.04598988723754883, 0.046266368865966793, 0.046317569732666014, 0.04634624099731445, 0.046438369750976566, 0.04638518524169922, 0.04652851104736328, 0.04637417602539062, 0.04641251373291016, 0.04661862564086914, 0.046642879486083984, 0.04675411224365234, 0.04655718231201172, 0.04668975830078125, 0.04669494247436524, 0.04700710296630859, 0.04669635009765625, 0.046785247802734374, 0.0465530891418457, 0.04639539337158203, 0.04681913757324219, 0.04664748764038086, 0.04639904022216797, 0.04653302383422851, 0.04650191879272461, 0.04664022445678711, 0.04673827362060547, 0.046702014923095704, 0.046567169189453125, 0.04680179214477539, 0.04696268844604492, 0.046923454284667966, 0.04696249771118164, 0.0470340461730957, 0.04696665573120117, 0.04688931274414063, 0.04721654510498047, 0.04742540740966797, 0.04712326431274414, 0.04698112106323242, 0.046919456481933595, 0.047067230224609374, 0.04703388977050781, 0.04668463897705078, 0.046887039184570316, 0.046835136413574216, 0.047061569213867185, 0.047092929840087894, 0.04710406494140625, 0.04705356979370117, 0.04709785461425781, 0.047034366607666016, 0.04717567825317383, 0.04700774383544922, 0.04738627243041992, 0.048173408508300784, 0.04740095901489258, 0.046900352478027346, 0.04637948989868164, 0.046260414123535154, 0.04635465621948242, 0.04627206420898437, 0.0462729606628418, 0.04611276626586914, 0.04628803253173828, 0.046330718994140624, 0.04650185775756836, 0.04651190567016601, 0.046370174407958986, 0.0464692497253418, 0.046408447265625, 0.046432254791259765, 0.04661161422729492, 0.04686934280395508, 0.04674764633178711, 0.04640288162231445, 0.04658041763305664, 0.04658790588378906, 0.04652851104736328, 0.046301185607910154, 0.04637081527709961, 0.04646297454833984, 0.04640153503417969, 0.046637054443359374, 0.047332511901855466, 0.04661948776245117, 0.04657916641235352, 0.046527008056640624, 0.046720672607421875, 0.046696800231933594, 0.046635009765625, 0.046532608032226565, 0.04681932830810547, 0.04680704116821289, 0.0470648307800293, 0.04705257415771484, 0.04683148956298828, 0.04678102493286133, 0.04741849517822266, 0.047317344665527346, 0.04697507095336914, 0.04700950241088867, 0.04690790557861328, 0.04692745590209961, 0.046977375030517576, 0.04690563201904297, 0.046886913299560545, 0.04700569534301758, 0.04714236831665039, 0.047093505859375, 0.04699625778198242, 0.04714684677124024, 0.04704025650024414, 0.047198112487792966, 0.04712819290161133, 0.047227775573730466, 0.04735404968261719, 0.04737577438354492, 0.047178142547607424, 0.04736159896850586, 0.047143009185791014, 0.04648729705810547, 0.04593679809570313, 0.04614227294921875, 0.046196670532226564, 0.04627619171142578, 0.046227935791015626, 0.046321247100830076, 0.04644112014770508, 0.04656224060058594, 0.04671366500854492, 0.046415870666503906, 0.046380161285400394, 0.04662771224975586, 0.04628889465332031, 0.046519390106201174, 0.04634822463989258, 0.046595039367675783, 0.046483455657958986, 0.04659369659423828, 0.0465733757019043, 0.04668636703491211, 0.04647907257080078, 0.046527137756347654, 0.04644659042358398, 0.04631264114379883, 0.046498622894287106, 0.04683161544799805, 0.04676156616210937, 0.04669203186035156, 0.0466495361328125, 0.04700214385986328, 0.046747230529785154, 0.04671939086914063, 0.047019870758056644, 0.04720848083496094, 0.04716352081298828, 0.046943614959716794, 0.047325374603271485, 0.04681363296508789, 0.047034366607666016, 0.04703961563110352, 0.04683456039428711, 0.04698268890380859, 0.047159648895263674, 0.04700787353515625, 0.04679884719848633, 0.04689446258544922, 0.046905982971191404, 0.046811134338378906, 0.047075328826904295, 0.04694015884399414, 0.047298561096191405, 0.047933441162109375, 0.047202144622802734, 0.04711439895629883, 0.04751769638061523, 0.04726764678955078, 0.04721078491210937, 0.046992927551269534, 0.04710847854614258, 0.047242305755615235, 0.04745235061645508]",tokens/s,21.39786005823035,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4399.427584,4976.410624,0.0,4573.888512,4568.93696,s,1,10.653498046875,10.653498046875,0.0,10.653498046875,10.653498046875,10.653498046875,10.653498046875,[10.653498046875],,kWh,0.0001063242550708234,1.1721156534385567e-05,3.27141928379987e-05,0.00015075960444320766,,MB,2182.750208,5303.566336,0.0,4886.36416,4814.97344,s,10,2.0939474792480466,0.20939474792480467,0.00018643556789469997,0.20931350708007812,0.20967909698486328,0.2096967643737793,0.2097108982849121,"[0.2095928955078125, 0.20921379089355469, 0.2092220458984375, 0.2096751708984375, 0.20922808837890625, 0.20928163146972656, 0.20942431640625, 0.20924972534179687, 0.2097144317626953, 0.20934538269042968]",tokens/s,1222.5712561421628,kWh,6.184242152083483e-06,6.820154746513085e-07,4.111340094624845e-06,1.0977597721359636e-05,tokens/kWh,23320220.55261586,MB,2189.852672,5408.423936,0.0,4991.22176,4947.963904,s,10,25.337256347656247,2.5337256347656245,0.012450117298803099,2.5302037353515625,2.54606923828125,2.555306103515625,2.562695595703125,"[2.56454296875, 2.53185595703125, 2.53433349609375, 2.525358154296875, 2.5440166015625, 2.528551513671875, 2.519905517578125, 2.52308251953125, 2.539431396484375, 2.52617822265625]",tokens/s,24.864570628945643,kWh,7.406795273791552e-05,8.169663890119593e-06,4.533969946617528e-05,0.00012757731609421038,tokens/kWh,493818.1953402845,,s,630,25.334205780029304,0.040213025047665545,0.00046863920960852685,0.04015086364746094,0.04064994049072266,0.04087552795410156,0.04192168930053712,"[0.040599777221679685, 0.04045888137817383, 0.04061606216430664, 0.040546302795410154, 0.04255539321899414, 0.04041113662719727, 0.040302593231201174, 0.041043968200683595, 0.04048281478881836, 0.04069279861450195, 0.04052268981933594, 0.04085724639892578, 0.04042329788208008, 0.0405775032043457, 0.040476673126220705, 0.04022886276245117, 0.040237056732177735, 0.04103987121582031, 0.04028598403930664, 0.04099094390869141, 0.04043571090698242, 0.040548351287841795, 0.04051932907104492, 0.04109139251708984, 0.04074198532104492, 0.040567745208740236, 0.04054617691040039, 0.040687744140625, 0.04069564819335938, 0.040615966796875, 0.04060732650756836, 0.04400751876831055, 0.04127484893798828, 0.040604736328125, 0.040794017791748044, 0.041428897857666014, 0.04163593673706055, 0.040777313232421876, 0.04059807968139648, 0.040715614318847654, 0.04045260620117187, 0.04168499374389648, 0.041154014587402345, 0.040489505767822266, 0.040787616729736326, 0.040304702758789064, 0.04034288024902344, 0.04076009750366211, 0.04087004852294922, 0.0409804801940918, 0.040667137145996096, 0.04037836837768555, 0.040427520751953126, 0.040318977355957034, 0.04017068862915039, 0.0402808952331543, 0.040325119018554685, 0.040476673126220705, 0.04015718460083008, 0.040358016967773434, 0.04045721435546875, 0.0401847038269043, 0.03995852661132813, 0.04116361618041992, 0.04019174575805664, 0.04005503845214844, 0.04032908630371094, 0.04029196929931641, 0.04059145736694336, 0.04023337554931641, 0.040400543212890626, 0.040050014495849606, 0.0398831672668457, 0.039892929077148434, 0.03981584167480469, 0.039723262786865235, 0.03981286239624023, 0.03969225692749023, 0.03976563262939453, 0.039776927947998045, 0.040233726501464846, 0.04015158462524414, 0.04030511856079102, 0.04042342376708984, 0.04016934585571289, 0.04004022216796875, 0.04036774444580078, 0.039873249053955076, 0.04004249572753906, 0.04000569534301758, 0.04003401565551758, 0.04010185623168945, 0.04013663864135742, 0.04063983917236328, 0.040596446990966796, 0.04053606414794922, 0.040404449462890624, 0.040371841430664065, 0.040237983703613284, 0.04013379287719727, 0.04020620727539063, 0.04029065704345703, 0.04003609466552734, 0.040354686737060545, 0.0403963508605957, 0.04015763092041016, 0.040353759765625, 0.039982368469238284, 0.03999452972412109, 0.040242015838623045, 0.04028112030029297, 0.040304351806640625, 0.040468223571777345, 0.04037247848510742, 0.04023100662231445, 0.04000553512573242, 0.04008700942993164, 0.040314815521240235, 0.04061017608642578, 0.04029481506347656, 0.040169567108154294, 0.04011324691772461, 0.03980944061279297, 0.040040672302246096, 0.040012863159179686, 0.039954593658447266, 0.040597312927246096, 0.04070076751708984, 0.04041247940063476, 0.04045075225830078, 0.040714241027832034, 0.040529918670654294, 0.04052406311035156, 0.04075715255737305, 0.0401629753112793, 0.040034111022949216, 0.04004217529296875, 0.04019817733764648, 0.04081024169921875, 0.040219104766845704, 0.039825889587402345, 0.03979462432861328, 0.039882625579833984, 0.040107616424560545, 0.040511070251464845, 0.040623039245605466, 0.040554080963134766, 0.04027638244628906, 0.040304222106933595, 0.040449920654296874, 0.040365760803222656, 0.04012236785888672, 0.04027068710327148, 0.040279998779296874, 0.040124095916748044, 0.04023334503173828, 0.04012236785888672, 0.04009574508666992, 0.04044972610473633, 0.04028857421875, 0.03994214248657227, 0.03984998321533203, 0.03968393707275391, 0.040372383117675784, 0.039995391845703124, 0.03972473526000977, 0.03980486297607422, 0.03990521621704102, 0.03998553466796875, 0.03986748886108398, 0.03969446563720703, 0.040014305114746095, 0.0399441909790039, 0.04000806427001953, 0.040008865356445315, 0.0399183349609375, 0.04077987289428711, 0.0399884147644043, 0.04009884643554688, 0.040218273162841794, 0.04060992050170899, 0.04018739318847656, 0.040165790557861326, 0.04100342559814453, 0.04075040054321289, 0.04018972778320312, 0.04004719924926758, 0.040043743133544925, 0.040395553588867185, 0.040426303863525394, 0.040291519165039064, 0.04032412719726562, 0.040283935546875, 0.04006326293945312, 0.04039276885986328, 0.04000486373901367, 0.04002470397949219, 0.04005411148071289, 0.039911006927490236, 0.039990177154541014, 0.039855072021484375, 0.0398427848815918, 0.04005440139770508, 0.03983305740356445, 0.04016950225830078, 0.03983244705200195, 0.03978035354614258, 0.03969023895263672, 0.04031488037109375, 0.04041318511962891, 0.03986636734008789, 0.03981926345825195, 0.03971072006225586, 0.03974467086791992, 0.03940003204345703, 0.03980108642578125, 0.03937279891967774, 0.03958784103393555, 0.03967715072631836, 0.039887649536132816, 0.0396649284362793, 0.0397782096862793, 0.0396192626953125, 0.03967382431030273, 0.0402966079711914, 0.04087804794311523, 0.04016096115112305, 0.04009971237182617, 0.03995027160644531, 0.040003231048583984, 0.03988159942626953, 0.040030174255371094, 0.04004665756225586, 0.04047225570678711, 0.04049859237670898, 0.03996761703491211, 0.0401162223815918, 0.04119756698608398, 0.040359935760498046, 0.04017283248901367, 0.040182689666748046, 0.04009471893310547, 0.039955520629882814, 0.03996236801147461, 0.040130561828613284, 0.04047257614135742, 0.04023839950561523, 0.04016169738769531, 0.040397087097167966, 0.04057292938232422, 0.040803966522216795, 0.040744991302490235, 0.04040457534790039, 0.04089708709716797, 0.04052284622192383, 0.040589824676513675, 0.04045651245117188, 0.040441856384277344, 0.04026572799682617, 0.04022249603271484, 0.04025980758666992, 0.040703998565673825, 0.040468097686767575, 0.041576831817626954, 0.04365856170654297, 0.04060675048828125, 0.04041449737548828, 0.04034950256347656, 0.04027654266357422, 0.04049100875854492, 0.04035939025878906, 0.04032969665527344, 0.040492225646972656, 0.040455039978027345, 0.04020547103881836, 0.040147743225097655, 0.040198207855224606, 0.040369663238525394, 0.0408724479675293, 0.04069171142578125, 0.04031078338623047, 0.04021488189697266, 0.04024889755249023, 0.04030064010620117, 0.0402529296875, 0.04017407989501953, 0.040229217529296875, 0.04016479873657226, 0.04037817764282227, 0.040148704528808594, 0.04005062484741211, 0.03991865539550781, 0.03991622543334961, 0.04011430358886719, 0.04288761520385742, 0.04100755310058594, 0.04028416061401367, 0.04034966278076172, 0.04004048156738281, 0.040032257080078126, 0.040204288482666016, 0.040079360961914064, 0.04029439926147461, 0.04048076629638672, 0.04015727996826172, 0.040202144622802735, 0.03990118408203125, 0.03994195175170898, 0.03987065505981445, 0.03990323257446289, 0.039725055694580076, 0.039816608428955076, 0.03968793487548828, 0.03992793655395508, 0.03977443313598633, 0.04034550476074219, 0.04043724822998047, 0.040497631072998044, 0.04020336151123047, 0.04024617767333984, 0.03993209457397461, 0.040019775390625, 0.040048446655273434, 0.040923328399658204, 0.040289726257324215, 0.040370750427246097, 0.04038860702514648, 0.04043775939941406, 0.04036198425292969, 0.04017737579345703, 0.04044188690185547, 0.040184062957763673, 0.0400200309753418, 0.04048275375366211, 0.040103935241699216, 0.03990729522705078, 0.040113662719726564, 0.04014339065551758, 0.04016569519042969, 0.04018355178833008, 0.04019705581665039, 0.03988787078857422, 0.04006313705444336, 0.040529727935791016, 0.040091552734375, 0.03992793655395508, 0.03983747100830078, 0.039815391540527344, 0.04023868942260742, 0.040165088653564454, 0.03982815933227539, 0.0396572151184082, 0.039606208801269534, 0.040427841186523435, 0.039747486114501955, 0.039761024475097655, 0.0397957763671875, 0.040316257476806644, 0.04066566467285156, 0.04002790451049805, 0.04000179290771484, 0.03982131195068359, 0.0400261116027832, 0.03988681411743164, 0.040056575775146486, 0.03956140899658203, 0.03996681594848633, 0.039782398223876955, 0.0396954231262207, 0.040172576904296875, 0.03963619232177734, 0.039785152435302736, 0.0397946891784668, 0.03971286392211914, 0.040251296997070314, 0.04003622436523437, 0.03994611358642578, 0.04308566284179687, 0.04035583877563476, 0.040497150421142575, 0.04030905532836914, 0.04027155303955078, 0.04031283187866211, 0.04047872161865235, 0.040042560577392576, 0.04014313507080078, 0.040275615692138673, 0.04019401550292969, 0.04003843307495117, 0.03995852661132813, 0.040064990997314455, 0.03989097595214844, 0.039741439819335936, 0.040137054443359375, 0.040144542694091796, 0.04023036956787109, 0.03999593734741211, 0.039755775451660154, 0.039567359924316405, 0.03971686553955078, 0.03962623977661133, 0.03957196807861328, 0.039657470703125, 0.03998454284667969, 0.03948604965209961, 0.03972035217285156, 0.039596481323242186, 0.03953788757324219, 0.04028684616088867, 0.04003667068481445, 0.039766014099121096, 0.04127510452270508, 0.041195232391357424, 0.03989728164672852, 0.039954814910888675, 0.03978035354614258, 0.03993100738525391, 0.03981606292724609, 0.0397344970703125, 0.04013545608520508, 0.04303462219238281, 0.04078182220458984, 0.04015014266967774, 0.03998400115966797, 0.03985612869262695, 0.039608318328857424, 0.03949772644042969, 0.0397006721496582, 0.03999497604370117, 0.039698654174804685, 0.03972915267944336, 0.039626750946044925, 0.03966124725341797, 0.039717182159423825, 0.03954687881469727, 0.03960176086425781, 0.03971932983398437, 0.039669761657714846, 0.039550880432128906, 0.03965500640869141, 0.03973689651489258, 0.03995647811889649, 0.03995369720458984, 0.03982163238525391, 0.039849952697753904, 0.03974364852905273, 0.039979137420654294, 0.03970883178710938, 0.040082817077636716, 0.039766048431396486, 0.039989376068115236, 0.039965152740478516, 0.04023295974731445, 0.040356960296630856, 0.03999862289428711, 0.040003326416015624, 0.03996672058105469, 0.040062976837158204, 0.03999948883056641, 0.03959513473510742, 0.039647552490234376, 0.039489566802978514, 0.03994169616699219, 0.039803550720214846, 0.04031110382080078, 0.0409804801940918, 0.039766368865966795, 0.039937694549560546, 0.03987865447998047, 0.03985715103149414, 0.039847198486328124, 0.03975958251953125, 0.039981056213378906, 0.040390369415283206, 0.04012265777587891, 0.03993088150024414, 0.03986288070678711, 0.039976577758789066, 0.040174369812011716, 0.040153438568115235, 0.03994179153442383, 0.03995647811889649, 0.03968239974975586, 0.03974115371704102, 0.03963488006591797, 0.04005795288085937, 0.04025027084350586, 0.04042956924438477, 0.04038835144042969, 0.04054451370239258, 0.040288257598876956, 0.0400715217590332, 0.04005567932128906, 0.040008480072021485, 0.040013824462890625, 0.04025881576538086, 0.03994291305541992, 0.041371200561523436, 0.040996353149414064, 0.040137664794921875, 0.0400076789855957, 0.039894847869873046, 0.0401446418762207, 0.04009209442138672, 0.0405794563293457, 0.04046195220947266, 0.04068163299560547, 0.04077923202514649, 0.040434272766113284, 0.0408037109375, 0.040538238525390624, 0.040430335998535155, 0.040312000274658207, 0.04054508972167969, 0.04042342376708984, 0.04055478286743164, 0.04027568054199219, 0.04012851333618164, 0.03992486572265625, 0.039697280883789064, 0.04000966262817383, 0.039946529388427736, 0.040331295013427734, 0.04201836776733398, 0.040691902160644534, 0.040648193359375, 0.0408089599609375, 0.04090044784545899, 0.0412808952331543, 0.040462177276611326, 0.04036908721923828, 0.04008678436279297, 0.040471294403076175, 0.039946239471435545, 0.04016089630126953, 0.04012275314331055, 0.039904926300048826, 0.04011199951171875, 0.03999164962768555, 0.03986240005493164, 0.03973324966430664, 0.04003443145751953, 0.03971996688842774, 0.03995734405517578, 0.039846912384033206, 0.039954849243164066, 0.03975228881835938, 0.03996979141235352, 0.04028684616088867, 0.04028044891357422, 0.04024873733520508, 0.040441726684570314, 0.04018454360961914, 0.040172607421875, 0.040046558380126954, 0.04029334259033203, 0.04012192153930664, 0.04015315246582031, 0.040118656158447265, 0.04007731246948242, 0.04054963302612305, 0.04056553649902344, 0.04060780715942383, 0.040507297515869144, 0.04007321548461914, 0.04072390365600586, 0.040008480072021485, 0.04042515182495117, 0.04037235260009766, 0.03999996948242188, 0.0399441909790039, 0.04008755111694336, 0.039892383575439457, 0.039957088470458986, 0.040222431182861326, 0.039846176147460936, 0.040078750610351564, 0.03992399978637695, 0.039805534362792966, 0.03993571090698242, 0.03998617553710938, 0.040007774353027346, 0.04019497680664062, 0.04022886276245117, 0.03996672058105469, 0.039833343505859375, 0.03996448135375977, 0.040026912689208986, 0.04009958267211914, 0.03998060989379883, 0.039991870880126956, 0.03999065780639648, 0.040256992340087894, 0.03994630432128906, 0.040166271209716796, 0.04006006240844726, 0.04006095886230469, 0.03996080017089844, 0.03994275283813477, 0.03991551971435547, 0.040030208587646485, 0.0399788818359375, 0.04010591888427734, 0.03990156936645508, 0.039952190399169925, 0.0401099853515625, 0.04046448135375977, 0.04062019348144531, 0.04052361679077148, 0.04024079895019531, 0.0403111686706543, 0.04008310317993164, 0.04020217514038086, 0.040462177276611326, 0.040543968200683594, 0.04028908920288086, 0.04024694442749024, 0.04027033615112305, 0.040221599578857424, 0.0401673583984375, 0.04017449569702149, 0.040136798858642575, 0.04011849594116211, 0.040088512420654296, 0.04003926467895508, 0.039903358459472654, 0.039964542388916016, 0.04019302368164063, 0.03978672027587891, 0.03969731140136719]",tokens/s,24.867564646396882,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1045.434368,904.855552,0.0,509.607936,491.434496,s,1,7.69194482421875,7.69194482421875,0.0,7.69194482421875,7.69194482421875,7.69194482421875,7.69194482421875,[7.69194482421875],,kWh,2.3835842712499773e-05,2.621883335264642e-06,7.240561347994501e-06,3.369828739575891e-05,,MB,1403.293696,1039.07328,0.0,629.1456,592.24832,s,10,0.26188947296142584,0.02618894729614258,0.00021841016604451076,0.026089839935302735,0.026433218955993654,0.026552321910858153,0.026647604274749756,"[0.02629078483581543, 0.025977535247802733, 0.02640675163269043, 0.026321664810180664, 0.026105951309204102, 0.02605801582336426, 0.026671424865722656, 0.02605923271179199, 0.02592438316345215, 0.026073728561401367]",tokens/s,9775.11608638453,kWh,7.529128719823882e-07,8.303299081971835e-08,4.7044925952060254e-07,1.3063951223227092e-06,tokens/kWh,195959090.4969425,MB,1442.83648,1053.753344,0.0,643.825664,605.085696,s,10,13.617179565429685,1.3617179565429687,0.005481366197635939,1.3625023803710938,1.3678305053710937,1.368237274169922,1.3685626892089844,"[1.3677401123046875, 1.3626983642578125, 1.357629150390625, 1.366910400390625, 1.3640528564453125, 1.36864404296875, 1.362306396484375, 1.3496138916015625, 1.3598287353515626, 1.357755615234375]",tokens/s,46.2650871990701,kWh,3.91161338250991e-05,4.314099562961237e-06,1.4551146091079945e-05,5.7981379479140276e-05,tokens/kWh,1086555.728165544,,s,630,13.611552797317499,0.021605639360821435,0.0004668648119582739,0.02151136016845703,0.021915046691894532,0.022159848117828368,0.022940940837860118,"[0.02109094429016113, 0.021511455535888672, 0.02187932777404785, 0.022566911697387695, 0.0217674560546875, 0.021486303329467774, 0.021960832595825194, 0.021898944854736327, 0.021585407257080077, 0.021679103851318358, 0.02184364891052246, 0.02174799919128418, 0.021874656677246095, 0.022176736831665038, 0.021777183532714843, 0.021987680435180665, 0.021745439529418945, 0.021680000305175782, 0.02151136016845703, 0.021904191970825194, 0.02185420799255371, 0.021848031997680664, 0.0217620792388916, 0.02188047981262207, 0.02188969612121582, 0.021946016311645507, 0.02163715171813965, 0.021624448776245118, 0.021813631057739258, 0.021688671112060548, 0.021618495941162108, 0.021604192733764647, 0.021544031143188477, 0.021697439193725587, 0.02162073516845703, 0.021616128921508788, 0.021688575744628905, 0.0214182071685791, 0.02182899284362793, 0.0216231689453125, 0.02154323196411133, 0.021442655563354493, 0.021518207550048827, 0.021741151809692383, 0.021689983367919923, 0.021712703704833983, 0.021603296279907227, 0.021489664077758788, 0.021600255966186522, 0.021538047790527343, 0.021701375961303712, 0.02147942352294922, 0.021548671722412108, 0.02181711959838867, 0.022073951721191407, 0.021766143798828123, 0.021547008514404296, 0.021512447357177736, 0.021800703048706054, 0.021635072708129883, 0.021575679779052736, 0.021432319641113282, 0.02155142402648926, 0.020985824584960937, 0.02126608085632324, 0.021326112747192382, 0.021349439620971678, 0.021433343887329103, 0.02157702445983887, 0.021381376266479492, 0.021616512298583985, 0.02152422332763672, 0.021541696548461914, 0.0215285758972168, 0.021389312744140625, 0.021386560440063478, 0.021330623626708983, 0.02127667236328125, 0.022237184524536133, 0.021288543701171874, 0.02141222381591797, 0.021700607299804688, 0.021612255096435547, 0.022180320739746094, 0.021484800338745117, 0.02154092788696289, 0.021323808670043944, 0.021388927459716798, 0.021560192108154297, 0.02128895950317383, 0.021489152908325194, 0.021328384399414063, 0.021407487869262696, 0.021448768615722657, 0.02133625602722168, 0.021345983505249022, 0.021768512725830077, 0.02263417625427246, 0.022300800323486327, 0.02219241523742676, 0.0216878719329834, 0.021429824829101562, 0.021787424087524414, 0.022111423492431642, 0.022330463409423826, 0.022029119491577147, 0.02198579216003418, 0.021839839935302734, 0.023038143157958983, 0.0217193603515625, 0.021532608032226563, 0.02146928024291992, 0.021499807357788087, 0.021374975204467773, 0.021377023696899415, 0.02136000061035156, 0.021451391220092774, 0.021331968307495116, 0.021779552459716797, 0.02144486427307129, 0.021623296737670897, 0.021735328674316406, 0.02174527931213379, 0.02175823974609375, 0.021510496139526367, 0.021932191848754883, 0.021527584075927735, 0.02155513572692871, 0.021747167587280274, 0.02151888084411621, 0.021303295135498047, 0.021259904861450196, 0.02128927993774414, 0.02174777603149414, 0.021440511703491212, 0.02123334312438965, 0.02123119926452637, 0.021523168563842773, 0.021503456115722658, 0.021375520706176758, 0.02135980796813965, 0.0212607364654541, 0.02154944038391113, 0.02187001609802246, 0.02208620834350586, 0.02146099281311035, 0.021403936386108397, 0.02144428825378418, 0.02265091133117676, 0.021829183578491212, 0.021457088470458983, 0.021323904037475586, 0.021491615295410157, 0.021366432189941408, 0.021319936752319336, 0.021295360565185547, 0.021440576553344727, 0.021329376220703126, 0.021381664276123046, 0.021313888549804687, 0.02134009552001953, 0.02156924819946289, 0.021525856018066405, 0.02162719917297363, 0.021766496658325196, 0.02160428810119629, 0.02161004829406738, 0.02142380714416504, 0.021419967651367187, 0.021224031448364256, 0.021185951232910158, 0.021390335083007812, 0.021337984085083007, 0.021323263168334963, 0.02123766326904297, 0.021480031967163086, 0.02155023956298828, 0.021434240341186524, 0.02133705520629883, 0.021336416244506835, 0.021517984390258788, 0.02123366355895996, 0.021260128021240235, 0.02216352081298828, 0.025294944763183592, 0.021600255966186522, 0.021395456314086913, 0.021624895095825197, 0.02137081527709961, 0.022188543319702148, 0.021338016510009765, 0.02125823974609375, 0.021458208084106447, 0.021457279205322265, 0.02192624092102051, 0.02145280075073242, 0.02153424072265625, 0.021799455642700194, 0.02199679946899414, 0.021572288513183595, 0.021477535247802736, 0.021671520233154298, 0.02162291145324707, 0.021366527557373047, 0.021336448669433593, 0.021249759674072267, 0.021475616455078124, 0.02164156723022461, 0.022205408096313477, 0.02240905570983887, 0.022692832946777344, 0.02202406311035156, 0.022015071868896483, 0.021758880615234375, 0.02188489532470703, 0.02186591911315918, 0.02178518486022949, 0.02182143974304199, 0.02163408088684082, 0.02158896064758301, 0.02170675277709961, 0.021880544662475587, 0.021741216659545898, 0.021858848571777344, 0.021803104400634765, 0.021778432846069336, 0.022228992462158204, 0.02138729667663574, 0.02146031951904297, 0.021534591674804687, 0.02148374366760254, 0.021456703186035157, 0.021392032623291014, 0.021530176162719728, 0.02173833656311035, 0.022009504318237304, 0.021395456314086913, 0.021782527923583983, 0.02181907272338867, 0.022567232131958007, 0.021523775100708006, 0.021570047378540038, 0.021502080917358397, 0.021477439880371093, 0.021387039184570314, 0.021504287719726563, 0.021483808517456054, 0.021626527786254884, 0.02170675277709961, 0.021712896347045898, 0.02146099281311035, 0.02137411117553711, 0.020966592788696288, 0.021473056793212892, 0.021693504333496094, 0.021763008117675783, 0.021618688583374023, 0.021497856140136717, 0.02141596794128418, 0.021468608856201173, 0.021418527603149416, 0.021409791946411134, 0.02141798400878906, 0.02184601593017578, 0.021485567092895508, 0.0216342716217041, 0.02144745635986328, 0.021524032592773436, 0.021524768829345703, 0.021536191940307616, 0.021472000122070314, 0.021608415603637694, 0.021405696868896484, 0.022202367782592772, 0.021803007125854493, 0.02161664009094238, 0.021421087265014647, 0.021724128723144533, 0.02150761604309082, 0.02141596794128418, 0.02163961601257324, 0.022095903396606446, 0.022155359268188478, 0.021998495101928712, 0.021801631927490236, 0.02456403160095215, 0.021854015350341798, 0.02177401542663574, 0.021496320724487306, 0.021608575820922852, 0.02160201644897461, 0.021555360794067384, 0.02164121627807617, 0.021782815933227537, 0.0217903995513916, 0.02159187126159668, 0.02166192054748535, 0.021525856018066405, 0.021429088592529295, 0.0215567684173584, 0.021586208343505858, 0.021822656631469727, 0.02158675193786621, 0.021660736083984375, 0.02158687973022461, 0.021536415100097656, 0.021534400939941405, 0.021486143112182617, 0.021398784637451172, 0.021528543472290038, 0.02140403175354004, 0.021469696044921875, 0.021526527404785157, 0.021428319931030275, 0.021492671966552735, 0.02096588706970215, 0.021331104278564453, 0.021471136093139647, 0.021302207946777344, 0.021519807815551757, 0.021359167098999023, 0.021407743453979493, 0.02155036735534668, 0.021412832260131836, 0.021433343887329103, 0.022036800384521483, 0.02150592041015625, 0.021352863311767577, 0.021425439834594728, 0.021913984298706054, 0.0216048641204834, 0.021774335861206053, 0.02151628875732422, 0.02147260856628418, 0.02147599983215332, 0.021385215759277345, 0.021479040145874023, 0.021416736602783204, 0.02134556770324707, 0.021399839401245117, 0.021452064514160155, 0.02131190490722656, 0.021491743087768553, 0.02146441650390625, 0.021473695755004883, 0.021414112091064454, 0.023469791412353516, 0.02245427131652832, 0.021838464736938477, 0.021755136489868165, 0.021746431350708008, 0.0216407356262207, 0.0214486083984375, 0.02144723129272461, 0.021387264251708983, 0.021421087265014647, 0.021735647201538085, 0.021481695175170897, 0.02259008026123047, 0.021381216049194338, 0.02156540870666504, 0.02151753616333008, 0.021469535827636718, 0.02147689628601074, 0.021435136795043944, 0.021344255447387696, 0.021480768203735352, 0.021452768325805664, 0.021368831634521485, 0.021477567672729493, 0.02171139144897461, 0.021761184692382814, 0.021977951049804687, 0.02142144012451172, 0.021419967651367187, 0.021502656936645506, 0.023023616790771483, 0.029345792770385744, 0.021269792556762694, 0.02227689552307129, 0.02152851104736328, 0.0215283203125, 0.021397823333740233, 0.021491647720336914, 0.021734527587890625, 0.021383167266845703, 0.02152332878112793, 0.021564735412597656, 0.021476255416870118, 0.02152943992614746, 0.021474239349365234, 0.02162483215332031, 0.021398591995239257, 0.021625247955322266, 0.02150864028930664, 0.021712448120117186, 0.021436384201049805, 0.021485759735107423, 0.022276384353637695, 0.021671072006225586, 0.02160931205749512, 0.02164249610900879, 0.02187321662902832, 0.02203468894958496, 0.022062719345092772, 0.02227436828613281, 0.02186240005493164, 0.021874975204467774, 0.021816255569458008, 0.02164201545715332, 0.021536512374877928, 0.021506303787231444, 0.021553152084350585, 0.021551103591918946, 0.021411487579345703, 0.021467168807983397, 0.022067007064819337, 0.021486080169677735, 0.02142633628845215, 0.02143631935119629, 0.02188038444519043, 0.021551488876342773, 0.021737567901611327, 0.0217475528717041, 0.021766208648681642, 0.021569536209106444, 0.021609600067138673, 0.021697311401367186, 0.02143459129333496, 0.02141539192199707, 0.021475744247436524, 0.021401599884033205, 0.02137660789489746, 0.021477792739868166, 0.021358591079711914, 0.021452768325805664, 0.021749792098999025, 0.02161187171936035, 0.021571807861328125, 0.021380704879760744, 0.02143052864074707, 0.02119308853149414, 0.021497568130493163, 0.021547615051269533, 0.021692256927490234, 0.021364736557006835, 0.021339231491088868, 0.02140447998046875, 0.02125212860107422, 0.021540607452392578, 0.021356512069702148, 0.02129539108276367, 0.021259647369384765, 0.021309951782226562, 0.021403039932250977, 0.021192575454711916, 0.021405920028686524, 0.02168288040161133, 0.02154876708984375, 0.021350208282470702, 0.021311967849731446, 0.02156787109375, 0.021558528900146486, 0.021324159622192383, 0.02123401641845703, 0.021315263748168944, 0.02164102363586426, 0.02151238441467285, 0.021354623794555664, 0.02125129508972168, 0.021375616073608397, 0.021366783142089844, 0.021370431900024415, 0.021451200485229492, 0.021381120681762695, 0.021511199951171876, 0.021396448135375976, 0.021372928619384765, 0.021577119827270508, 0.021306047439575194, 0.02140764808654785, 0.02145484733581543, 0.021377023696899415, 0.021303295135498047, 0.021476831436157227, 0.0213591365814209, 0.021313407897949218, 0.021336320877075196, 0.021405567169189454, 0.02143846321105957, 0.02182963180541992, 0.021407743453979493, 0.02141119956970215, 0.021633888244628908, 0.021524255752563476, 0.021556480407714844, 0.021357280731201172, 0.02149331283569336, 0.021453279495239258, 0.021405696868896484, 0.021395456314086913, 0.021348352432250976, 0.021321311950683593, 0.021217376708984374, 0.021008384704589843, 0.02129292869567871, 0.021362815856933594, 0.02204876708984375, 0.02130534362792969, 0.02130086326599121, 0.021205568313598634, 0.021640960693359374, 0.02139891242980957, 0.021490367889404297, 0.021397504806518555, 0.021362911224365233, 0.021403423309326174, 0.0215285758972168, 0.02143846321105957, 0.021307392120361326, 0.021305280685424803, 0.021235488891601564, 0.02136911964416504, 0.021292032241821288, 0.021189407348632814, 0.02123347282409668, 0.021293472290039063, 0.021217279434204102, 0.021241695404052734, 0.021290912628173828, 0.021378719329833984, 0.021525312423706054, 0.022738527297973633, 0.021458976745605467, 0.021521631240844726, 0.021668384552001953, 0.021907392501831054, 0.021690847396850586, 0.021643264770507813, 0.022140928268432617, 0.021753856658935547, 0.02146428871154785, 0.02166659164428711, 0.021385215759277345, 0.021354496002197267, 0.021493759155273438, 0.02127872085571289, 0.021506271362304687, 0.021346080780029298, 0.021397184371948243, 0.02174345588684082, 0.023611871719360352, 0.021719039916992186, 0.022004032135009767, 0.022113983154296874, 0.021517568588256836, 0.02158361625671387, 0.02192460823059082, 0.02221900749206543, 0.021786687850952148, 0.021787967681884766, 0.021807584762573242, 0.021655712127685547, 0.021634559631347656, 0.021481248855590822, 0.02175868797302246, 0.02145305633544922, 0.021368640899658203, 0.021412448883056642, 0.02136662483215332, 0.02136675262451172, 0.021499359130859375, 0.02150662422180176, 0.02146099281311035, 0.021450815200805665, 0.021956544876098633, 0.021968896865844727, 0.021843967437744142, 0.0215482234954834, 0.02165433692932129, 0.02182143974304199, 0.021717056274414063, 0.021639104843139648, 0.02167558479309082, 0.021750207901000976, 0.022726655960083008, 0.02177222442626953, 0.021966911315917968, 0.02162067222595215, 0.02134841537475586, 0.021495967864990233, 0.021454975128173827, 0.021384607315063475, 0.021754175186157226, 0.021356544494628905, 0.02142617607116699, 0.021409791946411134, 0.02151136016845703, 0.021662784576416017, 0.021845760345458983, 0.021344512939453126, 0.02179971122741699, 0.02166268730163574, 0.02169980812072754, 0.021326400756835937, 0.021420255661010742, 0.02134940719604492, 0.02133705520629883, 0.021298816680908203, 0.021457279205322265, 0.021370880126953123, 0.02148761558532715, 0.021447744369506836, 0.021539775848388672, 0.02147327995300293, 0.021358591079711914, 0.02124777603149414, 0.021307615280151366, 0.021327871322631836, 0.021289087295532225, 0.021237312316894533, 0.02146124839782715, 0.02134022331237793, 0.021401311874389647, 0.02194870376586914, 0.021448703765869142, 0.021469247817993163, 0.02160367965698242, 0.021527135848999023, 0.021493759155273438]",tokens/s,46.284212343808214,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,3161.960448,4423.876608,0.0,4028.628992,3944.723968,s,1,10.405025390625,10.405025390625,0.0,10.405025390625,10.405025390625,10.405025390625,10.405025390625,[10.405025390625],,kWh,9.278183984999561e-05,1.022738812495303e-05,2.9278912311997418e-05,0.00013228814028694606,,MB,3216.65024,4763.615232,0.0,4353.687552,4305.05728,s,10,1.091167808532715,0.1091167808532715,0.0004448517049072606,0.10910725021362305,0.10960431518554688,0.10980559844970703,0.10996662506103516,"[0.11000688171386719, 0.10916985321044922, 0.10916105651855469, 0.10896675109863281, 0.10890582275390626, 0.10955958557128906, 0.1091534423828125, 0.1090610580444336, 0.10901763153076172, 0.10816572570800781]",tokens/s,2346.110268266081,kWh,3.258486512037279e-06,3.593257365275346e-07,2.1669801286445e-06,5.7847923772093135e-06,tokens/kWh,44253965.10488056,MB,3216.65024,4763.615232,0.0,4353.687552,4305.05984,s,10,21.80553564453125,2.180553564453125,0.02097974010962949,2.1801929931640625,2.2088744140625,2.2116095703125,2.2137976953125,"[2.156970703125, 2.16743212890625, 2.1854130859375, 2.18996337890625, 2.2082666015625, 2.2143447265625, 2.19798046875, 2.158267822265625, 2.174972900390625, 2.151923828125]",tokens/s,28.891746126768584,kWh,6.251471981838032e-05,6.894757265456508e-06,3.406832663735535e-05,0.00010347780372119218,tokens/kWh,608826.2190966626,,s,630,21.802990219116204,0.03460792098272415,0.0006658891403288806,0.034552591323852536,0.0352255615234375,0.035505592918395994,0.03711125492095947,"[0.03509654235839844, 0.03595951843261719, 0.035210529327392576, 0.034835166931152343, 0.03445062255859375, 0.03415110397338867, 0.03393548965454102, 0.03414435195922851, 0.034076576232910154, 0.03444262313842773, 0.03470195388793945, 0.03417462539672852, 0.034293758392333985, 0.03453916931152344, 0.03415420913696289, 0.03390768051147461, 0.033990657806396485, 0.03405619049072266, 0.03407462310791016, 0.03397836685180664, 0.033908447265625, 0.03399871826171875, 0.03419587326049805, 0.03399244689941406, 0.03463139343261719, 0.03472835159301758, 0.03432815933227539, 0.0341099853515625, 0.03376128005981445, 0.0343138542175293, 0.033948257446289064, 0.03386729431152344, 0.03376582336425781, 0.033990623474121094, 0.03388809585571289, 0.03425484848022461, 0.03367731094360352, 0.03400246429443359, 0.03439459228515625, 0.03425075149536133, 0.03394889450073242, 0.033987422943115235, 0.03379363250732422, 0.034143775939941404, 0.03374982452392578, 0.03466156768798828, 0.03387065505981445, 0.03401318359375, 0.03391692733764649, 0.03429507064819336, 0.03451279830932617, 0.03441337585449219, 0.034031105041503903, 0.03390476989746094, 0.034646400451660155, 0.03609395217895508, 0.03399660873413086, 0.034113792419433596, 0.03396207809448242, 0.03436243057250977, 0.033936161041259766, 0.03416409683227539, 0.03404249572753906, 0.033922752380371096, 0.03433776092529297, 0.03377884674072266, 0.03417379379272461, 0.03391692733764649, 0.034095104217529294, 0.03455376052856445, 0.035139041900634764, 0.034988094329833984, 0.0345337905883789, 0.0342437744140625, 0.034579071044921875, 0.03435760116577148, 0.034170879364013675, 0.03415244674682617, 0.03437363052368164, 0.034473983764648435, 0.03499990463256836, 0.034547168731689455, 0.034648448944091796, 0.03439878463745117, 0.03417497634887695, 0.034342910766601564, 0.03405984115600586, 0.03398905563354492, 0.034218273162841796, 0.034145694732666015, 0.03409337615966797, 0.03409414291381836, 0.03392006301879883, 0.03439603042602539, 0.0349793930053711, 0.034476478576660155, 0.0344637451171875, 0.034080768585205076, 0.03424383926391601, 0.03412416076660156, 0.03412211227416992, 0.03400006484985352, 0.03408364868164063, 0.034430816650390626, 0.03420585632324219, 0.03439206314086914, 0.03450470352172851, 0.03434604644775391, 0.0343438720703125, 0.034945022583007815, 0.03455535888671875, 0.0343680305480957, 0.034605216979980466, 0.03431817626953125, 0.03422329711914063, 0.03417171096801758, 0.038128673553466795, 0.03480630493164062, 0.03482444763183594, 0.03446137619018555, 0.034327041625976565, 0.03402751922607422, 0.03420707321166992, 0.034441600799560546, 0.03404828643798828, 0.034103294372558594, 0.0355650863647461, 0.035081695556640625, 0.03469327926635742, 0.03470217514038086, 0.034557823181152345, 0.03460051345825195, 0.03414883041381836, 0.034154590606689454, 0.0343818244934082, 0.034854911804199216, 0.03466796875, 0.03447251129150391, 0.034484222412109376, 0.03455590438842773, 0.034710655212402346, 0.034253150939941406, 0.03412022399902344, 0.0339351692199707, 0.03395734405517578, 0.03386220932006836, 0.0338331184387207, 0.03466864013671875, 0.03408886337280274, 0.034938880920410156, 0.03616553497314453, 0.03512723159790039, 0.035084190368652346, 0.03486896133422852, 0.03422467041015625, 0.034164894104003904, 0.03409628677368164, 0.03440915298461914, 0.034516990661621096, 0.034353153228759765, 0.03438796615600586, 0.03469107055664063, 0.034830337524414064, 0.034559745788574216, 0.034516895294189456, 0.03556524658203125, 0.03461574554443359, 0.03441622543334961, 0.0347465934753418, 0.03712432098388672, 0.035794113159179686, 0.034700286865234374, 0.03456121444702148, 0.03482502365112305, 0.034740097045898435, 0.034680606842041016, 0.03462118530273438, 0.0347039680480957, 0.03428966522216797, 0.034447647094726565, 0.03426793670654297, 0.03435212707519531, 0.03504316711425781, 0.035305023193359375, 0.034802207946777346, 0.034887680053710936, 0.03566400146484375, 0.03505539321899414, 0.03467273712158203, 0.03537715148925781, 0.03451289749145508, 0.03461648178100586, 0.03460796737670899, 0.034680831909179685, 0.0351879997253418, 0.03484659194946289, 0.03484297561645508, 0.03463164901733398, 0.03426556777954102, 0.034078784942626957, 0.03398976135253906, 0.033917823791503904, 0.03422617721557617, 0.034369537353515625, 0.034772991180419925, 0.03456361770629883, 0.03477164840698242, 0.03490505599975586, 0.034991104125976565, 0.03879670333862305, 0.03533567810058594, 0.0346591682434082, 0.03478707122802734, 0.034330944061279296, 0.0346063346862793, 0.03451980972290039, 0.03462742233276367, 0.03484832000732422, 0.034780895233154294, 0.03472473526000976, 0.03473958587646484, 0.0345258560180664, 0.03461526489257812, 0.03456185531616211, 0.034276542663574217, 0.03456467056274414, 0.03443545532226563, 0.03443308639526367, 0.03444073486328125, 0.03492659378051758, 0.03478371047973633, 0.03444351959228516, 0.03461503982543945, 0.03451084899902344, 0.03458047866821289, 0.034566143035888675, 0.034729312896728516, 0.034703296661376955, 0.0348076171875, 0.03496217727661133, 0.034923774719238285, 0.035213375091552736, 0.03538118362426758, 0.03506473541259766, 0.034935905456542966, 0.034724544525146485, 0.03443119812011719, 0.03478112030029297, 0.03448384094238281, 0.03514147186279297, 0.03490671920776367, 0.03527635192871094, 0.035624767303466795, 0.034840766906738284, 0.0347770881652832, 0.035833854675292966, 0.034551422119140626, 0.034689407348632816, 0.034639297485351564, 0.0348326416015625, 0.03500374221801758, 0.035044097900390626, 0.035154144287109376, 0.03490611267089844, 0.03464803314208984, 0.03485084915161133, 0.03470675277709961, 0.03522364807128906, 0.034676704406738285, 0.03496409606933594, 0.03654156875610352, 0.034880542755126955, 0.03480508804321289, 0.03857664108276367, 0.039120670318603515, 0.03547772979736328, 0.035065345764160157, 0.036252159118652344, 0.03515561676025391, 0.034939231872558596, 0.035098175048828124, 0.03520131301879883, 0.034938880920410156, 0.034785438537597656, 0.03502899169921875, 0.03494297790527344, 0.03540297698974609, 0.034892097473144534, 0.034705886840820314, 0.034715648651123046, 0.03491017532348633, 0.034504127502441403, 0.03457904052734375, 0.03451084899902344, 0.034441375732421876, 0.034786975860595704, 0.03572326278686523, 0.034955455780029294, 0.03479267120361328, 0.03472006225585938, 0.03451667022705078, 0.03495987319946289, 0.03466287994384765, 0.03441030502319336, 0.0344268798828125, 0.034720863342285156, 0.03499212646484375, 0.034783905029296874, 0.03461759948730469, 0.034715648651123046, 0.03663644790649414, 0.034318431854248044, 0.03423654556274414, 0.03424822235107422, 0.034327007293701174, 0.035743743896484374, 0.0349813117980957, 0.03508486557006836, 0.03509209442138672, 0.034703742980957034, 0.03487094497680664, 0.035350879669189456, 0.03534643173217773, 0.03541315078735351, 0.03526460647583008, 0.035396350860595706, 0.035339488983154296, 0.03519302368164062, 0.035885887145996095, 0.03549776077270508, 0.035512001037597656, 0.035766654968261716, 0.03545270538330078, 0.03541798400878906, 0.03645257568359375, 0.035020030975341794, 0.035074432373046874, 0.03521120071411133, 0.03520134353637695, 0.03535865783691406, 0.035495872497558596, 0.03512761688232422, 0.035181888580322264, 0.03532025527954102, 0.03533657455444336, 0.035026016235351565, 0.035149951934814454, 0.035219680786132815, 0.03542265701293945, 0.03548291015625, 0.03532255935668945, 0.035182624816894534, 0.03524937438964844, 0.035242782592773435, 0.03516630554199219, 0.035165313720703126, 0.03500316619873047, 0.03480780792236328, 0.03474147033691406, 0.03481884765625, 0.03463782501220703, 0.034560001373291016, 0.034548927307128906, 0.03493356704711914, 0.03480985641479492, 0.03451446533203125, 0.03480403137207031, 0.03469942474365234, 0.03495462417602539, 0.035043968200683596, 0.03493289566040039, 0.03481340789794922, 0.03489782333374023, 0.03485948944091797, 0.03466377639770508, 0.034996158599853514, 0.03510140609741211, 0.03519875335693359, 0.035409374237060545, 0.03462995147705078, 0.03457660675048828, 0.03436515045166016, 0.03453366470336914, 0.03482624053955078, 0.035003681182861325, 0.03593084716796875, 0.03477119827270508, 0.03476591873168945, 0.03486991882324219, 0.03469827270507812, 0.03465315246582031, 0.03475807952880859, 0.03452985763549805, 0.03474172973632812, 0.03506230545043945, 0.03494841766357422, 0.03519081497192383, 0.03495126342773437, 0.03480137634277344, 0.034683582305908206, 0.03494620895385742, 0.034976768493652347, 0.03466035079956055, 0.034781185150146485, 0.03496345520019531, 0.0348671989440918, 0.03478732681274414, 0.03504870223999024, 0.034992511749267576, 0.03479385757446289, 0.03472348785400391, 0.034721439361572265, 0.03470748901367188, 0.03489449691772461, 0.03505753707885742, 0.03471516799926758, 0.03458623886108399, 0.034869598388671874, 0.035068641662597655, 0.03502070236206055, 0.03495062255859375, 0.035025440216064456, 0.035039039611816404, 0.03559401702880859, 0.03707926559448242, 0.03541196823120117, 0.03491244888305664, 0.035081825256347655, 0.034916767120361326, 0.03485081481933594, 0.03526041412353516, 0.03489542388916016, 0.035051166534423826, 0.034824897766113284, 0.03441468811035156, 0.034426433563232425, 0.034592254638671875, 0.03435820770263672, 0.03441049575805664, 0.03435449600219727, 0.034409183502197266, 0.03584000015258789, 0.035125022888183595, 0.03453155136108398, 0.03490611267089844, 0.034385921478271485, 0.03449446487426758, 0.034203647613525394, 0.03486265563964844, 0.034346462249755856, 0.034222270965576174, 0.034565086364746096, 0.03439708709716797, 0.03424256134033203, 0.03418320083618164, 0.03398332977294922, 0.0339733772277832, 0.03397097778320313, 0.034213985443115234, 0.03391507339477539, 0.03389763259887695, 0.033841888427734376, 0.03421366500854492, 0.03461920166015625, 0.03466713714599609, 0.034688766479492185, 0.034313377380371095, 0.03388678359985352, 0.034439456939697265, 0.03433606338500977, 0.033769439697265625, 0.033979393005371096, 0.03383779144287109, 0.0337716178894043, 0.03385232162475586, 0.034070526123046875, 0.03451084899902344, 0.03450470352172851, 0.035192832946777344, 0.03407177734375, 0.03403807830810547, 0.033831390380859375, 0.0339046401977539, 0.03376278305053711, 0.033944095611572266, 0.03393283081054688, 0.033931743621826174, 0.034598911285400394, 0.03465420913696289, 0.03409920120239258, 0.034164798736572265, 0.03379929733276367, 0.0338460807800293, 0.03393769454956055, 0.034035423278808596, 0.03392716979980469, 0.03393920135498047, 0.033820926666259764, 0.034618751525878906, 0.03434764862060547, 0.03470950317382813, 0.034523136138916014, 0.03405414581298828, 0.03476620864868164, 0.03478169631958008, 0.03407257461547852, 0.03406028747558594, 0.03402921676635742, 0.03434121704101562, 0.03446783828735352, 0.03423436737060547, 0.03403571319580078, 0.034086910247802735, 0.03407417678833008, 0.034165184020996095, 0.03404211044311523, 0.03380607986450195, 0.034104991912841796, 0.034027870178222654, 0.03464751815795898, 0.035576446533203125, 0.03469609451293945, 0.034511871337890625, 0.03500758361816406, 0.03757689666748047, 0.035991680145263674, 0.03477155303955078, 0.03431628799438476, 0.03403945541381836, 0.03410364913940429, 0.034293758392333985, 0.03417497634887695, 0.034411937713623046, 0.03487152099609375, 0.03424643325805664, 0.03402403259277344, 0.03400908660888672, 0.03389788818359375, 0.03394211196899414, 0.03400089645385742, 0.03410124969482422, 0.03391897583007813, 0.034301246643066406, 0.034157249450683595, 0.034395904541015626, 0.035299583435058596, 0.034170879364013675, 0.03416595077514648, 0.03697318267822266, 0.03438963317871094, 0.039381568908691406, 0.03473932647705078, 0.0350599365234375, 0.035256446838378905, 0.034990623474121095, 0.03497369766235352, 0.03488668823242187, 0.03453231811523438, 0.03417472076416016, 0.03397635269165039, 0.03385776138305664, 0.03382259368896484, 0.034319553375244144, 0.033739391326904296, 0.03385139083862305, 0.03376115036010742, 0.03409561538696289, 0.03479347229003906, 0.034361343383789066, 0.033853408813476565, 0.03376278305053711, 0.03380691146850586, 0.033767425537109375, 0.03386163330078125, 0.0340450553894043, 0.03404422378540039, 0.03406905746459961, 0.03470131301879883, 0.0353372802734375, 0.034986686706542966, 0.034277023315429686, 0.034027679443359375, 0.033849342346191406, 0.033900993347167965, 0.03395174407958984, 0.03382463836669922, 0.03386175918579101, 0.033957279205322266, 0.035156574249267575, 0.03484463882446289, 0.03477916717529297, 0.03410678482055664, 0.033841758728027346, 0.033939582824707035, 0.03377139282226563, 0.033990657806396485, 0.03378176116943359, 0.03415654373168946, 0.033890304565429685, 0.03390566253662109, 0.03387078475952148, 0.03399875259399414, 0.03388022232055664, 0.03407254409790039, 0.03374460983276367, 0.03406012725830078, 0.033717758178710935, 0.033992897033691405, 0.034468639373779295, 0.03417702484130859, 0.03381657409667969, 0.03401068878173828, 0.034042304992675784, 0.03454969787597656, 0.03435504150390625, 0.03427145767211914, 0.034184638977050784, 0.03400140762329101, 0.03411500930786133, 0.03395660781860352, 0.03419075012207031, 0.03394303894042969, 0.03406742477416992, 0.03387187194824219, 0.03408835220336914, 0.034789985656738284, 0.03487948989868164, 0.0349409294128418, 0.03435520172119141, 0.03406643295288086]",tokens/s,28.895119140476183,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8154.529792,11251.089408,0.0,10848.567296,10616.027648,s,1,14.68530078125,14.68530078125,0.0,14.68530078125,14.68530078125,14.68530078125,14.68530078125,[14.68530078125],,kWh,0.00021833083696667468,2.4074120734856615e-05,6.566449697599508e-05,0.0003080694546775264,,MB,3835.26912,11672.61696,0.0,11255.414784,11070.470656,s,10,3.6982595825195315,0.36982595825195314,0.0016546003656661222,0.37028662109375,0.3710934844970703,0.3717191970825195,0.3722197671508789,"[0.36617440795898437, 0.3680224609375, 0.3702292175292969, 0.37034402465820315, 0.37093032836914064, 0.37234490966796874, 0.36918389892578124, 0.37095443725585936, 0.3693240661621094, 0.3707518310546875]",tokens/s,692.2174993070487,kWh,1.082723246219145e-05,1.194031689604212e-06,7.155633296518566e-06,1.9176897448314228e-05,tokens/kWh,13349396.099654485,MB,3839.524864,11674.714112,0.0,11257.511936,11070.473216,s,10,28.627691406249998,2.8627691406249998,0.004573717331479009,2.86322998046875,2.8682132568359378,2.868764221191406,2.8692049926757814,"[2.8571474609375, 2.8563994140625, 2.856251220703125, 2.86196923828125, 2.863158203125, 2.865311767578125, 2.869315185546875, 2.8633017578125, 2.8680908203125, 2.866746337890625]",tokens/s,22.00666449347216,kWh,8.378808828239136e-05,9.240122668859486e-06,5.583166709328138e-05,0.00014885987804453218,tokens/kWh,423216.79170765704,,s,630,28.624257297515847,0.04543532904367598,0.00041357450996904496,0.0454036636352539,0.04586983299255371,0.04597235126495361,0.04726500995635987,"[0.047309024810791016, 0.045224479675292965, 0.04496588897705078, 0.04491420745849609, 0.04480819320678711, 0.04472675323486328, 0.044813472747802736, 0.045310592651367186, 0.04545558547973633, 0.04493875122070313, 0.04509900665283203, 0.04571376037597656, 0.04548828887939453, 0.04513552093505859, 0.04505753707885742, 0.0453210563659668, 0.045172737121582034, 0.045004512786865236, 0.04508700942993164, 0.04509468841552734, 0.04541241455078125, 0.04529119873046875, 0.045136417388916016, 0.045423999786376956, 0.04542307281494141, 0.04512982559204102, 0.044830398559570314, 0.04488224029541016, 0.04543075180053711, 0.04526851272583008, 0.04514409637451172, 0.045142303466796874, 0.045485343933105465, 0.04574262237548828, 0.04534255981445313, 0.045537822723388674, 0.04597555160522461, 0.04566016006469727, 0.04515430450439453, 0.045010303497314455, 0.04527286529541016, 0.04541078567504883, 0.04518320083618164, 0.045334686279296876, 0.04568678283691406, 0.045412353515625, 0.045061599731445315, 0.04527276611328125, 0.04563571166992188, 0.045650657653808595, 0.04533449554443359, 0.04523971176147461, 0.0453752326965332, 0.04546416091918945, 0.045195552825927736, 0.04550041580200195, 0.04612422561645508, 0.045876033782958986, 0.04553286361694336, 0.045455680847167966, 0.04574399948120117, 0.04548732757568359, 0.045539520263671876, 0.047341087341308596, 0.045437694549560544, 0.04511743927001953, 0.04485286331176758, 0.04464678573608399, 0.044843006134033206, 0.044900161743164066, 0.04527238464355469, 0.04525350570678711, 0.045071582794189456, 0.04491312026977539, 0.04499283218383789, 0.045287647247314454, 0.04513075256347656, 0.04475519943237305, 0.045254302978515626, 0.04518783950805664, 0.044930400848388674, 0.045133792877197265, 0.04535583877563477, 0.04543910217285156, 0.04516236877441406, 0.04584243011474609, 0.04541439819335937, 0.045400062561035154, 0.04506412887573242, 0.04502441787719726, 0.044993438720703126, 0.0451503677368164, 0.04547107315063476, 0.04520329666137695, 0.0451671028137207, 0.04547356796264648, 0.04530771255493164, 0.045332542419433595, 0.04514252853393555, 0.045195262908935545, 0.04549763107299805, 0.04532025527954102, 0.045159072875976564, 0.0452393913269043, 0.04525353622436523, 0.04563148880004883, 0.04547135925292969, 0.04536751937866211, 0.04560070419311523, 0.04538755035400391, 0.04552339172363281, 0.04521548843383789, 0.045754398345947266, 0.045699295043945314, 0.04552425765991211, 0.04573052978515625, 0.04543657684326172, 0.04517923355102539, 0.04539958572387695, 0.045935230255126955, 0.045604705810546875, 0.045397598266601565, 0.0454285774230957, 0.045521472930908205, 0.045375038146972656, 0.04590227127075195, 0.04748892974853516, 0.04540367889404297, 0.0450683822631836, 0.044939743041992185, 0.04502511978149414, 0.04489056015014648, 0.044923648834228516, 0.044923873901367185, 0.0450621452331543, 0.044943359375, 0.044951553344726565, 0.04500889587402344, 0.04515225601196289, 0.044902400970458986, 0.04513792037963867, 0.045313312530517576, 0.045486175537109375, 0.04537142562866211, 0.045162494659423826, 0.045353569030761716, 0.045608478546142577, 0.045555809020996096, 0.045395713806152344, 0.04534540939331055, 0.04518707275390625, 0.045058048248291016, 0.045199104309082035, 0.04492108917236328, 0.045112510681152344, 0.04509980773925781, 0.04521136093139649, 0.04497235107421875, 0.045166305541992184, 0.0451927375793457, 0.045246368408203126, 0.04507036972045898, 0.04531814575195312, 0.04573062515258789, 0.045484031677246094, 0.04514963150024414, 0.045365825653076175, 0.04583628845214844, 0.04586086273193359, 0.04536832046508789, 0.045644351959228516, 0.04570505523681641, 0.04544982528686523, 0.04508262252807617, 0.04519756698608399, 0.04558822250366211, 0.04544307327270508, 0.04514166259765625, 0.045218017578125, 0.04515852737426758, 0.045707263946533204, 0.04545228958129883, 0.0454257926940918, 0.04573481750488281, 0.04588438415527344, 0.04556595230102539, 0.045484031677246094, 0.04558233642578125, 0.04554956817626953, 0.047889728546142575, 0.04538243103027344, 0.0448359375, 0.04494019317626953, 0.04494131088256836, 0.04470988845825195, 0.04497612762451172, 0.04460543823242188, 0.04507033538818359, 0.044974079132080076, 0.04526265716552735, 0.04503366470336914, 0.045000705718994144, 0.04487168121337891, 0.045262847900390625, 0.0450203857421875, 0.045514686584472656, 0.04549923324584961, 0.04558230209350586, 0.045413536071777345, 0.04552793502807617, 0.04605542373657227, 0.04551424026489258, 0.04513552093505859, 0.04508041763305664, 0.044983009338378906, 0.04535734558105469, 0.04504275131225586, 0.0449600944519043, 0.04481459045410156, 0.04547020721435547, 0.0450596809387207, 0.04504560089111328, 0.04523382568359375, 0.04556876754760742, 0.04533782577514649, 0.04537212753295899, 0.04580470275878906, 0.045644702911376955, 0.04567036819458008, 0.045797409057617186, 0.04595663833618164, 0.04597094345092773, 0.045867774963378904, 0.04591024017333984, 0.045778942108154294, 0.045592575073242186, 0.04558412933349609, 0.04575872039794922, 0.04549017715454102, 0.04534272003173828, 0.04526489639282227, 0.045400062561035154, 0.04537753677368164, 0.04542451095581055, 0.04542272186279297, 0.045552734375, 0.045775775909423826, 0.045676513671875, 0.04546559906005859, 0.045828128814697264, 0.04596886444091797, 0.04590860748291015, 0.04684598541259766, 0.04510924911499024, 0.04513177490234375, 0.04493107223510742, 0.045271041870117185, 0.045703327178955075, 0.044805023193359376, 0.044757537841796875, 0.04507075119018555, 0.04492083358764649, 0.04490348815917969, 0.045150142669677734, 0.045292545318603515, 0.045090816497802735, 0.04508371353149414, 0.04532524871826172, 0.04574617767333984, 0.04553113555908203, 0.04520140838623047, 0.045244415283203124, 0.045676544189453126, 0.045674495697021485, 0.045623294830322264, 0.045231616973876954, 0.04550297546386719, 0.04551270294189453, 0.045184608459472655, 0.045152671813964845, 0.0450682258605957, 0.0452619514465332, 0.04510201644897461, 0.04499251174926758, 0.04537753677368164, 0.04556595230102539, 0.045430015563964844, 0.04546384048461914, 0.045653728485107424, 0.04565475082397461, 0.045583774566650394, 0.04549091339111328, 0.045485984802246096, 0.04597350311279297, 0.04565983963012695, 0.04578153610229492, 0.04602243041992188, 0.04588947296142578, 0.0456800308227539, 0.045709983825683594, 0.045699073791503904, 0.045385726928710936, 0.04540351867675781, 0.04536137771606445, 0.0452567024230957, 0.04541072082519531, 0.04539187240600586, 0.04569833755493164, 0.04564451217651367, 0.04554713439941406, 0.04571078491210938, 0.04586966323852539, 0.04591036987304688, 0.04545945739746094, 0.04558147048950195, 0.04668374252319336, 0.0451011848449707, 0.04490643310546875, 0.04488768005371094, 0.04476976013183594, 0.04498166275024414, 0.044956321716308596, 0.04470364761352539, 0.04519094467163086, 0.045128097534179686, 0.04523427200317383, 0.04538991928100586, 0.045176734924316404, 0.04477542495727539, 0.04514121627807617, 0.0458691520690918, 0.04557017517089844, 0.04498284912109375, 0.04555980682373047, 0.04592038345336914, 0.04559244918823242, 0.045203006744384766, 0.04551724624633789, 0.04554751968383789, 0.045553665161132816, 0.04527308654785156, 0.045211647033691404, 0.04511743927001953, 0.04530614471435547, 0.045163265228271486, 0.045146625518798826, 0.04553776168823242, 0.04557823944091797, 0.04544281768798828, 0.04531430435180664, 0.045186752319335936, 0.0460843505859375, 0.04573603057861328, 0.04578822326660156, 0.045884319305419925, 0.04575174331665039, 0.045711936950683596, 0.045873153686523435, 0.04586102294921875, 0.04564339065551758, 0.04547769546508789, 0.045582752227783206, 0.04554342269897461, 0.0452751350402832, 0.04532428741455078, 0.04584563064575195, 0.04556889724731445, 0.04557376098632813, 0.04528985595703125, 0.045508289337158205, 0.04571958541870117, 0.0453983039855957, 0.04581785583496094, 0.04619468688964844, 0.045932769775390625, 0.04594627380371094, 0.04585846328735352, 0.04611936187744141, 0.04737068939208984, 0.04564585494995117, 0.04515225601196289, 0.044875423431396486, 0.045088577270507815, 0.045019680023193356, 0.04529689788818359, 0.04491750335693359, 0.04486147308349609, 0.045428703308105466, 0.04528332901000977, 0.045246463775634765, 0.04517068862915039, 0.045146110534667966, 0.04540988922119141, 0.04557660675048828, 0.04560486221313476, 0.04595097732543945, 0.04553644943237305, 0.04530464172363281, 0.04519116973876953, 0.04548198318481445, 0.04582710266113281, 0.04559062576293945, 0.04513241577148437, 0.0454207992553711, 0.045328384399414064, 0.045211647033691404, 0.04494540786743164, 0.0452935676574707, 0.045434879302978515, 0.04524851226806641, 0.045530719757080076, 0.04568105697631836, 0.04576870346069336, 0.04527228927612305, 0.04539616012573242, 0.045873760223388675, 0.04605952072143555, 0.04588521575927734, 0.04567267227172851, 0.045784576416015625, 0.045623809814453124, 0.045932544708251956, 0.0461578254699707, 0.04573798370361328, 0.045721408843994144, 0.045583808898925784, 0.04552511978149414, 0.045755008697509765, 0.045649921417236325, 0.04518064117431641, 0.04545561599731445, 0.04574211120605469, 0.045625343322753906, 0.045295265197753905, 0.04560246276855469, 0.04589433670043945, 0.045760353088378905, 0.045746463775634766, 0.04591132736206055, 0.04609468841552734, 0.046029056549072266, 0.0481929931640625, 0.04533712005615234, 0.04491782379150391, 0.044882209777832034, 0.04476134490966797, 0.04469392013549805, 0.04480409622192383, 0.045131649017333984, 0.045243934631347654, 0.0451835823059082, 0.04538531112670898, 0.04519952011108398, 0.045871360778808594, 0.045049854278564457, 0.04487097549438476, 0.04520211029052734, 0.04604927825927734, 0.04546284866333008, 0.045100894927978516, 0.04509756851196289, 0.04541462326049805, 0.04557372665405274, 0.045257152557373045, 0.045350910186767575, 0.04538729476928711, 0.04520934295654297, 0.04500316619873047, 0.044964256286621096, 0.045518753051757815, 0.04550156784057617, 0.04557503890991211, 0.04545241546630859, 0.04569174575805664, 0.04548611068725586, 0.04547174453735352, 0.04551196670532227, 0.045896415710449216, 0.045606910705566404, 0.045155616760253904, 0.045353694915771486, 0.04593868637084961, 0.045666015625, 0.04537577438354492, 0.04552864074707031, 0.04569747161865234, 0.045590465545654296, 0.045109310150146485, 0.045049854278564457, 0.045601856231689455, 0.045384449005126955, 0.04519750213623047, 0.04540364837646484, 0.045703678131103515, 0.04578684616088867, 0.04544300842285156, 0.04561955261230469, 0.04567859268188477, 0.04554956817626953, 0.045473377227783204, 0.04585718536376953, 0.04580934524536133, 0.04593900680541992, 0.04578713607788086, 0.04715724945068359, 0.04528073501586914, 0.04516304016113281, 0.0450123519897461, 0.04493747329711914, 0.04482851028442383, 0.04495004653930664, 0.04522927856445313, 0.045093151092529295, 0.04486809539794922, 0.04513792037963867, 0.04557619094848633, 0.04535049438476563, 0.044978591918945314, 0.04499577713012695, 0.04588627243041992, 0.04583200073242188, 0.0454453125, 0.045158401489257816, 0.04532223892211914, 0.045412353515625, 0.04571478271484375, 0.04534281539916992, 0.04535500717163086, 0.04529619216918945, 0.0455079345703125, 0.04522870254516602, 0.045127616882324216, 0.045045345306396485, 0.045468128204345704, 0.04521923065185547, 0.04538390350341797, 0.045450721740722656, 0.04533497619628906, 0.04508835220336914, 0.04518809509277344, 0.045578113555908205, 0.04563353729248047, 0.04540150451660156, 0.04547439956665039, 0.045699073791503904, 0.045639041900634766, 0.04554406356811523, 0.045686367034912106, 0.04603903961181641, 0.045838558197021484, 0.045803489685058596, 0.04560611343383789, 0.04590431976318359, 0.04566614532470703, 0.04567932891845703, 0.04539801788330078, 0.04595711898803711, 0.045758174896240233, 0.04585062408447266, 0.04585100936889648, 0.04572476959228516, 0.04605830383300781, 0.046216896057128906, 0.04610899353027344, 0.045862911224365234, 0.04609843063354492, 0.046243839263916016, 0.04822880172729492, 0.04569430541992187, 0.04546844863891602, 0.04509846496582031, 0.044848670959472654, 0.04521686553955078, 0.04496169662475586, 0.044953567504882816, 0.0451297607421875, 0.045284671783447264, 0.04522844696044922, 0.04520486450195312, 0.04523715209960937, 0.04517635345458984, 0.045958751678466796, 0.04552179336547851, 0.045774848937988284, 0.04552659225463867, 0.04566409683227539, 0.04521635055541992, 0.04544102478027344, 0.04584214401245117, 0.045558048248291017, 0.045483230590820316, 0.04514057540893555, 0.045373470306396484, 0.04542889785766602, 0.04524236679077148, 0.04499647903442383, 0.0450909423828125, 0.045373184204101566, 0.04518118286132813, 0.04497817611694336, 0.04539542388916016, 0.04550048065185547, 0.045265182495117184, 0.04528966522216797, 0.04564534378051758, 0.045838817596435544, 0.04553932952880859, 0.04506825637817383, 0.04534684753417969, 0.04601641464233398, 0.04570259094238281, 0.04535516738891601, 0.045617664337158206, 0.04563705444335937, 0.04569760131835938, 0.04539123153686524, 0.04563827133178711, 0.04569926452636719, 0.045674304962158206, 0.045321823120117184, 0.045353534698486325, 0.04562876892089844, 0.04565411376953125, 0.04550044631958008, 0.04562777709960938, 0.04585062408447266, 0.045693984985351564, 0.04592947387695313, 0.045985950469970706, 0.04599971389770508]",tokens/s,22.009304676515537,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 2.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 58850 has 14.74 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 386.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 816, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 734, in forward final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) RuntimeError: CUDA error: invalid configuration argument CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1582.342144,1596.915712,0.0,1201.668096,1189.151232,s,1,8.6505029296875,8.6505029296875,0.0,8.6505029296875,8.6505029296875,8.6505029296875,8.6505029296875,[8.6505029296875],,kWh,3.685549087500135e-05,4.0580436646186965e-06,1.1927509542004194e-05,5.284104408162424e-05,,MB,1563.049984,1794.048,0.0,1384.12032,1351.367168,s,10,0.4121473541259766,0.04121473541259766,0.0002022758176152118,0.04114734268188476,0.04153757705688477,0.041589892578124996,0.04163174499511719,"[0.04164220809936523, 0.04098281478881836, 0.041086112976074216, 0.04125983810424805, 0.04104713439941406, 0.041223487854003905, 0.04108512115478516, 0.04152595138549805, 0.04118703842163086, 0.04110764694213867]",tokens/s,6211.370701211666,kWh,1.2486447791132654e-06,1.3770349550795996e-07,8.256404990683729e-07,2.211988773689598e-06,tokens/kWh,115732956.26315133,MB,1572.442112,1835.99104,0.0,1426.06336,1407.548416,s,10,10.61557373046875,1.061557373046875,0.005552198334380235,1.0607288208007812,1.0706404907226563,1.0711919494628908,1.071633116455078,"[1.0594141845703124, 1.0570184326171874, 1.0705179443359376, 1.06061181640625, 1.0608458251953126, 1.0575704345703125, 1.071743408203125, 1.0526453857421876, 1.062847900390625, 1.0623583984375]",tokens/s,59.34676881304852,kWh,3.098585049172095e-05,3.4171700681130072e-06,1.5428455837531704e-05,4.983147639736567e-05,tokens/kWh,1264261.1568966173,,s,630,10.611997209548951,0.016844440015157062,0.0002947726225189542,0.01678551959991455,0.017162866401672364,0.01731639394760132,0.017957598571777342,"[0.017451072692871095, 0.017297407150268555, 0.017342464447021484, 0.01744895935058594, 0.01720684814453125, 0.017137151718139648, 0.017303680419921873, 0.017027904510498047, 0.01723353576660156, 0.0169168643951416, 0.016577983856201173, 0.016558048248291015, 0.016688800811767577, 0.016571327209472655, 0.016566272735595702, 0.01682009506225586, 0.016686399459838866, 0.016845632553100585, 0.016680959701538087, 0.016692256927490233, 0.016493024826049803, 0.016673023223876954, 0.016505088806152344, 0.01654560089111328, 0.016789695739746095, 0.016633440017700195, 0.016837024688720705, 0.016939008712768554, 0.016987327575683595, 0.016929599761962892, 0.016754079818725585, 0.016622047424316406, 0.016738431930541992, 0.0167587833404541, 0.016670719146728515, 0.016709632873535156, 0.016766592025756834, 0.016744384765625, 0.016711679458618164, 0.016721887588500975, 0.01676131248474121, 0.016676000595092774, 0.016565088272094727, 0.016502784729003905, 0.016496639251708984, 0.01643712043762207, 0.017168512344360353, 0.016590368270874022, 0.017137247085571287, 0.016565120697021485, 0.016664287567138673, 0.016777503967285157, 0.016833536148071288, 0.016686080932617187, 0.016695295333862305, 0.016498239517211914, 0.016601280212402345, 0.016535423278808595, 0.01654207992553711, 0.0186079044342041, 0.01702729606628418, 0.01662156867980957, 0.016537599563598633, 0.016503583908081054, 0.016575647354125977, 0.016554847717285156, 0.016551008224487306, 0.01651395225524902, 0.016324607849121094, 0.016479583740234376, 0.016461471557617187, 0.01647702407836914, 0.01651113510131836, 0.016631296157836914, 0.01672809600830078, 0.01696201515197754, 0.01697996711730957, 0.016798944473266603, 0.01676144027709961, 0.0167524471282959, 0.016743904113769532, 0.01680476760864258, 0.016920576095581053, 0.01657151985168457, 0.01662656021118164, 0.016687103271484375, 0.016846847534179688, 0.016939008712768554, 0.01695884895324707, 0.01686355209350586, 0.016666431427001954, 0.016719808578491212, 0.016720447540283203, 0.01716223907470703, 0.016729663848876954, 0.01656262397766113, 0.016525312423706053, 0.016654144287109374, 0.016635391235351564, 0.016847551345825194, 0.01699737548828125, 0.016933696746826172, 0.016909631729125976, 0.016720703125, 0.016834592819213866, 0.016748575210571288, 0.017229824066162108, 0.016768896102905273, 0.016633663177490234, 0.016742719650268554, 0.016685056686401366, 0.016654016494750977, 0.016572351455688476, 0.01679782485961914, 0.016810239791870116, 0.016915840148925783, 0.016792192459106445, 0.017161920547485353, 0.016748287200927733, 0.016734207153320312, 0.016648767471313476, 0.016643104553222657, 0.016561119079589844, 0.017476640701293945, 0.017296255111694334, 0.01794451141357422, 0.017136831283569336, 0.01717945671081543, 0.017006591796875, 0.017069536209106444, 0.016922399520874022, 0.016787967681884765, 0.016718080520629883, 0.016755807876586915, 0.01679657554626465, 0.016773120880126953, 0.016802976608276367, 0.01673097610473633, 0.016815935134887695, 0.01702112007141113, 0.016847936630249024, 0.01666099166870117, 0.016709407806396483, 0.016681631088256835, 0.016644096374511717, 0.018257568359375, 0.019118431091308594, 0.01719478416442871, 0.017072351455688476, 0.017295360565185547, 0.017141632080078125, 0.016875232696533203, 0.01685696029663086, 0.01693503952026367, 0.01691484832763672, 0.016858848571777343, 0.016818464279174803, 0.01671500778198242, 0.016831167221069337, 0.01713145637512207, 0.018671743392944334, 0.01787017631530762, 0.017562271118164063, 0.017112640380859374, 0.016931264877319337, 0.01722127914428711, 0.016879968643188477, 0.01690345573425293, 0.016857376098632814, 0.016716224670410156, 0.01686028861999512, 0.016845695495605467, 0.016742399215698242, 0.016725215911865234, 0.016838464736938476, 0.016762943267822267, 0.016718751907348634, 0.01678335952758789, 0.016811391830444337, 0.016959104537963867, 0.016870399475097657, 0.016831935882568358, 0.016806463241577148, 0.016734207153320312, 0.016711679458618164, 0.016660255432128908, 0.017047775268554687, 0.016889312744140624, 0.01686787223815918, 0.017353471755981446, 0.017210752487182616, 0.01717737579345703, 0.016963136672973632, 0.016852800369262694, 0.016831104278564452, 0.01692803192138672, 0.01665865516662598, 0.01672857666015625, 0.016793344497680666, 0.01693712043762207, 0.016992223739624022, 0.016904319763183594, 0.016969280242919924, 0.017099199295043947, 0.017080320358276366, 0.016892927169799805, 0.016773183822631835, 0.016613471984863282, 0.016587615966796875, 0.016647647857666016, 0.016623584747314454, 0.016553728103637696, 0.016537919998168945, 0.016626176834106447, 0.016721920013427736, 0.016703487396240235, 0.017117183685302736, 0.016946367263793945, 0.016955839157104493, 0.016781696319580076, 0.016842111587524414, 0.01670003128051758, 0.016695295333862305, 0.01661270332336426, 0.016765600204467775, 0.01696710395812988, 0.016869951248168945, 0.016885759353637696, 0.016709632873535156, 0.0167106876373291, 0.016724960327148437, 0.01666662406921387, 0.01658399963378906, 0.016628416061401367, 0.01665433692932129, 0.01660518455505371, 0.017032224655151366, 0.0168253116607666, 0.0169671688079834, 0.016806047439575197, 0.016836959838867186, 0.01687321662902832, 0.0172359676361084, 0.016883935928344727, 0.01679145622253418, 0.01678335952758789, 0.01681011199951172, 0.016683008193969725, 0.01677516746520996, 0.016932159423828124, 0.016926496505737305, 0.016823200225830077, 0.01686387252807617, 0.016883712768554687, 0.01677052879333496, 0.016775711059570313, 0.016723968505859374, 0.01682431983947754, 0.01679974365234375, 0.016901920318603516, 0.01700864028930664, 0.01696995162963867, 0.01699827194213867, 0.017452928543090822, 0.01718092727661133, 0.016957439422607423, 0.016793279647827147, 0.016757055282592772, 0.016690303802490234, 0.01674025535583496, 0.016690143585205076, 0.016697343826293946, 0.016676864624023437, 0.017032480239868163, 0.017193695068359376, 0.01687049674987793, 0.016724063873291017, 0.016806720733642578, 0.01682431983947754, 0.01680998420715332, 0.017360895156860352, 0.017012224197387696, 0.016830976486206056, 0.016864831924438477, 0.01696988868713379, 0.01690140724182129, 0.016843679428100587, 0.017008447647094728, 0.01709699249267578, 0.017082368850708008, 0.0168407039642334, 0.016713727951049806, 0.016695295333862305, 0.016752639770507814, 0.01679897689819336, 0.0168721923828125, 0.0169881591796875, 0.01683865547180176, 0.016904191970825197, 0.016815488815307617, 0.016633823394775392, 0.016732831954956055, 0.01656831932067871, 0.01681203269958496, 0.016695295333862305, 0.01658060836791992, 0.01659596824645996, 0.016638528823852538, 0.01656262397766113, 0.016623615264892578, 0.016632863998413086, 0.016630752563476563, 0.017035039901733398, 0.01665660858154297, 0.01661337661743164, 0.01661337661743164, 0.01660518455505371, 0.016677984237670897, 0.016710559844970704, 0.016738304138183592, 0.01687084770202637, 0.01699897575378418, 0.016915935516357422, 0.016595487594604493, 0.017386816024780274, 0.01676767921447754, 0.01663711929321289, 0.01654457664489746, 0.01667647933959961, 0.017084096908569334, 0.017472192764282225, 0.01714995193481445, 0.016780799865722656, 0.016638463973999023, 0.016817663192749025, 0.01654550361633301, 0.016712480545043946, 0.01670252799987793, 0.016652416229248047, 0.016610111236572266, 0.01677926445007324, 0.016563552856445313, 0.01661404800415039, 0.01765171241760254, 0.016590848922729492, 0.016531455993652345, 0.016547840118408205, 0.016600128173828124, 0.0165283203125, 0.016530847549438475, 0.01652387237548828, 0.01656831932067871, 0.016573728561401366, 0.016622304916381836, 0.01652236747741699, 0.01661427116394043, 0.016621183395385743, 0.016637632369995117, 0.016906335830688478, 0.017150367736816406, 0.017219776153564452, 0.01697750473022461, 0.016859136581420898, 0.01674870491027832, 0.01677337646484375, 0.016680608749389647, 0.016644447326660157, 0.016742399215698242, 0.01666662406921387, 0.017180543899536133, 0.017081663131713866, 0.016775999069213867, 0.016821504592895508, 0.016838464736938476, 0.01674336051940918, 0.01681407928466797, 0.01729030418395996, 0.01687443161010742, 0.017922048568725587, 0.017962944030761718, 0.017193023681640624, 0.01722572708129883, 0.01738137626647949, 0.016969728469848632, 0.016934911727905275, 0.018257568359375, 0.017338720321655274, 0.018175199508666993, 0.01744156837463379, 0.017542463302612304, 0.017328832626342775, 0.017270784378051757, 0.01712656021118164, 0.017015199661254882, 0.016795904159545898, 0.016736064910888672, 0.017590175628662108, 0.01686895942687988, 0.01683340835571289, 0.01686275291442871, 0.01694748878479004, 0.016906015396118163, 0.0169005126953125, 0.01682156753540039, 0.017078975677490234, 0.017127424240112304, 0.01694476890563965, 0.016777599334716797, 0.01681782341003418, 0.0167541446685791, 0.01681702423095703, 0.01677926445007324, 0.016979135513305665, 0.016814943313598632, 0.01694921684265137, 0.017285120010375975, 0.017466367721557616, 0.017293664932250978, 0.017140384674072265, 0.016879615783691407, 0.01703321647644043, 0.017006591796875, 0.017008319854736328, 0.016900415420532226, 0.016743743896484375, 0.017015487670898437, 0.016602975845336914, 0.01669264030456543, 0.016739072799682616, 0.01672985649108887, 0.016734464645385742, 0.016562175750732423, 0.01662156867980957, 0.016611328125, 0.016620800018310546, 0.016610048294067384, 0.016682815551757813, 0.016639999389648438, 0.016533695220947265, 0.016504512786865235, 0.016662847518920897, 0.01666489601135254, 0.01659516716003418, 0.016832735061645506, 0.016553567886352538, 0.01653398323059082, 0.016618623733520507, 0.016832832336425782, 0.016665151596069336, 0.01658470344543457, 0.016664575576782227, 0.01684480094909668, 0.016701440811157226, 0.01678303909301758, 0.01663167953491211, 0.016662975311279297, 0.016668479919433595, 0.0165762882232666, 0.01668943977355957, 0.016667999267578126, 0.01648259162902832, 0.01657244873046875, 0.016601343154907227, 0.01662518310546875, 0.016603839874267577, 0.016564224243164064, 0.01657651138305664, 0.016479711532592773, 0.0165949764251709, 0.016429567337036134, 0.0165150089263916, 0.016563743591308595, 0.016546239852905275, 0.01658684730529785, 0.016486400604248046, 0.016728063583374024, 0.01696476745605469, 0.016734880447387697, 0.01664224052429199, 0.016639999389648438, 0.016734207153320312, 0.01668035125732422, 0.01697443199157715, 0.017004480361938478, 0.01674553680419922, 0.016666976928710938, 0.016740447998046876, 0.016773183822631835, 0.01672652816772461, 0.016781312942504883, 0.016699392318725585, 0.016760223388671874, 0.016706144332885742, 0.01675468826293945, 0.016752479553222656, 0.016826528549194336, 0.01679088020324707, 0.016910560607910158, 0.016717920303344725, 0.016873823165893555, 0.016855039596557618, 0.01680499267578125, 0.01682111930847168, 0.017536735534667967, 0.017137567520141603, 0.01711680030822754, 0.01692310333251953, 0.01679132843017578, 0.01684707260131836, 0.016873151779174804, 0.016877887725830078, 0.016928543090820314, 0.016996383666992188, 0.01693244743347168, 0.016800352096557617, 0.016756032943725584, 0.016765056610107423, 0.01681056022644043, 0.016893951416015626, 0.017053152084350588, 0.01720944023132324, 0.017316287994384765, 0.017229663848876954, 0.017062047958374023, 0.016963584899902344, 0.016957439422607423, 0.01697737693786621, 0.016949792861938477, 0.017065568923950194, 0.01710483169555664, 0.016903999328613282, 0.01678767967224121, 0.01681657600402832, 0.016982015609741212, 0.0170700798034668, 0.016920576095581053, 0.016910335540771485, 0.016903392791748045, 0.01680668830871582, 0.01684480094909668, 0.017215360641479494, 0.01670297622680664, 0.01661734390258789, 0.016546560287475587, 0.016672479629516603, 0.016572160720825194, 0.016679328918457033, 0.016694528579711914, 0.016683839797973634, 0.0167445125579834, 0.016752639770507814, 0.016517120361328123, 0.017156095504760743, 0.01731328010559082, 0.01665279960632324, 0.016615423202514648, 0.016730112075805666, 0.016581823348999023, 0.016572799682617188, 0.017783231735229492, 0.0166146240234375, 0.01653619194030762, 0.016703647613525392, 0.016515071868896485, 0.01661248016357422, 0.016515104293823243, 0.016530271530151366, 0.01681135940551758, 0.016821920394897463, 0.016870399475097657, 0.01670512008666992, 0.016639968872070313, 0.01674630355834961, 0.016577152252197264, 0.016578367233276367, 0.016578752517700194, 0.016664447784423827, 0.017078399658203125, 0.016770368576049806, 0.016636608123779296, 0.016602239608764648, 0.016633823394775392, 0.016505760192871095, 0.01659235191345215, 0.016730655670166017, 0.016494272232055664, 0.016560447692871093, 0.016543359756469728, 0.01651545524597168, 0.016695295333862305, 0.016926528930664063, 0.016590816497802734, 0.01664841651916504, 0.016757984161376954, 0.01670822334289551, 0.016474271774291994, 0.0167891845703125, 0.017135263442993164, 0.016675071716308595, 0.01675817680358887, 0.016991231918334963, 0.01683046340942383, 0.017059648513793945, 0.01690812873840332, 0.017029087066650392, 0.016902336120605467, 0.016955583572387696, 0.016957439422607423, 0.017147903442382813, 0.016701440811157226, 0.01692188835144043, 0.016795520782470704, 0.01686409568786621, 0.017022335052490234, 0.01731648063659668, 0.0171615047454834, 0.01712611198425293, 0.017074176788330078, 0.017657632827758788, 0.016963808059692383, 0.016977920532226562, 0.016920576095581053, 0.016924671173095703, 0.016932384490966797, 0.01713404846191406, 0.017041120529174805, 0.017170719146728516, 0.017114591598510743, 0.017281120300292968, 0.017107168197631837]",tokens/s,59.36677022804998,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1583.456256,1596.915712,0.0,1201.668096,1189.151232,s,1,8.8428955078125,8.8428955078125,0.0,8.8428955078125,8.8428955078125,8.8428955078125,8.8428955078125,[8.8428955078125],,kWh,3.590696299166514e-05,3.950185491363577e-06,1.1906676192000726e-05,5.176382467502944e-05,,MB,1568.436224,1794.048,0.0,1384.12032,1351.367168,s,10,0.4130821113586426,0.04130821113586426,0.00022286802072661455,0.0412576961517334,0.041507010269165034,0.04170185794830322,0.041857736091613765,"[0.041896705627441404, 0.04109718322753906, 0.04121372985839844, 0.04126678466796875, 0.04124860763549805, 0.04128707122802734, 0.041103679656982424, 0.04146371078491211, 0.04115500640869141, 0.0413496322631836]",tokens/s,6197.315084838856,kWh,1.2452644686701731e-06,1.3733006921265444e-07,8.294391978297931e-07,2.2120337357126205e-06,tokens/kWh,115730603.86329415,MB,1573.920768,1835.99104,0.0,1426.06336,1407.548416,s,10,10.498130493164062,1.0498130493164062,0.006266048482070716,1.0488837890625,1.0573603149414061,1.0606197937011719,1.0632273767089844,"[1.056635986328125, 1.0469267578125, 1.04680224609375, 1.0506383056640625, 1.044119140625, 1.0502557373046875, 1.0475118408203126, 1.040291748046875, 1.0510694580078126, 1.0638792724609376]",tokens/s,60.01068479861527,kWh,3.0614132719246836e-05,3.376261120950881e-06,1.530290704136997e-05,4.929330088156768e-05,tokens/kWh,1278064.1359637105,,s,630,10.494630895614618,0.01665814427875337,0.00033934837523612196,0.016566128730773927,0.016881436538696292,0.0172128849029541,0.018070274982452398,"[0.017091936111450195, 0.016477983474731447, 0.016464927673339843, 0.017317983627319337, 0.01666227149963379, 0.016607168197631837, 0.01690451240539551, 0.016965375900268555, 0.01659507179260254, 0.016795520782470704, 0.016619136810302734, 0.01656412887573242, 0.016537471771240233, 0.016523359298706054, 0.016473920822143554, 0.01665273666381836, 0.016604799270629883, 0.0167325439453125, 0.01671833610534668, 0.016717664718627928, 0.01679315185546875, 0.016848735809326172, 0.01670729637145996, 0.016689952850341798, 0.016637184143066405, 0.016526079177856444, 0.016634016036987304, 0.0166910400390625, 0.016626943588256837, 0.016745088577270507, 0.016730239868164062, 0.01675823974609375, 0.0175164794921875, 0.02031407928466797, 0.016833152770996094, 0.017135200500488282, 0.016765344619750978, 0.016703487396240235, 0.016674943923950195, 0.01668492889404297, 0.016623231887817384, 0.016543231964111327, 0.016556768417358397, 0.016605344772338868, 0.016571584701538085, 0.016638784408569335, 0.016842975616455078, 0.017002368927001955, 0.016908191680908204, 0.016742080688476563, 0.016611648559570313, 0.016763904571533202, 0.01654854393005371, 0.016836095809936523, 0.01661836814880371, 0.01681952095031738, 0.01661372756958008, 0.016633216857910155, 0.01659587287902832, 0.01660313606262207, 0.01661452865600586, 0.01654431915283203, 0.01653094482421875, 0.016677728652954103, 0.016698400497436525, 0.016706335067749024, 0.01661894416809082, 0.016435327529907225, 0.01647385597229004, 0.016509408950805663, 0.016431583404541015, 0.016676095962524413, 0.016424959182739257, 0.016535167694091798, 0.01664080047607422, 0.016694623947143553, 0.016685600280761718, 0.016835071563720702, 0.016699392318725585, 0.016648000717163085, 0.01658060836791992, 0.01659903907775879, 0.016561216354370117, 0.01668611145019531, 0.016582784652709962, 0.01675388717651367, 0.016906816482543944, 0.016691328048706055, 0.016530912399291994, 0.01665065574645996, 0.016477695465087892, 0.016539840698242186, 0.016596895217895508, 0.01665043258666992, 0.016730112075805666, 0.01648371124267578, 0.016382816314697266, 0.016558080673217773, 0.016523263931274415, 0.0165515193939209, 0.016525728225708008, 0.016504608154296874, 0.01641878318786621, 0.01638015937805176, 0.01647830390930176, 0.01655388832092285, 0.016680959701538087, 0.016590848922729492, 0.016530527114868163, 0.016690080642700195, 0.01649228858947754, 0.01688118362426758, 0.016558847427368163, 0.0170383358001709, 0.016556480407714843, 0.01659721565246582, 0.01640790367126465, 0.01655833625793457, 0.017318592071533204, 0.01661136054992676, 0.01659699249267578, 0.01671196746826172, 0.01649839973449707, 0.01676268768310547, 0.016574655532836914, 0.0167587833404541, 0.016698400497436525, 0.01652214431762695, 0.01661756706237793, 0.016695327758789062, 0.016475839614868162, 0.016708608627319335, 0.01663692855834961, 0.016599231719970704, 0.016619327545166016, 0.01657206344604492, 0.0166376953125, 0.017625696182250978, 0.016693248748779296, 0.0167093448638916, 0.016582944869995116, 0.016862720489501954, 0.016586528778076173, 0.01645654487609863, 0.016459648132324218, 0.016478208541870116, 0.016449535369873047, 0.01668908882141113, 0.0165, 0.016587392807006836, 0.016574111938476563, 0.016785184860229493, 0.01666223907470703, 0.01662054443359375, 0.016556032180786134, 0.016719871520996094, 0.016582656860351562, 0.016476320266723632, 0.016555871963500977, 0.016547840118408205, 0.016521215438842773, 0.016586143493652342, 0.0167491512298584, 0.016582399368286132, 0.0165295352935791, 0.016610687255859374, 0.016523935317993163, 0.018104415893554687, 0.01661747169494629, 0.01643123245239258, 0.016514080047607422, 0.01648441505432129, 0.016457984924316407, 0.0164234561920166, 0.016478208541870116, 0.016489791870117187, 0.01632736015319824, 0.016474367141723633, 0.01647385597229004, 0.017044479370117188, 0.016540735244750977, 0.01654368019104004, 0.016496639251708984, 0.016483840942382814, 0.0164564151763916, 0.01658412742614746, 0.0165479679107666, 0.01646636772155762, 0.016494367599487306, 0.01680588722229004, 0.01665238380432129, 0.016529312133789064, 0.016668832778930665, 0.016694496154785157, 0.016510719299316405, 0.01649158477783203, 0.016447231292724608, 0.016648384094238283, 0.016777088165283203, 0.017125280380249023, 0.017897247314453125, 0.016877887725830078, 0.016652288436889647, 0.01661337661743164, 0.01656345558166504, 0.016556800842285155, 0.01660927963256836, 0.016559648513793945, 0.016801279067993165, 0.0179866886138916, 0.016797536849975585, 0.017172479629516603, 0.0168222713470459, 0.016748544692993163, 0.01660927963256836, 0.016494783401489257, 0.016852800369262694, 0.016554048538208008, 0.017024959564208984, 0.01648182487487793, 0.016572895050048827, 0.01644361686706543, 0.016498207092285156, 0.016445695877075197, 0.01670070457458496, 0.016646879196166992, 0.016883712768554687, 0.016496448516845702, 0.016517311096191405, 0.016487808227539064, 0.016517536163330078, 0.016473600387573242, 0.016496864318847657, 0.016605695724487304, 0.01653286361694336, 0.016490144729614256, 0.01662393569946289, 0.01665705680847168, 0.016703487396240235, 0.016768768310546876, 0.01654537582397461, 0.016490144729614256, 0.01661849594116211, 0.01662774467468262, 0.01667068862915039, 0.016600896835327148, 0.016529024124145506, 0.01638243293762207, 0.016512287139892577, 0.016646528244018556, 0.016551488876342772, 0.016448095321655275, 0.01669647979736328, 0.016658912658691405, 0.01663363265991211, 0.01656425666809082, 0.016624191284179687, 0.016760831832885743, 0.016678848266601563, 0.016529632568359376, 0.016622623443603515, 0.016550336837768555, 0.016636287689208985, 0.01677654457092285, 0.01663657569885254, 0.016647647857666016, 0.017009279251098634, 0.01671721649169922, 0.016531103134155272, 0.016533504486083983, 0.016513919830322264, 0.016727455139160158, 0.016565984725952148, 0.016540512084960938, 0.016497695922851562, 0.0165644474029541, 0.016497407913208008, 0.016523008346557618, 0.016498943328857422, 0.016519264221191408, 0.016525087356567384, 0.0165295352935791, 0.016733407974243164, 0.016671232223510742, 0.01678531265258789, 0.01650636863708496, 0.016445728302001954, 0.016415327072143555, 0.016467967987060548, 0.01642291259765625, 0.016472063064575194, 0.01648748779296875, 0.01635568046569824, 0.01649135971069336, 0.016602880477905275, 0.016811168670654297, 0.016786272048950196, 0.016473472595214842, 0.016496992111206053, 0.016427295684814453, 0.016483583450317384, 0.01646227264404297, 0.01642527961730957, 0.01653932762145996, 0.016414751052856447, 0.016462112426757814, 0.01643519973754883, 0.016469215393066405, 0.016443487167358398, 0.016548288345336913, 0.01655628776550293, 0.016645824432373047, 0.01662393569946289, 0.01670908737182617, 0.0165053768157959, 0.016819839477539063, 0.016869184494018554, 0.016781280517578125, 0.016736799240112305, 0.016488191604614257, 0.016576927185058595, 0.016425312042236326, 0.016528032302856446, 0.016493408203125, 0.016719104766845704, 0.01693503952026367, 0.016679328918457033, 0.016483871459960938, 0.01656028747558594, 0.016478912353515625, 0.01673539161682129, 0.01669206428527832, 0.016670719146728515, 0.016525312423706053, 0.016612991333007813, 0.01650931167602539, 0.016554208755493165, 0.016469280242919923, 0.016458240509033203, 0.016526975631713868, 0.016889984130859376, 0.016519071578979493, 0.01675254440307617, 0.016612831115722655, 0.016677888870239257, 0.016588544845581053, 0.016454879760742187, 0.016422048568725586, 0.016567583084106444, 0.016529983520507812, 0.016545024871826172, 0.01655593681335449, 0.01648691177368164, 0.016471616744995116, 0.01655084800720215, 0.016504640579223632, 0.016524896621704102, 0.016412992477416993, 0.016513151168823244, 0.016455039978027344, 0.016406431198120117, 0.01655471992492676, 0.01663145637512207, 0.016683040618896486, 0.016557695388793946, 0.01654035186767578, 0.016527328491210937, 0.016754720687866213, 0.016684255599975585, 0.016522016525268555, 0.016726015090942382, 0.016720096588134767, 0.01678927993774414, 0.016739904403686525, 0.017084863662719725, 0.017033376693725587, 0.01822697639465332, 0.018415679931640627, 0.017523136138916016, 0.017217536926269532, 0.016865280151367186, 0.016986112594604492, 0.01660927963256836, 0.016615423202514648, 0.01656831932067871, 0.016570367813110352, 0.016635168075561525, 0.016456703186035156, 0.016532735824584963, 0.016640480041503907, 0.016758687973022462, 0.016892000198364256, 0.0168175048828125, 0.016799999237060548, 0.016800159454345702, 0.01676313591003418, 0.01666227149963379, 0.016699392318725585, 0.016564224243164064, 0.016740352630615234, 0.016547359466552735, 0.016535072326660155, 0.016573375701904296, 0.016566272735595702, 0.01648454475402832, 0.01657360076904297, 0.016485023498535156, 0.016541919708251952, 0.01646335983276367, 0.016529695510864258, 0.016518367767333984, 0.01638275146484375, 0.016678720474243163, 0.016558080673217773, 0.016748191833496094, 0.016793535232543944, 0.016501344680786133, 0.016424959182739257, 0.01639833641052246, 0.01640457534790039, 0.016539072036743162, 0.016466400146484376, 0.01652889633178711, 0.01662822341918945, 0.016666656494140626, 0.016626976013183595, 0.0167425594329834, 0.01664463996887207, 0.01664521598815918, 0.016526144027709962, 0.016506975173950195, 0.016456800460815428, 0.016733152389526367, 0.016617408752441408, 0.016581920623779296, 0.016483327865600587, 0.016498559951782225, 0.016408063888549804, 0.01655228805541992, 0.016513023376464844, 0.016495967864990236, 0.016802175521850586, 0.01655193519592285, 0.01660745620727539, 0.01645955276489258, 0.016465919494628906, 0.01660927963256836, 0.016547168731689453, 0.016519392013549804, 0.01663337516784668, 0.01652934455871582, 0.016509920120239257, 0.01645484733581543, 0.016497472763061523, 0.016451583862304688, 0.016582271575927735, 0.0165230712890625, 0.016457664489746095, 0.016456319808959962, 0.016445119857788085, 0.01638387107849121, 0.016455423355102538, 0.01646771240234375, 0.01648736000061035, 0.01650499153137207, 0.016417856216430663, 0.016458528518676758, 0.016433120727539063, 0.016455135345458984, 0.016427583694458008, 0.016424959182739257, 0.016465280532836916, 0.016411264419555663, 0.01653913688659668, 0.01661939239501953, 0.0166015682220459, 0.016494752883911133, 0.01639423942565918, 0.0164270076751709, 0.01652118492126465, 0.01658064079284668, 0.016453472137451172, 0.016449695587158204, 0.016459840774536133, 0.01642255973815918, 0.01646620750427246, 0.016565568923950197, 0.016538303375244142, 0.0164454402923584, 0.016476160049438478, 0.016455455780029295, 0.016521087646484377, 0.01648089599609375, 0.016430816650390624, 0.01643734359741211, 0.016543968200683594, 0.01641212844848633, 0.016506399154663086, 0.016597312927246095, 0.016727935791015624, 0.01656268882751465, 0.016418560028076172, 0.0164552001953125, 0.017152448654174805, 0.016714784622192384, 0.01652124786376953, 0.016458688735961916, 0.016565311431884767, 0.01648736000061035, 0.01645523262023926, 0.016470464706420898, 0.016441343307495117, 0.016541696548461913, 0.016490848541259765, 0.016506528854370116, 0.016561983108520507, 0.016670143127441406, 0.016548608779907225, 0.017452959060668946, 0.019414623260498046, 0.01674678421020508, 0.01699567985534668, 0.016751487731933593, 0.016631999969482423, 0.01649203109741211, 0.01672137641906738, 0.016611743927001953, 0.01671340751647949, 0.016554752349853517, 0.016625631332397462, 0.016477407455444334, 0.016512096405029295, 0.01646972846984863, 0.016660480499267577, 0.016531455993652345, 0.016476160049438478, 0.016463872909545898, 0.016461599349975587, 0.016363616943359374, 0.01657644844055176, 0.01645996856689453, 0.016438592910766603, 0.016446144104003906, 0.016442752838134764, 0.01646860885620117, 0.01660518455505371, 0.01653555107116699, 0.016453632354736326, 0.016568191528320314, 0.017970304489135742, 0.01645804786682129, 0.016741056442260743, 0.016471647262573243, 0.01645199966430664, 0.016377504348754884, 0.016375167846679688, 0.016376800537109375, 0.01666975975036621, 0.018975679397583007, 0.016713727951049806, 0.017374656677246095, 0.016542272567749025, 0.01646518325805664, 0.016378591537475586, 0.01664723205566406, 0.01645254325866699, 0.0164270076751709, 0.016817663192749025, 0.016674911499023438, 0.016589664459228517, 0.01655948829650879, 0.016436960220336912, 0.01651299285888672, 0.016752960205078125, 0.016716415405273438, 0.016699392318725585, 0.016584928512573243, 0.01650787162780762, 0.016466720581054688, 0.01647830390930176, 0.01650886344909668, 0.016529632568359376, 0.016445215225219727, 0.01644476890563965, 0.016406784057617186, 0.016482208251953127, 0.016546239852905275, 0.016580671310424806, 0.016555807113647462, 0.016865503311157225, 0.01696998405456543, 0.017135360717773437, 0.01738479995727539, 0.01736969566345215, 0.017295295715332032, 0.01719718360900879, 0.01723391914367676, 0.017320032119750976, 0.017207199096679688, 0.01739366340637207, 0.017253503799438477, 0.017158687591552733, 0.017285472869873048, 0.01745305633544922, 0.017662208557128908, 0.017401599884033205, 0.01873823928833008, 0.016734336853027342, 0.01657734489440918, 0.01651456069946289, 0.016600576400756836, 0.016484640121459962, 0.017732320785522462, 0.01759052848815918, 0.017706111907958986, 0.016631935119628908, 0.01669990348815918, 0.016725887298583986, 0.016681087493896483, 0.016531455993652345, 0.01656012725830078, 0.01653539276123047, 0.016534879684448243, 0.01654662322998047, 0.01658060836791992, 0.016697343826293946, 0.01696713638305664, 0.016904735565185548, 0.01698406410217285, 0.01694633674621582]",tokens/s,60.03069629283077,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 16.12 MiB is free. Process 21916 has 14.72 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 187.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,6768.418816,7762.542592,0.0,7367.294976,7351.94368,s,1,12.4044169921875,12.4044169921875,0.0,12.4044169921875,12.4044169921875,12.4044169921875,12.4044169921875,[12.4044169921875],,kWh,0.00015872177171666712,1.7501032018332932e-05,5.042504033999848e-05,0.00022664784407499852,,MB,1417.826304,8404.271104,0.0,7994.343424,7863.794176,s,10,2.982906372070313,0.29829063720703125,0.00042369488986336985,0.29844981384277347,0.2986946716308594,0.298739990234375,0.2987762451171875,"[0.29835421752929686, 0.298471435546875, 0.29781222534179685, 0.2973498840332031, 0.2985828857421875, 0.2979638671875, 0.2987853088378906, 0.2984737548828125, 0.29868460083007814, 0.2984281921386719]",tokens/s,858.2233837340357,kWh,8.727604025857624e-06,9.621517366482535e-07,5.779473577823583e-06,1.546922934032946e-05,tokens/kWh,16548982.135301886,MB,1438.72,8676.900864,0.0,8266.973184,8120.408064,s,10,24.584218749999998,2.458421875,0.0029808773235690616,2.459900634765625,2.4606286865234375,2.4616724975585935,2.462507546386719,"[2.452980712890625, 2.454088134765625, 2.455698974609375, 2.45816455078125, 2.460396728515625, 2.45989404296875, 2.4599072265625, 2.460341064453125, 2.460031005859375, 2.46271630859375]",tokens/s,25.626195666681497,kWh,7.195771981330942e-05,7.937285538448352e-06,4.791437493277619e-05,0.00012780938028453392,tokens/kWh,492921.56694404664,,s,630,24.580845401763934,0.03901721492343479,0.000344952758432959,0.03898783874511719,0.0394716007232666,0.03956559257507324,0.03973260978698731,"[0.03861248016357422, 0.03829539108276367, 0.03821443176269531, 0.03829779052734375, 0.03813715362548828, 0.03831273651123047, 0.03855257415771484, 0.03852585601806641, 0.038358047485351564, 0.03833673477172852, 0.03850726318359375, 0.03876169586181641, 0.038717952728271485, 0.03879759979248047, 0.03879935836791992, 0.03861094284057617, 0.03860070419311523, 0.03877068710327149, 0.03867407989501953, 0.03870896148681641, 0.038890113830566404, 0.038834175109863284, 0.038780033111572264, 0.038701313018798825, 0.038986366271972654, 0.0389918098449707, 0.03897932815551758, 0.03889593505859375, 0.0387665901184082, 0.0386448974609375, 0.038695743560791016, 0.03877020645141602, 0.03877289581298828, 0.038809951782226564, 0.038801185607910155, 0.0389073600769043, 0.03910294342041016, 0.03914115142822266, 0.03913356781005859, 0.039319679260253905, 0.03939328002929687, 0.039329792022705076, 0.0392674560546875, 0.039203712463378906, 0.0392086067199707, 0.039346527099609375, 0.03924790573120117, 0.03914070510864258, 0.03931523132324219, 0.03948531341552734, 0.0394024658203125, 0.039255840301513675, 0.03922723388671875, 0.03942643356323242, 0.03933171081542969, 0.03926796722412109, 0.039246337890625, 0.0392171516418457, 0.039272449493408204, 0.03922323226928711, 0.03929884719848633, 0.03952041625976563, 0.03955513763427734, 0.038827072143554686, 0.03851753616333008, 0.03855091094970703, 0.038583072662353515, 0.03852492904663086, 0.03839299011230469, 0.038617759704589846, 0.038627231597900394, 0.038451297760009766, 0.03856150436401367, 0.03861516952514649, 0.03841878509521485, 0.0384532470703125, 0.03838908767700195, 0.03837020874023438, 0.038631168365478516, 0.03880550384521484, 0.03878870391845703, 0.03872604751586914, 0.038714752197265626, 0.03894745635986328, 0.0390041618347168, 0.03880361557006836, 0.03894255828857422, 0.03891404724121094, 0.0388403205871582, 0.0389119987487793, 0.038801406860351564, 0.03885055923461914, 0.03902582550048828, 0.039082817077636715, 0.03902211380004883, 0.03895347213745117, 0.038891166687011716, 0.03887926483154297, 0.03924720001220703, 0.03915673446655273, 0.038903423309326175, 0.03881961441040039, 0.038906143188476565, 0.039137569427490235, 0.03899987030029297, 0.039051456451416014, 0.039292160034179686, 0.03930393600463867, 0.03919257736206055, 0.03913667297363281, 0.03911945724487305, 0.039049217224121094, 0.03936870574951172, 0.03941904067993164, 0.03942627334594727, 0.03941798400878906, 0.039391742706298825, 0.039346176147460936, 0.03925196838378906, 0.03952435302734375, 0.03949318313598633, 0.039307712554931644, 0.03919462585449219, 0.03927859115600586, 0.039299072265625, 0.03924905776977539, 0.038626625061035154, 0.038763198852539066, 0.03858163070678711, 0.03854985427856445, 0.03858233642578125, 0.038663585662841796, 0.03869779205322266, 0.03858169555664062, 0.038343231201171876, 0.03857779312133789, 0.038680320739746095, 0.03871603012084961, 0.03865190505981445, 0.03864371109008789, 0.03881132888793945, 0.03872998428344727, 0.03859462356567383, 0.0385986557006836, 0.03855155181884766, 0.03850649642944336, 0.03859443283081055, 0.038905982971191404, 0.038817790985107424, 0.038778881072998046, 0.038978912353515624, 0.038981441497802735, 0.0388043212890625, 0.038760448455810545, 0.03880755233764648, 0.03896713638305664, 0.038965408325195315, 0.03894668960571289, 0.03908607864379883, 0.03936249542236328, 0.03899571228027344, 0.03882438278198242, 0.038895584106445315, 0.03898076629638672, 0.03914012908935547, 0.03916400146484375, 0.039182464599609376, 0.0390695686340332, 0.0390203857421875, 0.03896950531005859, 0.03929292678833008, 0.03948086547851563, 0.03934163284301758, 0.03939420700073242, 0.03938508987426758, 0.039346176147460936, 0.039354366302490236, 0.039376449584960935, 0.039239425659179684, 0.039158462524414066, 0.03902668762207031, 0.039139102935791016, 0.03956694412231445, 0.03963881683349609, 0.039430335998535154, 0.0395015983581543, 0.03937750244140625, 0.03935043334960937, 0.03955904006958008, 0.03853311920166016, 0.03847577667236328, 0.03842863845825195, 0.03855791854858399, 0.03859151840209961, 0.03849929428100586, 0.03844281768798828, 0.03857619094848633, 0.03845113754272461, 0.038703102111816406, 0.038694911956787106, 0.03857161712646484, 0.0385577278137207, 0.038689216613769534, 0.038739902496337894, 0.03869900894165039, 0.03885465621948242, 0.03862720108032226, 0.038645313262939456, 0.03885113525390625, 0.03873532867431641, 0.038713886260986326, 0.03894796752929688, 0.03891811370849609, 0.03890473556518555, 0.03889152145385742, 0.03890991973876953, 0.03883625411987305, 0.039034881591796876, 0.03887513732910156, 0.03882393646240234, 0.03878092956542969, 0.03893657684326172, 0.039204864501953124, 0.039041023254394534, 0.039051136016845706, 0.03925619125366211, 0.03938051223754883, 0.039223041534423825, 0.03907043075561523, 0.03951001739501953, 0.03932950210571289, 0.03937516784667969, 0.039411582946777345, 0.039241825103759766, 0.03940156936645508, 0.039444385528564455, 0.039241825103759766, 0.03925187301635742, 0.03925763320922852, 0.0390865592956543, 0.03913318252563477, 0.03952230453491211, 0.039354366302490236, 0.03937257766723633, 0.039276737213134766, 0.03941584014892578, 0.039534591674804685, 0.03957356643676758, 0.03943008041381836, 0.039632190704345704, 0.0396479377746582, 0.03960543823242187, 0.039279361724853516, 0.038668449401855466, 0.038520862579345706, 0.03839740753173828, 0.03834726333618164, 0.038634719848632815, 0.0387305908203125, 0.03861004638671875, 0.038561729431152346, 0.03854415893554688, 0.038735904693603516, 0.0391550407409668, 0.03881817626953125, 0.03862563323974609, 0.03878092956542969, 0.039002113342285157, 0.03884236907958984, 0.03881564712524414, 0.03890528106689453, 0.03897529602050781, 0.03894768142700195, 0.03896121597290039, 0.03897235107421875, 0.03884134292602539, 0.03871705627441406, 0.038954910278320314, 0.03894102478027344, 0.038885505676269534, 0.038852222442626955, 0.038873374938964846, 0.0389571533203125, 0.03891167831420898, 0.039134719848632815, 0.039058238983154296, 0.03891814422607422, 0.039099681854248045, 0.03903948974609375, 0.03905110549926758, 0.039325759887695315, 0.03927219009399414, 0.03918700790405273, 0.03935612869262695, 0.03938742446899414, 0.03935356903076172, 0.03924614334106445, 0.0391602897644043, 0.03917168045043945, 0.03911721420288086, 0.03921100616455078, 0.03916595077514649, 0.0393994255065918, 0.0394439697265625, 0.03930777740478516, 0.03929235076904297, 0.03925459289550781, 0.03939123153686523, 0.039462913513183595, 0.039376895904541014, 0.03956531143188476, 0.039702529907226565, 0.03959807968139648, 0.039669761657714846, 0.03957715225219727, 0.03886896133422851, 0.03856921768188477, 0.03834969711303711, 0.03845951843261719, 0.03861830520629883, 0.03855817413330078, 0.03854275131225586, 0.03844112014770508, 0.038855457305908205, 0.03879267120361328, 0.0388265266418457, 0.03873791885375977, 0.03864950561523438, 0.03856118392944336, 0.03865103912353516, 0.03884009552001953, 0.03874816131591797, 0.03859008026123047, 0.038805343627929687, 0.03889539337158203, 0.038808319091796876, 0.03893779373168945, 0.03915449523925781, 0.03909222412109375, 0.038950912475585936, 0.03888742446899414, 0.0389172477722168, 0.03879328155517578, 0.038770561218261716, 0.03885782241821289, 0.03895280075073242, 0.0388485107421875, 0.03905507278442383, 0.03899820709228516, 0.03895036697387695, 0.03907551956176758, 0.039140289306640624, 0.03908198547363281, 0.039139102935791016, 0.039233470916748045, 0.03922358322143555, 0.0392171516418457, 0.03928799819946289, 0.03935702514648438, 0.039321823120117186, 0.03929087829589844, 0.03918745422363281, 0.039330814361572264, 0.039327743530273435, 0.039317089080810545, 0.03942031860351562, 0.03946905517578125, 0.03938889694213867, 0.03929116821289062, 0.03955712127685547, 0.03947510528564453, 0.039497825622558595, 0.03927449417114258, 0.03978035354614258, 0.039702529907226565, 0.03962166213989258, 0.03956582260131836, 0.03964156723022461, 0.0387740478515625, 0.03865468978881836, 0.03848515319824219, 0.0384903678894043, 0.03848259353637695, 0.03833139038085937, 0.03831084823608399, 0.03867238235473633, 0.038963359832763673, 0.03888470458984375, 0.03877529525756836, 0.03855974578857422, 0.03846259307861328, 0.03853612899780273, 0.03865593719482422, 0.03863532638549805, 0.03879539108276367, 0.03892025756835937, 0.038899711608886715, 0.03879935836791992, 0.03883827209472656, 0.03911884689331055, 0.03889583969116211, 0.038849857330322264, 0.03883055877685547, 0.039013504028320316, 0.03883712005615234, 0.038983680725097655, 0.03895296096801758, 0.038829345703125, 0.038793952941894534, 0.03890176010131836, 0.03916799926757813, 0.0391044807434082, 0.03905235290527344, 0.03948992156982422, 0.039219806671142575, 0.03920444869995117, 0.03913532638549805, 0.03897993469238281, 0.039061473846435546, 0.039256065368652344, 0.03911065673828125, 0.03908403015136719, 0.03925571060180664, 0.03928940963745117, 0.03920054244995117, 0.03917737579345703, 0.03952931213378906, 0.039411712646484375, 0.03940505599975586, 0.03939718246459961, 0.03936735916137695, 0.03938304138183594, 0.039396991729736326, 0.039857887268066404, 0.03961718368530273, 0.03955913543701172, 0.039511199951171874, 0.03949606323242188, 0.039352832794189455, 0.03987251281738281, 0.039744895935058595, 0.03879305648803711, 0.038383838653564456, 0.03863929748535156, 0.038570240020751954, 0.03851446533203125, 0.03897366333007812, 0.038870849609375, 0.038674625396728515, 0.03870851135253906, 0.038645633697509764, 0.03857084655761719, 0.038662143707275394, 0.038746208190917966, 0.03861222457885742, 0.038547199249267576, 0.0387694091796875, 0.03891238403320312, 0.03880527877807617, 0.03866419219970703, 0.03882572937011719, 0.03885696029663086, 0.03892351913452148, 0.03892844772338867, 0.03887142562866211, 0.038959423065185544, 0.03874745559692383, 0.03873452758789062, 0.03881369781494141, 0.039008255004882815, 0.03901030349731445, 0.03892428970336914, 0.03927859115600586, 0.039142494201660154, 0.03909856033325195, 0.03904608154296875, 0.03909609603881836, 0.03922681427001953, 0.03950678253173828, 0.039323360443115234, 0.03938508987426758, 0.039090175628662106, 0.03908198547363281, 0.03912499237060547, 0.03924991989135742, 0.03923164749145508, 0.039182079315185546, 0.03915164947509766, 0.03938515090942383, 0.039362560272216796, 0.039479297637939455, 0.039446529388427735, 0.039384735107421874, 0.039368030548095706, 0.03925484848022461, 0.03967830276489258, 0.039626590728759764, 0.03948339080810547, 0.03938620758056641, 0.03933638381958008, 0.03926883316040039, 0.03943971252441406, 0.03948716735839844, 0.03966432189941406, 0.038912448883056644, 0.03842047882080078, 0.03872742462158203, 0.03859072113037109, 0.038569854736328124, 0.03854355239868164, 0.03854713439941406, 0.03867212677001953, 0.03849615859985352, 0.038480480194091796, 0.03863740921020508, 0.03857424163818359, 0.038563838958740236, 0.03868057632446289, 0.03870230484008789, 0.038683425903320315, 0.038965248107910154, 0.038860321044921875, 0.03895548629760742, 0.038983680725097655, 0.03871088027954102, 0.03858451080322266, 0.038713569641113284, 0.03868880081176758, 0.03864476776123047, 0.03899488067626953, 0.03907788848876953, 0.038948863983154294, 0.03892838287353516, 0.03907788848876953, 0.03898931121826172, 0.03939583969116211, 0.039403518676757815, 0.03932364654541016, 0.03915776062011719, 0.039203937530517576, 0.03899417495727539, 0.038847137451171875, 0.038978816986083985, 0.03901708984375, 0.039118785858154294, 0.039233409881591796, 0.03934828948974609, 0.03916825485229492, 0.03950732803344727, 0.0393570556640625, 0.03930316925048828, 0.039634944915771485, 0.03928473663330078, 0.039137279510498044, 0.03946080017089844, 0.039446590423583984, 0.03945209503173828, 0.03939395141601563, 0.039411617279052735, 0.03942399978637695, 0.039577598571777346, 0.039585792541503906, 0.03958169555664062, 0.03951599884033203, 0.03954643249511719, 0.03947148895263672, 0.039472606658935545, 0.038924415588378905, 0.03868239974975586, 0.03857827377319336, 0.038459007263183596, 0.03850243377685547, 0.03862972640991211, 0.03869510269165039, 0.038467391967773434, 0.038682174682617185, 0.03880799865722656, 0.038703102111816406, 0.0388403205871582, 0.03902668762207031, 0.0388485107421875, 0.03883404922485351, 0.038856510162353516, 0.03890950393676758, 0.038855422973632814, 0.03883747100830078, 0.038722335815429686, 0.03912086486816406, 0.03922332763671875, 0.0388935661315918, 0.038982719421386716, 0.0388474235534668, 0.038808895111083985, 0.03883078384399414, 0.038787071228027346, 0.03878297424316406, 0.038973438262939454, 0.03902054214477539, 0.03886899185180664, 0.039419902801513675, 0.039411712646484375, 0.03919257736206055, 0.03927014541625977, 0.03912054443359375, 0.03913584136962891, 0.0389911994934082, 0.03899868774414063, 0.03917391967773438, 0.03915711975097656, 0.03905593490600586, 0.03911231994628906, 0.03919529724121094, 0.03926220703125, 0.03926806259155274, 0.039200958251953126, 0.03953468704223633, 0.0394048957824707, 0.039430816650390624, 0.039419902801513675, 0.039282047271728515, 0.0397973747253418, 0.03960319900512695, 0.03955542373657227, 0.03957827377319336, 0.03954035186767578, 0.03997119903564453, 0.039751201629638674, 0.03964483261108399, 0.03944736099243164, 0.039436286926269534]",tokens/s,25.629712473387563,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,8219.242496,11243.814912,0.0,10848.567296,10616.027648,s,1,14.3545263671875,14.3545263671875,0.0,14.3545263671875,14.3545263671875,14.3545263671875,14.3545263671875,[14.3545263671875],,kWh,0.0002154615354791531,2.3759469206828034e-05,6.835061023599542e-05,0.00030757161492197654,,MB,3966.40256,11665.342464,0.0,11255.414784,11070.470656,s,10,3.729839172363282,0.3729839172363281,0.0009508679458567862,0.3731391906738281,0.3740673522949219,0.3741419219970703,0.37420157775878904,"[0.37110870361328124, 0.37287899780273437, 0.37146514892578125, 0.37303924560546875, 0.3732391357421875, 0.37294482421875, 0.3742164916992187, 0.37360546875, 0.37329037475585936, 0.37405078125]",tokens/s,686.3566716143276,kWh,1.0907724441358089e-05,1.2028850010034015e-06,7.236518134889388e-06,1.9347127577250877e-05,tokens/kWh,13231938.38350531,MB,3970.727936,11667.439616,0.0,11257.511936,11070.473216,s,10,28.761244873046877,2.876124487304687,0.005087828976131349,2.875763671875,2.882142236328125,2.8829097412109377,2.8835237451171873,"[2.86629638671875, 2.871433349609375, 2.873718994140625, 2.872728271484375, 2.87521826171875, 2.87630908203125, 2.879038330078125, 2.880853271484375, 2.8819716796875, 2.88367724609375]",tokens/s,21.90447606773774,kWh,8.428156574822338e-05,9.29480960613096e-06,5.6135504784910435e-05,0.00014971188013926482,tokens/kWh,420808.2881692235,,s,630,28.75854958343503,0.04564849140227787,0.0004311421835084933,0.0456529598236084,0.04601856193542481,0.04610665893554687,0.04772453594207764,"[0.04760335922241211, 0.045615966796875, 0.04535631942749024, 0.044825313568115234, 0.04515020751953125, 0.044977664947509766, 0.04483955383300781, 0.045158206939697264, 0.04504336166381836, 0.04497411346435547, 0.04489379119873047, 0.04549302291870117, 0.045245983123779296, 0.04515862274169922, 0.04527471923828125, 0.04521769714355469, 0.04527763366699219, 0.04541062545776367, 0.04560076904296875, 0.04552703857421875, 0.04550630569458008, 0.04552524948120117, 0.04588339233398438, 0.04562041473388672, 0.045222721099853515, 0.045174785614013675, 0.04522761535644531, 0.04529916763305664, 0.045203487396240236, 0.04520166397094726, 0.04525923156738281, 0.045241985321044925, 0.04537936019897461, 0.04537942504882812, 0.045644737243652346, 0.045486080169677735, 0.04538127899169922, 0.04561955261230469, 0.04570851135253906, 0.04561548614501953, 0.045870849609375, 0.0460681266784668, 0.04603110504150391, 0.04592844772338867, 0.045848320007324216, 0.04579734420776367, 0.04549871826171875, 0.045389759063720704, 0.045774848937988284, 0.04569462585449219, 0.0454447021484375, 0.045333248138427734, 0.045505569458007815, 0.04549731063842773, 0.045658111572265625, 0.04544921493530273, 0.04547923278808594, 0.0457383041381836, 0.045658462524414065, 0.04564585494995117, 0.04575020980834961, 0.0458770866394043, 0.04589385604858399, 0.04733542251586914, 0.04528742218017578, 0.045034656524658205, 0.044890975952148436, 0.0447498893737793, 0.04465964889526367, 0.04481568145751953, 0.04517055892944336, 0.045396800994873046, 0.045139968872070314, 0.04511948776245117, 0.045401313781738284, 0.04517977523803711, 0.04521084976196289, 0.04517561721801758, 0.045480865478515625, 0.04585980987548828, 0.045565185546875, 0.045166912078857424, 0.04543532943725586, 0.04574031829833984, 0.04568239974975586, 0.045330432891845705, 0.04571043014526367, 0.04566438293457031, 0.04535507202148437, 0.045283649444580076, 0.045344287872314454, 0.04534912109375, 0.04536380767822266, 0.04549407958984375, 0.04563516616821289, 0.045507198333740236, 0.04547379302978516, 0.04568841552734375, 0.04569708633422852, 0.04581568145751953, 0.04599350357055664, 0.04608710479736328, 0.04589507293701172, 0.04575718307495117, 0.04588937759399414, 0.046018558502197264, 0.04591772842407227, 0.04580969619750976, 0.04578323364257812, 0.045875167846679686, 0.045506847381591796, 0.045461505889892576, 0.045641281127929687, 0.045736480712890625, 0.04570102310180664, 0.04552294540405273, 0.0454769287109375, 0.04580819320678711, 0.045717281341552736, 0.04570377731323242, 0.045827167510986325, 0.045964351654052736, 0.04587628936767578, 0.04589648056030273, 0.045975006103515625, 0.04611747360229492, 0.047839168548583985, 0.04606982421875, 0.045096958160400394, 0.04497200012207031, 0.04487734222412109, 0.04507644653320313, 0.04497615814208984, 0.04492134475708008, 0.04503731155395508, 0.045309505462646484, 0.045294559478759766, 0.04520489501953125, 0.045459712982177734, 0.04526496124267578, 0.045225311279296875, 0.04523686218261719, 0.045578559875488284, 0.04570697784423828, 0.04534067153930664, 0.045256671905517576, 0.04546073532104492, 0.04593494415283203, 0.045695423126220706, 0.04553318405151367, 0.045553665161132816, 0.04539542388916016, 0.04547174453735352, 0.04521014404296875, 0.04517817687988281, 0.045480640411376956, 0.0454205436706543, 0.04539587020874023, 0.045486175537109375, 0.04557561492919922, 0.045752864837646484, 0.04561103820800781, 0.04580707168579102, 0.04586918258666992, 0.04581008148193359, 0.04590387344360351, 0.045676673889160156, 0.046161441802978515, 0.045807968139648436, 0.04590591812133789, 0.04604313659667969, 0.04586905670166016, 0.04562739181518555, 0.04578713607788086, 0.04570924758911133, 0.045588542938232425, 0.04560809707641601, 0.045837150573730466, 0.04568681716918945, 0.04555977630615234, 0.04554912185668945, 0.04594473648071289, 0.04610710525512695, 0.04574816131591797, 0.04585075378417969, 0.04595507049560547, 0.045794654846191406, 0.04628473663330078, 0.046069793701171875, 0.0483836784362793, 0.04588102340698242, 0.045115230560302734, 0.044816287994384765, 0.044901313781738283, 0.04505388641357422, 0.045227710723876956, 0.045107391357421874, 0.04504800033569336, 0.045421695709228514, 0.04532912063598633, 0.045236385345458985, 0.04526694488525391, 0.045502079010009765, 0.04525094223022461, 0.04525590515136719, 0.04548303985595703, 0.04568857574462891, 0.045692928314208986, 0.04550841522216797, 0.04584262466430664, 0.0462224006652832, 0.045939777374267576, 0.045524417877197264, 0.04530598449707031, 0.0452426872253418, 0.045338558197021483, 0.04514575958251953, 0.04518953704833984, 0.04542464065551758, 0.04552908706665039, 0.045246463775634765, 0.04531814575195312, 0.04545232009887695, 0.045378528594970706, 0.04560076904296875, 0.04572323226928711, 0.045609375, 0.04568438339233399, 0.045480480194091795, 0.04571855926513672, 0.046007072448730466, 0.04592025756835937, 0.04605132675170898, 0.045948928833007815, 0.04579942321777344, 0.045471839904785157, 0.04552080154418945, 0.04568819046020508, 0.04560550308227539, 0.04549148941040039, 0.04568534469604492, 0.0455865592956543, 0.0455551986694336, 0.04589328002929687, 0.04581568145751953, 0.045616096496582034, 0.04569497680664063, 0.04605747222900391, 0.046104576110839846, 0.046036991119384765, 0.04572918319702148, 0.04608060836791992, 0.047745471954345704, 0.04548198318481445, 0.04513382339477539, 0.044799999237060545, 0.04491775894165039, 0.04507340621948242, 0.04506131362915039, 0.045090911865234375, 0.045160255432128905, 0.04551897430419922, 0.045443550109863284, 0.04543315124511719, 0.04522137451171875, 0.045400577545166014, 0.04558643341064453, 0.045461505889892576, 0.045418014526367186, 0.04550403213500977, 0.04581372833251953, 0.0455463981628418, 0.04578656005859375, 0.04580825424194336, 0.045742080688476565, 0.04555084609985351, 0.04518169784545899, 0.04542611312866211, 0.04529926300048828, 0.04504886245727539, 0.045305824279785155, 0.04558233642578125, 0.04563558578491211, 0.04558812713623047, 0.04554991912841797, 0.04550239944458008, 0.045776958465576174, 0.04573583984375, 0.04575446319580078, 0.045963264465332034, 0.04575641632080078, 0.045445121765136716, 0.04604108810424805, 0.0458158073425293, 0.045721408843994144, 0.04583033752441406, 0.045995105743408204, 0.045846912384033205, 0.04574019241333008, 0.04579366302490234, 0.04557574462890625, 0.045792766571044925, 0.04588035202026367, 0.04589052963256836, 0.04584339141845703, 0.04555980682373047, 0.0456888313293457, 0.04586918258666992, 0.04571942520141602, 0.045666305541992185, 0.04597711944580078, 0.04592601776123047, 0.04611568069458008, 0.04613119888305664, 0.046290111541748044, 0.04823040008544922, 0.04608787155151367, 0.04502764892578125, 0.04483686447143555, 0.04501251220703125, 0.04497884750366211, 0.0447916145324707, 0.04482252883911133, 0.04498636627197266, 0.04543203353881836, 0.045555648803710935, 0.04541641616821289, 0.0454071044921875, 0.04536524963378906, 0.04528057479858399, 0.04525897598266602, 0.04538748931884766, 0.04570608139038086, 0.04566006469726563, 0.0453851203918457, 0.04580822372436524, 0.04593407821655274, 0.045928958892822266, 0.04550361633300781, 0.045347297668457034, 0.045528961181640626, 0.04536348724365234, 0.04529507064819336, 0.04534348678588867, 0.045373470306396484, 0.04544211196899414, 0.04528838348388672, 0.04566425704956055, 0.04571955108642578, 0.04566233444213867, 0.04564476776123047, 0.04593961715698242, 0.045846080780029295, 0.04582233428955078, 0.04570880126953125, 0.04592697525024414, 0.045849727630615233, 0.045992832183837894, 0.04610611343383789, 0.04597148895263672, 0.04557257461547851, 0.04592156982421875, 0.04586979293823242, 0.0456328010559082, 0.045607646942138674, 0.04578227233886719, 0.04577497482299805, 0.045652286529541015, 0.04588521575927734, 0.04578937530517578, 0.045809761047363284, 0.045961471557617185, 0.045907936096191405, 0.04603692626953125, 0.04594195175170898, 0.04586761474609375, 0.04618172836303711, 0.04620387268066406, 0.04806150436401367, 0.045575103759765624, 0.04521152114868164, 0.04496192169189453, 0.04497532653808594, 0.04480694580078125, 0.044918495178222655, 0.04500905609130859, 0.04532396697998047, 0.04518713760375977, 0.04530601501464844, 0.045811168670654295, 0.04565068817138672, 0.04555286407470703, 0.04525955200195313, 0.04559462356567383, 0.04587724685668945, 0.045617057800292966, 0.04535919952392578, 0.04550204849243164, 0.0461082878112793, 0.04602140808105469, 0.04570025634765625, 0.04557043075561523, 0.04552479934692383, 0.045369598388671876, 0.04519180679321289, 0.04520460891723633, 0.045546142578125, 0.045559585571289064, 0.0454268798828125, 0.04563766479492187, 0.04558774566650391, 0.04547369766235351, 0.04564051055908203, 0.04596464157104492, 0.04585539245605469, 0.04554060745239258, 0.045742561340332034, 0.046026878356933594, 0.045913761138916015, 0.04588784027099609, 0.04623807907104492, 0.045960544586181644, 0.0459554557800293, 0.04597884750366211, 0.0460849609375, 0.04577212905883789, 0.04560863876342774, 0.0455055046081543, 0.04562739181518555, 0.04568617630004883, 0.045647808074951175, 0.045892257690429684, 0.046007999420166014, 0.04592876815795898, 0.0458271369934082, 0.04597856140136719, 0.045882785797119144, 0.04601712036132812, 0.04672716903686523, 0.046170112609863284, 0.04615068817138672, 0.04795024108886719, 0.0455392951965332, 0.04531817626953125, 0.04518707275390625, 0.045013057708740235, 0.04490598297119141, 0.045264640808105466, 0.04542329788208008, 0.0454389762878418, 0.04513177490234375, 0.04543078231811523, 0.04558956909179687, 0.045523582458496095, 0.045313793182373045, 0.04563183975219726, 0.045850112915039064, 0.045772705078125, 0.04546438217163086, 0.04550796890258789, 0.04568332672119141, 0.04575436782836914, 0.04568848037719726, 0.04580124664306641, 0.04567097473144531, 0.0457154541015625, 0.045561569213867184, 0.04549660873413086, 0.045328254699707034, 0.045492351531982424, 0.045696575164794924, 0.04549004745483398, 0.04570966339111328, 0.04568652725219727, 0.04573769760131836, 0.04578531265258789, 0.0457564811706543, 0.04587158584594726, 0.046020606994628906, 0.04590172958374023, 0.04573964691162109, 0.04609641647338867, 0.04605305480957031, 0.04588399887084961, 0.04551286315917969, 0.04581990432739258, 0.04595225524902344, 0.04578937530517578, 0.04565049743652344, 0.04605132675170898, 0.04588339233398438, 0.045553665161132816, 0.045701087951660155, 0.046018592834472655, 0.04592844772338867, 0.04563353729248047, 0.04583388900756836, 0.0460926399230957, 0.046129150390625, 0.04593664169311523, 0.04595916748046875, 0.046086143493652344, 0.045929759979248044, 0.04623977661132812, 0.04767327880859375, 0.04568502426147461, 0.04534889602661133, 0.04512697601318359, 0.04500547027587891, 0.04538982391357422, 0.04541439819335937, 0.04526489639282227, 0.045412353515625, 0.045618335723876954, 0.045484897613525394, 0.045238273620605465, 0.045592575073242186, 0.045502464294433595, 0.04568473434448242, 0.04558393478393555, 0.04560140609741211, 0.045967166900634765, 0.04570425415039062, 0.04545836639404297, 0.045932544708251956, 0.045835777282714846, 0.04581631851196289, 0.04565343856811523, 0.04571807861328125, 0.04562636947631836, 0.04571043014526367, 0.045617057800292966, 0.045879295349121094, 0.04570316696166992, 0.045682689666748044, 0.045676544189453126, 0.04567244720458984, 0.04562684631347656, 0.045529407501220705, 0.0456317138671875, 0.04590739059448242, 0.04571503829956055, 0.045783870697021486, 0.04570326232910156, 0.045793342590332034, 0.045752513885498045, 0.045755233764648434, 0.045977760314941406, 0.045880126953125, 0.045809120178222654, 0.045860542297363284, 0.0460184326171875, 0.045954017639160155, 0.04563497543334961, 0.04581846237182617, 0.045795169830322266, 0.045969566345214846, 0.04601241683959961, 0.04604108810424805, 0.0458620491027832, 0.045820766448974606, 0.045840385437011716, 0.046080001831054686, 0.04590703964233398, 0.04588864135742188, 0.046058624267578126, 0.04602521514892578, 0.04845353698730469, 0.04565804672241211, 0.04511385726928711, 0.04543932723999024, 0.04522390365600586, 0.0451297607421875, 0.04523209762573242, 0.045426239013671876, 0.04569955062866211, 0.04552291107177735, 0.04518300628662109, 0.04547113418579102, 0.045492351531982424, 0.04529404830932617, 0.04549856185913086, 0.04547564697265625, 0.04580556869506836, 0.045690879821777344, 0.04538163375854492, 0.04548710250854492, 0.0457237434387207, 0.04587971115112305, 0.045697536468505856, 0.04582918548583984, 0.045760929107666014, 0.04557619094848633, 0.045518367767333985, 0.045456382751464845, 0.045817119598388675, 0.04554620742797852, 0.04563148880004883, 0.045803680419921874, 0.045803329467773435, 0.04577196884155273, 0.04571017456054687, 0.045795326232910154, 0.04589878463745117, 0.04565248107910156, 0.045697502136230465, 0.04589478302001953, 0.04597849655151367, 0.04588544082641602, 0.04610867309570312, 0.04597555160522461, 0.04595507049560547, 0.045954334259033204, 0.04607577514648437, 0.04598806381225586, 0.04584307098388672, 0.045873153686523435, 0.04605952072143555, 0.045725696563720705, 0.04570521545410156, 0.04614963150024414, 0.04597455978393555, 0.0457287368774414, 0.04589068984985351, 0.0462611198425293, 0.045939777374267576, 0.04581049728393555, 0.0462583999633789, 0.046153343200683594, 0.045943073272705075]",tokens/s,21.906528984440868,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4967.366656,7338.917888,0.0,6943.670272,6539.1744,s,1,11.2944716796875,11.2944716796875,0.0,11.2944716796875,11.2944716796875,11.2944716796875,11.2944716796875,[11.2944716796875],,kWh,0.00012866641170834327,1.4180290562249682e-05,4.057586579400696e-05,0.0001834225680645999,,MB,5049.356288,7634.61632,0.0,7224.68864,6917.39904,s,10,1.9967895507812499,0.199678955078125,0.0007747060688834615,0.19981849670410157,0.20028613281250002,0.20068047332763672,0.20099594573974608,"[0.19812553405761718, 0.19894613647460938, 0.19987551879882812, 0.19898886108398436, 0.19967721557617188, 0.199761474609375, 0.200023193359375, 0.20019850158691407, 0.20011830139160156, 0.20107481384277343]",tokens/s,1282.05799103786,kWh,5.856164737500043e-06,6.455097372657867e-07,3.867208649319942e-06,1.036888312408577e-05,tokens/kWh,24689255.046702214,MB,5053.059072,7636.713472,0.0,7226.785792,6917.4016,s,10,19.06018884277344,1.9060188842773438,0.010296499028534297,1.9040180053710938,1.9154139160156252,1.922641162109375,1.928422958984375,"[1.929868408203125, 1.9029803466796875, 1.89453857421875, 1.9039404296875, 1.906664306640625, 1.913807861328125, 1.892323974609375, 1.8991402587890625, 1.9040955810546876, 1.9128291015625]",tokens/s,33.05318773055394,kWh,5.549881273124834e-05,6.121207348230365e-06,3.697640735888019e-05,9.859642743835888e-05,tokens/kWh,638968.3849284167,,s,630,19.05779961586,0.03025047558073014,0.00047673951892125124,0.03013876724243164,0.03063316173553467,0.03107121591567993,0.03180391098022461,"[0.03129859161376953, 0.030673088073730467, 0.031146303176879882, 0.03107683181762695, 0.03062326431274414, 0.030982624053955077, 0.030506240844726563, 0.030615903854370116, 0.030566816329956056, 0.030770944595336913, 0.031137439727783205, 0.030509599685668944, 0.030431295394897463, 0.030519296646118164, 0.03037798309326172, 0.03042918395996094, 0.030146080017089842, 0.03040239906311035, 0.03081484794616699, 0.03061759948730469, 0.030752288818359376, 0.030439456939697265, 0.030501312255859375, 0.030496768951416016, 0.030338495254516602, 0.03172614479064941, 0.03254886245727539, 0.031055231094360352, 0.030732927322387697, 0.0305798397064209, 0.03080691146850586, 0.0314202880859375, 0.030592575073242187, 0.030406879425048827, 0.03052783966064453, 0.030463199615478515, 0.030421791076660157, 0.030592063903808593, 0.030530496597290038, 0.030561344146728515, 0.0306426887512207, 0.03071996879577637, 0.031687135696411135, 0.030390272140502928, 0.030363071441650392, 0.03035100746154785, 0.030586944580078126, 0.030501535415649414, 0.030496959686279298, 0.030238399505615233, 0.03043177604675293, 0.0306296329498291, 0.030708959579467773, 0.030554815292358397, 0.03055628776550293, 0.030447616577148437, 0.03038822364807129, 0.030095359802246095, 0.03027519989013672, 0.03030393600463867, 0.030103872299194336, 0.030042112350463866, 0.029989248275756837, 0.0314582405090332, 0.030508384704589844, 0.03055174446105957, 0.03043414306640625, 0.03021536064147949, 0.03033580780029297, 0.030102943420410155, 0.03015331268310547, 0.030193727493286134, 0.030291904449462892, 0.031477407455444334, 0.03134499168395996, 0.03028700828552246, 0.03023548889160156, 0.030345216751098632, 0.03040460777282715, 0.03014067268371582, 0.030095104217529298, 0.029926431655883788, 0.03021308708190918, 0.030006464004516602, 0.030372575759887697, 0.032732513427734374, 0.030339359283447265, 0.030513023376464842, 0.030187328338623046, 0.030294815063476564, 0.030454912185668946, 0.030013952255249023, 0.02992086410522461, 0.029970495223999024, 0.029944543838500978, 0.02987238311767578, 0.030137216567993164, 0.02994166374206543, 0.029778911590576173, 0.02973695945739746, 0.02992313575744629, 0.030015296936035156, 0.030003583908081055, 0.029841535568237303, 0.029795679092407226, 0.029800832748413084, 0.02985795211791992, 0.029811872482299804, 0.02978895950317383, 0.02984457588195801, 0.02999385643005371, 0.03011996841430664, 0.030484575271606446, 0.030021440505981444, 0.030058080673217774, 0.029975103378295898, 0.030066688537597655, 0.030068288803100585, 0.030136159896850586, 0.029958751678466795, 0.029927167892456055, 0.029927200317382812, 0.029923807144165038, 0.03038003158569336, 0.02999465560913086, 0.030079328536987304, 0.03134668731689453, 0.03058073616027832, 0.03002787208557129, 0.029870336532592773, 0.029870880126953124, 0.02986892890930176, 0.02982659149169922, 0.030126560211181642, 0.03019164848327637, 0.030375904083251953, 0.03038934326171875, 0.0300677433013916, 0.030160512924194336, 0.029890815734863282, 0.030056671142578126, 0.02994767951965332, 0.029908992767333983, 0.030062591552734375, 0.030203903198242187, 0.030042335510253905, 0.030232351303100587, 0.029921152114868163, 0.02990662384033203, 0.03006892776489258, 0.030767168045043945, 0.02984940719604492, 0.0299237117767334, 0.030117664337158203, 0.03027292823791504, 0.030085952758789062, 0.030187231063842773, 0.03385782241821289, 0.030023679733276368, 0.02983443260192871, 0.02987273597717285, 0.029943552017211914, 0.030114336013793944, 0.030416831970214844, 0.02996227264404297, 0.029786079406738282, 0.029722335815429688, 0.02996169662475586, 0.02999193572998047, 0.029967327117919922, 0.029780832290649414, 0.02987606430053711, 0.029664640426635743, 0.02976028823852539, 0.02977177619934082, 0.02975334358215332, 0.029771520614624025, 0.030120288848876953, 0.029984031677246094, 0.0298604793548584, 0.02973695945739746, 0.02977382469177246, 0.02976972770690918, 0.030029247283935547, 0.02978054428100586, 0.02981622314453125, 0.02970844841003418, 0.029781919479370117, 0.029979167938232423, 0.03127779197692871, 0.030313440322875976, 0.030029951095581056, 0.029896575927734374, 0.02984934425354004, 0.029720191955566407, 0.02984204864501953, 0.029945375442504883, 0.030240671157836914, 0.030122592926025392, 0.029882144927978516, 0.029956287384033203, 0.029824384689331053, 0.02993404769897461, 0.029838911056518556, 0.030179935455322264, 0.02980611228942871, 0.02988902473449707, 0.029867040634155274, 0.02986899185180664, 0.029949535369873048, 0.03181609535217285, 0.03151615905761719, 0.030721920013427734, 0.030300256729125976, 0.030118431091308594, 0.030154624938964845, 0.029976703643798827, 0.029916608810424804, 0.030039871215820312, 0.030098175048828123, 0.029851648330688478, 0.02980463981628418, 0.029986976623535156, 0.03002956771850586, 0.03019161605834961, 0.029995008468627928, 0.02992153549194336, 0.02994268798828125, 0.030077791213989256, 0.030488576889038086, 0.03014041519165039, 0.030037311553955077, 0.030073055267333983, 0.030243295669555664, 0.03009292793273926, 0.02993814468383789, 0.030149951934814453, 0.031111135482788085, 0.03076995277404785, 0.03083353614807129, 0.030483327865600585, 0.03050931167602539, 0.030560096740722655, 0.03033497619628906, 0.030422304153442385, 0.03006057548522949, 0.030173120498657228, 0.030089984893798827, 0.03158835220336914, 0.030263296127319338, 0.030294015884399415, 0.03035116767883301, 0.030283615112304686, 0.0302127685546875, 0.030044063568115235, 0.029954271316528322, 0.029872127532958984, 0.030061792373657227, 0.030417695999145507, 0.030330368041992187, 0.030175552368164063, 0.030232736587524414, 0.02997865676879883, 0.03003331184387207, 0.030079296112060547, 0.029929344177246093, 0.030191776275634765, 0.0303819522857666, 0.030412767410278322, 0.03064463996887207, 0.030516416549682616, 0.03091244888305664, 0.03106435203552246, 0.030654144287109376, 0.030440128326416016, 0.03063216018676758, 0.03038003158569336, 0.030208000183105467, 0.030146495819091797, 0.030076992034912108, 0.029937664031982423, 0.02993561553955078, 0.02998624038696289, 0.030069311141967772, 0.029997055053710937, 0.03018454360961914, 0.030229408264160155, 0.030243839263916016, 0.030075904846191406, 0.02996156883239746, 0.03003664016723633, 0.029925376892089843, 0.029923295974731444, 0.030095392227172852, 0.030107648849487304, 0.03008892822265625, 0.030141984939575196, 0.030161376953125, 0.030243104934692382, 0.03025222396850586, 0.03034547233581543, 0.03033964729309082, 0.030948736190795897, 0.030835264205932616, 0.030393951416015624, 0.030439327239990235, 0.030407136917114257, 0.03040880012512207, 0.030279680252075194, 0.03032268714904785, 0.03037295913696289, 0.030321535110473634, 0.03030633544921875, 0.03055014419555664, 0.030279552459716797, 0.031188671112060546, 0.030441791534423827, 0.030107135772705077, 0.030621856689453126, 0.03038857650756836, 0.030642175674438478, 0.030537248611450196, 0.031631839752197265, 0.03040460777282715, 0.03137091255187988, 0.03005251121520996, 0.02997228813171387, 0.030007680892944335, 0.03055615997314453, 0.03082057571411133, 0.0305611515045166, 0.030538623809814452, 0.03056630325317383, 0.030550079345703127, 0.030353471755981444, 0.03060940742492676, 0.030556095123291015, 0.030343231201171876, 0.030060415267944337, 0.030159231185913085, 0.030228416442871095, 0.029913087844848633, 0.03017046356201172, 0.030073312759399413, 0.03034316825866699, 0.030430816650390626, 0.03053401565551758, 0.030408544540405272, 0.03039174461364746, 0.030246912002563478, 0.030294015884399415, 0.029952127456665038, 0.029970655441284178, 0.029894880294799805, 0.029890016555786134, 0.029890399932861328, 0.029889408111572265, 0.0299683837890625, 0.030014816284179686, 0.030849472045898437, 0.03034339141845703, 0.03051683235168457, 0.030314048767089843, 0.03025200080871582, 0.030181343078613282, 0.030070688247680662, 0.03010291290283203, 0.0309736328125, 0.030395263671875, 0.03004800033569336, 0.030218559265136717, 0.030455135345458986, 0.03018169593811035, 0.030140127182006836, 0.03191849517822266, 0.03042460823059082, 0.030165119171142576, 0.03045609664916992, 0.03101750373840332, 0.030507232666015623, 0.03056025505065918, 0.03038585662841797, 0.03057695960998535, 0.030148479461669923, 0.030130271911621095, 0.03000476837158203, 0.029847488403320313, 0.029998975753784178, 0.030148479461669923, 0.029969215393066406, 0.02988822364807129, 0.03003343963623047, 0.02985241508483887, 0.030066015243530274, 0.029794975280761717, 0.02985958480834961, 0.029802240371704102, 0.029825056076049804, 0.029839839935302735, 0.02975446319580078, 0.02974812889099121, 0.0297524471282959, 0.029846399307250977, 0.029747200012207032, 0.029717824935913087, 0.02983919906616211, 0.029868896484375, 0.02977996826171875, 0.030078975677490235, 0.030448896408081055, 0.030288576126098633, 0.029879871368408202, 0.02991360092163086, 0.02977177619934082, 0.029781536102294923, 0.030060800552368164, 0.02974332809448242, 0.02977961540222168, 0.02971683120727539, 0.029886463165283202, 0.02974211120605469, 0.03030729675292969, 0.029982336044311525, 0.02989913558959961, 0.029829248428344727, 0.029757312774658203, 0.02993152046203613, 0.030043743133544923, 0.029858207702636717, 0.030466047286987305, 0.030365695953369142, 0.030257152557373046, 0.030093311309814453, 0.030107648849487304, 0.03097599983215332, 0.03033497619628906, 0.03039232063293457, 0.030115840911865234, 0.02995199966430664, 0.030086847305297853, 0.029913408279418945, 0.030811840057373047, 0.030533632278442382, 0.030552831649780274, 0.030233983993530274, 0.030122623443603516, 0.030205215454101562, 0.030100191116333008, 0.03040412712097168, 0.029970912933349608, 0.03018067169189453, 0.029921663284301757, 0.030062816619873048, 0.030090335845947266, 0.03045084762573242, 0.030275423049926756, 0.030496351242065428, 0.030402816772460938, 0.03033103942871094, 0.03009903907775879, 0.030301952362060548, 0.030081695556640625, 0.030116064071655273, 0.030000064849853517, 0.029823423385620117, 0.029866399765014647, 0.029917184829711913, 0.029921279907226563, 0.03038617515563965, 0.030154367446899415, 0.030320608139038085, 0.029987232208251953, 0.030007295608520508, 0.02989206314086914, 0.029903392791748046, 0.03001753616333008, 0.029808000564575197, 0.02987071990966797, 0.0299204158782959, 0.02983612823486328, 0.02981068801879883, 0.03013222312927246, 0.03017932891845703, 0.03006163215637207, 0.03017375946044922, 0.03005404853820801, 0.029890335083007813, 0.02989148712158203, 0.029914655685424806, 0.02988287925720215, 0.029867040634155274, 0.030338016510009766, 0.029924543380737304, 0.03156870460510254, 0.03017728042602539, 0.030584640502929687, 0.03043756866455078, 0.03020595169067383, 0.030066688537597655, 0.03054182434082031, 0.029912128448486328, 0.030067007064819337, 0.0298338565826416, 0.030023679733276368, 0.030578655242919923, 0.030278591156005858, 0.030046207427978516, 0.029929471969604493, 0.0317071361541748, 0.0305248966217041, 0.029912704467773436, 0.03014672088623047, 0.02995916748046875, 0.03166921615600586, 0.029852447509765626, 0.02991321563720703, 0.0305511360168457, 0.030367679595947265, 0.030507871627807617, 0.030484672546386718, 0.031165855407714844, 0.030007711410522463, 0.0299718074798584, 0.029878944396972657, 0.03018137550354004, 0.03014860725402832, 0.029990976333618163, 0.02982700729370117, 0.029738624572753905, 0.03020841598510742, 0.03008665657043457, 0.030452192306518553, 0.03043328094482422, 0.03030624008178711, 0.03030431938171387, 0.030345216751098632, 0.030031871795654298, 0.029886463165283202, 0.02999513626098633, 0.02978332710266113, 0.030052959442138674, 0.029742143630981446, 0.029961151123046877, 0.030121984481811522, 0.030188831329345703, 0.030193471908569337, 0.030485408782958984, 0.030726144790649414, 0.03041663932800293, 0.029877504348754882, 0.02998284721374512, 0.029931711196899413, 0.030446271896362304, 0.029997055053710937, 0.030027584075927736, 0.03006892776489258, 0.030292064666748046, 0.03055779266357422, 0.030587200164794923, 0.030398176193237304, 0.0304597110748291, 0.030101984024047852, 0.03023052787780762, 0.029999359130859375, 0.029894399642944335, 0.030021631240844726, 0.02993152046203613, 0.03174355125427246, 0.030837568283081054, 0.030349311828613282, 0.030137407302856446, 0.030045120239257813, 0.029968671798706055, 0.030154464721679687, 0.030324735641479493, 0.029997055053710937, 0.03034716796875, 0.030263391494750977, 0.03030793571472168, 0.030294240951538084, 0.03177408027648926, 0.03032761573791504, 0.030089151382446288, 0.03006591987609863, 0.030472959518432617, 0.029880096435546875, 0.030283199310302735, 0.029752159118652345, 0.02980454444885254, 0.029884416580200194, 0.03009328079223633, 0.029855775833129882, 0.030046207427978516, 0.029988000869750977, 0.030093631744384765, 0.029975072860717773, 0.030078592300415038, 0.030038335800170898, 0.029921344757080078, 0.03014566421508789, 0.0304169921875, 0.030487039566040038, 0.030277824401855467, 0.030306400299072264, 0.030508287429809572, 0.030558687210083008, 0.033962272644042966, 0.030544992446899413, 0.030407583236694336, 0.03018288040161133, 0.03015292739868164, 0.030736576080322264, 0.030195775985717772, 0.030109888076782228, 0.029957952499389647, 0.029958208084106444, 0.030044160842895507, 0.02999087905883789, 0.030283456802368165, 0.0304268798828125, 0.03010982322692871, 0.02992313575744629, 0.029978624343872072, 0.029932191848754883, 0.02995167922973633, 0.030418495178222656, 0.02991961669921875, 0.03160623931884766, 0.033751617431640624, 0.03011961555480957]",tokens/s,33.057331522979766,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,11136.79872,12227.3792,0.0,11848.9088,11814.752256,s,1,16.58408203125,16.58408203125,0.0,16.58408203125,16.58408203125,16.58408203125,16.58408203125,[16.58408203125],,kWh,0.00027038696720004126,2.9818376568283732e-05,8.865979315003658e-05,0.0003888651369183616,,MB,2071.98208,14033.027072,0.0,13625.196544,13297.870848,s,10,22.904132568359376,2.2904132568359374,0.0012868454753217756,2.2905863037109375,2.2922088623046877,2.292209411621094,2.292209851074219,"[2.292208740234375, 2.2922099609375, 2.29136376953125, 2.291201171875, 2.29030224609375, 2.288611328125, 2.28920458984375, 2.290870361328125, 2.288856201171875, 2.28930419921875]",tokens/s,111.77022279099448,kWh,6.670982737708072e-05,7.357757209983291e-06,4.435417437220202e-05,0.00011842175895926602,tokens/kWh,2161764.8838340365,MB,2079.465472,14335.01696,0.0,13927.186432,13689.870848,s,10,1355.52684375,135.55268437499998,0.057629119548384934,135.5256875,135.6300109375,135.65389609375,135.67300421875,"[135.67778125, 135.624703125, 135.585671875, 135.569046875, 135.532953125, 135.497828125, 135.507578125, 135.518421875, 135.51459375, 135.498265625]",tokens/s,0.4647639424514348,kWh,0.003951603366722094,0.0004358923515490394,0.0026285220194825986,0.0070160177377537325,tokens/kWh,8979.452782878832,,s,630,1355.5206533203132,2.151620084635417,0.0010979530220220387,2.1514691162109374,2.153191381835937,2.1536974487304685,2.15430625,"[2.15374072265625, 2.152707763671875, 2.152853271484375, 2.152304443359375, 2.15255126953125, 2.15317822265625, 2.15230126953125, 2.15374462890625, 2.152818603515625, 2.153167724609375, 2.15337890625, 2.1532265625, 2.153262939453125, 2.153539794921875, 2.153477294921875, 2.152826904296875, 2.15331640625, 2.153724609375, 2.15341259765625, 2.153799560546875, 2.153301513671875, 2.15342529296875, 2.152824951171875, 2.1533818359375, 2.1532939453125, 2.153029052734375, 2.153637939453125, 2.15375732421875, 2.153396240234375, 2.154084228515625, 2.15372607421875, 2.154080322265625, 2.153596923828125, 2.15418798828125, 2.153638671875, 2.153900146484375, 2.15345556640625, 2.1538623046875, 2.1537548828125, 2.153672607421875, 2.15450830078125, 2.153498779296875, 2.15490625, 2.1540546875, 2.154407958984375, 2.154281982421875, 2.153531005859375, 2.1537177734375, 2.153956787109375, 2.15450927734375, 2.153934814453125, 2.15406591796875, 2.15435888671875, 2.1535478515625, 2.154036376953125, 2.1539912109375, 2.1544140625, 2.1534453125, 2.153170166015625, 2.154316162109375, 2.154031494140625, 2.1533955078125, 2.15377197265625, 2.153185302734375, 2.15213427734375, 2.152380126953125, 2.152184326171875, 2.1523232421875, 2.152303955078125, 2.151925537109375, 2.152924072265625, 2.15286083984375, 2.152084228515625, 2.1526240234375, 2.151804931640625, 2.15241943359375, 2.1530869140625, 2.152595458984375, 2.15252392578125, 2.152662841796875, 2.152728759765625, 2.152658935546875, 2.15320361328125, 2.1518603515625, 2.152887451171875, 2.152725341796875, 2.15288427734375, 2.15361767578125, 2.1521552734375, 2.152619873046875, 2.153019287109375, 2.15226171875, 2.1537685546875, 2.152908935546875, 2.152489990234375, 2.152644775390625, 2.15259326171875, 2.152498046875, 2.152840576171875, 2.152521728515625, 2.152732666015625, 2.153362060546875, 2.153743896484375, 2.152993408203125, 2.152506591796875, 2.153020263671875, 2.154104736328125, 2.152980712890625, 2.152964111328125, 2.152489990234375, 2.15295458984375, 2.152462646484375, 2.153101318359375, 2.15288818359375, 2.152739990234375, 2.153175537109375, 2.152466796875, 2.153329833984375, 2.152919921875, 2.152263671875, 2.15319091796875, 2.15310595703125, 2.153195556640625, 2.152091552734375, 2.15292919921875, 2.153471923828125, 2.152052734375, 2.15093359375, 2.151443603515625, 2.151208740234375, 2.151665771484375, 2.15092431640625, 2.151182373046875, 2.151501953125, 2.151745361328125, 2.151505859375, 2.151245849609375, 2.15233251953125, 2.151381103515625, 2.1523515625, 2.1514228515625, 2.151843017578125, 2.15215576171875, 2.15196484375, 2.151256103515625, 2.15206494140625, 2.1517880859375, 2.1520634765625, 2.1513359375, 2.152428466796875, 2.15237939453125, 2.152940673828125, 2.151484375, 2.15225537109375, 2.1527265625, 2.152265625, 2.153132080078125, 2.15225244140625, 2.152393798828125, 2.152676513671875, 2.152208984375, 2.152499267578125, 2.15231884765625, 2.151991455078125, 2.152485107421875, 2.152724365234375, 2.152110107421875, 2.1523330078125, 2.15228466796875, 2.15233544921875, 2.15267919921875, 2.152990966796875, 2.1522646484375, 2.15244921875, 2.152156982421875, 2.153568359375, 2.15230419921875, 2.152249755859375, 2.151930908203125, 2.15242138671875, 2.152317138671875, 2.153120361328125, 2.152474609375, 2.152212646484375, 2.152177001953125, 2.152445556640625, 2.152672119140625, 2.1532548828125, 2.15179052734375, 2.150914794921875, 2.1508173828125, 2.151113037109375, 2.15136474609375, 2.151604248046875, 2.151395263671875, 2.15161767578125, 2.15109716796875, 2.151607421875, 2.150910888671875, 2.15164306640625, 2.151921630859375, 2.15119775390625, 2.151859130859375, 2.1525478515625, 2.1510517578125, 2.15236376953125, 2.151751220703125, 2.151856689453125, 2.15140966796875, 2.1511865234375, 2.153145751953125, 2.151104736328125, 2.151987548828125, 2.15233251953125, 2.1515537109375, 2.150688720703125, 2.150803466796875, 2.15117822265625, 2.152405029296875, 2.152108154296875, 2.151814697265625, 2.1515673828125, 2.15140625, 2.15215478515625, 2.151510009765625, 2.15209521484375, 2.152355712890625, 2.152592041015625, 2.152163330078125, 2.15205859375, 2.15209375, 2.15196484375, 2.152599853515625, 2.151763427734375, 2.151579345703125, 2.152060791015625, 2.15278662109375, 2.1517724609375, 2.153413818359375, 2.151795654296875, 2.152584716796875, 2.152, 2.15275537109375, 2.152795654296875, 2.151948486328125, 2.152388671875, 2.15271533203125, 2.151862060546875, 2.1522646484375, 2.152122314453125, 2.152468505859375, 2.152443359375, 2.15089306640625, 2.150482177734375, 2.15049267578125, 2.15028125, 2.150667236328125, 2.151060546875, 2.15079052734375, 2.15144873046875, 2.150859130859375, 2.1505966796875, 2.15177001953125, 2.15014404296875, 2.151184326171875, 2.1510185546875, 2.1508076171875, 2.151145751953125, 2.15081103515625, 2.1512626953125, 2.150821044921875, 2.1516171875, 2.150609130859375, 2.15122900390625, 2.15109326171875, 2.1516748046875, 2.15079931640625, 2.151235595703125, 2.150504150390625, 2.151947998046875, 2.15126904296875, 2.15168408203125, 2.15206103515625, 2.151129150390625, 2.15129248046875, 2.15198046875, 2.151365478515625, 2.151548583984375, 2.15148291015625, 2.1516533203125, 2.1513544921875, 2.151612548828125, 2.150863525390625, 2.15126123046875, 2.15179052734375, 2.1515458984375, 2.151701904296875, 2.151739990234375, 2.15112109375, 2.15173876953125, 2.151557861328125, 2.1519248046875, 2.1510234375, 2.15153662109375, 2.151571533203125, 2.15143212890625, 2.152637939453125, 2.151794677734375, 2.1523251953125, 2.15185400390625, 2.151250244140625, 2.1512763671875, 2.151165771484375, 2.15204638671875, 2.15148388671875, 2.150702392578125, 2.14983154296875, 2.15018896484375, 2.149712158203125, 2.150517578125, 2.149858154296875, 2.150455322265625, 2.14996484375, 2.149432373046875, 2.150497802734375, 2.149911865234375, 2.150378662109375, 2.14996923828125, 2.150060302734375, 2.14969140625, 2.150126708984375, 2.150521728515625, 2.15058837890625, 2.149822265625, 2.149945556640625, 2.150328369140625, 2.15119677734375, 2.149961669921875, 2.15074609375, 2.15079931640625, 2.15058642578125, 2.150466796875, 2.15084619140625, 2.150916748046875, 2.15092041015625, 2.151034912109375, 2.150868896484375, 2.151144775390625, 2.151258544921875, 2.15111474609375, 2.15045556640625, 2.150823974609375, 2.150533203125, 2.1515546875, 2.1513095703125, 2.151034912109375, 2.15058837890625, 2.15149560546875, 2.151301025390625, 2.150927978515625, 2.151443115234375, 2.15052294921875, 2.15045654296875, 2.15022265625, 2.152161376953125, 2.1504658203125, 2.15101416015625, 2.15065185546875, 2.151403076171875, 2.1523974609375, 2.151395263671875, 2.150956787109375, 2.152105224609375, 2.150964111328125, 2.152171630859375, 2.151329833984375, 2.151540771484375, 2.151630615234375, 2.14989013671875, 2.14916650390625, 2.149771728515625, 2.14993310546875, 2.149523681640625, 2.15041357421875, 2.149125, 2.150731689453125, 2.15006982421875, 2.150005126953125, 2.149689453125, 2.15046728515625, 2.150459228515625, 2.150856689453125, 2.150032958984375, 2.151127197265625, 2.150333251953125, 2.150701171875, 2.14982568359375, 2.150210205078125, 2.15022412109375, 2.1509560546875, 2.15025439453125, 2.151168701171875, 2.15073583984375, 2.15011328125, 2.15008203125, 2.1519951171875, 2.150269775390625, 2.1510556640625, 2.150285400390625, 2.151147216796875, 2.150312255859375, 2.151153564453125, 2.151604248046875, 2.151036865234375, 2.15113330078125, 2.151172119140625, 2.15152978515625, 2.15193408203125, 2.151918212890625, 2.150645751953125, 2.15139111328125, 2.1517958984375, 2.1515087890625, 2.151620849609375, 2.151088134765625, 2.151255126953125, 2.150869873046875, 2.152009765625, 2.151751708984375, 2.151941650390625, 2.15211962890625, 2.152193115234375, 2.15191748046875, 2.151174072265625, 2.151095458984375, 2.15147412109375, 2.1513125, 2.1514921875, 2.152072998046875, 2.152081787109375, 2.151716796875, 2.15088427734375, 2.149719482421875, 2.150300048828125, 2.150454833984375, 2.15006640625, 2.151052734375, 2.149905029296875, 2.15017822265625, 2.1502412109375, 2.15087451171875, 2.15023193359375, 2.15002783203125, 2.150787109375, 2.150412353515625, 2.150989501953125, 2.150674560546875, 2.15045947265625, 2.15073974609375, 2.150287109375, 2.150994140625, 2.151113037109375, 2.15100830078125, 2.15099365234375, 2.150598876953125, 2.151919677734375, 2.151110595703125, 2.150681884765625, 2.15150634765625, 2.15130908203125, 2.1512744140625, 2.152123046875, 2.151403564453125, 2.15124609375, 2.15190087890625, 2.15092626953125, 2.151630126953125, 2.15122412109375, 2.151163818359375, 2.150731201171875, 2.151129150390625, 2.151237548828125, 2.15116259765625, 2.151464111328125, 2.150979248046875, 2.1516767578125, 2.15100830078125, 2.151354248046875, 2.151202880859375, 2.15159716796875, 2.152220703125, 2.151686279296875, 2.1517109375, 2.151391845703125, 2.151088134765625, 2.151636962890625, 2.15196875, 2.151333984375, 2.151794677734375, 2.151206787109375, 2.151443359375, 2.151432373046875, 2.151592529296875, 2.151311279296875, 2.1513984375, 2.150262939453125, 2.1506396484375, 2.149322021484375, 2.15015283203125, 2.150506103515625, 2.149909912109375, 2.15073681640625, 2.150552734375, 2.150275146484375, 2.15019189453125, 2.150582275390625, 2.1506826171875, 2.15127978515625, 2.150322998046875, 2.150883544921875, 2.150467529296875, 2.151138916015625, 2.15074658203125, 2.150599853515625, 2.149941650390625, 2.151123291015625, 2.151288818359375, 2.151434326171875, 2.1514072265625, 2.15140380859375, 2.15039794921875, 2.1509931640625, 2.150472412109375, 2.15136669921875, 2.150916015625, 2.151755859375, 2.15052685546875, 2.150698974609375, 2.151047119140625, 2.150866943359375, 2.15036474609375, 2.150756591796875, 2.1507783203125, 2.1514296875, 2.151457275390625, 2.15137451171875, 2.1515224609375, 2.151106201171875, 2.151357421875, 2.15172509765625, 2.150919189453125, 2.151572509765625, 2.151522216796875, 2.152494140625, 2.15128125, 2.1516044921875, 2.151712646484375, 2.151427978515625, 2.151321044921875, 2.151582275390625, 2.1514423828125, 2.15172412109375, 2.15140625, 2.152030517578125, 2.1506416015625, 2.1513359375, 2.151771484375, 2.150775390625, 2.149087158203125, 2.149661865234375, 2.149651123046875, 2.14935888671875, 2.149644287109375, 2.1498623046875, 2.15006103515625, 2.14955615234375, 2.150403076171875, 2.1497666015625, 2.149818359375, 2.14995556640625, 2.150998291015625, 2.150135986328125, 2.150268310546875, 2.150007080078125, 2.149869873046875, 2.151012451171875, 2.150572021484375, 2.149473876953125, 2.150074462890625, 2.15087353515625, 2.15081884765625, 2.149972900390625, 2.1505576171875, 2.150240234375, 2.15158984375, 2.150319580078125, 2.150472412109375, 2.1510146484375, 2.1510009765625, 2.151279541015625, 2.15128857421875, 2.150300048828125, 2.15108203125, 2.150709228515625, 2.15055322265625, 2.151600341796875, 2.1515654296875, 2.151026611328125, 2.151, 2.151370849609375, 2.151364501953125, 2.1511015625, 2.15116259765625, 2.1517353515625, 2.1517578125, 2.1517373046875, 2.15187451171875, 2.15133349609375, 2.151178466796875, 2.151184326171875, 2.1522880859375, 2.152058349609375, 2.151327880859375, 2.1508779296875, 2.151151611328125, 2.151152587890625, 2.15120166015625, 2.151810791015625, 2.15129736328125, 2.15137255859375]",tokens/s,0.464766064948425,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3553.804288,4495.179776,0.0,4116.709376,3980.386816,s,1,10.698505859375,10.698505859375,0.0,10.698505859375,10.698505859375,10.698505859375,10.698505859375,[10.698505859375],,kWh,9.446468462082861e-05,1.0412533916205323e-05,3.08761358120066e-05,0.00013575335434904052,,MB,3569.58208,4826.529792,0.0,4418.699264,4245.764608,s,10,6.540985412597656,0.6540985412597655,0.0014622330901031934,0.6539962463378907,0.6554122009277343,0.6563269500732422,0.6570587493896484,"[0.65141357421875, 0.6533139038085938, 0.6548538208007812, 0.6540926513671875, 0.652806884765625, 0.6543641967773437, 0.65724169921875, 0.6552089233398437, 0.6538998413085938, 0.6537899169921875]",tokens/s,391.37833805125916,kWh,1.9062787973176872e-05,2.1022905774729127e-06,1.263961080612535e-05,3.380468935677513e-05,tokens/kWh,7572913.843347965,MB,3576.631296,4837.015552,0.0,4429.185024,4245.767168,s,10,385.14350781249993,38.514350781249995,0.015261880598067006,38.517564453125004,38.528111328125,38.5308974609375,38.5331263671875,"[38.48181640625, 38.49543359375, 38.50627734375, 38.51169140625, 38.51723046875, 38.5178984375, 38.52492578125, 38.52705859375, 38.53368359375, 38.5274921875]",tokens/s,1.6357539130757046,kWh,0.0011235635662026559,0.0001239374439319332,0.0007470703580724743,0.0019945713682070635,tokens/kWh,31585.73365897216,,s,630,385.13932885742145,0.6113322680276537,0.0005062537275485813,0.6113420104980469,0.6119678344726562,0.6121722381591798,0.6125492669677735,"[0.6109921264648438, 0.6100316162109375, 0.611051513671875, 0.6100374145507812, 0.6104026489257812, 0.61068017578125, 0.6101295776367187, 0.6103638305664062, 0.61080810546875, 0.6105416870117187, 0.6103138427734375, 0.6109783325195313, 0.6102640991210937, 0.61072216796875, 0.610486572265625, 0.6104078979492188, 0.6116115112304688, 0.6101176147460937, 0.6113382568359375, 0.6105477294921875, 0.6103302001953125, 0.6110244140625, 0.6103285522460937, 0.6104708251953125, 0.61085693359375, 0.6103161010742187, 0.6109573364257812, 0.6105985107421875, 0.610902587890625, 0.6105989379882812, 0.6109214477539062, 0.61062451171875, 0.610783203125, 0.6112945556640625, 0.6108182983398438, 0.6106873779296875, 0.6106516723632812, 0.6108145141601562, 0.6111149291992187, 0.6111642456054688, 0.6110392456054687, 0.6110167236328125, 0.6107685546875, 0.6111071166992188, 0.6108098754882813, 0.6105782470703125, 0.611965087890625, 0.6104343872070312, 0.6109550170898438, 0.6109105224609375, 0.6109763793945312, 0.6111295776367187, 0.611034912109375, 0.6109859619140625, 0.6112991943359375, 0.6115060424804688, 0.6106196899414063, 0.611409423828125, 0.6109086303710938, 0.6111492309570312, 0.6111985473632813, 0.611876953125, 0.6106612548828125, 0.6112717895507812, 0.6104872436523437, 0.610522705078125, 0.610563720703125, 0.6107140502929688, 0.6106519775390625, 0.6105010375976563, 0.6112579956054688, 0.6113301391601562, 0.6104927368164063, 0.6113546142578125, 0.6107176513671875, 0.6111146240234375, 0.6111682739257812, 0.6106771240234375, 0.61136279296875, 0.610326416015625, 0.6114895629882813, 0.6102262573242188, 0.6108101196289063, 0.6111925048828125, 0.6111926879882813, 0.6110531005859375, 0.610546630859375, 0.6112174072265625, 0.6107294921875, 0.6116746826171875, 0.6105430297851563, 0.6113797607421875, 0.6106009521484375, 0.611178466796875, 0.6106419067382812, 0.6110119018554687, 0.6121970825195312, 0.6102078247070313, 0.6113463745117188, 0.611293212890625, 0.6107523193359375, 0.611599609375, 0.6104749755859376, 0.611045166015625, 0.610963623046875, 0.610951171875, 0.61168212890625, 0.6109185791015626, 0.6111758422851562, 0.6113695068359375, 0.6109900512695312, 0.6115401611328125, 0.610949951171875, 0.6108753662109375, 0.6111744384765625, 0.6108746948242187, 0.6114636840820312, 0.611250244140625, 0.610959228515625, 0.6115820922851563, 0.61092041015625, 0.6112987060546875, 0.6110767822265625, 0.611411865234375, 0.6119174194335938, 0.610774658203125, 0.611037353515625, 0.610566162109375, 0.611219482421875, 0.6104780883789063, 0.6116825561523438, 0.6103765869140625, 0.6105445556640625, 0.6110844116210937, 0.6112127075195313, 0.6108016357421875, 0.6114279174804688, 0.610241455078125, 0.6113211669921875, 0.6111033935546875, 0.6113541870117187, 0.611105224609375, 0.6106083374023438, 0.611092529296875, 0.611721435546875, 0.6106875610351562, 0.6114488525390624, 0.6113054809570313, 0.6107791137695312, 0.6112071533203125, 0.6111641845703125, 0.6112214965820313, 0.6112781372070313, 0.6113916015625, 0.610779296875, 0.6114451904296875, 0.6112849731445312, 0.6113416748046875, 0.6112028198242188, 0.6110396728515625, 0.6113751220703125, 0.6110286254882813, 0.6112623291015625, 0.6121746826171875, 0.610402587890625, 0.6124005737304687, 0.6108922119140625, 0.611768310546875, 0.6109204711914062, 0.6111437377929687, 0.6113484497070313, 0.6110239868164062, 0.6111959838867187, 0.6112222900390625, 0.6117939453125, 0.611330078125, 0.611583984375, 0.6110422973632812, 0.6113079223632812, 0.6110150756835937, 0.6115774536132812, 0.6112814331054688, 0.61088525390625, 0.610998779296875, 0.6116351928710938, 0.6121427001953125, 0.6112935791015625, 0.611651611328125, 0.6115852661132812, 0.61158984375, 0.6109527587890625, 0.610888427734375, 0.6115921630859374, 0.6106536254882813, 0.6108473510742187, 0.6118167114257812, 0.6111113891601563, 0.6113118896484375, 0.6108585205078125, 0.6109044189453126, 0.6109605712890624, 0.6112481689453125, 0.6114147338867187, 0.610752685546875, 0.6111846313476562, 0.6110863647460938, 0.6110222778320312, 0.6114710083007813, 0.6109985961914063, 0.6114678344726563, 0.6113565673828125, 0.6111417846679688, 0.6116593627929687, 0.6111826782226563, 0.6116703491210937, 0.6109490966796876, 0.6115655517578125, 0.610693115234375, 0.6110773315429687, 0.6111281127929687, 0.6118500366210937, 0.6110473022460937, 0.6114962768554687, 0.6114857177734375, 0.6112329711914063, 0.61167822265625, 0.6107901611328125, 0.6116265869140625, 0.6112479248046875, 0.61144873046875, 0.61187109375, 0.6102879638671875, 0.6122516479492187, 0.6111025390625, 0.6116984252929687, 0.6117682495117187, 0.610472412109375, 0.6118174438476562, 0.6105640869140625, 0.611926025390625, 0.611567138671875, 0.6107017822265625, 0.611706298828125, 0.6110521240234374, 0.6117572021484375, 0.612184814453125, 0.6106842041015625, 0.6116370849609375, 0.6110784301757812, 0.6120189208984375, 0.611293212890625, 0.6113382568359375, 0.6117147216796875, 0.6109002075195312, 0.6115035400390625, 0.610646728515625, 0.61136279296875, 0.6111211547851563, 0.6111058959960938, 0.6116148071289063, 0.6106111450195313, 0.611332275390625, 0.6105990600585938, 0.6115392456054688, 0.6115084838867187, 0.6101905517578124, 0.6114943237304687, 0.6112234497070312, 0.611407470703125, 0.6116044921875, 0.61050146484375, 0.6120794067382812, 0.6108378295898438, 0.6114108276367187, 0.611237548828125, 0.6108038330078125, 0.611182861328125, 0.612284423828125, 0.6106126098632813, 0.612206298828125, 0.6105542602539062, 0.6119552612304687, 0.6112861328125, 0.6112113037109375, 0.6112544555664062, 0.6114638061523437, 0.611952392578125, 0.6113582763671875, 0.610810546875, 0.6126079711914062, 0.6107545776367187, 0.6121326904296875, 0.61099365234375, 0.61170556640625, 0.6126713256835937, 0.6104024658203125, 0.6125906982421875, 0.6105870971679688, 0.6122266845703125, 0.6111973876953125, 0.6116929321289063, 0.610832275390625, 0.6116475219726563, 0.6121077880859375, 0.6109210205078125, 0.6118068237304688, 0.6112506103515625, 0.6116597900390625, 0.6113929443359375, 0.61121142578125, 0.6117564697265625, 0.6117857666015625, 0.6112423095703124, 0.6110398559570313, 0.6121041870117188, 0.6117105712890625, 0.6112811279296875, 0.611064208984375, 0.6111959838867187, 0.6108968505859375, 0.6108692626953125, 0.611430419921875, 0.6108098754882813, 0.6111761474609375, 0.6110431518554688, 0.6113449096679687, 0.6107310180664063, 0.6115316772460937, 0.6114692993164063, 0.6104757080078125, 0.6113712158203125, 0.6112704467773438, 0.6113427124023437, 0.6115717163085937, 0.6105310668945313, 0.61163330078125, 0.6108427734375, 0.6118502197265625, 0.6115921630859374, 0.6103900146484375, 0.6117611694335937, 0.6107698974609375, 0.611978271484375, 0.6117611083984374, 0.6107955322265625, 0.6119133911132812, 0.6106209716796875, 0.6123458862304687, 0.611243896484375, 0.611052734375, 0.6119125366210938, 0.6110679931640625, 0.6112162475585937, 0.6125850830078124, 0.6105450439453125, 0.6122443237304688, 0.6114215698242188, 0.6118607788085938, 0.6113755493164063, 0.6115061645507812, 0.611826904296875, 0.6110618286132813, 0.6113306274414062, 0.6119398803710937, 0.6121388549804687, 0.611496826171875, 0.6111968383789063, 0.61158154296875, 0.6117154541015625, 0.611382568359375, 0.611432861328125, 0.6111724243164063, 0.6114041748046875, 0.61209521484375, 0.6118342895507812, 0.6113487548828125, 0.612005859375, 0.6110945434570313, 0.6110287475585937, 0.6114531860351563, 0.6117652587890625, 0.610704345703125, 0.6109407348632813, 0.6114449462890625, 0.6114954223632812, 0.6116541748046875, 0.611900390625, 0.6107484741210938, 0.611822021484375, 0.6110479125976562, 0.611915771484375, 0.61170068359375, 0.6108094482421875, 0.6110839233398437, 0.6109970703125, 0.611715087890625, 0.6115327758789062, 0.61136279296875, 0.6114140014648437, 0.611293212890625, 0.61115185546875, 0.6119301147460937, 0.6108995361328124, 0.6124560546875, 0.6110175170898438, 0.6112965087890625, 0.6114058837890625, 0.6116432495117188, 0.6114476318359375, 0.6111863403320312, 0.6121980590820313, 0.6117874145507812, 0.6110812377929687, 0.611695068359375, 0.6110883178710937, 0.61243603515625, 0.6119996337890625, 0.61095556640625, 0.6116661987304688, 0.610967529296875, 0.6117539672851563, 0.61153271484375, 0.61158203125, 0.6116188354492188, 0.6116188354492188, 0.6107908935546875, 0.6125614624023438, 0.611567626953125, 0.6112808837890625, 0.6116290283203125, 0.6113873901367187, 0.612421630859375, 0.611565185546875, 0.6117789306640625, 0.6108671875, 0.61222216796875, 0.6117747192382812, 0.6116050415039063, 0.6115977172851562, 0.6112446899414062, 0.6119505615234375, 0.6115143432617187, 0.6112133178710938, 0.6109389038085937, 0.6114498291015625, 0.6111221923828125, 0.6121287841796875, 0.6100809326171875, 0.6120098266601562, 0.6109921264648438, 0.6113211059570313, 0.6113121948242187, 0.6112925415039062, 0.6113984375, 0.611454345703125, 0.6114136352539062, 0.6119331665039063, 0.6110739135742187, 0.6118888549804687, 0.6115637817382813, 0.6115280151367187, 0.6106920166015625, 0.6120549926757812, 0.611082275390625, 0.61172119140625, 0.6113543090820313, 0.611282958984375, 0.61156591796875, 0.61148291015625, 0.6115827026367188, 0.61085888671875, 0.6118744506835937, 0.6116233520507812, 0.6119481201171875, 0.61106005859375, 0.61166796875, 0.6118728637695312, 0.610848388671875, 0.612122802734375, 0.6112965087890625, 0.6121966552734375, 0.6116377563476563, 0.611567626953125, 0.6121145629882813, 0.61161181640625, 0.6117364501953125, 0.6111968383789063, 0.6119666748046875, 0.6112833862304687, 0.6117703857421875, 0.6125808715820312, 0.6109168701171875, 0.6113150024414062, 0.6118038330078125, 0.6116127319335938, 0.6124400024414063, 0.6106331176757812, 0.611764892578125, 0.6115382690429687, 0.6115887451171875, 0.6122815551757812, 0.6113696899414063, 0.61158203125, 0.6125175170898437, 0.6118273315429688, 0.6111178588867188, 0.6110796508789063, 0.6114712524414062, 0.6117977905273437, 0.611784912109375, 0.61105322265625, 0.6115532836914063, 0.6113218383789063, 0.6120200805664062, 0.6107853393554687, 0.6117777709960938, 0.6113821411132813, 0.611683837890625, 0.6118856201171875, 0.61125341796875, 0.6108681640625, 0.611375, 0.6117590942382812, 0.6111150512695313, 0.6115455322265625, 0.6114103393554687, 0.6111492919921875, 0.6117642211914063, 0.6117012939453125, 0.6114487915039063, 0.6117152099609375, 0.6112135620117187, 0.6121029663085937, 0.6118154296875, 0.61160498046875, 0.6118670654296875, 0.6117969970703125, 0.6106843872070312, 0.6123680419921875, 0.6115070190429688, 0.6117271728515625, 0.611373046875, 0.61243408203125, 0.6114918212890625, 0.6121692504882813, 0.61119921875, 0.6118868408203125, 0.6116414794921875, 0.611754150390625, 0.6111735229492188, 0.6117667236328125, 0.6120140991210937, 0.6122023315429688, 0.6115181884765625, 0.6117030639648438, 0.6116904907226562, 0.6116195678710937, 0.611751953125, 0.6125194091796875, 0.6119388427734375, 0.6119915771484375, 0.6117601318359375, 0.612121826171875, 0.6117793579101563, 0.6114078979492188, 0.6126141357421875, 0.6114260864257812, 0.6122276000976562, 0.6115369873046875, 0.6108263549804688, 0.6116002807617188, 0.6113702392578125, 0.6110420532226563, 0.6116039428710938, 0.6109025268554688, 0.611217041015625, 0.6117154541015625, 0.6109921264648438, 0.6114871215820312, 0.6117135620117188, 0.6114755249023438, 0.6108710327148438, 0.611488037109375, 0.6119318237304687, 0.6111788330078125, 0.611694580078125, 0.611493896484375, 0.6110637817382812, 0.6114118041992187, 0.6115015869140625, 0.6112830810546875, 0.6119649047851563, 0.6117708740234375, 0.6107279663085937, 0.6119935913085938, 0.6113423461914063, 0.6116249389648437, 0.611358642578125, 0.6114295654296875, 0.6116730346679687, 0.6117742309570312, 0.6115117797851563, 0.6119463500976563, 0.611017578125, 0.6122537231445313, 0.611446044921875, 0.6121294555664063, 0.6116015625, 0.6116072998046875, 0.6116763305664062, 0.6117437744140625, 0.6113546142578125, 0.6115429077148438, 0.6107661743164062, 0.6120408935546875, 0.6112135620117187, 0.6119387817382812, 0.6119666137695312, 0.6120185546875, 0.610864990234375, 0.61204833984375, 0.6110808715820313, 0.6115655517578125, 0.6121328735351562, 0.6121287841796875, 0.6120202026367187, 0.6107361450195312, 0.61201611328125, 0.6118806762695312, 0.6115453491210937]",tokens/s,1.6357716618268952,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,5181.218816,5584.584704,0.0,5182.062592,5181.108736,s,1,11.2432333984375,11.2432333984375,0.0,11.2432333984375,11.2432333984375,11.2432333984375,11.2432333984375,[11.2432333984375],,kWh,0.00011862922332915862,1.3078100650580155e-05,3.545030613799627e-05,0.00016715763011773503,,MB,5161.34912,5739.773952,0.0,5322.571776,5283.621376,s,10,2.4362078399658205,0.24362078399658205,0.0005670315966989556,0.24341770935058593,0.2442363235473633,0.24454350814819337,0.2447892558288574,"[0.24283779907226563, 0.2434153289794922, 0.24306562805175783, 0.2433887939453125, 0.2432181396484375, 0.2434200897216797, 0.24485069274902344, 0.24388758850097655, 0.24416806030273439, 0.24395571899414062]",tokens/s,1050.8134642715527,kWh,7.147958736788396e-06,7.882838604552668e-07,4.720254453707321e-06,1.2656497050950985e-05,tokens/kWh,20226765.665841535,MB,5167.075328,5756.551168,0.0,5339.348992,5283.623936,s,10,25.294171874999996,2.5294171875,0.017786721108151902,2.5263804931640625,2.5501684082031253,2.556255712890625,2.561125556640625,"[2.548815673828125, 2.562343017578125, 2.534892822265625, 2.536100830078125, 2.54606982421875, 2.5178681640625, 2.51202490234375, 2.511920166015625, 2.51397998046875, 2.510156494140625]",tokens/s,24.90692334634893,kWh,7.393513094404378e-05,8.155025752346191e-06,4.896795516349232e-05,0.00013105811185988228,tokens/kWh,480702.78982315096,,s,630,25.29130438613893,0.040144927597045896,0.0005855910361652461,0.04008828735351562,0.04062418556213379,0.04084815483093262,0.04267652240753175,"[0.04278681564331055, 0.04063568115234375, 0.04132502365112305, 0.04061196899414062, 0.04055392074584961, 0.04031558227539062, 0.04022886276245117, 0.04012441635131836, 0.04034051132202148, 0.04041007995605469, 0.04034969711303711, 0.04020940780639649, 0.04050022506713867, 0.04049270248413086, 0.04049545669555664, 0.04026163101196289, 0.040187904357910156, 0.040323070526123043, 0.040321025848388675, 0.04032921600341797, 0.04015923309326172, 0.04029990386962891, 0.040790462493896486, 0.04044518280029297, 0.040162239074707035, 0.04023849487304688, 0.040390625, 0.040148704528808594, 0.040354721069335936, 0.04026748657226562, 0.040141086578369144, 0.04031283187866211, 0.040097793579101565, 0.040075233459472656, 0.040142303466796876, 0.04036806488037109, 0.04043747329711914, 0.04048988723754883, 0.040542209625244144, 0.04042531204223633, 0.040306846618652345, 0.04022259140014649, 0.040382591247558594, 0.04030668640136719, 0.04035692977905273, 0.040597503662109374, 0.04104288101196289, 0.040750911712646484, 0.04038675308227539, 0.04023871994018555, 0.0401923828125, 0.040204288482666016, 0.04024524688720703, 0.04007731246948242, 0.04048896026611328, 0.04081999969482422, 0.040911457061767575, 0.040591487884521486, 0.04040639877319336, 0.04032745742797852, 0.041597278594970706, 0.04039475250244141, 0.040576126098632814, 0.0409354248046875, 0.040736766815185545, 0.04084265518188476, 0.040593856811523436, 0.040814559936523435, 0.04044384002685547, 0.040276222229003907, 0.040508926391601564, 0.04020275115966797, 0.0422740478515625, 0.043031230926513675, 0.0404890251159668, 0.040302593231201174, 0.04053401565551758, 0.04084838485717773, 0.04210496139526367, 0.04035417556762695, 0.04042598342895508, 0.04046768188476563, 0.0406036491394043, 0.042406494140625, 0.04065283203125, 0.04067670440673828, 0.04036038589477539, 0.04040524673461914, 0.040554622650146484, 0.041305313110351564, 0.04051638412475586, 0.04045571136474609, 0.04053782272338867, 0.04018048095703125, 0.04077347183227539, 0.040363616943359375, 0.040428096771240235, 0.04029827117919922, 0.040394977569580076, 0.040521728515625, 0.040468318939208985, 0.040457759857177734, 0.04016191864013672, 0.04066064071655273, 0.04042089462280273, 0.04052812957763672, 0.04038492965698242, 0.04034883117675781, 0.04046720123291016, 0.04045779037475586, 0.04041388702392578, 0.04064780807495117, 0.04046681594848633, 0.04084787368774414, 0.04431241607666016, 0.0403969612121582, 0.04029788970947266, 0.040342113494873044, 0.0405032958984375, 0.04023468780517578, 0.040021759033203125, 0.04040723037719727, 0.04044019317626953, 0.040183265686035155, 0.040436511993408204, 0.040109886169433596, 0.04071449661254883, 0.03983776092529297, 0.04000531387329102, 0.039629215240478514, 0.03957388687133789, 0.03978160095214844, 0.040630527496337894, 0.040317150115966795, 0.04045651245117188, 0.04056441497802735, 0.03993222427368164, 0.0398397102355957, 0.03998518371582031, 0.03974720001220703, 0.03997516632080078, 0.04015526580810547, 0.04057062530517578, 0.0403950080871582, 0.03995852661132813, 0.040371936798095705, 0.04099663925170898, 0.040628734588623046, 0.04045119857788086, 0.04032396697998047, 0.04057907104492187, 0.040613887786865234, 0.04050739288330078, 0.040681343078613284, 0.04051366424560547, 0.04034064102172852, 0.04025225448608399, 0.04026572799682617, 0.040481983184814455, 0.04034550476074219, 0.040395198822021486, 0.04040752029418945, 0.04027801513671875, 0.04017548751831055, 0.04003830337524414, 0.03982950210571289, 0.039798336029052736, 0.040247806549072264, 0.04038671875, 0.04016643142700195, 0.04025203323364258, 0.040237407684326175, 0.04007635116577148, 0.03999430465698242, 0.039874561309814455, 0.03982950210571289, 0.04026153564453125, 0.040212001800537106, 0.04026220703125, 0.040051807403564454, 0.04011708831787109, 0.04008252716064453, 0.04042822265625, 0.040478622436523434, 0.03999372863769531, 0.040662944793701174, 0.04029654312133789, 0.04022809600830078, 0.040135040283203124, 0.04081391906738281, 0.040029022216796876, 0.040170913696289064, 0.040055393218994144, 0.04022886276245117, 0.040243198394775394, 0.040185855865478515, 0.03984572982788086, 0.03999935913085938, 0.040312576293945315, 0.04024374389648438, 0.04001587295532227, 0.03997465515136719, 0.039825313568115236, 0.039887168884277346, 0.03998108673095703, 0.04007452774047852, 0.04062076950073242, 0.040081409454345705, 0.04001305770874024, 0.040051105499267575, 0.04034793472290039, 0.0404152946472168, 0.040927230834960936, 0.04099225616455078, 0.04019795227050781, 0.04001987075805664, 0.040043296813964846, 0.04000316619873047, 0.04008182525634765, 0.04033718490600586, 0.040369792938232424, 0.04001753616333008, 0.040274913787841794, 0.04006911849975586, 0.03990079879760742, 0.040632705688476566, 0.04022784042358398, 0.040001823425292966, 0.041111328125, 0.041339839935302734, 0.04016537475585937, 0.040308734893798825, 0.04013983917236328, 0.04030355072021485, 0.04012358474731445, 0.039782913208007815, 0.040317184448242185, 0.04028361511230469, 0.03998575973510742, 0.040013824462890625, 0.0406036491394043, 0.040615455627441406, 0.04034783935546875, 0.040429855346679686, 0.0405503044128418, 0.04019209671020508, 0.04016918563842774, 0.04002230453491211, 0.04005683135986328, 0.040908798217773434, 0.0403394546508789, 0.04014284896850586, 0.0407624626159668, 0.04027484893798828, 0.04033484649658203, 0.04161539077758789, 0.04014937591552734, 0.04002006530761719, 0.04039884948730469, 0.040151039123535154, 0.040118270874023435, 0.04014284896850586, 0.04098233413696289, 0.040425537109375, 0.0405904655456543, 0.0403056640625, 0.040545631408691406, 0.04037289428710938, 0.04028195190429688, 0.040476287841796875, 0.0412388801574707, 0.04025363159179687, 0.04041113662719727, 0.04014080047607422, 0.04015635299682617, 0.04027065658569336, 0.04021820831298828, 0.04016896057128906, 0.040420257568359375, 0.040261119842529294, 0.040476318359375, 0.04023910522460938, 0.040534591674804686, 0.041220382690429686, 0.04046614456176758, 0.040288543701171874, 0.040220577239990236, 0.04073382568359375, 0.0405145263671875, 0.04073497772216797, 0.0402163200378418, 0.04029561614990235, 0.04024531173706054, 0.04059622573852539, 0.040005630493164065, 0.03991551971435547, 0.04012851333618164, 0.04024524688720703, 0.03983769607543945, 0.039874561309814455, 0.03995647811889649, 0.04008892822265625, 0.039967391967773436, 0.04355440139770508, 0.04055081558227539, 0.03996672058105469, 0.03999532699584961, 0.039753536224365234, 0.03992601776123047, 0.039979007720947264, 0.040096832275390626, 0.0396317138671875, 0.04030003356933594, 0.04386671829223633, 0.03987046432495117, 0.041020225524902344, 0.04005478286743164, 0.03957964706420898, 0.03951615905761719, 0.03961161422729492, 0.0400043830871582, 0.0400524787902832, 0.03998336029052734, 0.04034764862060547, 0.03994214248657227, 0.0397762565612793, 0.039669761657714846, 0.03943219375610352, 0.03954483032226563, 0.03978364944458008, 0.040057632446289064, 0.03970048141479492, 0.0406236801147461, 0.04017401504516602, 0.039898143768310544, 0.03958473587036133, 0.039867904663085936, 0.03963033676147461, 0.04181273651123047, 0.04025312042236328, 0.040454208374023436, 0.04025759887695313, 0.039860671997070315, 0.039841793060302735, 0.039995391845703124, 0.040083297729492186, 0.040036033630371094, 0.039868831634521484, 0.04027151870727539, 0.03972751998901367, 0.03958169555664062, 0.03967180633544922, 0.03983564758300781, 0.039798782348632815, 0.03971686553955078, 0.03983564758300781, 0.040097793579101565, 0.04076876831054688, 0.039971073150634764, 0.03976243209838867, 0.039626049041748046, 0.039627456665039064, 0.03970835113525391, 0.04011788940429688, 0.03951004791259766, 0.04003033447265625, 0.04045059204101562, 0.040286209106445314, 0.03996867370605469, 0.03977001571655273, 0.03982476806640625, 0.039529281616210936, 0.03958988952636719, 0.0404967041015625, 0.04008806228637695, 0.04007708740234375, 0.03993743896484375, 0.03962511825561523, 0.040847774505615234, 0.03986841583251953, 0.03961779022216797, 0.0397215690612793, 0.03960153579711914, 0.03945347213745117, 0.040049663543701174, 0.040026718139648435, 0.039801246643066404, 0.04010764694213867, 0.039843807220458986, 0.03971219253540039, 0.03962774276733398, 0.03978755187988281, 0.039939041137695315, 0.039995391845703124, 0.040352928161621095, 0.03994300842285156, 0.04055859375, 0.039831550598144534, 0.039569408416748046, 0.039462913513183595, 0.039300609588623046, 0.04045846557617187, 0.040046207427978514, 0.03985036849975586, 0.03955449676513672, 0.03970544052124023, 0.04011372756958008, 0.03989481735229492, 0.03953705596923828, 0.03925008010864258, 0.03957097625732422, 0.03989561462402344, 0.039556671142578125, 0.03989139175415039, 0.03976988983154297, 0.03965769577026367, 0.039731201171875, 0.039495681762695314, 0.03980287933349609, 0.04027391815185547, 0.03979990386962891, 0.03958262252807617, 0.03992076873779297, 0.04365811157226562, 0.039294975280761715, 0.03930931091308594, 0.04010569763183594, 0.03982140731811523, 0.03991980743408203, 0.039997440338134765, 0.0397589111328125, 0.03951264190673828, 0.04007564926147461, 0.039577598571777346, 0.03965273666381836, 0.039759521484375, 0.039847934722900394, 0.03973168182373047, 0.0395494384765625, 0.03994214248657227, 0.03983484649658203, 0.04016566467285156, 0.039444480895996094, 0.03940758514404297, 0.03975785446166992, 0.04033065414428711, 0.03967622375488281, 0.04005526351928711, 0.040034111022949216, 0.03964924621582031, 0.039956222534179686, 0.039729183197021484, 0.0392993278503418, 0.04008345413208008, 0.040715648651123044, 0.039991710662841795, 0.039779998779296874, 0.039677921295166015, 0.03928329467773437, 0.03973686218261719, 0.039276447296142575, 0.039233470916748045, 0.03944291305541992, 0.03989910507202148, 0.040394943237304685, 0.04014668655395508, 0.03983180618286133, 0.040079360961914064, 0.03988889694213867, 0.0400711669921875, 0.040707935333251954, 0.03979280090332031, 0.04052377700805664, 0.04013852691650391, 0.040333534240722654, 0.04024838256835937, 0.04007417678833008, 0.03959388732910156, 0.03974563217163086, 0.03939728164672852, 0.03922748947143555, 0.039167713165283204, 0.03956700897216797, 0.04003084945678711, 0.04037971115112305, 0.04088396835327148, 0.039461822509765626, 0.03957555389404297, 0.039532543182373044, 0.03954012680053711, 0.03951267242431641, 0.03978380966186523, 0.03986236953735352, 0.04015977478027344, 0.039669761657714846, 0.04038787078857422, 0.039884769439697265, 0.03974835205078125, 0.039553024291992187, 0.039855422973632815, 0.03986502456665039, 0.04039680099487305, 0.0401080322265625, 0.039913471221923826, 0.04051388931274414, 0.03989907073974609, 0.040027679443359374, 0.03974211120605469, 0.03958771133422852, 0.04044198226928711, 0.039143009185791014, 0.0390926399230957, 0.03906524658203125, 0.03981142425537109, 0.040471775054931644, 0.03975027084350586, 0.039787967681884764, 0.039860065460205076, 0.039366943359375, 0.039422561645507816, 0.03941497421264648, 0.03914387130737305, 0.03964748764038086, 0.039909568786621094, 0.039819198608398436, 0.03984998321533203, 0.03957727813720703, 0.03926252746582031, 0.03929884719848633, 0.03968022537231446, 0.04427775955200195, 0.04055244827270508, 0.040295841217041016, 0.03981558227539062, 0.039317249298095706, 0.03950636672973633, 0.039400447845458986, 0.039498752593994144, 0.039957630157470704, 0.04195622253417969, 0.040030208587646485, 0.04016742324829101, 0.03986764907836914, 0.03975244903564453, 0.04007321548461914, 0.040066398620605466, 0.03951046371459961, 0.04075337600708008, 0.04113817596435547, 0.0401690559387207, 0.03999337768554687, 0.03981145477294922, 0.03959807968139648, 0.03972892761230469, 0.03963833618164062, 0.03945913696289063, 0.03983529663085938, 0.040088512420654296, 0.04006089782714844, 0.040000896453857425, 0.03964108657836914, 0.03956345748901367, 0.03929340744018555, 0.039521984100341793, 0.03928915023803711, 0.04053606414794922, 0.03992575836181641, 0.04081545639038086, 0.039702144622802735, 0.039809249877929685, 0.03976208114624023, 0.03976396942138672, 0.03995033645629883, 0.039616512298583983, 0.039400894165039065, 0.03958819198608399, 0.04015897750854492, 0.03979232025146484, 0.03993679809570312, 0.03969615936279297, 0.03974371337890625, 0.03995238494873047, 0.03980287933349609, 0.03967350387573242, 0.0405302734375, 0.0402966079711914, 0.039769119262695315, 0.039876705169677736, 0.04000950241088867, 0.04023187255859375, 0.039556480407714846, 0.03974310302734375, 0.039895103454589846, 0.039715648651123044, 0.04028201675415039, 0.04000380706787109, 0.040041599273681644, 0.03962515258789062, 0.03981356811523438, 0.039809024810791016, 0.03986227035522461, 0.039792640686035156, 0.03998921585083008, 0.040819904327392575, 0.04031983947753906, 0.03956505584716797, 0.0396229133605957, 0.040271232604980466, 0.03957980728149414, 0.04064713668823242, 0.0397844467163086, 0.039880702972412106, 0.03996780776977539, 0.039707359313964845, 0.03946723175048828, 0.03917004776000976, 0.0393616943359375, 0.03979910278320312, 0.039798656463623044, 0.03957417678833008, 0.03974956893920899, 0.039548992156982425, 0.0393359375, 0.03914137649536133, 0.03924991989135742, 0.03942755126953125, 0.03970716857910156, 0.039851486206054686, 0.04049359893798828, 0.040022014617919925]",tokens/s,24.90974725468395,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,874.1888,655.294464,0.0,260.046848,258.555392,s,1,7.527779296875,7.527779296875,0.0,7.527779296875,7.527779296875,7.527779296875,7.527779296875,[7.527779296875],,kWh,1.5538597287504293e-05,1.7067869787336107e-06,4.514170277997165e-06,2.175955454423507e-05,,MB,1325.068288,751.763456,0.0,341.835776,317.950464,s,18,0.19514825820922851,0.010841569900512694,0.00033796102924892106,0.010710544109344482,0.011126953697204591,0.011620157051086424,0.011872703132629394,"[0.01093945598602295, 0.010771424293518066, 0.011935839653015137, 0.010841279983520509, 0.010702048301696777, 0.010719039916992187, 0.010690400123596192, 0.011564448356628418, 0.010691391944885253, 0.010722208023071288, 0.010649632453918458, 0.010690239906311036, 0.010890432357788086, 0.010601568222045898, 0.010757984161376953, 0.010633760452270508, 0.010651840209960937, 0.010695263862609864]",tokens/s,23612.81644163857,kWh,3.115932021611639e-07,3.4363449575720296e-08,1.607406716969019e-07,5.066973234337861e-07,tokens/kWh,505232587.8991019,MB,1338.540032,779.026432,0.0,369.098752,317.953024,s,18,9.933052001953124,0.5518362223307292,0.003266523007112156,0.5515629272460938,0.5566544311523438,0.5573135284423828,0.5585013287353516,"[0.5587982788085938, 0.5515595703125, 0.5522174682617188, 0.5484296875, 0.5528018798828125, 0.5511383056640625, 0.553289794921875, 0.5540931396484375, 0.5515662841796874, 0.5504417724609375, 0.54669091796875, 0.5496655883789062, 0.5470558471679687, 0.5498707885742188, 0.5484144287109375, 0.5534824829101562, 0.5564842529296875, 0.557051513671875]",tokens/s,114.16430718142045,kWh,1.5852757085558006e-05,1.7481872435484924e-06,5.955272993040292e-06,2.355621732214679e-05,tokens/kWh,2674453.1661613365,,s,1134,9.924552287101738,0.008751809776985668,0.0003252445187168874,0.008687439918518067,0.008831110382080079,0.009003175783157348,0.010744747161865239,"[0.008410528182983398, 0.008704959869384766, 0.00870809555053711, 0.008684736251831054, 0.008617055892944337, 0.008650176048278809, 0.008689951896667481, 0.008640512466430664, 0.008675104141235351, 0.00862611198425293, 0.008635711669921876, 0.008633024215698242, 0.008689727783203125, 0.008628447532653809, 0.008964096069335938, 0.008736767768859864, 0.009592831611633301, 0.009575551986694335, 0.009444255828857422, 0.009559840202331542, 0.011028672218322754, 0.00880025577545166, 0.008738816261291504, 0.008927231788635253, 0.008736576080322266, 0.008711520195007325, 0.008969056129455566, 0.008691072463989258, 0.00872208023071289, 0.00866198444366455, 0.008670880317687988, 0.008667488098144532, 0.008658944129943847, 0.008759296417236329, 0.008751040458679199, 0.008730688095092774, 0.008720383644104004, 0.00875276756286621, 0.008653183937072755, 0.008721823692321778, 0.008669792175292968, 0.008647999763488769, 0.008652928352355958, 0.008636992454528808, 0.008693663597106934, 0.008814208030700684, 0.009508831977844238, 0.011827199935913087, 0.008761119842529298, 0.008714976310729981, 0.008761216163635253, 0.008652704238891602, 0.008713855743408203, 0.008681599617004394, 0.008720864295959472, 0.008720383644104004, 0.008732319831848145, 0.008982848167419434, 0.008707584381103516, 0.008663328170776367, 0.008683775901794434, 0.00870809555053711, 0.008721792221069335, 0.008603039741516114, 0.008776000022888183, 0.008753439903259278, 0.008742719650268556, 0.008751296043395996, 0.008822943687438965, 0.008744799613952636, 0.008659999847412109, 0.008684543609619141, 0.008664352416992187, 0.008722111701965332, 0.008749600410461425, 0.008677151679992676, 0.00866096019744873, 0.008661151885986328, 0.008780351638793945, 0.008807999610900879, 0.008706496238708497, 0.008681471824645997, 0.00897439956665039, 0.008707136154174804, 0.008711039543151855, 0.0087193603515625, 0.008627200126647949, 0.0086746244430542, 0.00867807960510254, 0.008650752067565918, 0.008802304267883301, 0.008678624153137207, 0.008741472244262695, 0.008755295753479005, 0.008710240364074707, 0.00868070411682129, 0.008758015632629394, 0.008734463691711426, 0.008681728363037109, 0.008689663887023925, 0.008668224334716796, 0.008659296035766602, 0.009030240058898926, 0.008706048011779785, 0.008669183731079102, 0.00881868839263916, 0.008722432136535644, 0.008806400299072266, 0.009395456314086914, 0.009048447608947754, 0.008855936050415038, 0.00883683204650879, 0.0087042875289917, 0.008748224258422851, 0.00862451171875, 0.008656543731689453, 0.008732895851135253, 0.009234047889709473, 0.008680383682250976, 0.008744959831237792, 0.008643744468688964, 0.008688159942626953, 0.008644895553588867, 0.008675104141235351, 0.00866329574584961, 0.008769536018371582, 0.008550463676452636, 0.0087259521484375, 0.008737536430358886, 0.00868729591369629, 0.008735199928283692, 0.008725631713867187, 0.00874790382385254, 0.008679424285888672, 0.008654656410217286, 0.008818880081176757, 0.00866006374359131, 0.008743840217590332, 0.008634367942810058, 0.008631744384765626, 0.008622655868530274, 0.008643903732299806, 0.008633024215698242, 0.00865875244140625, 0.008769248008728027, 0.008633983612060548, 0.008735584259033204, 0.008654848098754882, 0.008644288063049316, 0.008679327964782714, 0.008689760208129883, 0.00866540813446045, 0.008644576072692872, 0.008615967750549316, 0.008599552154541015, 0.008660991668701172, 0.008806400299072266, 0.008935423851013183, 0.00873846435546875, 0.008763744354248046, 0.008689663887023925, 0.008898048400878907, 0.011575776100158692, 0.009235712051391601, 0.008720319747924804, 0.008779871940612792, 0.008719103813171387, 0.00871833610534668, 0.008669183731079102, 0.008771391868591308, 0.008683327674865723, 0.008687999725341797, 0.00871628761291504, 0.008673279762268067, 0.00864787197113037, 0.008708928108215332, 0.008816160202026368, 0.008691455841064453, 0.008749792098999024, 0.008720383644104004, 0.008758463859558106, 0.008842047691345214, 0.00873862361907959, 0.008675519943237304, 0.00870809555053711, 0.008660191535949708, 0.008661791801452637, 0.008638463973999023, 0.008660991668701172, 0.008565664291381836, 0.008704000473022461, 0.008828927993774414, 0.008704000473022461, 0.008669183731079102, 0.008689663887023925, 0.00862822437286377, 0.008651935577392578, 0.008610655784606933, 0.00861184024810791, 0.008667136192321777, 0.008665087699890137, 0.008624032020568847, 0.009020768165588378, 0.008626912117004394, 0.008637855529785157, 0.008649344444274903, 0.00866703987121582, 0.008617343902587891, 0.008707839965820313, 0.008645600318908691, 0.00859340763092041, 0.008667136192321777, 0.0086179838180542, 0.008751104354858399, 0.008948927879333496, 0.008657183647155762, 0.008600064277648926, 0.008654208183288574, 0.008611680030822754, 0.008667807579040527, 0.009181344032287597, 0.008695808410644532, 0.008624128341674805, 0.008713536262512207, 0.008602304458618164, 0.008790016174316406, 0.008629792213439942, 0.008622079849243165, 0.00868556785583496, 0.008618080139160156, 0.00862451171875, 0.008685407638549804, 0.008612000465393066, 0.008639616012573242, 0.009003487586975098, 0.008678848266601563, 0.008655360221862793, 0.008827360153198242, 0.008787967681884766, 0.008769472122192383, 0.008738880157470702, 0.00872652816772461, 0.008728063583374024, 0.008671008110046386, 0.008665823936462402, 0.008615712165832519, 0.008705792427062988, 0.008679840087890625, 0.008978495597839356, 0.008747008323669434, 0.008638463973999023, 0.008663040161132812, 0.008397439956665038, 0.008652223587036132, 0.008642175674438476, 0.008737728118896485, 0.008840895652770997, 0.008790335655212402, 0.008671232223510742, 0.008790016174316406, 0.008761343955993652, 0.008689663887023925, 0.008712191581726075, 0.008720383644104004, 0.008705471992492676, 0.008665663719177246, 0.008683135986328125, 0.00864089584350586, 0.008667136192321777, 0.00870736026763916, 0.008677568435668945, 0.008574624061584473, 0.008674176216125488, 0.00862822437286377, 0.008619039535522462, 0.008809408187866212, 0.008736800193786622, 0.01102569580078125, 0.010760640144348145, 0.008789567947387696, 0.008778464317321778, 0.008930399894714355, 0.008815520286560059, 0.008742752075195313, 0.008671392440795898, 0.008683520317077637, 0.008675328254699707, 0.008665087699890137, 0.008744959831237792, 0.008674752235412598, 0.008605631828308105, 0.008612480163574219, 0.008664896011352539, 0.008831168174743652, 0.008818271636962891, 0.008817055702209472, 0.008714271545410155, 0.008603615760803223, 0.008769184112548829, 0.00867363166809082, 0.008631584167480468, 0.00862822437286377, 0.00868230438232422, 0.008705183982849122, 0.008835935592651367, 0.008695743560791015, 0.008617024421691894, 0.008717344284057618, 0.00865398406982422, 0.008665184020996093, 0.008648736000061035, 0.008692064285278321, 0.008620287895202637, 0.008664511680603027, 0.008626751899719239, 0.008533856391906739, 0.008638015747070312, 0.008674912452697754, 0.008698240280151367, 0.008710783958435058, 0.008754688262939453, 0.008650495529174804, 0.008651424407958984, 0.008674400329589844, 0.008764191627502442, 0.00882096004486084, 0.008935327529907227, 0.008781696319580078, 0.008741087913513183, 0.008830975532531739, 0.008703488349914551, 0.008729087829589843, 0.009273344039916993, 0.008756832122802734, 0.008765503883361817, 0.008792415618896484, 0.008676863670349122, 0.008695679664611817, 0.008716927528381348, 0.008768511772155761, 0.008690624237060547, 0.008673343658447265, 0.008630240440368652, 0.00865283203125, 0.008632320404052735, 0.008695808410644532, 0.008648192405700684, 0.008686016082763672, 0.008575039863586426, 0.008695167541503906, 0.008757887840270996, 0.008674304008483886, 0.008653504371643066, 0.008685888290405274, 0.008690752029418946, 0.008649503707885742, 0.008673439979553223, 0.008726304054260253, 0.008624064445495605, 0.008642848014831544, 0.008670623779296876, 0.008614208221435547, 0.008548640251159667, 0.008630271911621093, 0.008667136192321777, 0.008632320404052735, 0.008833024024963379, 0.00970751953125, 0.009828351974487304, 0.008673279762268067, 0.008683520317077637, 0.008853504180908203, 0.008689663887023925, 0.008625247955322265, 0.008612768173217773, 0.008864895820617676, 0.008700192451477051, 0.00872822380065918, 0.00843558406829834, 0.00876966381072998, 0.008711615562438964, 0.008778112411499023, 0.00866323184967041, 0.008644319534301758, 0.008671520233154296, 0.008712191581726075, 0.00875046443939209, 0.008737279891967773, 0.008740991592407226, 0.008642560005187988, 0.008883584022521972, 0.008657535552978516, 0.009713120460510254, 0.01159222412109375, 0.008740863800048827, 0.008828031539916991, 0.008681471824645997, 0.008671839714050293, 0.008648799896240235, 0.008616127967834473, 0.0086179838180542, 0.00862992000579834, 0.008675040245056153, 0.008642880439758301, 0.00864083194732666, 0.00872447967529297, 0.008638463973999023, 0.0086179838180542, 0.008792032241821289, 0.008695263862609864, 0.008682047843933106, 0.008812543869018554, 0.008713727951049804, 0.008790528297424317, 0.008861696243286133, 0.008673279762268067, 0.00872447967529297, 0.00869705581665039, 0.008729375839233399, 0.008880127906799316, 0.008801440238952636, 0.008843520164489745, 0.008741472244262695, 0.00870576000213623, 0.008701408386230469, 0.008698687553405762, 0.008750176429748536, 0.008725055694580078, 0.008718272209167481, 0.008749471664428712, 0.00867251205444336, 0.00867199993133545, 0.008785920143127441, 0.008736800193786622, 0.008709407806396485, 0.008743616104125976, 0.008771583557128907, 0.008642560005187988, 0.008769536018371582, 0.008673279762268067, 0.008681023597717285, 0.010270719528198241, 0.009801280021667481, 0.00894159984588623, 0.009328767776489258, 0.009259296417236327, 0.009842687606811524, 0.00870809555053711, 0.008674719810485839, 0.008651359558105469, 0.00873193645477295, 0.008659680366516114, 0.008605600357055664, 0.008627296447753906, 0.008645631790161134, 0.008630271911621093, 0.008709247589111329, 0.008729472160339355, 0.008665087699890137, 0.008634048461914063, 0.00877952003479004, 0.008706591606140136, 0.009287712097167968, 0.009011199951171875, 0.00874015998840332, 0.00876204776763916, 0.008787967681884766, 0.008761119842529298, 0.00899443244934082, 0.008749664306640625, 0.008787487983703613, 0.008800736427307129, 0.008746272087097167, 0.008686304092407227, 0.008697855949401855, 0.008656895637512207, 0.008615936279296875, 0.00868556785583496, 0.008632320404052735, 0.008607487678527832, 0.00863644790649414, 0.008652159690856934, 0.008606559753417968, 0.008754816055297852, 0.008777536392211915, 0.008606271743774414, 0.008578463554382324, 0.008637056350708008, 0.008666080474853516, 0.008706336021423339, 0.008714847564697266, 0.00879967975616455, 0.008630975723266602, 0.008677375793457032, 0.008650752067565918, 0.008664959907531739, 0.008693920135498048, 0.00864019203186035, 0.008632608413696289, 0.008606752395629884, 0.008647647857666016, 0.008699263572692871, 0.008661631584167481, 0.008683520317077637, 0.00841932773590088, 0.008814304351806641, 0.008677663803100586, 0.010979328155517578, 0.010343808174133301, 0.008735072135925294, 0.008696096420288086, 0.008650752067565918, 0.008642560005187988, 0.00874015998840332, 0.008747072219848632, 0.008690303802490234, 0.008742912292480469, 0.00868556785583496, 0.008699040412902832, 0.008700063705444336, 0.008761055946350097, 0.008682463645935059, 0.008622079849243165, 0.008750176429748536, 0.008753151893615722, 0.008840096473693848, 0.008642560005187988, 0.008627936363220215, 0.00863599967956543, 0.008628928184509277, 0.00868556785583496, 0.008728096008300781, 0.008712672233581542, 0.008621184349060059, 0.00864083194732666, 0.008614463806152344, 0.008633440017700195, 0.00868006420135498, 0.008664480209350586, 0.008571776390075683, 0.008707327842712402, 0.008563296318054199, 0.008601632118225098, 0.00867136001586914, 0.008671232223510742, 0.00865449619293213, 0.008650912284851073, 0.00875334358215332, 0.008632320404052735, 0.008783871650695801, 0.00859545612335205, 0.008641632080078124, 0.008704416275024414, 0.008650655746459962, 0.008643168449401856, 0.00862822437286377, 0.008912896156311035, 0.008679424285888672, 0.00876527976989746, 0.008773823738098145, 0.008632287979125977, 0.008690752029418946, 0.008723391532897948, 0.008695808410644532, 0.008736127853393555, 0.008713919639587403, 0.008749695777893066, 0.008411007881164551, 0.008696031570434571, 0.0087575044631958, 0.008655263900756835, 0.008660927772521972, 0.008734527587890626, 0.008867391586303711, 0.00866374397277832, 0.008768959999084473, 0.008673855781555176, 0.008654848098754882, 0.008667136192321777, 0.008756640434265137, 0.008655232429504394, 0.00863366413116455, 0.008663968086242676, 0.008623456001281738, 0.008589983940124512, 0.00869375991821289, 0.008652799606323243, 0.008664287567138673, 0.008610400199890136, 0.008710335731506348, 0.00860159969329834, 0.008632320404052735, 0.008757247924804687, 0.008589311599731446, 0.00861184024810791, 0.008615263938903809, 0.008665568351745605, 0.008620223999023437, 0.008681471824645997, 0.008613887786865235, 0.008584544181823731, 0.00857699203491211, 0.008616640090942382, 0.008652416229248047, 0.00883743953704834, 0.008807488441467285, 0.008690688133239746, 0.008635456085205079, 0.008638912200927735, 0.008657407760620118, 0.008679264068603516, 0.008670432090759277, 0.00861075210571289, 0.008654303550720215, 0.008634911537170411, 0.008777376174926757, 0.008652735710144042, 0.008659135818481446, 0.008630496025085449, 0.008656895637512207, 0.008615936279296875, 0.008615936279296875, 0.008851455688476563, 0.008662079811096192, 0.009878463745117187, 0.01113049602508545, 0.008735103607177734, 0.008691712379455567, 0.008763392448425293, 0.0088570556640625, 0.008449728012084962, 0.008680031776428223, 0.008659104347229003, 0.008761343955993652, 0.008624128341674805, 0.008693216323852539, 0.00864691162109375, 0.008702239990234374, 0.008636320114135742, 0.008605792045593261, 0.00861184024810791, 0.008644543647766113, 0.008675392150878907, 0.008627679824829102, 0.008607423782348633, 0.008632224082946777, 0.00862713623046875, 0.008622079849243165, 0.008675328254699707, 0.008704000473022461, 0.008681471824645997, 0.008693120002746583, 0.008691935539245606, 0.008747424125671387, 0.008642080307006837, 0.008626655578613281, 0.00862169647216797, 0.008589471817016602, 0.008646880149841309, 0.008656160354614258, 0.008849696159362793, 0.008720831871032715, 0.008780960083007813, 0.008737536430358886, 0.00868553638458252, 0.008715935707092285, 0.008636223793029784, 0.00862070369720459, 0.008605695724487305, 0.008773632049560547, 0.008623871803283691, 0.008808064460754394, 0.008734527587890626, 0.008600383758544923, 0.00858726406097412, 0.00858460807800293, 0.008611455917358398, 0.008698847770690918, 0.008583168029785156, 0.008640512466430664, 0.00860700798034668, 0.008727264404296876, 0.008644607543945313, 0.008704000473022461, 0.00865884780883789, 0.00860153579711914, 0.008702112197875976, 0.008603520393371582, 0.008685695648193359, 0.00920576000213623, 0.008722432136535644, 0.008672896385192872, 0.00862451171875, 0.008382752418518066, 0.008638463973999023, 0.008623295783996583, 0.008736831665039062, 0.008596192359924317, 0.008610943794250488, 0.008597439765930175, 0.008637408256530761, 0.00869375991821289, 0.008719807624816894, 0.0086943359375, 0.008623935699462891, 0.008638655662536621, 0.008589311599731446, 0.0086179838180542, 0.008730624198913574, 0.008774687767028808, 0.008655839920043946, 0.008588895797729493, 0.008612256050109863, 0.008707712173461914, 0.008665375709533692, 0.008648863792419434, 0.008664640426635742, 0.008595840454101562, 0.008744192123413087, 0.00865328025817871, 0.008691360473632812, 0.008677727699279785, 0.008642848014831544, 0.008642560005187988, 0.008652192115783691, 0.00862224006652832, 0.008630208015441895, 0.00870246410369873, 0.008730624198913574, 0.008658944129943847, 0.008677375793457032, 0.008634367942810058, 0.008652799606323243, 0.008644607543945313, 0.0086364164352417, 0.008589311599731446, 0.00860364818572998, 0.008574624061584473, 0.008721920013427734, 0.008711008071899414, 0.009003007888793945, 0.011773632049560548, 0.009081151962280274, 0.008738816261291504, 0.00871833610534668, 0.008773632049560547, 0.008656895637512207, 0.00870809555053711, 0.008646464347839356, 0.008622271537780762, 0.0086113920211792, 0.008751551628112792, 0.008674719810485839, 0.008615839958190917, 0.008643263816833497, 0.008642560005187988, 0.008503199577331543, 0.0086757755279541, 0.008736736297607422, 0.008742400169372559, 0.008670975685119629, 0.008659744262695312, 0.008597215652465821, 0.00859945583343506, 0.008679807662963867, 0.008656895637512207, 0.00862822437286377, 0.00859340763092041, 0.008617376327514649, 0.008704607963562011, 0.008646559715270997, 0.00862217617034912, 0.008809696197509766, 0.008631072044372558, 0.008705408096313476, 0.008639039993286133, 0.00879526424407959, 0.008641471862792969, 0.008615936279296875, 0.008599552154541015, 0.008652000427246094, 0.008846112251281738, 0.008671232223510742, 0.008662176132202149, 0.008737567901611329, 0.008667200088500977, 0.008601471900939942, 0.008627679824829102, 0.008735391616821289, 0.008703807830810546, 0.008707839965820313, 0.008609439849853516, 0.00860649585723877, 0.00862003231048584, 0.0087326717376709, 0.008719455718994141, 0.008618047714233398, 0.00859222412109375, 0.008697855949401855, 0.008984576225280762, 0.00892518424987793, 0.008756511688232422, 0.008690400123596191, 0.008691712379455567, 0.008650752067565918, 0.008696895599365234, 0.00866812801361084, 0.008687583923339844, 0.008624128341674805, 0.008594911575317382, 0.008610336303710938, 0.008704000473022461, 0.008646656036376953, 0.008687616348266602, 0.008796159744262694, 0.00862822437286377, 0.00862822437286377, 0.008663040161132812, 0.008687616348266602, 0.008454815864562988, 0.008647999763488769, 0.008633024215698242, 0.008710080146789551, 0.008990176200866699, 0.008708703994750976, 0.008658944129943847, 0.008650752067565918, 0.008667136192321777, 0.008613887786865235, 0.008677056312561035, 0.008811840057373047, 0.008704319953918457, 0.008647359848022462, 0.008684736251831054, 0.00860857582092285, 0.008630271911621093, 0.008691712379455567, 0.008626175880432128, 0.008646656036376953, 0.008621408462524413, 0.008704671859741211, 0.008607744216918945, 0.008650752067565918, 0.008568832397460938, 0.008617247581481934, 0.008638976097106933, 0.008628447532653809, 0.008625568389892578, 0.008790623664855958, 0.008703743934631348, 0.008652031898498535, 0.008619008064270019, 0.00862822437286377, 0.008605695724487305, 0.00880844783782959, 0.00873408031463623, 0.009247360229492187, 0.01175551986694336, 0.00871833610534668, 0.008676639556884766, 0.008720607757568359, 0.008816448211669922, 0.008676032066345214, 0.008666303634643555, 0.008677375793457032, 0.008616415977478028, 0.008640864372253417, 0.008650752067565918, 0.008699904441833496, 0.008617856025695802, 0.008619296073913573, 0.008586079597473144, 0.00859340763092041, 0.008665087699890137, 0.008644415855407714, 0.008577216148376464, 0.00867024040222168, 0.008614879608154297, 0.00861184024810791, 0.008673215866088867, 0.008640576362609863, 0.008671232223510742, 0.008470080375671386, 0.008636704444885254, 0.008687968254089356, 0.008746944427490234, 0.008859744071960449, 0.008699904441833496, 0.008654656410217286, 0.008679167747497558, 0.008657343864440918, 0.008650239944458007, 0.008769472122192383, 0.008663616180419921, 0.008671232223510742, 0.008671232223510742, 0.008871392250061035, 0.008736448287963867, 0.008815456390380859, 0.008710080146789551, 0.008691776275634765, 0.0087010555267334, 0.008713088035583495, 0.008734463691711426, 0.008802559852600098, 0.008632320404052735, 0.00863593578338623, 0.008663519859313965, 0.008679424285888672, 0.008749055862426757, 0.008761343955993652, 0.008701184272766112, 0.00866585636138916, 0.008910847663879394, 0.00894976043701172, 0.008689663887023925, 0.008654560089111328, 0.008634592056274414, 0.008624159812927246, 0.008678879737854004, 0.008708383560180664, 0.008648991584777832, 0.00863599967956543, 0.008706463813781738, 0.008632320404052735, 0.008589311599731446, 0.008699904441833496, 0.008708064079284667, 0.008642592430114747, 0.008656895637512207, 0.008671232223510742, 0.008714271545410155, 0.008695743560791015, 0.008687647819519043, 0.008675328254699707, 0.0086364164352417, 0.008642560005187988, 0.008734720230102539, 0.008701312065124511, 0.008671808242797851, 0.008814111709594726, 0.008612159729003907, 0.008685215950012207, 0.00868000030517578, 0.008767168045043945, 0.008486911773681641, 0.008734720230102539, 0.008697855949401855, 0.008671232223510742, 0.008712191581726075, 0.008683520317077637, 0.008691712379455567, 0.008740863800048827, 0.008697855949401855, 0.008609248161315917, 0.008624575614929198, 0.008648799896240235, 0.00867091178894043, 0.008681247711181641, 0.00871887969970703, 0.008678976058959962, 0.00868396759033203, 0.008660991668701172, 0.008683520317077637, 0.008677375793457032, 0.008599552154541015, 0.008650015830993653, 0.00872713565826416, 0.008636544227600098, 0.008687616348266602, 0.008894463539123536, 0.008759103775024413, 0.011014335632324219, 0.010712479591369629, 0.00892579174041748, 0.00880947208404541, 0.008778752326965332, 0.00872447967529297, 0.00881385612487793, 0.008746815681457519, 0.00880732822418213, 0.008701151847839355, 0.008760095596313477, 0.008646656036376953, 0.008860735893249512, 0.008731583595275878, 0.008686911582946778, 0.008716992378234863, 0.008671232223510742, 0.008773632049560547, 0.0086909761428833, 0.008729056358337402, 0.008681728363037109, 0.00860159969329834, 0.00861184024810791, 0.008695808410644532, 0.008656703948974609, 0.0086179838180542, 0.008674783706665039, 0.00883785629272461, 0.008736063957214355, 0.008782336235046387, 0.008732864379882813, 0.00872985553741455, 0.008659711837768555, 0.00864031982421875, 0.008696000099182128, 0.008947263717651368, 0.008737279891967773, 0.008682911872863769, 0.00872447967529297, 0.008794719696044923, 0.00875887966156006, 0.008790431976318359, 0.008749055862426757, 0.008740511894226075, 0.008720735549926757, 0.008761343955993652, 0.008790016174316406, 0.008675040245056153, 0.008740192413330078, 0.00864352035522461, 0.008796159744262694, 0.00902143955230713, 0.008756704330444336, 0.008933759689331055, 0.008790176391601562, 0.00872652816772461, 0.008760319709777833, 0.008749343872070313, 0.008766176223754883, 0.008802304267883301, 0.008734720230102539, 0.008728575706481934, 0.008767264366149902, 0.008699295997619629, 0.008694592475891114, 0.008704000473022461, 0.008828927993774414, 0.008826272010803222, 0.00871894359588623, 0.00871014404296875, 0.008706111907958985, 0.008672991752624511, 0.008695903778076173, 0.008667263984680176, 0.008650431632995606, 0.008624064445495605, 0.008670944213867188, 0.008757951736450196, 0.008764639854431152, 0.00867199993133545, 0.008796159744262694, 0.008857600212097168, 0.008812543869018554, 0.009191424369812011, 0.00911081600189209, 0.008905376434326172, 0.008755264282226562, 0.008748736381530762, 0.00868937587738037, 0.008684127807617188, 0.008732192039489746, 0.009120223999023437, 0.009965567588806153, 0.010010623931884765, 0.009639936447143555, 0.008843263626098634, 0.008804351806640624, 0.008798208236694336, 0.008799296379089355, 0.00840176010131836, 0.00872652816772461, 0.008724448204040527, 0.008697376251220704, 0.009023008346557617, 0.008861727714538574, 0.00891744041442871, 0.009011712074279785, 0.008828191757202149, 0.009063136100769044, 0.009390080451965332, 0.009346816062927246, 0.009027839660644531, 0.00900879955291748, 0.008747008323669434, 0.009939295768737793, 0.011491328239440919, 0.008796159744262694, 0.008741888046264648, 0.00872755241394043, 0.00872447967529297, 0.008707615852355957, 0.00872492790222168, 0.0089334077835083, 0.008904576301574707, 0.008796256065368652, 0.008849120140075684, 0.008710463523864747, 0.008712191581726075, 0.008753151893615722, 0.008689663887023925, 0.008654848098754882, 0.00866220760345459, 0.00862825584411621, 0.008644960403442382, 0.00869215965270996, 0.008695136070251465, 0.008651424407958984, 0.008711872100830078, 0.008675583839416504, 0.008720447540283203, 0.008744864463806153, 0.008689760208129883, 0.008732288360595704, 0.008655232429504394, 0.008621760368347168, 0.008691455841064453, 0.008790047645568847, 0.008641056060791016, 0.008828927993774414, 0.008756511688232422, 0.008960448265075683, 0.008759455680847168, 0.008785375595092773, 0.008706720352172851, 0.008685407638549804, 0.008671392440795898, 0.008758848190307617, 0.008673376083374023, 0.008640864372253417, 0.008658623695373536, 0.008722399711608887, 0.008636575698852539]",tokens/s,114.26208127028372,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,4293.541888,4878.958592,0.0,4483.710976,4465.672704,s,1,10.53728125,10.53728125,0.0,10.53728125,10.53728125,10.53728125,10.53728125,[10.53728125],,kWh,0.00010265646206668559,1.1316277597974052e-05,3.244585928999927e-05,0.00014641859895465892,,MB,2154.92608,5302.583296,0.0,4892.655616,4837.669376,s,10,1.806138107299805,0.18061381072998048,0.0003293052221499723,0.18058362579345705,0.18102312774658205,0.18117773971557619,0.1813014292907715,"[0.1805165710449219, 0.18033718872070312, 0.18065068054199218, 0.18019810485839843, 0.18073670959472657, 0.18069020080566406, 0.18098876953125, 0.18032659912109375, 0.18133235168457032, 0.18036093139648438]",tokens/s,1417.3888417797832,kWh,5.292656321875218e-06,5.83684918681336e-07,3.519010751714318e-06,9.395351992270872e-06,tokens/kWh,27247515.60245955,MB,2154.92608,5470.355456,0.0,5060.427776,5014.227968,s,10,15.812712036132812,1.5812712036132812,0.002739175781028118,1.5802742309570315,1.58424609375,1.5859874267578125,1.5873804931640627,"[1.577431884765625, 1.5804415283203126, 1.5797474365234374, 1.579446044921875, 1.5827811279296875, 1.58010693359375, 1.579886962890625, 1.583859130859375, 1.5812822265625, 1.587728759765625]",tokens/s,39.841362984440586,kWh,4.623907491687399e-05,5.0998846059126645e-06,3.0630627679086255e-05,8.19695872018729e-05,tokens/kWh,768577.739019778,,s,630,15.809436853408794,0.02509434421176002,0.0002683312272408559,0.025091216087341307,0.02539562587738037,0.02544892520904541,0.02562079782485962,"[0.02498044776916504, 0.024815391540527344, 0.024776927947998045, 0.024805376052856445, 0.024788991928100586, 0.024788991928100586, 0.02476348876953125, 0.024691295623779298, 0.02475449562072754, 0.02478489685058594, 0.024793088912963866, 0.024864767074584963, 0.024766464233398438, 0.0247459831237793, 0.024763839721679688, 0.024744384765625, 0.02489971160888672, 0.024831455230712892, 0.02490787124633789, 0.024795583724975586, 0.02489139175415039, 0.02500320053100586, 0.024898368835449217, 0.02482585525512695, 0.024792160034179687, 0.024869695663452148, 0.024821792602539063, 0.02477987289428711, 0.024884191513061524, 0.025010175704956054, 0.02507980728149414, 0.025064672470092773, 0.024938623428344728, 0.024996511459350584, 0.0251146240234375, 0.02512076759338379, 0.02505881690979004, 0.02513699150085449, 0.025188735961914063, 0.025165727615356445, 0.025176000595092774, 0.025184703826904298, 0.02513491249084473, 0.025129152297973634, 0.025194496154785157, 0.02534809684753418, 0.025310367584228517, 0.025208736419677736, 0.025102848052978514, 0.025165695190429688, 0.02533228874206543, 0.025389055252075195, 0.02526348876953125, 0.02523744010925293, 0.025434656143188475, 0.025280672073364256, 0.025202400207519533, 0.02540777587890625, 0.02535798454284668, 0.02527471923828125, 0.025361984252929688, 0.025620927810668947, 0.025276416778564452, 0.02510304069519043, 0.024698879241943358, 0.024653823852539062, 0.024667999267578126, 0.024707231521606445, 0.02466307258605957, 0.024621536254882812, 0.02473958396911621, 0.024779520034790038, 0.024803327560424804, 0.024825151443481446, 0.0247459831237793, 0.024807231903076172, 0.024838336944580076, 0.024850751876831053, 0.02490310478210449, 0.024869823455810548, 0.02487868881225586, 0.02479052734375, 0.024837024688720705, 0.02485043144226074, 0.024985599517822265, 0.02507776069641113, 0.025175615310668944, 0.025071231842041016, 0.02494041633605957, 0.024984159469604493, 0.02505353546142578, 0.025010175704956054, 0.025037055969238282, 0.025005151748657226, 0.02500204849243164, 0.02509020805358887, 0.025090496063232423, 0.02507776069641113, 0.025009727478027342, 0.0251396484375, 0.025190528869628907, 0.025227136611938476, 0.025161279678344726, 0.02514784049987793, 0.025153568267822266, 0.025163743972778322, 0.0252620792388916, 0.025384735107421875, 0.025440256118774415, 0.025311103820800783, 0.025268575668334962, 0.025337984085083008, 0.025268096923828125, 0.025223167419433593, 0.02549660873413086, 0.025320415496826173, 0.025372671127319335, 0.02544451141357422, 0.025354080200195313, 0.025376640319824218, 0.025380992889404298, 0.02560166358947754, 0.025423776626586913, 0.025455072402954103, 0.025442304611206053, 0.02549964714050293, 0.025324640274047853, 0.024912799835205078, 0.02489263916015625, 0.024666080474853514, 0.02470569610595703, 0.024719520568847655, 0.024807424545288087, 0.024729183197021484, 0.024676767349243164, 0.024769792556762694, 0.024742687225341797, 0.02475004768371582, 0.02478656005859375, 0.024909568786621095, 0.02491823959350586, 0.02493020820617676, 0.024833728790283203, 0.02486172866821289, 0.024846111297607422, 0.024788991928100586, 0.024795007705688477, 0.024967296600341797, 0.02500998306274414, 0.024968992233276366, 0.025135583877563476, 0.0250467529296875, 0.025036863327026367, 0.02502876853942871, 0.025028032302856447, 0.025047615051269533, 0.024993791580200195, 0.02495078468322754, 0.024993791580200195, 0.025069568634033205, 0.0251267204284668, 0.025097728729248047, 0.02506585693359375, 0.025198911666870116, 0.025208831787109375, 0.025210880279541017, 0.0252620792388916, 0.025167871475219726, 0.025531999588012694, 0.025235904693603515, 0.025192415237426758, 0.02517318344116211, 0.025172128677368164, 0.025209503173828127, 0.025118719100952147, 0.025312543869018555, 0.025449184417724608, 0.02535424041748047, 0.02528665542602539, 0.025351743698120117, 0.025340351104736328, 0.025362432479858397, 0.025332735061645507, 0.025278783798217772, 0.025374656677246095, 0.025445119857788086, 0.02533718490600586, 0.025295488357543944, 0.025346080780029298, 0.025178176879882812, 0.024930303573608398, 0.024844287872314453, 0.024825983047485352, 0.02469811248779297, 0.024636032104492188, 0.024805280685424806, 0.02510771179199219, 0.02479705619812012, 0.024687583923339845, 0.02479248046875, 0.024785503387451172, 0.025139104843139647, 0.024799327850341796, 0.02468659210205078, 0.0247459831237793, 0.024796703338623046, 0.024844608306884765, 0.024849952697753905, 0.024760480880737304, 0.02491628837585449, 0.024985759735107423, 0.02489331245422363, 0.024848512649536133, 0.02494588851928711, 0.024965919494628907, 0.024977407455444335, 0.02499939155578613, 0.025013856887817383, 0.02504390335083008, 0.025028831481933595, 0.024956703186035156, 0.02507776069641113, 0.025109535217285157, 0.025099231719970704, 0.025052480697631836, 0.025213632583618164, 0.02529484748840332, 0.025169919967651368, 0.025100288391113282, 0.025126911163330077, 0.025157632827758788, 0.02509404754638672, 0.025058496475219728, 0.025397184371948243, 0.025332799911499025, 0.02526812744140625, 0.025391040802001955, 0.025362495422363282, 0.0253308162689209, 0.025318271636962892, 0.025302751541137695, 0.02533945655822754, 0.025350879669189454, 0.025266176223754884, 0.025251840591430662, 0.02533782386779785, 0.025269952774047852, 0.025203039169311523, 0.025335264205932618, 0.02539369583129883, 0.025415679931640626, 0.02547203254699707, 0.025045759201049806, 0.024967424392700194, 0.02495871925354004, 0.024827903747558593, 0.024743871688842775, 0.02476608085632324, 0.027119232177734376, 0.0246812801361084, 0.024653728485107423, 0.02470307159423828, 0.02476144027709961, 0.024716127395629884, 0.02484230422973633, 0.02488319969177246, 0.024936447143554686, 0.02490572738647461, 0.02484841537475586, 0.02486182403564453, 0.02511296081542969, 0.025000415802001952, 0.024921184539794923, 0.024808351516723632, 0.024840320587158203, 0.02491200065612793, 0.02560111999511719, 0.024847007751464843, 0.02503232002258301, 0.025024223327636718, 0.02499456024169922, 0.02503593635559082, 0.025055999755859374, 0.02509414482116699, 0.02509574317932129, 0.025039295196533203, 0.025104255676269532, 0.025061023712158202, 0.02504934310913086, 0.02506729507446289, 0.025154016494750978, 0.025237472534179687, 0.025378816604614256, 0.0254486083984375, 0.025304927825927734, 0.025290752410888673, 0.02534809684753418, 0.025197568893432616, 0.025177087783813477, 0.025290048599243165, 0.025232063293457032, 0.025195968627929687, 0.025215551376342772, 0.0252126407623291, 0.025223455429077148, 0.025286848068237305, 0.025458303451538086, 0.025409215927124022, 0.025389440536499025, 0.025348127365112303, 0.025344095230102538, 0.025358335494995117, 0.025341920852661133, 0.025315359115600587, 0.025436159133911132, 0.025061952590942384, 0.02478060722351074, 0.02474015998840332, 0.024678047180175782, 0.024641759872436525, 0.024778816223144533, 0.024788351058959962, 0.024793855667114256, 0.02484000015258789, 0.024817663192749022, 0.024774560928344725, 0.02493552017211914, 0.02495795249938965, 0.024987648010253907, 0.02488934326171875, 0.024784383773803712, 0.02478665542602539, 0.024848480224609375, 0.024844991683959962, 0.024883359909057618, 0.024905759811401366, 0.024912864685058593, 0.02499875259399414, 0.024932512283325194, 0.024907455444335938, 0.024893760681152344, 0.025005184173583984, 0.02493881607055664, 0.025050880432128907, 0.02512348747253418, 0.025192287445068358, 0.02564534378051758, 0.025096063613891603, 0.025159679412841796, 0.02513100814819336, 0.025185823440551758, 0.025112607955932616, 0.02508956718444824, 0.02507254409790039, 0.025163743972778322, 0.025226848602294922, 0.02521513557434082, 0.02520297622680664, 0.025233407974243165, 0.02526380729675293, 0.02518252754211426, 0.025157632827758788, 0.025358335494995117, 0.025350208282470702, 0.025362464904785158, 0.025397151947021485, 0.02528678321838379, 0.02532748794555664, 0.025310783386230468, 0.025259647369384765, 0.025212959289550783, 0.02533184051513672, 0.025376800537109376, 0.025334272384643555, 0.02541360092163086, 0.025335968017578123, 0.025283967971801758, 0.025291391372680664, 0.024946720123291015, 0.02489792060852051, 0.024862720489501954, 0.024766464233398438, 0.024737247467041014, 0.02477519989013672, 0.024879104614257814, 0.0247172794342041, 0.024676383972167967, 0.024774656295776368, 0.024780799865722656, 0.024877056121826172, 0.024766464233398438, 0.02482585525512695, 0.024840415954589842, 0.024856351852416993, 0.0248702392578125, 0.02487772750854492, 0.02487295913696289, 0.02487071990966797, 0.024885440826416017, 0.02486832046508789, 0.024857120513916017, 0.02484646415710449, 0.024843231201171875, 0.024884128570556642, 0.025004032135009766, 0.024990751266479493, 0.02498771286010742, 0.025121248245239258, 0.02515011215209961, 0.02512067222595215, 0.025097536087036132, 0.02515551948547363, 0.025117311477661133, 0.025124128341674806, 0.0251759033203125, 0.025057600021362304, 0.02514579200744629, 0.025157760620117188, 0.025217023849487305, 0.025286367416381836, 0.025272607803344727, 0.025296287536621095, 0.02518671989440918, 0.025202880859375, 0.025280511856079102, 0.02527027130126953, 0.025218784332275392, 0.02524188804626465, 0.025322656631469726, 0.02536944007873535, 0.025332927703857422, 0.025400032043457033, 0.02541904067993164, 0.025489919662475585, 0.025479488372802735, 0.02539107131958008, 0.02543577575683594, 0.02540550422668457, 0.02530544090270996, 0.02535024070739746, 0.025327232360839842, 0.025350143432617187, 0.025231359481811523, 0.024922111511230468, 0.0248668155670166, 0.024771903991699217, 0.024724288940429686, 0.024729280471801757, 0.024785087585449218, 0.024769599914550782, 0.02481155204772949, 0.02489641571044922, 0.02478214454650879, 0.024769216537475585, 0.024805599212646485, 0.024812383651733397, 0.025000768661499022, 0.024940671920776366, 0.02498150444030762, 0.02495689582824707, 0.02501024055480957, 0.02505289649963379, 0.025000192642211913, 0.025026464462280275, 0.02510857582092285, 0.025062688827514647, 0.02500681686401367, 0.025026559829711914, 0.025040319442749023, 0.025036800384521486, 0.025024799346923827, 0.02495471954345703, 0.024924192428588867, 0.025049503326416016, 0.025115999221801757, 0.025189023971557617, 0.02507776069641113, 0.02513920021057129, 0.025124448776245117, 0.02516009521484375, 0.025161727905273438, 0.02518534469604492, 0.02521340751647949, 0.025237823486328127, 0.025189727783203126, 0.025966400146484374, 0.025248607635498046, 0.02521481513977051, 0.025506175994873048, 0.025456575393676757, 0.025458112716674804, 0.026091808319091796, 0.02537436866760254, 0.025410175323486328, 0.02536857604980469, 0.025267200469970705, 0.025291648864746094, 0.02543014335632324, 0.025317375183105468, 0.025357887268066405, 0.02545631980895996, 0.025430784225463868, 0.02547711944580078, 0.02544371223449707, 0.025239519119262695, 0.02485424041748047, 0.024719680786132812, 0.024645631790161132, 0.024723167419433593, 0.02477846336364746, 0.024811168670654297, 0.024689567565917968, 0.02469068717956543, 0.024816640853881834, 0.024837343215942383, 0.02473347282409668, 0.024751935958862305, 0.024774848937988283, 0.02487049674987793, 0.024833696365356445, 0.02489334487915039, 0.024916831970214843, 0.024980607986450194, 0.025033311843872072, 0.024895776748657228, 0.024885568618774414, 0.025027551651000977, 0.025084287643432616, 0.024983903884887696, 0.02485862350463867, 0.02491561508178711, 0.024953407287597658, 0.024999616622924804, 0.025059423446655273, 0.025083904266357423, 0.02510995292663574, 0.02510207939147949, 0.025187007904052733, 0.025214336395263673, 0.025197311401367186, 0.02514262390136719, 0.025148063659667968, 0.025215200424194336, 0.02516713523864746, 0.02511510467529297, 0.025304224014282225, 0.025383808135986327, 0.02526323127746582, 0.025262975692749025, 0.025444351196289062, 0.025427967071533202, 0.025316736221313477, 0.025113216400146486, 0.025069568634033205, 0.025174016952514647, 0.0252392635345459, 0.025213247299194337, 0.02536240005493164, 0.025448448181152345, 0.0255098876953125, 0.02527846336364746, 0.02535628890991211, 0.025374624252319337, 0.025417823791503907, 0.02554265594482422, 0.025620479583740235, 0.025617664337158202, 0.02533635139465332, 0.025091936111450195, 0.024928255081176756, 0.024829952239990235, 0.024666112899780275, 0.024731647491455077, 0.02483404731750488, 0.024821760177612305, 0.024909631729125976, 0.024788671493530274, 0.02480499267578125, 0.024864736557006835, 0.024998815536499023, 0.024868864059448242, 0.024860671997070313, 0.024877056121826172, 0.024925567626953124, 0.024805952072143554, 0.024891456604003905, 0.024870912551879884, 0.02506547164916992, 0.025038463592529297, 0.0250184326171875, 0.026261823654174805, 0.025176319122314453, 0.024999679565429686, 0.02505523109436035, 0.02509404754638672, 0.02510652732849121, 0.02508083152770996, 0.025178943634033203, 0.025163967132568358, 0.025081279754638672, 0.02509040069580078, 0.02530112075805664, 0.025276287078857422, 0.025278688430786133, 0.025372480392456053, 0.02558598327636719, 0.0252126407623291, 0.025338016510009765, 0.025261344909667968, 0.025240224838256838, 0.025306528091430663, 0.02534160041809082, 0.0252523193359375, 0.025303327560424804, 0.025395456314086913, 0.02547929573059082, 0.025489280700683594, 0.02540348815917969, 0.025413536071777345, 0.025413631439208984, 0.02534137535095215, 0.027045984268188477, 0.02526223945617676, 0.025290111541748046, 0.025348672866821288, 0.025389440536499025, 0.025466815948486328, 0.025411264419555664, 0.025413536071777345, 0.02561686325073242]",tokens/s,39.84961677266576,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,2226.5856,2551.119872,0.0,2155.872256,2032.413184,s,1,8.91062890625,8.91062890625,0.0,8.91062890625,8.91062890625,8.91062890625,8.91062890625,[8.91062890625],,kWh,4.962559017501083e-05,5.466919062269274e-06,1.5655012524005973e-05,7.074752176128608e-05,,MB,2275.807232,2827.943936,0.0,2418.016256,2279.563776,s,10,0.7760554885864258,0.07760554885864257,0.0001963902127203726,0.07763959884643555,0.07780231704711914,0.07785509605407714,0.07789731925964355,"[0.07790787506103515, 0.07766345977783203, 0.07763129425048829, 0.07724739074707031, 0.07762963104248047, 0.07771437072753906, 0.07764790344238282, 0.07727324676513672, 0.07754972839355469, 0.07779058837890625]",tokens/s,3298.733193244989,kWh,2.319681841898155e-06,2.558206105176512e-07,1.5470757526507914e-06,4.122578205066597e-06,tokens/kWh,62097063.358404994,MB,2283.020288,2911.830016,0.0,2501.902336,2389.801984,s,10,13.973065063476563,1.3973065063476562,0.003427724356547006,1.3969668579101562,1.401062353515625,1.4017873413085937,1.4023673315429688,"[1.3914072265625, 1.393419189453125, 1.4009012451171876, 1.4004678955078125, 1.3964017333984375, 1.4000360107421874, 1.4025123291015624, 1.397531982421875, 1.395591064453125, 1.39479638671875]",tokens/s,45.08674346952859,kWh,4.133535275435144e-05,4.558946191796699e-06,2.321011071974948e-05,6.910440966589762e-05,tokens/kWh,911663.9633359014,,s,630,13.965461080551151,0.022167398540557378,0.0003694476016180388,0.022058432579040527,0.022498105430603026,0.0227490740776062,0.023932267227172854,"[0.023076864242553712, 0.022781951904296875, 0.022462080001831055, 0.02277334403991699, 0.022332191467285156, 0.02209791946411133, 0.02203878402709961, 0.02194985580444336, 0.02207494354248047, 0.022087551116943358, 0.022014400482177735, 0.021895647048950195, 0.02198534393310547, 0.021904767990112303, 0.021988927841186525, 0.02204569625854492, 0.022129951477050783, 0.021947200775146485, 0.02204252815246582, 0.02200927925109863, 0.022014720916748047, 0.022103231430053712, 0.022211231231689454, 0.022060384750366212, 0.02210083198547363, 0.02201523208618164, 0.021993215560913087, 0.02196940803527832, 0.022058528900146486, 0.021956544876098633, 0.021994304656982423, 0.021987327575683592, 0.02206924819946289, 0.0220425910949707, 0.021934112548828124, 0.021845632553100586, 0.021931840896606446, 0.021942047119140624, 0.02202908706665039, 0.02190336036682129, 0.021975231170654298, 0.021987136840820314, 0.022183488845825196, 0.022038976669311525, 0.022018335342407228, 0.021949344635009766, 0.02195350456237793, 0.021901151657104493, 0.021915935516357423, 0.022158784866333006, 0.022018335342407228, 0.021983232498168945, 0.022038528442382813, 0.022002719879150392, 0.02203887939453125, 0.021948480606079103, 0.02204115104675293, 0.021997568130493163, 0.022026239395141603, 0.02224127960205078, 0.022478080749511718, 0.02231283187866211, 0.022141824722290038, 0.02288470458984375, 0.02227596855163574, 0.021926015853881837, 0.02231091117858887, 0.02256915283203125, 0.02285139274597168, 0.02217478370666504, 0.02203539276123047, 0.02205900764465332, 0.022040576934814454, 0.022079488754272462, 0.022097824096679687, 0.022019327163696287, 0.02197385597229004, 0.021850175857543945, 0.02184137535095215, 0.02201238441467285, 0.0219238395690918, 0.021987327575683592, 0.022067359924316406, 0.02194326400756836, 0.021988128662109373, 0.02206697654724121, 0.021876928329467773, 0.021928064346313475, 0.021786624908447266, 0.022064128875732423, 0.02192620849609375, 0.022321855545043946, 0.022001056671142577, 0.022114912033081056, 0.022044767379760744, 0.02196988868713379, 0.021836736679077148, 0.021927040100097658, 0.022078271865844726, 0.021932031631469725, 0.02250739288330078, 0.02191993522644043, 0.02191360092163086, 0.021997568130493163, 0.022021184921264647, 0.022504383087158204, 0.02207321548461914, 0.022139007568359376, 0.022046720504760742, 0.021980352401733398, 0.021924671173095704, 0.022099967956542968, 0.022540288925170897, 0.02205900764465332, 0.02215116882324219, 0.022005760192871093, 0.02224287986755371, 0.022040159225463866, 0.02203094482421875, 0.022059263229370116, 0.02209382438659668, 0.02262835121154785, 0.022120447158813478, 0.022644960403442382, 0.02197465515136719, 0.02222051239013672, 0.023654272079467773, 0.02295238494873047, 0.02258403205871582, 0.022224288940429687, 0.022034208297729493, 0.02196124839782715, 0.022360063552856444, 0.021954559326171876, 0.022156864166259765, 0.022030784606933595, 0.02205411148071289, 0.021967647552490234, 0.02216783905029297, 0.0220664005279541, 0.022045183181762695, 0.021948415756225585, 0.022234495162963868, 0.022346336364746092, 0.02232249641418457, 0.02249388885498047, 0.022347904205322264, 0.022161312103271484, 0.022116352081298828, 0.02206515121459961, 0.022016000747680665, 0.02199888038635254, 0.02188889694213867, 0.022007680892944335, 0.02412771224975586, 0.02273356819152832, 0.022997247695922853, 0.02225724792480469, 0.02236636734008789, 0.022124223709106446, 0.02208799934387207, 0.022157312393188477, 0.022332544326782226, 0.022283136367797853, 0.022347776412963868, 0.0221265926361084, 0.022153215408325197, 0.021944320678710938, 0.021883039474487304, 0.021913631439208985, 0.02191084861755371, 0.021921632766723632, 0.021990047454833985, 0.021825536727905274, 0.021946367263793946, 0.021869823455810546, 0.02185260772705078, 0.02191801643371582, 0.022051872253417967, 0.022172639846801758, 0.022215808868408203, 0.022065088272094725, 0.022088640213012694, 0.021989248275756837, 0.02205299186706543, 0.021968511581420897, 0.02196928024291992, 0.021993471145629884, 0.02417804718017578, 0.02208358383178711, 0.022005247116088866, 0.02204489517211914, 0.022111743927001954, 0.021918176651000976, 0.021943775177001953, 0.022390975952148437, 0.021981855392456055, 0.022094079971313477, 0.02195568084716797, 0.022182559967041014, 0.02194380760192871, 0.022054527282714845, 0.021939071655273437, 0.022103391647338867, 0.022031007766723634, 0.022128704071044922, 0.02193574333190918, 0.022020416259765627, 0.022142175674438477, 0.022338592529296875, 0.021980831146240234, 0.02211235237121582, 0.02201100730895996, 0.022350912094116212, 0.022103904724121094, 0.022470624923706054, 0.022804479598999023, 0.02271014404296875, 0.022601856231689452, 0.022779136657714843, 0.022369024276733398, 0.022619487762451172, 0.02252249526977539, 0.022383903503417967, 0.02248518371582031, 0.022391008377075194, 0.022552768707275392, 0.022706335067749023, 0.022419424057006837, 0.022610111236572264, 0.022515775680541993, 0.022437664031982423, 0.022419456481933595, 0.022573280334472656, 0.02242729568481445, 0.02231865692138672, 0.02207619285583496, 0.022034208297729493, 0.02191798400878906, 0.021995231628417967, 0.021958656311035156, 0.022355167388916016, 0.021938528060913086, 0.021930431365966795, 0.021944320678710938, 0.02204787254333496, 0.021924448013305665, 0.022050399780273438, 0.022076095581054687, 0.022220800399780274, 0.02210201644897461, 0.02207744026184082, 0.022260543823242187, 0.02230271911621094, 0.022021440505981444, 0.0223604793548584, 0.02198761558532715, 0.021987327575683592, 0.021985471725463866, 0.022071039199829102, 0.022255680084228516, 0.02205833625793457, 0.021862464904785155, 0.02201251220703125, 0.021968896865844727, 0.02198255920410156, 0.021947168350219728, 0.022284095764160156, 0.0225382080078125, 0.022401119232177736, 0.022239231109619142, 0.02201350402832031, 0.022170207977294923, 0.022099775314331056, 0.02205900764465332, 0.022077472686767578, 0.02207334327697754, 0.022001440048217774, 0.022210784912109375, 0.022273279190063475, 0.022250240325927734, 0.02222857666015625, 0.02249337577819824, 0.02245039939880371, 0.0223187198638916, 0.022178272247314453, 0.022619808197021484, 0.02210348892211914, 0.02215609550476074, 0.02203209686279297, 0.022167871475219727, 0.022601408004760744, 0.022272287368774416, 0.021986976623535156, 0.021876575469970704, 0.022053184509277343, 0.02216979217529297, 0.022035551071166993, 0.02210223960876465, 0.022043359756469726, 0.022048736572265627, 0.02287001609802246, 0.022160383224487306, 0.02226278305053711, 0.022130687713623046, 0.022108160018920898, 0.022405120849609376, 0.02198255920410156, 0.022069919586181642, 0.022037599563598635, 0.021908416748046874, 0.022042623519897463, 0.021949888229370117, 0.02200160026550293, 0.0219899845123291, 0.02188902473449707, 0.022337535858154296, 0.02394726371765137, 0.02225798416137695, 0.022137760162353515, 0.022360671997070314, 0.02217945671081543, 0.02194268798828125, 0.022311071395874023, 0.022761760711669923, 0.02250927925109863, 0.022216928482055663, 0.022374176025390626, 0.02207334327697754, 0.022024192810058595, 0.02192131233215332, 0.022084064483642578, 0.02202128028869629, 0.02196771240234375, 0.02197292709350586, 0.022078720092773438, 0.022012256622314454, 0.021959007263183592, 0.022141056060791017, 0.02246575927734375, 0.021951263427734374, 0.02191974449157715, 0.021935359954833984, 0.021962944030761718, 0.021916223526000977, 0.021915615081787108, 0.02180713653564453, 0.021908863067626953, 0.021871231079101563, 0.021957727432250978, 0.021881439208984374, 0.02192620849609375, 0.02197817611694336, 0.02195289611816406, 0.022093408584594725, 0.02212063980102539, 0.022006784439086914, 0.022107872009277343, 0.022038591384887694, 0.02203545570373535, 0.021890239715576174, 0.022210559844970702, 0.022089536666870118, 0.022116352081298828, 0.02188083267211914, 0.02200281524658203, 0.022016960144042967, 0.022255584716796874, 0.022257631301879882, 0.022166847229003906, 0.022090431213378905, 0.02208358383178711, 0.02231920051574707, 0.02430905532836914, 0.024731679916381834, 0.02253036880493164, 0.022423807144165038, 0.022595584869384764, 0.022255231857299804, 0.022352031707763672, 0.022041759490966796, 0.02221955108642578, 0.022156959533691407, 0.02235856056213379, 0.022319007873535156, 0.022085216522216795, 0.02194063949584961, 0.022017023086547852, 0.02206822395324707, 0.02320707130432129, 0.022229856491088867, 0.021935487747192384, 0.021979711532592774, 0.022056224822998047, 0.022162336349487305, 0.021974336624145507, 0.021987775802612304, 0.02203251266479492, 0.022191328048706056, 0.02200009536743164, 0.022118175506591797, 0.022039072036743164, 0.022273696899414063, 0.024794784545898438, 0.023702207565307616, 0.022517152786254883, 0.022436447143554687, 0.022218751907348632, 0.022437887191772463, 0.022779903411865234, 0.0227061767578125, 0.02225987243652344, 0.022128480911254883, 0.022124160766601564, 0.0224168643951416, 0.022127519607543944, 0.022001792907714843, 0.02198111915588379, 0.022044607162475586, 0.022141952514648438, 0.02210508728027344, 0.022106111526489256, 0.02201203155517578, 0.022194047927856446, 0.022001056671142577, 0.022003456115722655, 0.02214944076538086, 0.02231929588317871, 0.02212236785888672, 0.022206432342529298, 0.022051584243774413, 0.022036224365234374, 0.022177087783813478, 0.02204947280883789, 0.022120447158813478, 0.021929983139038087, 0.021910783767700195, 0.02198182487487793, 0.022524032592773437, 0.02245193672180176, 0.022397184371948244, 0.02230944061279297, 0.022351104736328124, 0.02200422477722168, 0.022423807144165038, 0.021956544876098633, 0.02200172805786133, 0.021990432739257812, 0.021977247238159178, 0.021926687240600585, 0.0219648323059082, 0.021982431411743164, 0.021958751678466795, 0.022031040191650392, 0.021985279083251954, 0.021917055130004883, 0.021979328155517577, 0.022284736633300783, 0.022190080642700196, 0.021958080291748047, 0.02214134407043457, 0.022075551986694336, 0.022196224212646484, 0.021958656311035156, 0.02197532844543457, 0.02202569580078125, 0.022007167816162108, 0.021904064178466798, 0.021869855880737303, 0.021924863815307616, 0.02262403106689453, 0.022104127883911133, 0.021917600631713868, 0.021915807723999023, 0.021906879425048827, 0.022114400863647462, 0.022683744430541993, 0.022058719635009764, 0.02223126411437988, 0.022227392196655274, 0.02208790397644043, 0.022089151382446288, 0.021911903381347655, 0.02202217674255371, 0.021866464614868165, 0.02205695915222168, 0.021924896240234373, 0.022071487426757814, 0.021996320724487303, 0.02216534423828125, 0.022149280548095705, 0.022410751342773438, 0.022561279296875, 0.022461599349975585, 0.022462400436401367, 0.02216771125793457, 0.022424480438232423, 0.022781791687011718, 0.022447328567504882, 0.022201120376586916, 0.02196227264404297, 0.02206972885131836, 0.025210527420043944, 0.022159744262695312, 0.022419551849365234, 0.022324735641479493, 0.022164255142211913, 0.02201206398010254, 0.021869855880737303, 0.021983808517456054, 0.022061119079589842, 0.021943647384643553, 0.022096479415893554, 0.021934080123901366, 0.021917119979858398, 0.021834304809570312, 0.022052223205566407, 0.02219481658935547, 0.021959903717041016, 0.021852960586547853, 0.022006944656372072, 0.021807584762573242, 0.021899648666381836, 0.02192495918273926, 0.022174335479736327, 0.022024288177490234, 0.022269792556762695, 0.021930335998535156, 0.0219703369140625, 0.02190787124633789, 0.022032575607299806, 0.022806528091430665, 0.022413536071777342, 0.02264041519165039, 0.022101408004760743, 0.022088287353515625, 0.022037696838378907, 0.021979904174804686, 0.023217504501342773, 0.022014656066894532, 0.022179872512817382, 0.02192793655395508, 0.022034431457519533, 0.0218789119720459, 0.0220153923034668, 0.021893600463867187, 0.021966848373413086, 0.021946367263793946, 0.022076831817626954, 0.022206079483032225, 0.02211529541015625, 0.02185215950012207, 0.021915807723999023, 0.02233123207092285, 0.02316854476928711, 0.023895551681518554, 0.022577152252197266, 0.02200457572937012, 0.022134496688842775, 0.02201363182067871, 0.022012639999389648, 0.02192736053466797, 0.022239007949829102, 0.021920543670654297, 0.02205900764465332, 0.02225152015686035, 0.02229043197631836, 0.022446975708007813, 0.022427648544311524, 0.021944063186645508, 0.021932512283325194, 0.021808639526367187, 0.02204198455810547, 0.022004383087158203, 0.021999872207641602, 0.02307276725769043, 0.021949472427368163, 0.02189206314086914, 0.021853248596191407, 0.021896127700805665, 0.023895423889160158, 0.022857599258422852, 0.02315532875061035, 0.022497407913208006, 0.022330528259277345, 0.021953311920166016, 0.02192758369445801, 0.02192630386352539, 0.022042720794677735, 0.02189116859436035, 0.02206470489501953, 0.022061279296875, 0.021962047576904297, 0.023093631744384766, 0.022143327713012695, 0.021991615295410157, 0.02185523223876953, 0.0219136962890625, 0.021977312088012697, 0.021974687576293946, 0.02244630432128906, 0.022175775527954102, 0.022392831802368163, 0.022143583297729492, 0.022332576751708983, 0.022016864776611328, 0.022056896209716795, 0.021954559326171876, 0.02190342330932617, 0.02183305549621582, 0.021956415176391603, 0.021887840270996092, 0.02191916847229004, 0.021913663864135742, 0.021825023651123047, 0.021888191223144532, 0.021903167724609374, 0.022138879776000975, 0.02208153533935547, 0.022013471603393554, 0.02222934341430664, 0.02203455924987793, 0.021978271484375, 0.021999488830566405, 0.02194492721557617, 0.0222271671295166, 0.022149280548095705, 0.02195033645629883, 0.021987455368041992, 0.021941375732421876]",tokens/s,45.11129252133056,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1169.5104,1101.98784,0.0,706.740224,681.6384,s,1,7.8803232421875,7.8803232421875,0.0,7.8803232421875,7.8803232421875,7.8803232421875,7.8803232421875,[7.8803232421875],,kWh,2.994021517082122e-05,3.2955293587886216e-06,9.018618325994099e-06,4.225436285560394e-05,,MB,1473.560576,1406.07488,0.0,996.1472,949.238272,s,10,0.2701465301513672,0.027014653015136718,0.0002590781925695431,0.026916496276855467,0.02731109790802002,0.02742468538284302,0.027515555362701418,"[0.027538272857666017, 0.027266656875610352, 0.0267741756439209, 0.027285856246948244, 0.02666486358642578, 0.026836351394653322, 0.02688630485534668, 0.02694668769836426, 0.027082208633422852, 0.026865152359008788]",tokens/s,9476.338632095676,kWh,7.87035152987432e-07,8.679267208216295e-08,4.824255132237186e-07,1.3562533382933135e-06,tokens/kWh,188755295.76365587,MB,1494.245376,1414.463488,0.0,1004.535808,949.240832,s,10,12.78926037597656,1.2789260375976563,0.004933773595789401,1.2782561645507813,1.2860602294921875,1.2865773559570313,1.2869910571289063,"[1.287094482421875, 1.2779775390625, 1.2859453125, 1.2803258056640625, 1.2744139404296875, 1.27096923828125, 1.2768492431640626, 1.28282861328125, 1.2785347900390625, 1.2743214111328125]",tokens/s,49.26008083965484,kWh,3.754832102909648e-05,4.1411248843222496e-06,1.5031698009176037e-05,5.6721143922594757e-05,tokens/kWh,1110696.9225792373,,s,630,12.787142049789429,0.020297050872681632,0.00033143789891812,0.020219488143920897,0.020541273307800294,0.02072889757156372,0.021887381591796873,"[0.020536287307739258, 0.020398143768310548, 0.02035308837890625, 0.020231744766235352, 0.020243776321411132, 0.020232736587524416, 0.020072927474975587, 0.020193439483642578, 0.020277088165283202, 0.02020351982116699, 0.02022809600830078, 0.02029148864746094, 0.020152416229248047, 0.020344352722167967, 0.020193952560424805, 0.020031295776367187, 0.020373504638671876, 0.020368831634521484, 0.020482624053955078, 0.020609024047851563, 0.02038102340698242, 0.020664831161499024, 0.022929567337036133, 0.020983808517456053, 0.02082611274719238, 0.020406272888183592, 0.020279296875, 0.020290559768676757, 0.020488576889038088, 0.0201997127532959, 0.020320608139038087, 0.02035043144226074, 0.020193824768066405, 0.020362335205078123, 0.020255647659301757, 0.020701183319091796, 0.020346879959106445, 0.020231775283813477, 0.02055619239807129, 0.02037721633911133, 0.020295904159545897, 0.020572256088256836, 0.02027676773071289, 0.02042524719238281, 0.020340448379516603, 0.02029801559448242, 0.020967424392700194, 0.022359968185424805, 0.020377695083618166, 0.020338623046875, 0.020423904418945312, 0.020304672241210936, 0.02027503967285156, 0.020164255142211915, 0.02020924758911133, 0.020259071350097656, 0.02015715217590332, 0.020161632537841798, 0.020675487518310547, 0.020349056243896484, 0.020332416534423827, 0.020082687377929686, 0.02027030372619629, 0.020314815521240235, 0.020259872436523437, 0.020146303176879883, 0.020446144104003906, 0.02020310401916504, 0.020187456130981444, 0.020291200637817385, 0.02019500732421875, 0.02021651268005371, 0.020248575210571287, 0.020183040618896485, 0.02009059143066406, 0.02006390380859375, 0.020212352752685545, 0.020471519470214843, 0.0204169921875, 0.02052076721191406, 0.02086016082763672, 0.020358144760131838, 0.020449024200439453, 0.020305919647216796, 0.020273151397705077, 0.02020966339111328, 0.020241472244262697, 0.020204479217529298, 0.020332256317138673, 0.020277088165283202, 0.02077231979370117, 0.02034787178039551, 0.020099071502685546, 0.02011955261230469, 0.020090463638305665, 0.020115615844726563, 0.020101343154907227, 0.020148256301879882, 0.020245920181274413, 0.020203327178955077, 0.02021251106262207, 0.020332096099853515, 0.020263359069824217, 0.02033193588256836, 0.020107872009277345, 0.020090879440307616, 0.02008780860900879, 0.020118528366088868, 0.020154144287109373, 0.020258848190307616, 0.02051705551147461, 0.02022604751586914, 0.020312063217163084, 0.020172224044799805, 0.020146751403808595, 0.020231264114379883, 0.020179872512817384, 0.020313791275024414, 0.02095110321044922, 0.02037785530090332, 0.020238336563110353, 0.020243616104125978, 0.02030473518371582, 0.020606719970703125, 0.020461248397827148, 0.02031804847717285, 0.020342784881591795, 0.02071743965148926, 0.020582176208496093, 0.020520864486694337, 0.020335039138793944, 0.020254720687866212, 0.020152288436889647, 0.02016422462463379, 0.020121088027954103, 0.020138912200927735, 0.02016694450378418, 0.021693727493286134, 0.023953760147094726, 0.02070742416381836, 0.020313215255737305, 0.02029862403869629, 0.020166656494140626, 0.020137311935424805, 0.02012835121154785, 0.020041791915893555, 0.020041696548461913, 0.02001513671875, 0.02012553596496582, 0.02018694305419922, 0.019988832473754884, 0.01999667167663574, 0.02028489685058594, 0.02004368019104004, 0.02008127975463867, 0.01997177505493164, 0.020008575439453124, 0.020226816177368163, 0.02003721618652344, 0.02002569580078125, 0.020109312057495117, 0.02015203285217285, 0.02030342483520508, 0.020167392730712892, 0.02013167953491211, 0.020144287109375, 0.02017430305480957, 0.020273408889770507, 0.02027343940734863, 0.02169241523742676, 0.02009449577331543, 0.020060319900512696, 0.02017100715637207, 0.02012985610961914, 0.0204532470703125, 0.020096607208251953, 0.02045580863952637, 0.02041993522644043, 0.020534080505371095, 0.022040576934814454, 0.021884927749633788, 0.020585472106933594, 0.020264192581176756, 0.020108415603637696, 0.020173440933227538, 0.02021785545349121, 0.020731903076171874, 0.02103455924987793, 0.020854272842407227, 0.0206210880279541, 0.020460416793823242, 0.020256000518798827, 0.020345600128173828, 0.02017817687988281, 0.019983104705810547, 0.02003763198852539, 0.020129791259765627, 0.020162559509277343, 0.02007027244567871, 0.02005219268798828, 0.020506528854370116, 0.02044108772277832, 0.02072985649108887, 0.0202478084564209, 0.020196096420288086, 0.02037555122375488, 0.020158464431762696, 0.02035513687133789, 0.020459455490112306, 0.020457023620605468, 0.020291584014892578, 0.02044905662536621, 0.020607295989990233, 0.020414464950561522, 0.020338848114013673, 0.020510143280029296, 0.020599552154541016, 0.020445184707641603, 0.02029363250732422, 0.020238431930541992, 0.02022390365600586, 0.02060438346862793, 0.020640287399291992, 0.020385440826416017, 0.020267360687255858, 0.020099071502685546, 0.02048521614074707, 0.02062019157409668, 0.020312320709228514, 0.02045302391052246, 0.020192607879638672, 0.020062591552734373, 0.020275583267211916, 0.02004991912841797, 0.020846176147460937, 0.02024393653869629, 0.020247488021850585, 0.020282527923583985, 0.02016099166870117, 0.020140415191650392, 0.020120895385742188, 0.020588319778442384, 0.020652416229248047, 0.02015216064453125, 0.020126399993896486, 0.020099071502685546, 0.020414016723632813, 0.020089088439941408, 0.020086336135864257, 0.020177536010742188, 0.020320255279541014, 0.02030182456970215, 0.020199424743652345, 0.020346879959106445, 0.020165792465209963, 0.02033750343322754, 0.020207199096679687, 0.02032067108154297, 0.020219295501708985, 0.02022825622558594, 0.020525503158569335, 0.020254720687866212, 0.020207616806030275, 0.02008678436279297, 0.020115455627441405, 0.020148000717163085, 0.019962015151977538, 0.020093055725097658, 0.020264896392822265, 0.020526144027709962, 0.02036761665344238, 0.020107872009277345, 0.020520832061767576, 0.020217567443847655, 0.020670656204223634, 0.020363584518432617, 0.02020265579223633, 0.020601152420043945, 0.020189855575561525, 0.02016828727722168, 0.02026851272583008, 0.020228000640869142, 0.020165536880493166, 0.020420608520507814, 0.020316160202026368, 0.02022400093078613, 0.02020351982116699, 0.0202608642578125, 0.020324352264404297, 0.020225727081298828, 0.020258207321166993, 0.020112287521362304, 0.020129600524902345, 0.020076736450195313, 0.020057952880859375, 0.020307680130004883, 0.020178720474243163, 0.020081087112426756, 0.02007267189025879, 0.02006630325317383, 0.020291648864746093, 0.020270015716552733, 0.020164960861206054, 0.020084672927856446, 0.019987167358398436, 0.02006447982788086, 0.020113183975219728, 0.02021990394592285, 0.020145503997802735, 0.020286111831665038, 0.020076831817626952, 0.020248287200927733, 0.020182912826538085, 0.020172927856445314, 0.02026713562011719, 0.020234079360961915, 0.02019001579284668, 0.02013507270812988, 0.020095840454101562, 0.02027519989013672, 0.020404224395751954, 0.02021347236633301, 0.02012598419189453, 0.020072160720825197, 0.020044063568115233, 0.02006220817565918, 0.020074495315551756, 0.020266176223754883, 0.02008291244506836, 0.020119935989379882, 0.020111263275146483, 0.020388160705566406, 0.020172800064086914, 0.02006387138366699, 0.020089216232299804, 0.020215232849121093, 0.0200949764251709, 0.02010086441040039, 0.020111583709716798, 0.02004591941833496, 0.020212223052978515, 0.020150272369384766, 0.020116800308227538, 0.020177600860595703, 0.02013545608520508, 0.020197248458862303, 0.020139711380004883, 0.020226112365722658, 0.020063072204589843, 0.020068351745605468, 0.02004991912841797, 0.020099071502685546, 0.02003094482421875, 0.02010998344421387, 0.020062080383300783, 0.02012575912475586, 0.020151615142822266, 0.02007072067260742, 0.02011372756958008, 0.020178112030029297, 0.02041324806213379, 0.020496383666992187, 0.020442367553710938, 0.02026927947998047, 0.02026451110839844, 0.020360160827636718, 0.02029737663269043, 0.020291936874389647, 0.020206975936889648, 0.020287359237670898, 0.020189952850341798, 0.020125696182250977, 0.02016041564941406, 0.020164735794067384, 0.020106592178344727, 0.02013657569885254, 0.020135936737060548, 0.02015145683288574, 0.020189247131347655, 0.02145964813232422, 0.020557151794433594, 0.020238624572753907, 0.020468128204345702, 0.020353023529052734, 0.020131839752197265, 0.02019046401977539, 0.020155136108398437, 0.020155679702758788, 0.020162879943847658, 0.020066015243530272, 0.02018387222290039, 0.020022687911987306, 0.02003955268859863, 0.020009151458740236, 0.02012179183959961, 0.020081888198852538, 0.020204128265380858, 0.020212127685546876, 0.020841503143310548, 0.02094179153442383, 0.020672191619873048, 0.02053356742858887, 0.020341951370239256, 0.020169408798217773, 0.020629568099975584, 0.020369472503662108, 0.020136224746704103, 0.020309728622436525, 0.020264959335327147, 0.020286975860595705, 0.020226560592651367, 0.020237600326538086, 0.02017967987060547, 0.020174400329589844, 0.02009542465209961, 0.02032614326477051, 0.020232448577880858, 0.02002943992614746, 0.02006559944152832, 0.020134143829345703, 0.020112831115722655, 0.02002841567993164, 0.020113311767578124, 0.02011532783508301, 0.020146400451660156, 0.020094911575317384, 0.020130943298339844, 0.02021062469482422, 0.020297439575195312, 0.02020089530944824, 0.020228960037231444, 0.02020547294616699, 0.0202589111328125, 0.020287488937377928, 0.020264671325683593, 0.02032217597961426, 0.020343040466308592, 0.020292800903320314, 0.020138975143432616, 0.02027123260498047, 0.02036467170715332, 0.020186784744262696, 0.020093088150024415, 0.020076959609985352, 0.021663551330566407, 0.020729631423950196, 0.02037507247924805, 0.02034550476074219, 0.020173023223876953, 0.02011071968078613, 0.020136159896850588, 0.020074176788330077, 0.020048608779907228, 0.02008998489379883, 0.020138463973999023, 0.02006671905517578, 0.02002739143371582, 0.020172800064086914, 0.020178943634033202, 0.020281183242797853, 0.020312223434448242, 0.02027724838256836, 0.02026700782775879, 0.02023756790161133, 0.020208383560180666, 0.020412416458129884, 0.020518463134765626, 0.020332992553710936, 0.02020351982116699, 0.020202783584594725, 0.020396415710449218, 0.020629215240478515, 0.02039583969116211, 0.020456256866455077, 0.020520191192626953, 0.020398847579956053, 0.02189516830444336, 0.021045120239257812, 0.021192832946777342, 0.02072800064086914, 0.020289344787597655, 0.020159584045410156, 0.020315135955810547, 0.020084543228149412, 0.020142175674438476, 0.020104448318481447, 0.02013670349121094, 0.020256767272949217, 0.02026412773132324, 0.02021049690246582, 0.02007040023803711, 0.020365407943725586, 0.020072351455688475, 0.020178592681884766, 0.020453535079956054, 0.020179136276245117, 0.020171903610229493, 0.020130367279052735, 0.020160831451416016, 0.020740352630615234, 0.02069273567199707, 0.020630624771118163, 0.0206713924407959, 0.020539104461669924, 0.020254720687866212, 0.020227519989013672, 0.020447551727294924, 0.020270784378051757, 0.020208192825317384, 0.02019708824157715, 0.020093215942382812, 0.020069408416748046, 0.020165599822998048, 0.02020147132873535, 0.02011955261230469, 0.02004604721069336, 0.020053632736206056, 0.02001862335205078, 0.020177631378173827, 0.02021171188354492, 0.020127391815185545, 0.02009123229980469, 0.020167776107788086, 0.02039695930480957, 0.020174848556518556, 0.020144384384155275, 0.020188928604125977, 0.020213567733764648, 0.020182975769042967, 0.020115711212158202, 0.02231500816345215, 0.021888383865356444, 0.02017286491394043, 0.02031827163696289, 0.020119359970092773, 0.02000761604309082, 0.02010851287841797, 0.02010601615905762, 0.02010316848754883, 0.020260351181030273, 0.020464128494262695, 0.02051411247253418, 0.02022879981994629, 0.020133535385131837, 0.020136287689208984, 0.020142080307006836, 0.020146175384521483, 0.020193248748779296, 0.020129919052124023, 0.02015376091003418, 0.020219680786132812, 0.020128416061401366, 0.020221824645996093, 0.020226240158081055, 0.020120864868164064, 0.020331520080566406, 0.02014156723022461, 0.020133344650268555, 0.020318431854248045, 0.020539615631103517, 0.020437280654907228, 0.0208240966796875, 0.020520959854125977, 0.02082611274719238, 0.02038374328613281, 0.02039971160888672, 0.020343135833740235, 0.02015884780883789, 0.020219200134277342, 0.020131711959838868, 0.020112192153930664, 0.020262271881103515, 0.020804224014282228, 0.020303871154785155, 0.020286624908447265, 0.020218719482421876, 0.020238336563110353, 0.020298751831054687, 0.020274175643920898, 0.020162559509277343, 0.02023129653930664, 0.02026380729675293, 0.02019327926635742, 0.02007619285583496, 0.020160863876342774, 0.020131839752197265, 0.02011907196044922, 0.020170272827148436, 0.020071359634399415, 0.02009235191345215, 0.019951583862304688, 0.019995136260986326, 0.020176416397094728, 0.020002975463867187, 0.019990528106689453, 0.02008720016479492, 0.02004547119140625, 0.020197120666503907, 0.020457632064819337, 0.020302528381347655, 0.02017817687988281, 0.020056575775146485, 0.020215808868408205, 0.02021785545349121, 0.020223039627075196, 0.020109535217285156, 0.020251199722290038, 0.0202446403503418, 0.020183040618896485, 0.020120960235595703, 0.020142816543579103, 0.02039504051208496, 0.020392671585083007, 0.020256927490234375, 0.020234176635742188, 0.020149951934814454, 0.020121952056884766, 0.020616863250732424, 0.020461952209472656, 0.020563968658447264, 0.020348031997680663, 0.020419456481933593, 0.02033459281921387, 0.020314111709594726, 0.02024448013305664, 0.020125696182250977, 0.020161951065063476, 0.020261472702026367, 0.02021990394592285, 0.02036639976501465]",tokens/s,49.26824129637119,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1844.375552,2760.835072,0.0,2365.587456,2314.318336,s,1,8.9506689453125,8.9506689453125,0.0,8.9506689453125,8.9506689453125,8.9506689453125,8.9506689453125,[8.9506689453125],,kWh,5.476549709167102e-05,6.033410756598026e-06,1.7303069397997528e-05,7.810197724626658e-05,,MB,1901.416448,3092.185088,0.0,2682.257408,2607.60832,s,10,0.4811466560363769,0.04811466560363769,0.0001687004770132298,0.04807537651062012,0.04819355430603028,0.04839336986541748,0.04855322231292725,"[0.04859318542480469, 0.048093952178955075, 0.048094879150390624, 0.047963359832763675, 0.048149150848388673, 0.047989856719970705, 0.048056800842285155, 0.048127326965332035, 0.04802908706665039, 0.04804905700683594]",tokens/s,5320.623073823155,kWh,1.4723514423158077e-06,1.6237270339468906e-07,9.705856452562412e-07,2.605309790966738e-06,tokens/kWh,98260867.4360401,MB,1901.416448,3092.185088,0.0,2682.257408,2607.61088,s,10,13.3271533203125,1.33271533203125,0.007290476577824268,1.3307625732421875,1.3393217407226563,1.344705975341797,1.3490133630371095,"[1.338125244140625, 1.3302781982421874, 1.3379046630859375, 1.3288638916015625, 1.3312469482421876, 1.333383544921875, 1.3261055908203125, 1.3270718994140625, 1.3240831298828124, 1.3500902099609375]",tokens/s,47.27191057671629,kWh,3.910087619434966e-05,4.312441178818341e-06,1.927501388454316e-05,6.268833125771117e-05,tokens/kWh,1004971.7185963615,,s,630,13.32509829330445,0.021150949671911817,0.0003596681577085467,0.021049039840698242,0.021470634269714357,0.021678290939331053,0.022570490989685058,"[0.021683935165405274, 0.021206464767456055, 0.02124880027770996, 0.021204704284667968, 0.021231903076171874, 0.02125833511352539, 0.02138313674926758, 0.021178367614746094, 0.021525983810424806, 0.021155712127685546, 0.02108624076843262, 0.021266847610473632, 0.021016799926757812, 0.020956672668457032, 0.020959999084472655, 0.021022464752197264, 0.022714368820190428, 0.022293983459472658, 0.021531167984008788, 0.02164249610900879, 0.021764768600463866, 0.02130544090270996, 0.02149580764770508, 0.021295103073120117, 0.021142911911010743, 0.02130803108215332, 0.02105548858642578, 0.02101862335205078, 0.0209562873840332, 0.020994239807128907, 0.020945024490356446, 0.020905792236328127, 0.02089241600036621, 0.020883455276489257, 0.0209322566986084, 0.020883295059204103, 0.02093721580505371, 0.021604352951049805, 0.02146713638305664, 0.021315231323242187, 0.021356800079345702, 0.021243999481201172, 0.02118592071533203, 0.02099468803405762, 0.02139321517944336, 0.020942304611206056, 0.020894432067871095, 0.021184095382690428, 0.020996511459350584, 0.02100614356994629, 0.021072063446044922, 0.021020671844482423, 0.0210098876953125, 0.02096774482727051, 0.021186784744262697, 0.021255647659301758, 0.021690847396850586, 0.021300544738769533, 0.021616512298583985, 0.02122550392150879, 0.02123673629760742, 0.021303136825561522, 0.021175680160522462, 0.021351423263549805, 0.021388288497924804, 0.021223424911499023, 0.021172224044799806, 0.021262048721313476, 0.021409568786621095, 0.021019136428833008, 0.021110048294067384, 0.021056127548217774, 0.02092451286315918, 0.020985343933105468, 0.021078367233276368, 0.02091801643371582, 0.02082601547241211, 0.020976127624511717, 0.020908031463623047, 0.0209256649017334, 0.02082896041870117, 0.02099363136291504, 0.02108457565307617, 0.020977216720581053, 0.020840448379516603, 0.020984224319458008, 0.021071903228759764, 0.021108448028564455, 0.02097952079772949, 0.021221855163574218, 0.021131103515625, 0.02119910430908203, 0.02099600028991699, 0.02105958366394043, 0.021161888122558595, 0.02102012825012207, 0.020988767623901367, 0.021112607955932616, 0.02101862335205078, 0.02091619110107422, 0.02096131134033203, 0.021352224349975586, 0.025118080139160157, 0.021371360778808593, 0.02103036880493164, 0.02111123275756836, 0.021114751815795897, 0.021002847671508788, 0.02104934310913086, 0.02125619125366211, 0.020975616455078124, 0.020948991775512696, 0.021059423446655273, 0.02112335968017578, 0.020950912475585937, 0.021027040481567384, 0.020948768615722656, 0.020968448638916014, 0.020888576507568358, 0.020824064254760744, 0.020909952163696288, 0.02105523109436035, 0.020967008590698243, 0.02085126495361328, 0.020930976867675782, 0.021014368057250977, 0.021209087371826172, 0.021170175552368165, 0.021370880126953123, 0.02274620819091797, 0.021441408157348633, 0.021121055603027343, 0.021016576766967773, 0.021237695693969726, 0.02104751968383789, 0.02087715148925781, 0.020996095657348633, 0.021440511703491212, 0.021059488296508787, 0.020946399688720703, 0.02107980728149414, 0.021041759490966795, 0.020995967864990233, 0.02090025520324707, 0.02108006477355957, 0.02111267280578613, 0.02102697563171387, 0.02101558494567871, 0.022539520263671876, 0.021525472640991212, 0.02142393684387207, 0.021211360931396483, 0.021266847610473632, 0.02128108787536621, 0.021073087692260743, 0.02092051124572754, 0.02096143913269043, 0.020989984512329102, 0.020974016189575194, 0.02087321662902832, 0.02230067253112793, 0.021985279083251954, 0.0210882568359375, 0.02115123176574707, 0.021488224029541016, 0.021500864028930665, 0.021269472122192382, 0.02106777572631836, 0.022112255096435548, 0.0212541446685791, 0.022235136032104492, 0.02112828826904297, 0.020974496841430663, 0.020942848205566408, 0.02084966468811035, 0.021081087112426757, 0.021159936904907226, 0.0208855037689209, 0.021048992156982423, 0.020982112884521485, 0.020999807357788086, 0.020928415298461914, 0.021186304092407227, 0.021109344482421875, 0.021237728118896484, 0.0213209285736084, 0.02124076843261719, 0.021123071670532227, 0.021068000793457033, 0.021809535980224608, 0.022754783630371093, 0.021852640151977538, 0.021133279800415038, 0.021121248245239258, 0.02104662322998047, 0.0211976318359375, 0.021431711196899413, 0.021158079147338867, 0.021104543685913087, 0.021090303421020508, 0.021021087646484374, 0.02124928092956543, 0.0209334716796875, 0.02102272033691406, 0.021276351928710937, 0.021279327392578123, 0.021108448028564455, 0.021145599365234375, 0.021149919509887694, 0.020999488830566407, 0.020941280364990236, 0.021099967956542967, 0.02105606460571289, 0.020965375900268556, 0.020805631637573242, 0.02100223922729492, 0.020930559158325195, 0.020930240631103516, 0.020810047149658204, 0.020987903594970703, 0.021016576766967773, 0.020985855102539062, 0.020908031463623047, 0.021006336212158205, 0.021071840286254882, 0.02105731201171875, 0.02088310432434082, 0.0211212158203125, 0.020952735900878906, 0.020994335174560546, 0.02083683204650879, 0.020922367095947265, 0.020987520217895506, 0.02103334426879883, 0.02103910446166992, 0.02100249671936035, 0.020944639205932616, 0.020940000534057618, 0.021008352279663085, 0.020939584732055663, 0.021089696884155275, 0.02110678482055664, 0.020943359375, 0.021019872665405274, 0.02127952003479004, 0.021228799819946288, 0.02094095993041992, 0.021054048538208008, 0.021009727478027342, 0.021037759780883788, 0.020764671325683593, 0.0211190071105957, 0.021360639572143555, 0.020975616455078124, 0.020975616455078124, 0.021026336669921875, 0.02103548812866211, 0.02083951950073242, 0.02108233642578125, 0.021283519744873046, 0.021049087524414062, 0.020885759353637696, 0.02105548858642578, 0.020998144149780275, 0.020957183837890626, 0.020792415618896484, 0.023914400100708007, 0.02098259162902832, 0.020998016357421875, 0.021020992279052735, 0.02107187271118164, 0.0208855037689209, 0.021028287887573244, 0.02091641616821289, 0.021022815704345704, 0.02091651153564453, 0.02103910446166992, 0.021156959533691407, 0.021140256881713868, 0.021112384796142577, 0.021151872634887697, 0.021078720092773437, 0.02103500747680664, 0.021144800186157227, 0.021092256546020507, 0.020900224685668944, 0.0210762882232666, 0.02106883239746094, 0.02106051254272461, 0.020960384368896485, 0.02107846450805664, 0.021174720764160156, 0.02105936050415039, 0.020888063430786134, 0.02099171257019043, 0.02108415985107422, 0.02188697624206543, 0.022574527740478516, 0.022542911529541014, 0.021349632263183593, 0.02104707145690918, 0.02090902328491211, 0.020998144149780275, 0.020989023208618163, 0.020960159301757812, 0.020807680130004884, 0.020809728622436522, 0.021016576766967773, 0.021040447235107423, 0.020818111419677734, 0.02088150405883789, 0.020994367599487303, 0.021131359100341796, 0.020991167068481444, 0.020939680099487306, 0.02143436813354492, 0.02119081687927246, 0.021217119216918944, 0.021139007568359375, 0.021168575286865235, 0.020936704635620116, 0.02105753517150879, 0.020964704513549804, 0.021026880264282226, 0.02079190444946289, 0.021009759902954103, 0.02097171211242676, 0.02103343963623047, 0.02230271911621094, 0.021005727767944335, 0.021027423858642577, 0.02127667236328125, 0.021020671844482423, 0.02115932846069336, 0.022270559310913086, 0.022675455093383787, 0.02198259162902832, 0.021295040130615235, 0.021258464813232424, 0.021251680374145508, 0.02118124771118164, 0.021495872497558594, 0.02204038429260254, 0.0214704647064209, 0.021316768646240235, 0.021540319442749024, 0.02116640090942383, 0.02102681541442871, 0.020985952377319338, 0.021159839630126954, 0.020981760025024415, 0.020914176940917968, 0.02108006477355957, 0.02107961654663086, 0.02098953628540039, 0.02095939254760742, 0.020949440002441408, 0.021072128295898437, 0.020946943283081054, 0.020975168228149415, 0.020976064682006836, 0.021000192642211913, 0.020903200149536134, 0.020820703506469727, 0.02086499214172363, 0.021019775390625, 0.020962207794189454, 0.02091632080078125, 0.020940704345703123, 0.020975008010864257, 0.02093116760253906, 0.020998016357421875, 0.020899967193603517, 0.02111894416809082, 0.021081920623779296, 0.021043424606323243, 0.020899839401245117, 0.021040576934814453, 0.021463552474975587, 0.021179712295532227, 0.021365440368652344, 0.02119171142578125, 0.02103183937072754, 0.02104252815246582, 0.021159648895263672, 0.021104991912841795, 0.021232288360595705, 0.02104934310913086, 0.021086208343505858, 0.020973567962646485, 0.02104473686218262, 0.02106825637817383, 0.02119398307800293, 0.02102992057800293, 0.021130752563476563, 0.02101683235168457, 0.020985855102539062, 0.02086502456665039, 0.020941951751708984, 0.020902687072753907, 0.020918560028076173, 0.020952896118164064, 0.020899839401245117, 0.02091823959350586, 0.021702688217163087, 0.02095871925354004, 0.02096588706970215, 0.020874528884887694, 0.02094304084777832, 0.020895679473876952, 0.020959840774536134, 0.021014528274536134, 0.02099945640563965, 0.02125270462036133, 0.020947071075439454, 0.0208855037689209, 0.020951040267944337, 0.02108598327636719, 0.021186784744262697, 0.020963327407836914, 0.02090291213989258, 0.02091667175292969, 0.020874048233032228, 0.02094259262084961, 0.021046848297119142, 0.021121471405029298, 0.020953088760375976, 0.02104934310913086, 0.02104662322998047, 0.020925088882446288, 0.020985408782958983, 0.020895200729370116, 0.021035455703735353, 0.021194528579711915, 0.02128086471557617, 0.021559968948364257, 0.02105548858642578, 0.021051008224487303, 0.020961664199829103, 0.020847904205322267, 0.020933055877685548, 0.021200767517089845, 0.021102592468261717, 0.021024032592773436, 0.020973695755004882, 0.02101433563232422, 0.020919071197509766, 0.02093427276611328, 0.020928895950317383, 0.021088512420654296, 0.020919776916503905, 0.020966976165771485, 0.02092076873779297, 0.020875551223754882, 0.02098588752746582, 0.021211360931396483, 0.020856576919555662, 0.020971359252929686, 0.02104867172241211, 0.0209866886138916, 0.020864639282226562, 0.020969247817993163, 0.02100079917907715, 0.020973567962646485, 0.020926464080810548, 0.020969472885131835, 0.02097942352294922, 0.021012767791748047, 0.020981760025024415, 0.020950239181518556, 0.021551904678344728, 0.02137628746032715, 0.021301984786987305, 0.02116329574584961, 0.020929248809814453, 0.020996095657348633, 0.02112719917297363, 0.021382911682128906, 0.021155424118041992, 0.021484159469604493, 0.021294527053833008, 0.021353023529052734, 0.02128486442565918, 0.02123980712890625, 0.02094486427307129, 0.020942527770996092, 0.02100259208679199, 0.021112224578857423, 0.02189516830444336, 0.02100079917907715, 0.021018112182617187, 0.021066240310668945, 0.021000192642211913, 0.020983007431030272, 0.02093484878540039, 0.020987743377685546, 0.021060352325439454, 0.021024768829345702, 0.020938751220703124, 0.020932607650756836, 0.02087731170654297, 0.020888864517211916, 0.021097152709960938, 0.02095088005065918, 0.021437984466552734, 0.021333471298217773, 0.021268768310546873, 0.021111520767211914, 0.021052480697631836, 0.020923328399658204, 0.020934431076049805, 0.020934879302978517, 0.021182464599609374, 0.02102079963684082, 0.021214176177978515, 0.0210482234954834, 0.02145894432067871, 0.0209017276763916, 0.021030399322509767, 0.02097587203979492, 0.020945215225219728, 0.020842592239379884, 0.020908031463623047, 0.02091606330871582, 0.020882911682128906, 0.021318336486816407, 0.020987903594970703, 0.021063135147094725, 0.02122127914428711, 0.020899520874023438, 0.02084550476074219, 0.020959232330322267, 0.02101161575317383, 0.020874080657958986, 0.0209039363861084, 0.02090188789367676, 0.020880544662475586, 0.020898624420166014, 0.02087062454223633, 0.02078982353210449, 0.02091366386413574, 0.020868608474731445, 0.020956159591674805, 0.020944896697998046, 0.02108415985107422, 0.020944000244140625, 0.02110348892211914, 0.02087731170654297, 0.02109644889831543, 0.020872800827026368, 0.021029279708862304, 0.020868928909301757, 0.02077510452270508, 0.020840448379516603, 0.021372928619384765, 0.020832256317138673, 0.020841760635375975, 0.020842752456665038, 0.020924896240234376, 0.021010112762451173, 0.020829919815063477, 0.020877920150756835, 0.020948991775512696, 0.02104729652404785, 0.021415519714355468, 0.021307775497436524, 0.021671392440795897, 0.021772480010986327, 0.021323680877685547, 0.021288415908813477, 0.021453344345092773, 0.02129817581176758, 0.02123673629760742, 0.02155731201171875, 0.021603872299194336, 0.021340576171875, 0.021356512069702148, 0.02140310478210449, 0.021496416091918946, 0.021446624755859376, 0.02147545623779297, 0.02146236801147461, 0.02148796844482422, 0.02154719924926758, 0.021433984756469727, 0.021477760314941405, 0.021921760559082033, 0.022394912719726563, 0.021501184463500977, 0.021463808059692384, 0.021348352432250976, 0.02125619125366211, 0.021331039428710938, 0.021472160339355468, 0.021202943801879884, 0.021177759170532228, 0.021168479919433592, 0.02114995193481445, 0.021137407302856445, 0.021170080184936522, 0.02106787109375, 0.021196800231933592, 0.021366527557373047, 0.02121855926513672, 0.02107084846496582, 0.021282623291015625, 0.021136959075927733, 0.02116057586669922, 0.021227487564086912, 0.02138889694213867, 0.021249759674072267, 0.021232351303100586, 0.021336063385009766, 0.02115932846069336, 0.02119536018371582, 0.021168127059936523, 0.021202943801879884, 0.02256060791015625, 0.022495391845703126, 0.02163705635070801, 0.021458272933959962, 0.02161123275756836, 0.021817344665527344, 0.02160220718383789, 0.02152569580078125, 0.021584064483642577, 0.021575775146484375, 0.021580703735351564, 0.021397344589233397, 0.021231487274169923]",tokens/s,47.27920095843199,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,823.488512,554.631168,0.0,159.383552,143.673856,s,1,7.305361328125,7.305361328125,0.0,7.305361328125,7.305361328125,7.305361328125,7.305361328125,[7.305361328125],,kWh,1.1056477220840102e-05,1.2058617879394831e-06,3.5866695360028955e-06,1.584900854478248e-05,,MB,1305.423872,609.15712,0.0,199.22944,186.684928,s,32,0.19960160017013548,0.006237550005316735,0.00015914221107278168,0.006206959962844849,0.0062986785411834716,0.006448363280296326,0.0068627054214477545,"[0.0063146882057189945, 0.00616428804397583, 0.006129983901977539, 0.006231584072113037, 0.006142784118652344, 0.006127295970916748, 0.006237760066986084, 0.006172063827514648, 0.0061476478576660155, 0.006155168056488037, 0.006201024055480957, 0.006183008193969727, 0.006218016147613525, 0.006250720024108886, 0.0062919678688049315, 0.006138720035552978, 0.006232992172241211, 0.0062548799514770504, 0.006299424171447754, 0.006134943962097168, 0.006259615898132324, 0.006975456237792969, 0.0062202239036560055, 0.006215839862823486, 0.006187359809875489, 0.0062410240173339845, 0.006119743824005127, 0.006167327880859375, 0.006160384178161621, 0.006611743927001953, 0.006207007884979248, 0.006206912040710449]",tokens/s,41041.75514132823,kWh,1.8462630307294917e-07,2.0361089943147197e-08,7.79196280113859e-08,2.829070210274823e-07,tokens/kWh,904890939.327842,MB,1319.186432,613.351424,0.0,203.423744,186.687488,s,32,9.894926513671873,0.30921645355224614,0.0016198589206653123,0.30923019409179686,0.31221292419433594,0.3126740478515625,0.3129811117553711,"[0.31301141357421874, 0.30927874755859375, 0.3124779968261719, 0.30763729858398436, 0.30926971435546874, 0.31235183715820314, 0.30713351440429687, 0.30975051879882814, 0.3097658996582031, 0.3073489990234375, 0.30855889892578126, 0.309537109375, 0.30816668701171873, 0.30962884521484374, 0.30777706909179686, 0.30975933837890623, 0.30842816162109377, 0.3079856872558594, 0.3084017333984375, 0.30803717041015627, 0.3129136657714844, 0.3104873046875, 0.3094598693847656, 0.30852020263671875, 0.31096270751953126, 0.3075557250976563, 0.3083272705078125, 0.3069648742675781, 0.30923321533203124, 0.3092739868164063, 0.3092271728515625, 0.30769387817382815]",tokens/s,203.7407753573997,kWh,9.012871368541888e-06,9.939593985697612e-07,3.2511715769261455e-06,1.3258002344037798e-05,tokens/kWh,4751847.100730939,,s,2016,9.880397473812101,0.00490099081042267,0.0001024788382667862,0.004880608081817627,0.004958703994750976,0.005008639931678772,0.005323640060424802,"[0.004883200168609619, 0.004953279972076416, 0.004961343765258789, 0.00510265588760376, 0.00504527997970581, 0.005024991989135742, 0.005040639877319336, 0.005111743927001953, 0.004993343830108642, 0.004972288131713867, 0.004968575954437256, 0.00499721622467041, 0.004990047931671142, 0.004978367805480957, 0.004932544231414795, 0.004988480091094971, 0.004948416233062744, 0.004912576198577881, 0.004884384155273438, 0.005165152072906494, 0.004973120212554931, 0.004969664096832275, 0.005055295944213867, 0.004997119903564453, 0.004910943984985351, 0.004894911766052246, 0.004877664089202881, 0.0050466561317443846, 0.004924767971038818, 0.004904032230377197, 0.004883296012878418, 0.004971072196960449, 0.004937344074249268, 0.004936480045318604, 0.004922880172729492, 0.0049177598953247074, 0.005053952217102051, 0.0049706239700317385, 0.004906752109527588, 0.004887167930603027, 0.004962495803833008, 0.0049498238563537595, 0.004886879920959473, 0.004884223937988281, 0.004931488037109375, 0.004898111820220947, 0.004861824035644531, 0.004846496105194092, 0.004960159778594971, 0.004958208084106445, 0.005238783836364746, 0.005243040084838867, 0.004990848064422607, 0.0049417920112609865, 0.004863999843597412, 0.004848991870880127, 0.0049424958229064946, 0.0049459199905395506, 0.004896895885467529, 0.004869440078735352, 0.004977215766906738, 0.0049642882347106934, 0.0049194879531860355, 0.004890719890594483, 0.004903808116912842, 0.004904191970825195, 0.004895487785339356, 0.004949567794799804, 0.004917695999145508, 0.0049040641784667965, 0.004940671920776367, 0.004983871936798096, 0.004920000076293945, 0.004914624214172363, 0.004891039848327637, 0.004942240238189698, 0.004978687763214112, 0.004894720077514648, 0.004890175819396972, 0.004880832195281982, 0.005065824031829834, 0.004891551971435547, 0.004884479999542236, 0.004843520164489746, 0.004949632167816162, 0.004906623840332031, 0.0048455362319946285, 0.004846367835998535, 0.004835328102111816, 0.004916639804840088, 0.004878943920135498, 0.004853824138641358, 0.0048949761390686035, 0.004928671836853028, 0.004901408195495605, 0.00486195182800293, 0.004845471858978272, 0.004843616008758545, 0.004915071964263916, 0.004877952098846436, 0.004860415935516358, 0.004866047859191895, 0.004917247772216797, 0.004910079956054687, 0.004870240211486816, 0.0049081602096557615, 0.004859200000762939, 0.004915103912353515, 0.0048707199096679685, 0.004839647769927979, 0.004871967792510986, 0.005203487873077392, 0.0048865280151367185, 0.004857952117919922, 0.004831168174743652, 0.004940224170684814, 0.004941760063171386, 0.004881792068481446, 0.004872928142547608, 0.0049552001953125, 0.0049222722053527835, 0.004892799854278564, 0.004867231845855713, 0.004868031978607178, 0.004895423889160156, 0.0049348797798156735, 0.004910975933074951, 0.004876607894897461, 0.004896448135375976, 0.004853919982910156, 0.004913055896759033, 0.004892288208007812, 0.004886847972869873, 0.0049090561866760255, 0.004867455959320068, 0.004942207813262939, 0.004982175827026367, 0.004860383987426758, 0.004872576236724853, 0.004941504001617431, 0.00490118408203125, 0.004875936031341553, 0.004864672183990479, 0.0048717761039733885, 0.00492080020904541, 0.004888351917266846, 0.004871007919311524, 0.004860127925872803, 0.005211071968078613, 0.004926271915435791, 0.00490499210357666, 0.004885568141937256, 0.0049304962158203125, 0.004935679912567138, 0.004888576030731201, 0.0048676800727844236, 0.0048726401329040525, 0.004927072048187256, 0.004910880088806152, 0.0048707518577575685, 0.004882431983947754, 0.004912191867828369, 0.004903168201446533, 0.004967103958129883, 0.0048858561515808105, 0.004934304237365723, 0.004931583881378174, 0.004881951808929443, 0.004868256092071533, 0.005226560115814209, 0.005945824146270752, 0.006082335948944092, 0.005511168003082275, 0.00488150405883789, 0.004871071815490723, 0.004909311771392822, 0.004931551933288574, 0.004892127990722656, 0.004880671977996826, 0.004857183933258057, 0.004930079936981201, 0.00495798397064209, 0.004874623775482177, 0.004872223854064941, 0.004882336139678955, 0.004916736125946045, 0.0048932480812072755, 0.0048782720565795894, 0.004861760139465332, 0.004765920162200928, 0.0048412480354309085, 0.0049192957878112795, 0.0048558077812194825, 0.004869376182556152, 0.004844287872314453, 0.004917503833770752, 0.004863743782043457, 0.004833439826965332, 0.0048495039939880375, 0.004838880062103272, 0.00491267204284668, 0.004883456230163574, 0.004833280086517334, 0.00506060791015625, 0.004902112007141113, 0.004879136085510254, 0.004874239921569825, 0.004851712226867676, 0.004847519874572754, 0.004908927917480469, 0.004890560150146484, 0.004851488113403321, 0.004839104175567627, 0.004903744220733643, 0.004857855796813965, 0.004911104202270508, 0.004834496021270752, 0.004823872089385986, 0.005014912128448486, 0.00487494421005249, 0.004868031978607178, 0.004837376117706299, 0.004888576030731201, 0.004882080078125, 0.004862592220306396, 0.004840799808502197, 0.004901247978210449, 0.004959743976593017, 0.004881087779998779, 0.004873600006103516, 0.004884928226470947, 0.004888031959533691, 0.0049054079055786135, 0.004860000133514404, 0.004845664024353027, 0.004873695850372314, 0.004938176155090332, 0.004861760139465332, 0.004821248054504394, 0.0048167362213134765, 0.0048288640975952144, 0.004917695999145508, 0.004881663799285889, 0.004850399971008301, 0.004851583957672119, 0.004890175819396972, 0.004860095977783203, 0.0048520960807800296, 0.004837376117706299, 0.004843039989471436, 0.005059135913848877, 0.004891808032989502, 0.004814464092254639, 0.004858240127563477, 0.004861536026000976, 0.00485478401184082, 0.004921504020690918, 0.004882527828216553, 0.004857600212097168, 0.004843520164489746, 0.0061512961387634275, 0.00503219223022461, 0.004885119915008545, 0.004923391819000244, 0.0049268798828125, 0.004880767822265625, 0.004849696159362793, 0.004892864227294922, 0.004947968006134033, 0.004892672061920166, 0.004910975933074951, 0.004860032081604004, 0.004892447948455811, 0.004903295993804932, 0.0048576960563659665, 0.004838687896728516, 0.004842207908630371, 0.004919583797454834, 0.0048657598495483395, 0.004875775814056397, 0.004919807910919189, 0.004837471961975098, 0.004863903999328613, 0.004888576030731201, 0.00482316780090332, 0.004945792198181152, 0.00491315221786499, 0.0048631677627563475, 0.004851967811584473, 0.004837855815887451, 0.004873792171478272, 0.0048932161331176754, 0.004856063842773438, 0.0048427519798278805, 0.00483187198638916, 0.00486406421661377, 0.004929312229156494, 0.004874112129211426, 0.004829343795776367, 0.004843135833740234, 0.004933760166168213, 0.004884736061096191, 0.004864287853240967, 0.00486137580871582, 0.004870431900024414, 0.004917151927947998, 0.004870240211486816, 0.004876287937164306, 0.004875840187072754, 0.005017216205596924, 0.004891456127166748, 0.004901887893676758, 0.004870175838470459, 0.0048752322196960445, 0.004884479999542236, 0.0048148479461669925, 0.005328896045684814, 0.004988704204559326, 0.005273888111114502, 0.005115839958190918, 0.005059936046600342, 0.005655200004577637, 0.005015744209289551, 0.004933055877685547, 0.004913536071777344, 0.004977695941925049, 0.004991968154907226, 0.0049194879531860355, 0.0048781437873840336, 0.005010623931884766, 0.004971327781677246, 0.004911359786987305, 0.004886271953582763, 0.0048717761039733885, 0.004921343803405762, 0.004926208019256592, 0.004969279766082763, 0.004868192195892334, 0.0049712638854980465, 0.004918911933898926, 0.004891007900238037, 0.004857855796813965, 0.004882207870483398, 0.004919616222381592, 0.004894464015960693, 0.004894271850585937, 0.004905568122863769, 0.004884479999542236, 0.004952064037322998, 0.005062655925750732, 0.004935679912567138, 0.005176896095275879, 0.005029888153076172, 0.004959968090057373, 0.004863999843597412, 0.005, 0.00495411205291748, 0.00489462423324585, 0.004916863918304443, 0.004899199962615967, 0.004947968006134033, 0.004895071983337402, 0.004883168220520019, 0.004910016059875488, 0.004876192092895508, 0.004947904109954834, 0.004887904167175293, 0.004862239837646484, 0.0048603200912475586, 0.004923520088195801, 0.004940127849578858, 0.004881887912750244, 0.004870399951934814, 0.004902847766876221, 0.004925439834594727, 0.004854015827178955, 0.004835072040557861, 0.0048364481925964355, 0.004737984180450439, 0.004821951866149902, 0.004923264026641846, 0.0048512320518493655, 0.004825568199157715, 0.004839424133300781, 0.004864128112792969, 0.004878047943115234, 0.004845248222351074, 0.004861440181732178, 0.004836160182952881, 0.004940095901489258, 0.004875360012054443, 0.004846335887908936, 0.004847616195678711, 0.004839360237121582, 0.004929535865783692, 0.004847392082214356, 0.004865503787994384, 0.00483241605758667, 0.004876192092895508, 0.004869472026824951, 0.004845983982086181, 0.004835328102111816, 0.004869503974914551, 0.004919936180114746, 0.004884479999542236, 0.004843520164489746, 0.004881663799285889, 0.004865824222564697, 0.00489299201965332, 0.004852384090423584, 0.004857855796813965, 0.004833280086517334, 0.0048865280151367185, 0.004871935844421387, 0.004829728126525879, 0.004831103801727295, 0.00483135986328125, 0.005004928112030029, 0.0048807039260864256, 0.004950079917907715, 0.004824927806854248, 0.004894432067871094, 0.004841983795166016, 0.004877888202667237, 0.004829311847686767, 0.0048373441696166995, 0.0050013761520385745, 0.004875391960144043, 0.004845280170440674, 0.004909599781036377, 0.004960927963256836, 0.004890431880950927, 0.004851712226867676, 0.004851327896118164, 0.00483676815032959, 0.004903232097625733, 0.00488105583190918, 0.004820991992950439, 0.0048455681800842285, 0.0048717761039733885, 0.004911520004272461, 0.004815519809722901, 0.004883488178253174, 0.004831615924835205, 0.004847904205322265, 0.004843391895294189, 0.004901120185852051, 0.004851903915405274, 0.004851712226867676, 0.005111743927001953, 0.004943935871124267, 0.004988927841186524, 0.004886496067047119, 0.004847392082214356, 0.004857151985168457, 0.00489683198928833, 0.004862847805023193, 0.004873792171478272, 0.0048685441017150876, 0.004941823959350586, 0.0048800320625305175, 0.004900288105010986, 0.004872608184814453, 0.004926080226898193, 0.004963200092315673, 0.004887551784515381, 0.004882080078125, 0.00488640022277832, 0.004976384162902832, 0.00491593599319458, 0.005014688014984131, 0.005024608135223389, 0.004943168163299561, 0.004906847953796387, 0.004892767906188965, 0.004877056121826172, 0.004952352046966552, 0.004918176174163819, 0.004864831924438476, 0.005365471839904785, 0.004956319808959961, 0.004939583778381348, 0.0048657598495483395, 0.004858431816101075, 0.004861824035644531, 0.004935840129852295, 0.004958208084106445, 0.0048715839385986325, 0.0049014720916748045, 0.004972671985626221, 0.004915071964263916, 0.0048496642112731934, 0.004859903812408447, 0.0048510079383850095, 0.004921247959136963, 0.004917503833770752, 0.004951615810394287, 0.004860896110534668, 0.004931007862091065, 0.004903488159179688, 0.004839424133300781, 0.004857855796813965, 0.0048865280151367185, 0.0048798398971557615, 0.004788640022277832, 0.004847263813018799, 0.004858143806457519, 0.004847263813018799, 0.004863903999328613, 0.00493171215057373, 0.004871744155883789, 0.0049543361663818355, 0.004852255821228027, 0.0049316477775573735, 0.004877664089202881, 0.004926208019256592, 0.004853248119354248, 0.004993375778198242, 0.005007359981536865, 0.0049062399864196774, 0.004866335868835449, 0.0048501439094543455, 0.004900864124298096, 0.0048940801620483395, 0.004853983879089355, 0.004951712131500244, 0.004877056121826172, 0.004894720077514648, 0.0049032001495361325, 0.004869311809539795, 0.00490550422668457, 0.00504633617401123, 0.004877888202667237, 0.004853280067443848, 0.004852575778961182, 0.004889984130859375, 0.004987520217895508, 0.004894720077514648, 0.004829440116882324, 0.004862944126129151, 0.004903711795806885, 0.004877823829650879, 0.004870656013488769, 0.004843520164489746, 0.004863999843597412, 0.004981056213378906, 0.004894144058227539, 0.005847296237945557, 0.00494217586517334, 0.004884191989898682, 0.005015488147735596, 0.004859903812408447, 0.0049359359741210935, 0.004887680053710938, 0.004956255912780762, 0.004872735977172852, 0.004878335952758789, 0.004919616222381592, 0.0049064640998840335, 0.004862175941467285, 0.004859903812408447, 0.0049192957878112795, 0.004898848056793213, 0.004835296154022217, 0.004838655948638916, 0.00485811185836792, 0.00492742395401001, 0.004800415992736816, 0.004882751941680908, 0.004905216217041016, 0.0048716158866882325, 0.004848192214965821, 0.004932928085327149, 0.004880799770355225, 0.0048373122215270995, 0.004887936115264893, 0.004909183979034424, 0.004864543914794922, 0.004861504077911377, 0.0049303040504455565, 0.004863999843597412, 0.004929664134979248, 0.004898943901062012, 0.0048477439880371094, 0.0048403840065002444, 0.00488489580154419, 0.0048932480812072755, 0.004839136123657227, 0.004851391792297363, 0.004845888137817383, 0.0049296321868896485, 0.004873824119567871, 0.004837696075439453, 0.0048455681800842285, 0.004841760158538818, 0.0048923840522766115, 0.004866047859191895, 0.0048492798805236815, 0.004920896053314209, 0.004911712169647217, 0.00486569595336914, 0.004840000152587891, 0.004831456184387207, 0.004849440097808838, 0.004918496131896973, 0.004879039764404297, 0.004833375930786133, 0.0048455681800842285, 0.004880383968353271, 0.004886847972869873, 0.0048821120262146, 0.004824639797210694, 0.004837823867797852, 0.0048886399269104, 0.004863647937774658, 0.004833407878875732, 0.004843679904937745, 0.004838560104370117, 0.005009791851043701, 0.004860383987426758, 0.004843743801116943, 0.004847392082214356, 0.004920671939849854, 0.004934304237365723, 0.004850016117095947, 0.004845215797424317, 0.004836832046508789, 0.004911647796630859, 0.004876192092895508, 0.004858272075653076, 0.0047964158058166504, 0.004858208179473877, 0.004851583957672119, 0.0049047360420227054, 0.0048757119178771974, 0.004849599838256836, 0.004850304126739502, 0.004844831943511963, 0.004942560195922851, 0.004896768093109131, 0.004876448154449463, 0.0048678722381591795, 0.004933695793151855, 0.004925280094146728, 0.004866208076477051, 0.004853759765625, 0.004842527866363525, 0.004905951976776123, 0.004859519958496094, 0.005039840221405029, 0.004881343841552734, 0.004910816192626953, 0.004910624027252198, 0.00485430383682251, 0.004868031978607178, 0.004902912139892578, 0.004894720077514648, 0.004856927871704102, 0.004866208076477051, 0.004909311771392822, 0.004901375770568848, 0.00484991979598999, 0.004837279796600342, 0.004864992141723633, 0.004879231929779053, 0.00494543981552124, 0.00487388801574707, 0.004833087921142578, 0.004838272094726563, 0.004923232078552246, 0.004960639953613281, 0.004964255809783936, 0.0048967041969299316, 0.005382527828216552, 0.004910912036895752, 0.004888095855712891, 0.004909567832946778, 0.004910943984985351, 0.004904767990112305, 0.004840928077697754, 0.004884672164916992, 0.004839136123657227, 0.004881216049194336, 0.004868095874786377, 0.004845759868621826, 0.0048455362319946285, 0.004865888118743896, 0.0049246401786804195, 0.004883552074432373, 0.004830912113189697, 0.004839295864105224, 0.004933760166168213, 0.004857632160186767, 0.004804831981658935, 0.004863999843597412, 0.004837183952331543, 0.004855552196502686, 0.0048559679985046384, 0.004896224021911621, 0.00488102388381958, 0.0048334717750549315, 0.004838687896728516, 0.004917984008789063, 0.0048939199447631835, 0.004868607997894287, 0.0048393278121948245, 0.004868415832519532, 0.004931968212127685, 0.0048575358390808105, 0.004818848133087158, 0.004894815921783448, 0.0049049282073974606, 0.004971744060516358, 0.004850111961364746, 0.004860288143157959, 0.004871520042419433, 0.0049522237777709964, 0.004876736164093017, 0.004900928020477295, 0.00486521577835083, 0.0049714879989624025, 0.004908895969390869, 0.004853759765625, 0.0049472317695617675, 0.004848351955413818, 0.004928864002227783, 0.004869984149932861, 0.004850304126739502, 0.0048798398971557615, 0.005143231868743896, 0.0051528000831604, 0.004874239921569825, 0.004866047859191895, 0.004956160068511963, 0.004890624046325683, 0.004853472232818603, 0.004867360115051269, 0.004860288143157959, 0.005061247825622559, 0.004906400203704834, 0.004980607986450196, 0.004880576133728027, 0.004968992233276367, 0.004908576011657715, 0.004904895782470703, 0.0048830718994140624, 0.004941535949707031, 0.004955359935760498, 0.0048670401573181156, 0.004892672061920166, 0.004886144161224365, 0.005253536224365234, 0.004880159854888916, 0.004912864208221435, 0.00491158390045166, 0.004953919887542724, 0.004863999843597412, 0.004882016181945801, 0.004866464138031006, 0.004954368114471436, 0.00488640022277832, 0.004908927917480469, 0.004863679885864258, 0.004850080013275146, 0.004875199794769287, 0.004948639869689941, 0.004905280113220215, 0.00488159990310669, 0.0048661761283874515, 0.004900928020477295, 0.0049690880775451664, 0.004864128112792969, 0.004876160144805909, 0.0048314881324768065, 0.004927199840545654, 0.0048865599632263185, 0.004857503890991211, 0.004882368087768554, 0.00488489580154419, 0.0048865280151367185, 0.004955808162689209, 0.004842144012451172, 0.004839104175567627, 0.004907008171081543, 0.004870016098022461, 0.004831424236297607, 0.004848959922790527, 0.004829823970794678, 0.004917407989501953, 0.004920447826385498, 0.004898528099060059, 0.004834303855895996, 0.004892672061920166, 0.004888576030731201, 0.004888800144195557, 0.0048678722381591795, 0.0048410558700561525, 0.00493609619140625, 0.004890880107879639, 0.004846464157104493, 0.004957056045532227, 0.004919136047363281, 0.00488259220123291, 0.0048496642112731934, 0.004846911907196045, 0.004860608100891113, 0.004931839942932129, 0.004871712207794189, 0.004841695785522461, 0.004847904205322265, 0.004886240005493164, 0.004916607856750488, 0.004842175960540772, 0.004875199794769287, 0.0048585600852966305, 0.004925471782684326, 0.004946208000183106, 0.004877855777740478, 0.004931200027465821, 0.004750239849090576, 0.004900767803192139, 0.004933248043060303, 0.004868127822875976, 0.004897215843200684, 0.00499619197845459, 0.004928512096405029, 0.004887551784515381, 0.004848671913146973, 0.004904831886291504, 0.004836832046508789, 0.0049526081085205075, 0.0048558077812194825, 0.00491315221786499, 0.004868095874786377, 0.0049090561866760255, 0.004888576030731201, 0.0048492798805236815, 0.004854144096374512, 0.0048510398864746096, 0.004917632102966309, 0.004882719993591309, 0.004857855796813965, 0.004857855796813965, 0.004899007797241211, 0.004889855861663819, 0.00485433578491211, 0.004863264083862305, 0.0048544640541076664, 0.004913375854492187, 0.0048858880996704105, 0.005234784126281739, 0.004850016117095947, 0.00491315221786499, 0.004904640197753906, 0.005220928192138672, 0.004859519958496094, 0.004954239845275879, 0.004959360122680664, 0.0048213438987731935, 0.004849535942077637, 0.0048559679985046384, 0.004933119773864746, 0.004856927871704102, 0.004837279796600342, 0.0048282241821289065, 0.0048975682258605956, 0.0048846077919006345, 0.004953855991363525, 0.004864287853240967, 0.004865056037902832, 0.004922239780426025, 0.004953184127807617, 0.004851744174957275, 0.005379360198974609, 0.004961984157562256, 0.004875552177429199, 0.00489734411239624, 0.004896927833557129, 0.004964000225067139, 0.004909408092498779, 0.004892672061920166, 0.004976640224456787, 0.004764639854431153, 0.004867551803588867, 0.004960896015167237, 0.00489247989654541, 0.004849535942077637, 0.0048353919982910155, 0.004857151985168457, 0.004875040054321289, 0.004892416000366211, 0.004841792106628418, 0.004847424030303955, 0.004904511928558349, 0.004862720012664795, 0.004859776020050049, 0.004911104202270508, 0.004875487804412841, 0.004926368236541748, 0.004859007835388183, 0.0048504958152771, 0.004827072143554687, 0.004890624046325683, 0.00485094404220581, 0.00486240005493164, 0.004823200225830078, 0.004953375816345215, 0.004897664070129395, 0.004866367816925049, 0.004832575798034668, 0.004877920150756836, 0.0048726720809936525, 0.0048807039260864256, 0.004939775943756103, 0.004833280086517334, 0.004842495918273926, 0.004893695831298828, 0.004867712020874024, 0.004860415935516358, 0.004896639823913574, 0.004890624046325683, 0.004986879825592041, 0.004860064029693603, 0.004841311931610108, 0.004846784114837646, 0.004903359889984131, 0.004857215881347656, 0.004819968223571777, 0.004903039932250977, 0.004892543792724609, 0.004931583881378174, 0.004899871826171875, 0.004827328205108642, 0.004868959903717041, 0.004986815929412842, 0.00486195182800293, 0.004947968006134033, 0.0048603200912475586, 0.004911871910095214, 0.004930079936981201, 0.004872511863708496, 0.0049006080627441405, 0.0048494720458984375, 0.004915872097015381, 0.00487388801574707, 0.004855487823486328, 0.005958879947662353, 0.004990719795227051, 0.005257215976715088, 0.004878335952758789, 0.004888576030731201, 0.004878592014312744, 0.004943615913391113, 0.004931583881378174, 0.004902463912963867, 0.004860032081604004, 0.004934048175811768, 0.004864096164703369, 0.004861760139465332, 0.004868063926696777, 0.004982816219329834, 0.004952064037322998, 0.0048865280151367185, 0.0048558077812194825, 0.004849599838256836, 0.004927231788635254, 0.004894815921783448, 0.004892864227294922, 0.004866079807281494, 0.004853759765625, 0.004974495887756347, 0.004917056083679199, 0.004867775917053223, 0.004866655826568604, 0.0049357438087463375, 0.004879744052886963, 0.00485433578491211, 0.004857855796813965, 0.004859903812408447, 0.004921343803405762, 0.005033984184265137, 0.0049502401351928715, 0.004873792171478272, 0.004922624111175537, 0.00487283182144165, 0.004828800201416015, 0.0048295679092407225, 0.0048429441452026364, 0.0049155521392822265, 0.0048559679985046384, 0.004830783843994141, 0.004849535942077637, 0.0048867201805114745, 0.00485865592956543, 0.00488640022277832, 0.00483955192565918, 0.004830399990081787, 0.004911871910095214, 0.0048642239570617675, 0.00484335994720459, 0.004831232070922851, 0.004984640121459961, 0.00487443208694458, 0.004859903812408447, 0.0048520641326904295, 0.004849599838256836, 0.004912000179290771, 0.004868959903717041, 0.004833439826965332, 0.004857247829437256, 0.0048685441017150876, 0.004825088024139404, 0.004890624046325683, 0.004898816108703613, 0.004843103885650635, 0.004821407794952393, 0.004828927993774414, 0.0049584641456604005, 0.004863039970397949, 0.004848639965057373, 0.004867072105407715, 0.004924160003662109, 0.0048887357711791994, 0.004886688232421875, 0.004840447902679444, 0.004827712059020996, 0.004900576114654541, 0.004849535942077637, 0.00491593599319458, 0.004839424133300781, 0.004877696037292481, 0.004896512031555176, 0.00489356803894043, 0.004829184055328369, 0.00489625597000122, 0.004882815837860107, 0.004862080097198487, 0.0048438081741333005, 0.004841184139251709, 0.004837376117706299, 0.004911104202270508, 0.004893824100494385, 0.0048475837707519535, 0.004864352226257324, 0.004919871807098388, 0.004889984130859375, 0.0048789758682250975, 0.004851103782653809, 0.0048700799942016605, 0.004954559803009033, 0.0051833920478820805, 0.004998688220977783, 0.004903520107269287, 0.004933824062347412, 0.004966559886932373, 0.0049788479804992675, 0.004851647853851318, 0.004918687820434571, 0.004899136066436768, 0.004849696159362793, 0.00485811185836792, 0.004857600212097168, 0.004946144104003907, 0.004891679763793945, 0.0048830718994140624, 0.004870272159576416, 0.004988704204559326, 0.004925375938415528, 0.0048765759468078615, 0.004846975803375244, 0.004870656013488769, 0.0047511358261108395, 0.0049259839057922365, 0.004946432113647461, 0.004858943939208984, 0.0048514242172241215, 0.004862656116485595, 0.004942368030548096, 0.004863615989685059, 0.004861983776092529, 0.004835328102111816, 0.0049276800155639644, 0.004873856067657471, 0.0048397121429443355, 0.004855167865753174, 0.004842048168182373, 0.004939199924468994, 0.0048808960914611815, 0.004878719806671142, 0.004861599922180175, 0.004921120166778564, 0.004869599819183349, 0.004850431919097901, 0.004835328102111816, 0.004975711822509766, 0.004887104034423828, 0.004880832195281982, 0.004865151882171631, 0.004864607810974121, 0.004903135776519775, 0.004865983963012696, 0.004878719806671142, 0.004869791984558105, 0.0048373441696166995, 0.0049316477775573735, 0.004923359870910644, 0.004851712226867676, 0.0048455681800842285, 0.004880383968353271, 0.004882431983947754, 0.004915200233459473, 0.00486195182800293, 0.004822656154632569, 0.004917439937591553, 0.004862143993377686, 0.004872191905975342, 0.0048447041511535645, 0.0048362560272216795, 0.004953216075897217, 0.0048566398620605465, 0.004963647842407226, 0.004846271991729737, 0.005027743816375732, 0.0048787841796875, 0.0048689918518066404, 0.00484991979598999, 0.004882976055145264, 0.00491315221786499, 0.004882431983947754, 0.0048839359283447265, 0.004860447883605957, 0.004978687763214112, 0.004892096042633056, 0.00485865592956543, 0.004909023761749267, 0.0048475837707519535, 0.0048716158866882325, 0.00492796802520752, 0.004890399932861328, 0.004831679821014404, 0.004849567890167237, 0.004900896072387696, 0.004876287937164306, 0.004872191905975342, 0.004853248119354248, 0.004870399951934814, 0.005066688060760498, 0.004914624214172363, 0.005008255958557129, 0.004863071918487549, 0.005094016075134277, 0.0050301761627197265, 0.004882431983947754, 0.004893887996673584, 0.00505452823638916, 0.004903552055358887, 0.004886655807495117, 0.004850719928741455, 0.004836319923400879, 0.004898272037506104, 0.004863679885864258, 0.00484003210067749, 0.0048559999465942385, 0.004892735958099365, 0.0048518719673156735, 0.004857567787170411, 0.004851840019226074, 0.004824192047119141, 0.00492249584197998, 0.004867712020874024, 0.004822720050811768, 0.0048393278121948245, 0.004975135803222656, 0.004901088237762451, 0.0048537278175354, 0.0049313921928405766, 0.004834303855895996, 0.004914112091064453, 0.004866112232208252, 0.0048269758224487305, 0.00483958387374878, 0.004835328102111816, 0.004951615810394287, 0.004885119915008545, 0.004865407943725586, 0.004847455978393555, 0.004895328044891357, 0.004879807949066162, 0.00486684799194336, 0.004855584144592285, 0.004853856086730957, 0.0049284157752990726, 0.004860928058624267, 0.004843423843383789, 0.004840832233428955, 0.004958943843841553, 0.00487014389038086, 0.004818175792694092, 0.004866399765014648, 0.004850080013275146, 0.004843935966491699, 0.0048364481925964355, 0.004892799854278564, 0.004862527847290039, 0.004829504013061523, 0.004870016098022461, 0.004907872200012207, 0.0048831038475036625, 0.00488643217086792, 0.004865503787994384, 0.004851967811584473, 0.004920000076293945, 0.004898816108703613, 0.004851712226867676, 0.0048865280151367185, 0.005033984184265137, 0.004937727928161621, 0.0048651199340820316, 0.004891136169433594, 0.004854207992553711, 0.004932640075683593, 0.004854015827178955, 0.004876992225646973, 0.004849696159362793, 0.005011519908905029, 0.004878047943115234, 0.004849376201629639, 0.004841951847076416, 0.00487014389038086, 0.004953887939453125, 0.004868319988250732, 0.004859007835388183, 0.004841375827789307, 0.004895679950714112, 0.00486732816696167, 0.004973343849182129, 0.004840735912322998, 0.004847424030303955, 0.004965087890625, 0.004892831802368164, 0.004852960109710694, 0.004835904121398926, 0.004928031921386719, 0.004926208019256592, 0.004842048168182373, 0.004896480083465576, 0.004853631973266602, 0.004913504123687744, 0.004885151863098145, 0.004844480037689209, 0.004834176063537598, 0.005119967937469483, 0.004868127822875976, 0.004845439910888672, 0.004860032081604004, 0.004859776020050049, 0.0049168958663940426, 0.004858143806457519, 0.004850016117095947, 0.00484335994720459, 0.00473635196685791, 0.004838367938995361, 0.00490783977508545, 0.0048546562194824215, 0.004834688186645507, 0.004835040092468262, 0.00482806396484375, 0.00489404821395874, 0.004854432106018066, 0.004859903812408447, 0.004837376117706299, 0.005025792121887207, 0.004882431983947754, 0.0048784961700439455, 0.004872032165527344, 0.004843264102935791, 0.0050032639503479, 0.004960512161254883, 0.004872384071350097, 0.004854752063751221, 0.004933856010437012, 0.004896512031555176, 0.004850560188293457, 0.004857664108276367, 0.004836991786956787, 0.004899168014526367, 0.004851935863494873, 0.004841472148895264, 0.0048698558807373045, 0.004888864040374756, 0.005224448204040527, 0.004878592014312744, 0.004865056037902832, 0.004909183979034424, 0.004923999786376953, 0.004892672061920166, 0.004881696224212646, 0.004891488075256347, 0.00524889612197876, 0.005791744232177734, 0.005873663902282715, 0.005629951953887939, 0.005095680236816406, 0.005721343994140625, 0.00491161584854126, 0.004867231845855713, 0.0048932480812072755, 0.004991263866424561, 0.0049231362342834475, 0.004882847785949707, 0.004898655891418457, 0.00493126392364502, 0.004998688220977783, 0.004901663780212402, 0.0048447041511535645, 0.004888800144195557, 0.00498089599609375, 0.004891456127166748, 0.0049079999923706055, 0.004881087779998779, 0.004966047763824463, 0.004940127849578858, 0.004888576030731201, 0.005240511894226074, 0.0049015040397644045, 0.004937759876251221, 0.00488259220123291, 0.004908703804016114, 0.004878335952758789, 0.004919104099273682, 0.004912735939025879, 0.004858751773834228, 0.004853663921356201, 0.004890624046325683, 0.004914400100708008, 0.004856704235076904, 0.004855711936950683, 0.004932767868041992, 0.004908991813659668, 0.005355679988861084, 0.004873983860015869, 0.00487721586227417, 0.004935359954833985, 0.0048644161224365235, 0.0048496642112731934, 0.005135392189025879, 0.0056737599372863766, 0.004878528118133545, 0.004861792087554931, 0.0048596482276916505, 0.004948383808135987, 0.004907008171081543, 0.0048802242279052735, 0.00488486385345459, 0.0049147200584411625, 0.004910624027252198, 0.004870880126953125, 0.004847360134124756, 0.00486630392074585, 0.0049051837921142575, 0.004890399932861328, 0.004863999843597412, 0.00493174409866333, 0.004883391857147217, 0.005004191875457763, 0.004902912139892578, 0.004847616195678711, 0.004857439994812012, 0.004913504123687744, 0.004862016201019287, 0.004855519771575928, 0.004831552028656006, 0.004909023761749267, 0.00486195182800293, 0.004878335952758789, 0.004876287937164306, 0.004863999843597412, 0.004933631896972656, 0.004933407783508301, 0.004857855796813965, 0.0048949441909790035, 0.004943871974945068, 0.005029888153076172, 0.0048577280044555665, 0.004882336139678955, 0.004868319988250732, 0.004787968158721924, 0.004929823875427246, 0.004912447929382324, 0.004878015995025635, 0.004877280235290527, 0.00484716796875, 0.004923840045928955, 0.0048980159759521484, 0.004866911888122559, 0.004841119766235352, 0.004900320053100586, 0.004885312080383301, 0.004854015827178955, 0.004857120037078858, 0.004860383987426758, 0.005024096012115478, 0.00489846420288086, 0.004843391895294189, 0.004841599941253662, 0.004898079872131348, 0.0048666238784790036, 0.0048514242172241215, 0.0048681597709655765, 0.004851456165313721, 0.00493017578125, 0.00488150405883789, 0.004851744174957275, 0.0048447041511535645, 0.004885983943939209, 0.004921599864959717, 0.005138336181640625, 0.005271488189697265, 0.005001311779022217, 0.004997183799743652, 0.0050455999374389646, 0.005534687995910645, 0.004924863815307617, 0.004897119998931884, 0.004861311912536621, 0.0048576960563659665, 0.004924320220947266, 0.004906816005706787, 0.004869823932647705, 0.0048496642112731934, 0.004909120082855225, 0.00489731216430664, 0.00487497615814209, 0.004844096183776856, 0.004839104175567627, 0.004901408195495605, 0.004863264083862305, 0.00482806396484375, 0.004868063926696777, 0.004835360050201416, 0.004966400146484375, 0.00487824010848999, 0.004843008041381836, 0.004837632179260254, 0.004864352226257324, 0.004877984046936035, 0.004856160163879394, 0.004831232070922851, 0.004921343803405762, 0.004834752082824707, 0.004880864143371582, 0.004927231788635254, 0.004890495777130127, 0.004869311809539795, 0.004849376201629639, 0.004947968006134033, 0.0049049282073974606, 0.0048640317916870115, 0.0048455681800842285, 0.004896128177642822, 0.004912831783294678, 0.004865151882171631, 0.004855072021484375, 0.004858208179473877, 0.004947487831115723, 0.004893184185028076, 0.004849823951721191, 0.0048558077812194825, 0.004865024089813232, 0.004963327884674072, 0.004951456069946289, 0.004907423973083496, 0.004903135776519775, 0.00497046422958374, 0.004931359767913818, 0.004882688045501709, 0.004896063804626465, 0.00495900821685791, 0.004910848140716553, 0.00487340784072876, 0.004883135795593262, 0.0048707199096679685, 0.004943552017211914, 0.004866335868835449, 0.004854944229125977, 0.004874815940856933, 0.004925439834594727, 0.004888576030731201, 0.004866047859191895, 0.004839424133300781, 0.004923391819000244, 0.004943647861480713, 0.004909408092498779, 0.0048715839385986325, 0.00486243200302124, 0.004927487850189209, 0.004915200233459473, 0.004866047859191895, 0.004853759765625, 0.004836959838867188, 0.004917183876037598, 0.004851359844207764, 0.004834239959716797, 0.004878176212310791, 0.00489635181427002, 0.005007808208465576, 0.004873600006103516, 0.004841792106628418, 0.004838719844818116, 0.004899328231811524, 0.004874752044677734, 0.0048876161575317385, 0.004749663829803467, 0.004864128112792969, 0.004868319988250732, 0.0049357438087463375, 0.004865375995635987, 0.004863872051239014, 0.004834080219268799, 0.00481708812713623, 0.0049172801971435545, 0.00485152006149292, 0.004925407886505127, 0.004863999843597412, 0.004910975933074951, 0.004868224143981934, 0.004834688186645507, 0.004841824054718017, 0.004837823867797852, 0.0048802242279052735, 0.004907008171081543, 0.004843520164489746, 0.004839424133300781, 0.004886464118957519, 0.004886591911315918, 0.004863999843597412, 0.004852799892425537, 0.004848576068878173, 0.004906335830688477, 0.004862815856933594, 0.004857120037078858, 0.004860415935516358, 0.004853759765625, 0.004947999954223633, 0.004873824119567871, 0.004929855823516845, 0.004841567993164063, 0.004898208141326904, 0.005237696170806885, 0.005007359981536865, 0.005416416168212891, 0.005245120048522949, 0.005334432125091553, 0.004899424076080322, 0.00506060791015625, 0.005293856143951416, 0.005279967784881592, 0.005105311870574951, 0.004967840194702149, 0.004891583919525147, 0.00487014389038086, 0.004872191905975342, 0.004965472221374512, 0.004920224189758301, 0.004898816108703613, 0.0048635520935058595, 0.004946623802185059, 0.004924543857574463, 0.004887167930603027, 0.004867968082427979, 0.004918943881988525, 0.004938432216644287, 0.004875040054321289, 0.004844543933868409, 0.004871456146240234, 0.004811295986175537, 0.004902751922607422, 0.004927360057830811, 0.004872320175170898, 0.004855231761932373, 0.004833856105804443, 0.004917344093322754, 0.004881728172302246, 0.004884352207183838, 0.004862656116485595, 0.0048537921905517575, 0.004921055793762207, 0.004869664192199707, 0.004834080219268799, 0.004849823951721191, 0.004939008235931397, 0.004901440143585205, 0.0048455681800842285, 0.00483513593673706, 0.004835296154022217, 0.004912384033203125, 0.004887519836425781, 0.004856128215789795, 0.004843391895294189, 0.004849408149719238, 0.0048681597709655765, 0.00484991979598999, 0.0048429441452026364, 0.004937759876251221, 0.004919583797454834, 0.004887807846069336, 0.004852479934692383, 0.0048410239219665524, 0.004856256008148194, 0.004978432178497315, 0.004858272075653076, 0.004846784114837646, 0.0048455362319946285, 0.0049179520606994626, 0.004863999843597412, 0.004854015827178955, 0.0049417920112609865, 0.004895711898803711, 0.004885536193847656, 0.004849760055541992, 0.004929056167602539, 0.0048776321411132815, 0.004891488075256347, 0.004874239921569825, 0.004830304145812988, 0.004830239772796631, 0.004847487926483154, 0.0049309759140014646, 0.004940383911132813, 0.0048455681800842285, 0.004828671932220459, 0.004891136169433594, 0.0048863677978515626, 0.004848896026611328, 0.004830399990081787, 0.004846752166748047, 0.004895455837249756, 0.004924928188323975, 0.0048345279693603515, 0.004856575965881347, 0.0048405442237854, 0.004829440116882324, 0.004909759998321534, 0.004863935947418213, 0.004851456165313721, 0.0048642559051513675, 0.00485811185836792, 0.004933440208435059, 0.0048919677734375, 0.00502239990234375, 0.004892896175384522, 0.0049284157752990726, 0.004883327960968018, 0.0048559679985046384, 0.004854688167572022, 0.00484438419342041, 0.004926623821258545, 0.004876959800720215, 0.004843520164489746, 0.004858143806457519, 0.0048947839736938475, 0.004883552074432373, 0.004878367900848389, 0.004877120018005371, 0.0048534078598022464, 0.004927840232849121, 0.004884479999542236, 0.004853375911712646, 0.004863455772399903, 0.004916128158569336, 0.004890336036682129, 0.004866335868835449, 0.004866335868835449, 0.004865983963012696, 0.0049376959800720216, 0.005026624202728271, 0.004875199794769287, 0.004860000133514404, 0.004935647964477539, 0.004879392147064209, 0.00487721586227417, 0.004853983879089355, 0.004858719825744629, 0.004966400146484375, 0.004883456230163574, 0.004869408130645752, 0.004893119812011719, 0.004944064140319824, 0.004880767822265625, 0.0048657598495483395, 0.00486569595336914, 0.004892831802368164, 0.004972064018249512, 0.004872159957885742, 0.004863840103149414, 0.004859936237335205, 0.004944447994232178, 0.004902495861053467, 0.004853888034820557, 0.0048830718994140624, 0.0048715839385986325, 0.004759647846221924, 0.004935647964477539, 0.004886176109313965, 0.004858431816101075, 0.004849184036254883, 0.004839871883392334, 0.004912896156311035, 0.004861087799072266, 0.0048297600746154785, 0.004831456184387207, 0.004873439788818359, 0.004898784160614014, 0.004862720012664795, 0.004827328205108642, 0.004824480056762695, 0.004933631896972656, 0.004888192176818847, 0.004840352058410644, 0.004847936153411865, 0.004853119850158691, 0.004896512031555176, 0.004862527847290039, 0.004868288040161133, 0.004847424030303955, 0.004857855796813965, 0.004876512050628662, 0.004918975830078125, 0.004829216003417969, 0.004829247951507568, 0.004902783870697021, 0.004874144077301025, 0.004835552215576172, 0.004826879978179931, 0.004831776142120361, 0.004890111923217774, 0.004843776226043701, 0.004851647853851318, 0.004849696159362793, 0.004904960155487061, 0.004858975887298584, 0.004860288143157959, 0.004907711982727051, 0.004841311931610108, 0.004896768093109131, 0.004824895858764649, 0.004994400024414062, 0.004836160182952881, 0.004856031894683838, 0.004896575927734375, 0.004874239921569825, 0.004825088024139404, 0.004833280086517334, 0.00495036792755127, 0.004875936031341553, 0.004822815895080566, 0.004815072059631348, 0.004820991992950439, 0.004903103828430176, 0.004883840084075928, 0.004843967914581299, 0.004853919982910156, 0.004929408073425293, 0.00490067195892334, 0.00484496021270752, 0.004913760185241699, 0.0049030079841613766, 0.004878560066223145, 0.004906688213348389, 0.004873600006103516, 0.004957888126373291, 0.00490723180770874, 0.00488918399810791, 0.004950143814086914, 0.004906144142150879, 0.00488640022277832, 0.004873119831085205, 0.005005375862121582, 0.0049147200584411625, 0.004866528034210205, 0.004866047859191895, 0.004978687763214112, 0.004930560111999512, 0.0048855037689208985, 0.004859744071960449, 0.004888768196105957, 0.00495305585861206, 0.004919936180114746, 0.004891007900238037, 0.004875391960144043, 0.004932479858398437, 0.004918591976165771, 0.004883135795593262, 0.004865600109100342, 0.0048419198989868165, 0.004924736022949219, 0.004878464221954345, 0.004856383800506592, 0.004849760055541992, 0.004863232135772705, 0.004889503955841064, 0.004873631954193115, 0.004938079833984375, 0.00491315221786499, 0.004950079917907715, 0.004908895969390869, 0.0048529281616210935, 0.0048484477996826175, 0.004877471923828125, 0.004870463848114013, 0.004856448173522949, 0.004869631767272949, 0.004827648162841797, 0.004907008171081543, 0.004896768093109131, 0.0048661441802978515, 0.0048536000251770016, 0.004861087799072266, 0.0049407038688659664, 0.004857247829437256, 0.004849696159362793, 0.004862815856933594, 0.004921055793762207, 0.004883872032165527, 0.004878943920135498, 0.005155968189239502, 0.00515123176574707, 0.005212192058563232, 0.005036799907684326, 0.004914463996887207, 0.004877344131469726, 0.00493126392364502, 0.004896224021911621, 0.004862463951110839, 0.004853536128997802, 0.004905216217041016, 0.004896768093109131, 0.004882431983947754, 0.0048681597709655765, 0.0048512001037597655, 0.004915647983551026, 0.00489686393737793, 0.004861599922180175, 0.004869376182556152, 0.004903935909271241, 0.004912255764007568, 0.00487824010848999, 0.004850399971008301, 0.00484991979598999, 0.004920671939849854, 0.00513097620010376, 0.004863808155059815, 0.004855936050415039, 0.004900224208831787, 0.004964384078979492, 0.00487388801574707, 0.0048501439094543455, 0.00487062406539917, 0.004911104202270508, 0.004876287937164306, 0.004847263813018799, 0.004848159790039062, 0.004904160022735596, 0.004920000076293945, 0.004968544006347656, 0.004860735893249511, 0.004828159809112549, 0.004909311771392822, 0.004877696037292481, 0.004864575862884521, 0.004894527912139892, 0.0049192957878112795, 0.004902912139892578, 0.004889887809753418, 0.004860640048980713, 0.004868095874786377, 0.0050332479476928715, 0.004897503852844238, 0.0048784961700439455, 0.0048475518226623535, 0.004914080142974854, 0.004891647815704346, 0.004834943771362304, 0.0048559679985046384, 0.004847263813018799, 0.00505299186706543, 0.004911327838897705, 0.004887360095977783, 0.0048846077919006345, 0.004983424186706543, 0.004765727996826172, 0.004900063991546631, 0.004887296199798584, 0.004846879959106445, 0.004855743885040283, 0.004909952163696289, 0.004861408233642578, 0.004839871883392334, 0.00485584020614624, 0.004831200122833252, 0.0048887357711791994, 0.004872032165527344, 0.004843520164489746, 0.004847839832305908, 0.004890560150146484, 0.004912896156311035, 0.0048763837814331055, 0.004860928058624267, 0.004856895923614502, 0.004930784225463867, 0.004878079891204834, 0.004847712039947509, 0.004842368125915528, 0.004855584144592285, 0.004955840110778808, 0.004847936153411865, 0.004851935863494873, 0.004831232070922851, 0.004916959762573242, 0.004888864040374756, 0.004851391792297363, 0.004837696075439453, 0.0048429441452026364, 0.00492409610748291, 0.004880256175994873, 0.004855616092681885, 0.0048848319053649905, 0.004935520172119141, 0.0049183998107910154, 0.004883711814880371, 0.004882048130035401, 0.004904960155487061, 0.004964352130889893, 0.004904096126556397, 0.004850527763366699, 0.004842879772186279, 0.0048830718994140624, 0.004869599819183349, 0.004872255802154541, 0.004854559898376465, 0.004849120140075684, 0.004914463996887207, 0.00489577579498291, 0.005908639907836914, 0.005014272212982178, 0.004927552223205566, 0.004872992038726807, 0.004884640216827393, 0.004934656143188477, 0.004899871826171875, 0.0051233282089233395, 0.00491977596282959, 0.004884736061096191, 0.004759679794311524, 0.004925439834594727, 0.0049060797691345215, 0.004852640151977539, 0.004853568077087402, 0.004839615821838379, 0.004943039894104004, 0.0049795198440551755, 0.0048551359176635745, 0.004854015827178955, 0.004933311939239502, 0.004908063888549805, 0.00487388801574707, 0.004855264186859131, 0.0048707518577575685, 0.004929056167602539, 0.004893119812011719, 0.004887904167175293, 0.004883264064788818, 0.0049576001167297365, 0.004898431777954101, 0.0048587841987609865, 0.004881408214569092, 0.004854688167572022, 0.004917600154876709, 0.004875936031341553, 0.004852992057800293, 0.004852479934692383, 0.004907008171081543, 0.004874239921569825, 0.004859615802764892, 0.0048540477752685544, 0.004869760036468506, 0.0049565439224243165, 0.004939167976379394, 0.004919680118560791, 0.00488265609741211, 0.004933311939239502, 0.004896543979644776, 0.004852255821228027, 0.004837056159973144, 0.004854080200195313, 0.004927584171295166, 0.004867616176605224, 0.004848000049591065, 0.004839424133300781, 0.004857855796813965, 0.004880640029907226, 0.0048596482276916505, 0.004841087818145752, 0.004848127841949463, 0.004898240089416504, 0.00486240005493164, 0.0048240962028503415, 0.004834271907806396, 0.00483897590637207, 0.004905439853668213, 0.004857823848724365, 0.0048455362319946285, 0.004835360050201416, 0.00486579179763794, 0.004876800060272217, 0.004861695766448975]",tokens/s,204.04037442252584,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3846, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_gptq.py"", line 76, in _process_model_before_weight_loading model = self.optimum_quantizer.convert_model(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/quantizer.py"", line 218, in convert_model self.block_name_to_quantize = get_block_name_with_pattern(model) File ""/usr/local/lib/python3.10/dist-packages/optimum/gptq/utils.py"", line 77, in get_block_name_with_pattern raise ValueError(""Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model`"") ValueError: Block pattern could not be match. Pass `block_name_to_quantize` argument in `quantize_model` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,7434.821632,8041.463808,0.0,7646.216192,7627.584,s,1,13.1314404296875,13.1314404296875,0.0,13.1314404296875,13.1314404296875,13.1314404296875,13.1314404296875,[13.1314404296875],,kWh,0.00017221434909583307,1.8989107224086042e-05,5.507671072799841e-05,0.0002462801670479175,,MB,1756.71296,8725.13536,0.0,8315.20768,8191.863296,s,10,3.325114532470703,0.33251145324707027,0.00033927029372321884,0.3324805603027344,0.3330387817382812,0.33309107666015625,0.3331329125976562,"[0.33262384033203124, 0.3321598205566406, 0.33248501586914064, 0.3324128112792969, 0.3324761047363281, 0.3325360107421875, 0.33198587036132815, 0.33302716064453125, 0.3322645263671875, 0.3331433715820312]",tokens/s,769.8982922244818,kWh,9.734977347083315e-06,1.0735828873529782e-06,6.439773670333278e-06,1.724833390476957e-05,tokens/kWh,14842013.229417477,MB,1763.782656,9039.70816,0.0,8629.78048,8480.067584,s,10,26.70110595703125,2.6701105957031253,0.0035851400564946515,2.6707076416015623,2.6737568847656252,2.6743437744140626,2.6748132861328124,"[2.663013671875, 2.666642333984375, 2.66614990234375, 2.67047509765625, 2.669856201171875, 2.673041748046875, 2.6724296875, 2.670940185546875, 2.67362646484375, 2.6749306640625]",tokens/s,23.59452829458927,kWh,7.812808360291664e-05,8.617620039500167e-06,5.196985639066686e-05,0.00013871556003308366,tokens/kWh,454166.7855067917,,s,630,26.691571811676045,0.04236757430424766,0.000393671327118452,0.0423721923828125,0.04286623229980469,0.042984235000610355,0.04324060020446777,"[0.041927009582519534, 0.04157680130004883, 0.041426399230957034, 0.0416278076171875, 0.0417143669128418, 0.041723838806152345, 0.04161856079101563, 0.04187599945068359, 0.041927009582519534, 0.04173110580444336, 0.04178969573974609, 0.04169180679321289, 0.04173830413818359, 0.041983039855957034, 0.041898239135742185, 0.041853633880615235, 0.04182342529296875, 0.04184352111816406, 0.04213555145263672, 0.04209664154052734, 0.042259681701660154, 0.04209539031982422, 0.04245913696289062, 0.04239974212646484, 0.04214374542236328, 0.04191641616821289, 0.04220278549194336, 0.041968990325927734, 0.04209151840209961, 0.042178558349609374, 0.042478622436523436, 0.04244102478027344, 0.04234675216674805, 0.04231209564208984, 0.042434558868408204, 0.04232720184326172, 0.042296161651611326, 0.04229683303833008, 0.04227462387084961, 0.04238380813598633, 0.04246268844604492, 0.04268316650390625, 0.04272470474243164, 0.04268304061889648, 0.04268409729003906, 0.04245945739746094, 0.042510337829589843, 0.042387454986572266, 0.04243395233154297, 0.04242639923095703, 0.04258041763305664, 0.04266201782226563, 0.04284995269775391, 0.042826080322265626, 0.042698047637939454, 0.04258272171020508, 0.04248086547851562, 0.04256857681274414, 0.04271503829956055, 0.04291123199462891, 0.0429156494140625, 0.04270560073852539, 0.04282720184326172, 0.04180511856079101, 0.041666366577148437, 0.041623615264892576, 0.04158342361450195, 0.041869312286376956, 0.041850879669189454, 0.041744384765625, 0.041562110900878906, 0.04169254302978516, 0.041775615692138675, 0.04173580932617187, 0.04190188980102539, 0.04201337432861328, 0.04211507034301758, 0.042031265258789065, 0.041944766998291014, 0.04196905517578125, 0.041888160705566405, 0.0421830062866211, 0.04215795135498047, 0.04209382247924805, 0.0421662712097168, 0.042275585174560544, 0.04227459335327149, 0.042252799987792966, 0.04200374221801758, 0.04205001449584961, 0.0421453742980957, 0.04223027038574219, 0.04225830459594727, 0.042229888916015625, 0.04231516647338867, 0.04233814239501953, 0.04233606338500977, 0.042516864776611325, 0.04276076889038086, 0.04254860687255859, 0.04248579025268555, 0.042396255493164066, 0.04244678497314453, 0.04245100784301758, 0.04271104049682617, 0.04272742462158203, 0.04276224136352539, 0.042575870513916016, 0.042842113494873046, 0.04261068725585938, 0.042487648010253905, 0.04245753479003906, 0.04244438552856445, 0.04295395278930664, 0.042678688049316404, 0.0429349136352539, 0.04279225540161133, 0.042709121704101564, 0.0425968017578125, 0.042665985107421874, 0.04289913558959961, 0.04272364807128906, 0.04276220703125, 0.04272335815429688, 0.04295270538330078, 0.04296633529663086, 0.042206401824951174, 0.04178425598144531, 0.041774528503417965, 0.04158303833007813, 0.04165631866455078, 0.04204748916625976, 0.04200243377685547, 0.041678848266601565, 0.04156415939331055, 0.041772289276123045, 0.041857791900634767, 0.04194918441772461, 0.041744384765625, 0.04239155197143555, 0.042246143341064454, 0.04211916732788086, 0.04206143951416016, 0.04201692962646485, 0.04198806381225586, 0.04207632064819336, 0.04202681732177734, 0.04201910400390625, 0.04217446517944336, 0.04223311996459961, 0.04225059127807617, 0.042113407135009766, 0.042057727813720705, 0.04208838272094727, 0.042196990966796875, 0.04205491256713867, 0.041995006561279295, 0.04205779266357422, 0.04202310562133789, 0.04218828964233398, 0.04222140884399414, 0.04227734375, 0.042681407928466794, 0.04279391860961914, 0.042597824096679685, 0.042528350830078124, 0.04249264144897461, 0.04236854553222656, 0.042638240814208986, 0.042686046600341795, 0.04260067367553711, 0.04252467346191406, 0.043038719177246096, 0.04281756973266602, 0.04265580749511719, 0.04258332824707031, 0.042621566772460935, 0.04274585723876953, 0.04293017578125, 0.042788864135742184, 0.04258377456665039, 0.04250377655029297, 0.04268233489990234, 0.04274454498291016, 0.0427803840637207, 0.04259161758422852, 0.04290876770019531, 0.042936126708984376, 0.04284592056274414, 0.04192160034179687, 0.04171846389770508, 0.041963905334472654, 0.041729759216308594, 0.04205583953857422, 0.04188275146484375, 0.04182720184326172, 0.041839839935302735, 0.04180252838134765, 0.04187136077880859, 0.04187136077880859, 0.041799678802490234, 0.042162174224853514, 0.04217036819458008, 0.04211916732788086, 0.04203519821166992, 0.04244275283813476, 0.04232396697998047, 0.042262527465820314, 0.042074111938476565, 0.04208003234863281, 0.04195145416259766, 0.042280960083007815, 0.04228300857543945, 0.04214169692993164, 0.04222540664672852, 0.04210918426513672, 0.042315006256103516, 0.04231654357910156, 0.04226588821411133, 0.04215267181396484, 0.04217446517944336, 0.042288192749023436, 0.042453567504882814, 0.04263734436035156, 0.0426295051574707, 0.042618846893310545, 0.04255539321899414, 0.042487777709960935, 0.042624095916748046, 0.04264441680908203, 0.0425346565246582, 0.04261814498901367, 0.042820575714111325, 0.042856449127197264, 0.04266368103027344, 0.04261065673828125, 0.0427542724609375, 0.042507392883300785, 0.04242345428466797, 0.042788192749023436, 0.04283843231201172, 0.04267625427246094, 0.04257398223876953, 0.042676063537597654, 0.04284396743774414, 0.042877120971679686, 0.04275404739379883, 0.04267929458618164, 0.04285702514648437, 0.04296748733520508, 0.04285161590576172, 0.043114559173583984, 0.04197580718994141, 0.041880672454833984, 0.041864097595214846, 0.041987808227539065, 0.04195923233032227, 0.04186710357666015, 0.041667198181152346, 0.041678367614746095, 0.04180748748779297, 0.04172208023071289, 0.04168374252319336, 0.04180774307250976, 0.042082271575927734, 0.04252057647705078, 0.04234204864501953, 0.042207584381103516, 0.0420285758972168, 0.042005985260009766, 0.042017345428466794, 0.04199468612670899, 0.04205344009399414, 0.04207360076904297, 0.04204105758666992, 0.04202364730834961, 0.04209910583496094, 0.042159969329833985, 0.04230963134765625, 0.042364158630371095, 0.04226284790039062, 0.0423328971862793, 0.042569377899169925, 0.04230368041992188, 0.04230131149291992, 0.04230758285522461, 0.04231782531738281, 0.0425241584777832, 0.042608448028564457, 0.04263324737548828, 0.04249196624755859, 0.042472000122070315, 0.04258544158935547, 0.04244140625, 0.042401790618896484, 0.042674175262451174, 0.042611839294433594, 0.042494911193847656, 0.04250137710571289, 0.04238528060913086, 0.042775360107421875, 0.042616832733154295, 0.042536609649658205, 0.04278665542602539, 0.04324169540405273, 0.042938495635986326, 0.04284630584716797, 0.042848033905029295, 0.04286288070678711, 0.04271104049682617, 0.04332748794555664, 0.04276633453369141, 0.04303436660766601, 0.043036415100097654, 0.04294844818115234, 0.041867008209228514, 0.04173574447631836, 0.041955486297607425, 0.04199603271484375, 0.04190902328491211, 0.04210883331298828, 0.04204553604125977, 0.041998336791992184, 0.042057727813720705, 0.04193894577026367, 0.041875583648681644, 0.04183395385742188, 0.04192524719238281, 0.04215727996826172, 0.042248767852783205, 0.042229759216308595, 0.04220723342895508, 0.04214726257324219, 0.042011070251464847, 0.04241420745849609, 0.04222908782958985, 0.042062496185302736, 0.04187347030639649, 0.041766849517822266, 0.0420843505859375, 0.04235059356689453, 0.042278911590576174, 0.04214988708496094, 0.0424898567199707, 0.04251004791259766, 0.04244438552856445, 0.04237107086181641, 0.04236470413208008, 0.042296127319335936, 0.04255267333984375, 0.042574592590332035, 0.042509761810302735, 0.042500606536865236, 0.04255478286743164, 0.04237955093383789, 0.042508544921875, 0.04239369583129883, 0.04259638214111328, 0.04272851181030273, 0.04280825424194336, 0.04259209442138672, 0.04305459213256836, 0.04279119873046875, 0.04281078338623047, 0.04254719924926758, 0.042554622650146486, 0.042774177551269534, 0.04287897491455078, 0.042831745147705075, 0.042870944976806644, 0.042729503631591795, 0.04327328109741211, 0.04318636703491211, 0.04281779098510742, 0.04299020767211914, 0.0431328010559082, 0.04307712173461914, 0.043028640747070315, 0.042335422515869144, 0.042154239654541015, 0.04211974334716797, 0.042049537658691405, 0.04212678527832031, 0.04160528182983399, 0.04165795135498047, 0.04182041549682617, 0.041779777526855466, 0.04172934341430664, 0.04185494232177735, 0.04206460952758789, 0.04210812759399414, 0.04244969558715821, 0.04226047897338867, 0.04216169738769531, 0.042016288757324216, 0.041927009582519534, 0.04183849716186523, 0.041984703063964846, 0.04212268829345703, 0.04195180892944336, 0.04214988708496094, 0.042240001678466796, 0.04215145492553711, 0.042590335845947264, 0.04241852951049805, 0.04220633697509766, 0.04223241424560547, 0.042152225494384764, 0.04214374542236328, 0.04222127914428711, 0.04233833694458008, 0.042467582702636716, 0.04269667053222656, 0.042729633331298825, 0.04262899017333984, 0.0430489616394043, 0.04265155029296875, 0.04245721435546875, 0.04231164932250977, 0.04263731384277344, 0.042659839630126956, 0.04257724761962891, 0.04266870498657226, 0.04298342514038086, 0.04278636932373047, 0.04257583999633789, 0.042498687744140624, 0.042675262451171876, 0.04284284973144531, 0.04288108825683594, 0.042644992828369144, 0.04269635009765625, 0.04328944015502929, 0.042947742462158205, 0.04269667053222656, 0.042853248596191405, 0.04299980926513672, 0.04298489761352539, 0.04282799911499023, 0.043294910430908204, 0.04306972885131836, 0.04217116928100586, 0.041768959045410156, 0.04166156768798828, 0.04189273452758789, 0.041783294677734374, 0.04159196853637695, 0.04169539260864258, 0.04176761627197265, 0.04173619079589844, 0.041760768890380856, 0.04199423980712891, 0.04200003051757813, 0.042133377075195315, 0.04237564849853516, 0.04209814453125, 0.04211334228515625, 0.0419730224609375, 0.04205049514770508, 0.04205545425415039, 0.042074337005615234, 0.041875232696533204, 0.04217164611816406, 0.042850753784179685, 0.042227745056152344, 0.042342910766601564, 0.042417247772216796, 0.04224911880493164, 0.04230348968505859, 0.04217446517944336, 0.0420906867980957, 0.04194300842285156, 0.04215792083740234, 0.04257318496704102, 0.04254294586181641, 0.04240806579589844, 0.04275820922851563, 0.042740318298339845, 0.04277657699584961, 0.042602497100830077, 0.04245280075073242, 0.04235225677490234, 0.042637630462646486, 0.04275580978393555, 0.04259280014038086, 0.04287078475952148, 0.04274176025390625, 0.0426761589050293, 0.04259436798095703, 0.04246252822875977, 0.04282371139526367, 0.042823806762695316, 0.042777118682861326, 0.042874496459960935, 0.04279715347290039, 0.04269903945922852, 0.04248112106323242, 0.04250672149658203, 0.042549312591552736, 0.04286572647094727, 0.043119552612304685, 0.04332284927368164, 0.043237918853759764, 0.04299980926513672, 0.04216915130615234, 0.04190537643432617, 0.041861919403076174, 0.04169321441650391, 0.042076126098632816, 0.042071361541748044, 0.04181472015380859, 0.04171798324584961, 0.041667903900146484, 0.04170595169067383, 0.041813087463378903, 0.042107807159423825, 0.042426368713378904, 0.04256697463989258, 0.04231647872924805, 0.042156032562255856, 0.04213324737548828, 0.04211328125, 0.04209664154052734, 0.04247065734863281, 0.04213772964477539, 0.04197849655151367, 0.04215750503540039, 0.04227129745483398, 0.04228915023803711, 0.042291393280029295, 0.04321859359741211, 0.04245110321044922, 0.04231167984008789, 0.04234035110473633, 0.04213759994506836, 0.042147838592529296, 0.04227686309814453, 0.042469375610351565, 0.042503646850585934, 0.04244089508056641, 0.04292643356323242, 0.043276287078857424, 0.042648990631103514, 0.0424884147644043, 0.04230144119262695, 0.04263033676147461, 0.04266067123413086, 0.04263724899291992, 0.04262508773803711, 0.042510337829589843, 0.04254848098754883, 0.04268518447875977, 0.04275404739379883, 0.04284620666503906, 0.04272537612915039, 0.04282108688354492, 0.04296707153320312, 0.04277936172485351, 0.042624801635742185, 0.04258998489379883, 0.0425588493347168, 0.04265865707397461, 0.04302438354492188, 0.043235328674316405, 0.04308377456665039, 0.04298342514038086, 0.0429189453125, 0.0421847038269043, 0.04186521530151367, 0.0417628173828125, 0.04191231918334961, 0.041975902557373046, 0.041996192932128903, 0.04210073471069336, 0.041744384765625, 0.04187059020996094, 0.04187827301025391, 0.04204544067382812, 0.04222956848144531, 0.042348735809326174, 0.042284641265869144, 0.04222995376586914, 0.04205590438842773, 0.041992416381835936, 0.04210979080200195, 0.042091457366943356, 0.04237331390380859, 0.04260787200927734, 0.04246291351318359, 0.04239011383056641, 0.04249446487426758, 0.04237443161010742, 0.042430912017822266, 0.04236038589477539, 0.04219084930419922, 0.042076576232910154, 0.042051681518554686, 0.04219062423706055, 0.042402015686035154, 0.0424898567199707, 0.042456192016601564, 0.04285440063476562, 0.04267433547973633, 0.04248649597167969, 0.042235008239746095, 0.04235968017578125, 0.0427883186340332, 0.042635807037353514, 0.042603679656982425, 0.042655902862548827, 0.04265251159667969, 0.04271638488769531, 0.04279769515991211, 0.0425984001159668, 0.04246112060546875, 0.04268230438232422, 0.042952831268310544, 0.04297299194335937, 0.04288735961914063, 0.04283087921142578, 0.04267827224731445, 0.04261526489257812, 0.04253747177124023, 0.04294246292114258, 0.043157215118408206, 0.04305692672729492, 0.04304470443725586, 0.0429595832824707, 0.04298854446411133, 0.042895870208740236]",tokens/s,23.602956185757904,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 106, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1044.66432,904.855552,0.0,509.607936,491.434496,s,1,7.832982421875,7.832982421875,0.0,7.832982421875,7.832982421875,7.832982421875,7.832982421875,[7.832982421875],,kWh,2.3440985341665054e-05,2.5785837086470675e-06,7.252228023994778e-06,3.32717970743069e-05,,MB,1364.054016,1018.10176,0.0,608.17408,592.24832,s,10,0.19564134216308593,0.019564134216308596,0.0001360158079278712,0.019515311241149903,0.019726105117797852,0.01979231662750244,0.019845285835266114,"[0.01948374366760254, 0.01985852813720703, 0.01954537582397461, 0.019519615173339843, 0.019469696044921873, 0.019433792114257813, 0.019511007308959962, 0.019693599700927735, 0.01971139144897461, 0.01941459274291992]",tokens/s,13085.168869195308,kWh,5.661638348514217e-07,6.243810454996636e-08,3.5134718030232953e-07,9.799491197037176e-07,tokens/kWh,261238052.9280951,MB,1377.755136,1032.781824,0.0,622.854144,605.085696,s,10,10.322046630859376,1.0322046630859376,0.0030857666669471323,1.031349365234375,1.036057580566406,1.0378003967285157,1.0391946496582032,"[1.0313995361328125, 1.0314732666015625, 1.039543212890625, 1.029095947265625, 1.03034423828125, 1.0333880615234374, 1.0356702880859374, 1.0310013427734375, 1.0312991943359375, 1.02883154296875]",tokens/s,61.034407470754516,kWh,2.9547575426814806e-05,3.2583578491944143e-06,1.1555023455897596e-05,4.436095673190681e-05,tokens/kWh,1420167.747524863,,s,630,10.316964745521545,0.016376134516700867,0.00027373448692247595,0.01631972789764404,0.01657235870361328,0.01676669759750366,0.017506927680969238,"[0.01589008045196533, 0.01655571174621582, 0.016296735763549806, 0.016491935729980468, 0.016374048233032228, 0.016637279510498048, 0.016554975509643556, 0.01639423942565918, 0.016363519668579102, 0.016556032180786134, 0.0162938232421875, 0.01633286476135254, 0.016242687225341796, 0.016315616607666016, 0.016407487869262695, 0.01624662399291992, 0.016274656295776367, 0.016319263458251954, 0.016244735717773438, 0.016373760223388673, 0.016338943481445312, 0.016291839599609375, 0.01638387107849121, 0.01637366485595703, 0.01640470314025879, 0.016252927780151367, 0.016507999420166015, 0.016323680877685546, 0.016408479690551758, 0.016320415496826172, 0.01641881561279297, 0.016322208404541017, 0.016439008712768554, 0.016444032669067382, 0.016453632354736326, 0.016314367294311523, 0.01636092758178711, 0.016400800704956055, 0.016386016845703125, 0.01629427146911621, 0.016303903579711915, 0.01630780792236328, 0.016297664642333985, 0.016306655883789063, 0.016320512771606444, 0.01627996826171875, 0.01625657653808594, 0.016433120727539063, 0.016367679595947267, 0.016238847732543946, 0.01624678421020508, 0.01624678421020508, 0.016477279663085938, 0.016282527923583985, 0.01660211181640625, 0.016473087310791015, 0.016373760223388673, 0.0163155517578125, 0.016337696075439452, 0.016558143615722658, 0.01658608055114746, 0.016371488571166992, 0.01631558418273926, 0.01604812812805176, 0.01625075149536133, 0.016340511322021484, 0.01664044761657715, 0.016416927337646485, 0.016283647537231445, 0.016572128295898436, 0.016339231491088867, 0.01641414451599121, 0.016333375930786133, 0.01642518424987793, 0.016690975189208986, 0.016390016555786133, 0.016395488739013673, 0.016222240447998047, 0.016234912872314454, 0.016248319625854494, 0.016312768936157226, 0.01622275161743164, 0.016349184036254884, 0.016513023376464844, 0.016289791107177733, 0.01628329658508301, 0.016264896392822265, 0.016220832824707033, 0.016242687225341796, 0.01626300811767578, 0.016438688278198242, 0.01634377670288086, 0.01638198471069336, 0.016310272216796876, 0.016472095489501952, 0.01649843215942383, 0.01700009536743164, 0.016464448928833007, 0.016457120895385743, 0.016357152938842774, 0.01650281524658203, 0.016400800704956055, 0.016320863723754884, 0.016377344131469726, 0.016329216003417968, 0.016230432510375977, 0.016280895233154298, 0.01630009651184082, 0.01632057571411133, 0.016351808547973634, 0.016259071350097656, 0.016240640640258788, 0.016497888565063477, 0.01622505569458008, 0.016316415786743164, 0.01655129623413086, 0.01645427131652832, 0.016324031829833986, 0.01633951950073242, 0.01627136039733887, 0.016347040176391603, 0.01628988838195801, 0.016404127120971678, 0.016284000396728514, 0.016482080459594727, 0.0163350715637207, 0.01602457618713379, 0.016837631225585938, 0.01623859214782715, 0.01637990379333496, 0.016441503524780274, 0.016617311477661132, 0.016373151779174804, 0.016709503173828126, 0.016227039337158203, 0.016326656341552736, 0.016254655838012694, 0.016330463409423828, 0.01614499282836914, 0.01639958381652832, 0.01623734474182129, 0.016191488265991212, 0.01618943977355957, 0.016175104141235352, 0.01630975914001465, 0.016402944564819336, 0.0162795524597168, 0.017154048919677735, 0.016134143829345703, 0.017587839126586916, 0.016257408142089844, 0.01620732879638672, 0.016146976470947264, 0.01619558334350586, 0.016267263412475585, 0.01652236747741699, 0.016947967529296875, 0.01635545539855957, 0.016216064453125, 0.01618943977355957, 0.016186656951904296, 0.016251615524291992, 0.016574432373046875, 0.016181280136108398, 0.016242111206054687, 0.01633296012878418, 0.016350784301757813, 0.01618627166748047, 0.01646169662475586, 0.01634716796875, 0.016249887466430663, 0.016274431228637695, 0.01694099235534668, 0.017860832214355468, 0.016742240905761718, 0.016536703109741212, 0.01646860885620117, 0.016277631759643554, 0.01640665626525879, 0.016351232528686522, 0.016695295333862305, 0.017180192947387696, 0.01777302360534668, 0.017038976669311524, 0.016998783111572265, 0.016712959289550782, 0.01658470344543457, 0.01649126434326172, 0.016997983932495117, 0.016075679779052734, 0.01642214393615723, 0.016253440856933594, 0.01625894355773926, 0.01627564811706543, 0.01630182456970215, 0.016217920303344728, 0.016203296661376952, 0.016157503128051757, 0.016092992782592772, 0.016193824768066405, 0.0164388484954834, 0.016941215515136717, 0.016179487228393553, 0.016238176345825195, 0.01634739112854004, 0.0167794246673584, 0.016582592010498047, 0.01636070442199707, 0.016284032821655272, 0.01641721534729004, 0.016424959182739257, 0.01640233612060547, 0.016323936462402343, 0.016380672454833985, 0.01658880043029785, 0.016479711532592773, 0.016226783752441406, 0.016345151901245115, 0.016363264083862305, 0.01624892807006836, 0.01633296012878418, 0.016220159530639648, 0.016326528549194336, 0.016232576370239258, 0.016265216827392577, 0.016289535522460936, 0.01620992088317871, 0.016119487762451173, 0.016193376541137696, 0.01623520088195801, 0.016377887725830077, 0.016318464279174806, 0.01638604736328125, 0.016280736923217774, 0.01627440071105957, 0.016353151321411134, 0.016216064453125, 0.016242687225341796, 0.016270751953125, 0.01631702423095703, 0.01628483200073242, 0.016503648757934572, 0.016469856262207032, 0.016246208190917967, 0.0162390079498291, 0.016218431472778322, 0.01627136039733887, 0.016295936584472655, 0.0164138240814209, 0.016575359344482423, 0.016571487426757812, 0.0162224006652832, 0.015857919692993164, 0.01627369689941406, 0.016307296752929686, 0.0163110408782959, 0.01635305595397949, 0.016271743774414062, 0.016253183364868164, 0.016262912750244142, 0.016203647613525392, 0.016303903579711915, 0.016375200271606445, 0.01637606430053711, 0.016388799667358397, 0.01636944007873535, 0.016313568115234375, 0.016256000518798826, 0.016252607345581056, 0.016214111328125, 0.01622345542907715, 0.01620889663696289, 0.016166912078857423, 0.016326656341552736, 0.016154783248901367, 0.016232288360595704, 0.01617919921875, 0.01615667152404785, 0.016347135543823242, 0.01663599967956543, 0.01656412887573242, 0.016348224639892578, 0.01626755142211914, 0.01622697639465332, 0.016316255569458007, 0.01626128005981445, 0.016248832702636717, 0.016269088745117188, 0.016187360763549805, 0.01617945671081543, 0.01634819221496582, 0.01655894470214844, 0.016606592178344728, 0.017510528564453124, 0.016335487365722656, 0.016556032180786134, 0.016702816009521483, 0.016214687347412108, 0.016189407348632813, 0.016252639770507813, 0.016312128067016603, 0.016271871566772463, 0.016887807846069337, 0.016408512115478516, 0.016545055389404296, 0.016268064498901367, 0.016332799911499024, 0.016250879287719726, 0.01618636894226074, 0.01657980728149414, 0.016766239166259765, 0.016314495086669923, 0.01639846420288086, 0.016381343841552733, 0.016244895935058595, 0.018520992279052736, 0.01722947120666504, 0.016412832260131835, 0.016259008407592774, 0.016239999771118164, 0.01622515106201172, 0.01634662437438965, 0.01620774459838867, 0.01629644775390625, 0.016306655883789063, 0.016248096466064454, 0.016199071884155272, 0.016337888717651367, 0.016306175231933593, 0.016447200775146484, 0.016314655303955077, 0.016250879287719726, 0.016244735717773438, 0.016303936004638673, 0.016203968048095704, 0.016262880325317385, 0.016266592025756837, 0.016364479064941408, 0.016258655548095705, 0.01655049514770508, 0.016504287719726562, 0.016623968124389647, 0.01660495948791504, 0.016408159255981446, 0.016162464141845703, 0.01623334312438965, 0.016248672485351563, 0.016337247848510743, 0.016368671417236327, 0.016246816635131837, 0.016353311538696288, 0.016419647216796875, 0.016324607849121094, 0.016305856704711914, 0.016437568664550782, 0.016519168853759765, 0.01631023979187012, 0.016250911712646483, 0.016211360931396485, 0.01629654312133789, 0.01633452796936035, 0.016767072677612304, 0.01632896041870117, 0.016334815979003905, 0.01635327911376953, 0.01632383918762207, 0.01625369644165039, 0.016264863967895508, 0.016615776062011717, 0.01641267204284668, 0.016892032623291017, 0.016344959259033204, 0.01645903968811035, 0.01633875274658203, 0.016511903762817384, 0.01617683219909668, 0.016224479675292967, 0.01620591926574707, 0.0163852481842041, 0.01625894355773926, 0.016591808319091798, 0.016443359375, 0.016144384384155275, 0.01630745506286621, 0.016548608779907225, 0.01622220802307129, 0.016281600952148437, 0.016416959762573242, 0.016254751205444336, 0.01635740852355957, 0.016512960433959962, 0.016254688262939455, 0.016246463775634764, 0.016423583984375, 0.016416671752929688, 0.016226400375366212, 0.016375423431396485, 0.0163056640625, 0.0162938232421875, 0.016248895645141603, 0.01622105598449707, 0.016354400634765624, 0.016357376098632814, 0.016382879257202148, 0.016231712341308595, 0.016202207565307616, 0.01618092727661133, 0.016265024185180665, 0.016388864517211915, 0.016293600082397462, 0.01627779197692871, 0.016590848922729492, 0.016390144348144533, 0.01621196746826172, 0.016416160583496094, 0.016392383575439453, 0.01628816032409668, 0.016332799911499024, 0.016311391830444336, 0.016442144393920898, 0.016467136383056642, 0.016359359741210937, 0.016570783615112303, 0.016382015228271485, 0.016247360229492188, 0.016204992294311524, 0.016526016235351562, 0.016527456283569338, 0.01643929672241211, 0.01626316833496094, 0.016924127578735352, 0.016529951095581055, 0.01961369514465332, 0.017346559524536134, 0.01659459114074707, 0.016368095397949218, 0.016355039596557618, 0.01629404830932617, 0.01638105583190918, 0.016532352447509766, 0.01622220802307129, 0.016263391494750975, 0.016357919692993165, 0.016408575057983397, 0.016643743515014648, 0.016209375381469725, 0.016196479797363283, 0.01621401596069336, 0.016199680328369142, 0.016236543655395508, 0.01635308837890625, 0.01636751937866211, 0.016435232162475586, 0.0163371524810791, 0.016259071350097656, 0.016373760223388673, 0.016330623626708986, 0.016523391723632812, 0.016474111557006836, 0.01640243148803711, 0.016467903137207033, 0.01656972885131836, 0.01637984085083008, 0.016373855590820312, 0.01620240020751953, 0.016184640884399415, 0.01617990493774414, 0.01625212860107422, 0.016125823974609373, 0.016212352752685545, 0.016275999069213867, 0.016324480056762694, 0.0162346248626709, 0.016244735717773438, 0.01637580871582031, 0.016385440826416017, 0.01636751937866211, 0.016353343963623045, 0.01625766372680664, 0.016248863220214845, 0.01627280044555664, 0.01637843132019043, 0.016358400344848634, 0.016325632095336915, 0.01625823974609375, 0.016172096252441405, 0.016190240859985352, 0.016317216873168946, 0.016234111785888673, 0.01633683204650879, 0.0163571834564209, 0.01659782409667969, 0.0164003849029541, 0.016320512771606444, 0.016383392333984375, 0.01626576042175293, 0.016162879943847658, 0.016193119049072266, 0.016302431106567383, 0.01624275207519531, 0.0162729606628418, 0.016296384811401367, 0.01695088005065918, 0.018268928527832032, 0.01609004783630371, 0.016308223724365235, 0.016156063079833985, 0.01612041664123535, 0.01613350486755371, 0.016205759048461915, 0.01619811248779297, 0.016141632080078124, 0.01615555191040039, 0.01615683174133301, 0.016176511764526367, 0.016169439315795897, 0.016236543655395508, 0.016541696548461913, 0.01623619270324707, 0.01618569564819336, 0.01621811294555664, 0.016250656127929686, 0.016176671981811525, 0.01622604751586914, 0.016236959457397462, 0.016587295532226563, 0.016570367813110352, 0.016358816146850585, 0.016244512557983398, 0.016318399429321288, 0.016292192459106444, 0.016290336608886718, 0.0163154239654541, 0.01647305679321289, 0.01642291259765625, 0.016312288284301757, 0.016246816635131837, 0.016232448577880858, 0.016268800735473633, 0.016189952850341797, 0.016320192337036132, 0.017247936248779298, 0.016769664764404297, 0.016990207672119142, 0.016428800582885743, 0.016400224685668947, 0.01636761665344238, 0.016562271118164062, 0.016336223602294923, 0.016427999496459962, 0.016953344345092772, 0.017498111724853514, 0.01648988723754883, 0.016499296188354492, 0.0162093448638916, 0.01622038459777832, 0.01622256088256836, 0.016285696029663087, 0.0162857608795166, 0.016400096893310546, 0.01634867286682129, 0.016214303970336914, 0.016326271057128906, 0.016264320373535156, 0.01639516830444336, 0.01653651237487793, 0.01633059120178223, 0.016312896728515627, 0.016328256607055665, 0.01617910385131836, 0.016161216735839843, 0.016201183319091796, 0.0162289924621582, 0.01623859214782715, 0.016260448455810546, 0.01638262367248535, 0.016347135543823242, 0.016467872619628905, 0.016500255584716798, 0.016267744064331055, 0.01633417510986328, 0.01645792007446289, 0.016740928649902342, 0.016668415069580077, 0.01642288017272949, 0.0163056640625, 0.01615542411804199, 0.01616022491455078, 0.016254720687866212, 0.016337696075439452, 0.01641372871398926, 0.016260032653808595, 0.016267295837402343, 0.016277503967285157, 0.01619126319885254, 0.016209888458251952, 0.01616307258605957, 0.016166208267211914, 0.016222911834716795, 0.016313600540161132, 0.017076128005981444, 0.01637977600097656, 0.01621295928955078, 0.01621196746826172, 0.016236223220825196, 0.016322080612182616, 0.016201663970947265, 0.016188255310058595, 0.016215999603271483, 0.0163656005859375, 0.016230432510375977, 0.01618534469604492, 0.016867136001586912, 0.01631007957458496, 0.016283615112304688, 0.016465599060058594, 0.016384960174560547, 0.016362560272216796, 0.01633353614807129, 0.0162478084564209, 0.016260095596313476, 0.016390144348144533, 0.016504831314086914, 0.016248512268066406, 0.016345407485961912, 0.01637171173095703, 0.01635647964477539, 0.016089984893798828, 0.016295679092407228, 0.016170751571655272]",tokens/s,61.064471532043804,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1171.64032,1109.262336,0.0,706.740224,681.6384,s,1,8.11124267578125,8.11124267578125,0.0,8.11124267578125,8.11124267578125,8.11124267578125,8.11124267578125,[8.11124267578125],,kWh,3.0492379691653087e-05,3.3563227701532543e-06,1.0048341371998726e-05,4.3897043833805065e-05,,MB,1471.512576,1413.349376,0.0,996.1472,949.238272,s,10,0.27525939369201663,0.02752593936920166,0.002131410169120802,0.02677203178405762,0.027904111862182612,0.030904280662536615,0.03330441570281983,"[0.033904449462890625, 0.02671004867553711, 0.026694944381713867, 0.026753408432006836, 0.026736160278320313, 0.026794815063476564, 0.026786304473876952, 0.02723740768432617, 0.026884096145629883, 0.026757759094238283]",tokens/s,9300.318385734525,kWh,7.888891874437568e-07,8.700050791072737e-08,5.028667686594466e-07,1.3787564640139308e-06,tokens/kWh,185674560.14292413,MB,1491.873792,1421.737984,0.0,1004.535808,949.240832,s,10,13.042000366210939,1.3042000366210937,0.08030272311483772,1.278968017578125,1.3109853759765624,1.427837170410156,1.5213186059570314,"[1.54468896484375, 1.285018310546875, 1.2828385009765626, 1.2757518310546876, 1.270318603515625, 1.277839111328125, 1.27610498046875, 1.269203857421875, 1.280096923828125, 1.2801392822265625]",tokens/s,48.30547326407049,kWh,3.715347994506116e-05,4.097577592889477e-06,1.5384284829941432e-05,5.663534236789207e-05,tokens/kWh,1112379.609021596,,s,630,13.039928680419937,0.020698299492730034,0.0016840006295560596,0.02020580768585205,0.020875152778625488,0.02679142370223999,0.0272004386138916,"[0.026969024658203125, 0.031070207595825194, 0.027200735092163086, 0.027015840530395508, 0.02678131294250488, 0.026876415252685547, 0.026869632720947265, 0.026756479263305665, 0.02700364875793457, 0.02683616065979004, 0.026823488235473633, 0.027015008926391602, 0.026839199066162108, 0.02675916862487793, 0.027100799560546875, 0.026810752868652345, 0.028041215896606447, 0.02686476707458496, 0.026909311294555663, 0.0270830078125, 0.026796031951904296, 0.026785791397094725, 0.02696396827697754, 0.027219968795776366, 0.027828224182128908, 0.0270882568359375, 0.02706496047973633, 0.02691276741027832, 0.026918912887573244, 0.02676940727233887, 0.02692300796508789, 0.026870847702026367, 0.02683180809020996, 0.02670796775817871, 0.027031328201293944, 0.0271997127532959, 0.027322368621826174, 0.02752102470397949, 0.020911584854125975, 0.020648479461669922, 0.020833759307861327, 0.020515296936035158, 0.02022310447692871, 0.02031715202331543, 0.020701152801513672, 0.020357120513916017, 0.02022604751586914, 0.020595808029174805, 0.02023891258239746, 0.020373760223388673, 0.02039756774902344, 0.02053977584838867, 0.02027337646484375, 0.020194816589355468, 0.02020604705810547, 0.020301151275634765, 0.020371904373168947, 0.02304435157775879, 0.021390399932861327, 0.020444095611572264, 0.020325759887695312, 0.02019571113586426, 0.020463872909545898, 0.020435552597045898, 0.020365312576293947, 0.020152320861816408, 0.020289535522460937, 0.021549184799194335, 0.020514688491821288, 0.020336448669433595, 0.02035430335998535, 0.020369472503662108, 0.020192127227783203, 0.02025881576538086, 0.020301599502563477, 0.0202324161529541, 0.020178943634033202, 0.020172639846801756, 0.020186975479125978, 0.02010963249206543, 0.02022604751586914, 0.02023219108581543, 0.02020351982116699, 0.020213727951049806, 0.02017283248901367, 0.020333984375, 0.020248575210571287, 0.020322912216186522, 0.020610048294067384, 0.02087424087524414, 0.02123161506652832, 0.02045747184753418, 0.020402368545532228, 0.020533056259155275, 0.020305919647216796, 0.020546911239624023, 0.02016489601135254, 0.02074179267883301, 0.020107999801635742, 0.020025312423706056, 0.02022403144836426, 0.020109312057495117, 0.020147552490234377, 0.02027996826171875, 0.02060492706298828, 0.02044108772277832, 0.02248899269104004, 0.020410400390625, 0.020383424758911133, 0.020383583068847657, 0.020185375213623048, 0.02014028739929199, 0.020369375228881836, 0.020285472869873047, 0.020172800064086914, 0.02013520050048828, 0.02164201545715332, 0.02019321632385254, 0.020397056579589845, 0.02024880027770996, 0.02041116714477539, 0.02022585678100586, 0.02026710319519043, 0.020164703369140623, 0.020322303771972656, 0.020231935501098634, 0.020520256042480468, 0.020148128509521485, 0.02027190399169922, 0.020168703079223634, 0.02028544044494629, 0.02024038314819336, 0.020183040618896485, 0.020368896484375, 0.020080480575561523, 0.0201181755065918, 0.020179040908813478, 0.020206655502319336, 0.02028371238708496, 0.02037980842590332, 0.02071180725097656, 0.02015011215209961, 0.02015043258666992, 0.020460832595825196, 0.020246240615844728, 0.020127840042114258, 0.02019139289855957, 0.020160480499267577, 0.020318784713745118, 0.02039193534851074, 0.020355295181274415, 0.02035660743713379, 0.020234752655029296, 0.02022604751586914, 0.02019660758972168, 0.020165376663208008, 0.020164159774780272, 0.02026540756225586, 0.020323360443115234, 0.02029257583618164, 0.020479808807373046, 0.023942399978637695, 0.021115840911865233, 0.0209039363861084, 0.020852703094482422, 0.020989984512329102, 0.020545536041259766, 0.020883359909057618, 0.0205515193939209, 0.020510976791381835, 0.020406047821044923, 0.02037196731567383, 0.020210975646972655, 0.020357568740844725, 0.020496383666992187, 0.02019327926635742, 0.02004377555847168, 0.02006947135925293, 0.01999555206298828, 0.020010784149169923, 0.020166879653930665, 0.020033536911010744, 0.020058143615722657, 0.020028768539428712, 0.020040319442749022, 0.020053375244140626, 0.020334400177001954, 0.02002617645263672, 0.02004377555847168, 0.020256351470947266, 0.020071136474609376, 0.02017043113708496, 0.021268735885620116, 0.020029312133789064, 0.02014361572265625, 0.020142719268798827, 0.020275136947631837, 0.02052511978149414, 0.023498687744140625, 0.020352319717407228, 0.02022230339050293, 0.02092073631286621, 0.020180320739746092, 0.0200731201171875, 0.02000230407714844, 0.019991039276123047, 0.02019705581665039, 0.020018720626831056, 0.02035081672668457, 0.020033632278442383, 0.020043712615966797, 0.020032543182373047, 0.020397951126098633, 0.02003558349609375, 0.020321599960327147, 0.020782943725585937, 0.02026358413696289, 0.020109504699707032, 0.02025257682800293, 0.02026915168762207, 0.020191232681274415, 0.020190271377563476, 0.02010412788391113, 0.020083808898925783, 0.020131839752197265, 0.020006015777587892, 0.02006403160095215, 0.019992576599121094, 0.019986431121826173, 0.020079999923706054, 0.020031808853149414, 0.0200392951965332, 0.020045888900756835, 0.020144287109375, 0.020273632049560546, 0.02013132858276367, 0.020247039794921876, 0.020060159683227538, 0.020119487762451174, 0.020080320358276366, 0.020399744033813477, 0.02013465690612793, 0.02003468894958496, 0.020210559844970704, 0.020059423446655275, 0.02012233543395996, 0.02015382385253906, 0.020099615097045897, 0.020156415939331054, 0.020146175384521483, 0.020268608093261718, 0.02051251220703125, 0.020311391830444336, 0.02019596862792969, 0.020146207809448244, 0.02046156883239746, 0.020205568313598633, 0.020197023391723634, 0.020113759994506836, 0.020164575576782227, 0.020158496856689453, 0.02006982421875, 0.02015622329711914, 0.021019392013549805, 0.020352672576904297, 0.020363616943359374, 0.020314111709594726, 0.02020886421203613, 0.02018511962890625, 0.020185440063476563, 0.020138656616210938, 0.020086528778076172, 0.02020147132873535, 0.020068351745605468, 0.020305248260498048, 0.020138656616210938, 0.02004991912841797, 0.02005606460571289, 0.02004582405090332, 0.020135936737060548, 0.02008406448364258, 0.020012928009033204, 0.02003228759765625, 0.020125696182250977, 0.0201744327545166, 0.020081056594848632, 0.02004400062561035, 0.02005574417114258, 0.020055231094360353, 0.020335519790649414, 0.02026905632019043, 0.020164159774780272, 0.020168672561645506, 0.02022038459777832, 0.020184160232543946, 0.02020025634765625, 0.020082015991210938, 0.02008483123779297, 0.020055904388427734, 0.020101951599121093, 0.02002252769470215, 0.020069183349609374, 0.019998655319213868, 0.019991968154907228, 0.020032096862792968, 0.02000214385986328, 0.019980960845947266, 0.02015180778503418, 0.019990591049194335, 0.020120000839233397, 0.020471647262573243, 0.020099231719970703, 0.02020694351196289, 0.020195999145507813, 0.02020911979675293, 0.020174848556518556, 0.020167903900146486, 0.020009759902954102, 0.020185375213623048, 0.0201312313079834, 0.020127359390258788, 0.02004652786254883, 0.02000806427001953, 0.020017696380615235, 0.020097503662109373, 0.020116992950439453, 0.020045536041259766, 0.019974048614501954, 0.02007526397705078, 0.020279296875, 0.02020966339111328, 0.020268608093261718, 0.020144575119018553, 0.020145471572875977, 0.02019603157043457, 0.020209760665893556, 0.020224191665649413, 0.020405824661254884, 0.020227584838867187, 0.020275680541992188, 0.020369472503662108, 0.02031420707702637, 0.02023632049560547, 0.02024038314819336, 0.020248575210571287, 0.0202794246673584, 0.020300895690917968, 0.020330463409423828, 0.0203143367767334, 0.020191167831420897, 0.020295551300048828, 0.020187936782836913, 0.02026905632019043, 0.0202324161529541, 0.020227872848510742, 0.020178943634033202, 0.02026700782775879, 0.020250335693359375, 0.02016694450378418, 0.020147327423095704, 0.02008153533935547, 0.020329952239990234, 0.025277984619140624, 0.020311040878295897, 0.02079539108276367, 0.020238336563110353, 0.02008883285522461, 0.020142080307006836, 0.020154239654541016, 0.020221439361572266, 0.020075136184692383, 0.020139455795288086, 0.02020924758911133, 0.020065120697021484, 0.020185216903686524, 0.020116832733154295, 0.020203424453735352, 0.020170944213867188, 0.02008304023742676, 0.022324800491333008, 0.020322751998901368, 0.020438079833984376, 0.020267967224121095, 0.020348800659179687, 0.020122976303100587, 0.020427072525024414, 0.020176671981811525, 0.020181695938110353, 0.020183040618896485, 0.02048409652709961, 0.020274911880493164, 0.02032467269897461, 0.020186687469482423, 0.020227519989013672, 0.021617631912231445, 0.020353023529052734, 0.020137407302856444, 0.020121440887451172, 0.02013257598876953, 0.020271104812622072, 0.020142080307006836, 0.020076543807983398, 0.020377599716186523, 0.020178943634033202, 0.02022400093078613, 0.02031545639038086, 0.020195072174072265, 0.02013279914855957, 0.020254432678222658, 0.02012598419189453, 0.020113407135009767, 0.0202445125579834, 0.020188928604125977, 0.020195552825927734, 0.020000383377075194, 0.0201046085357666, 0.020087776184082032, 0.019998720169067383, 0.020092927932739257, 0.020053983688354492, 0.020042816162109376, 0.020097280502319338, 0.01999945640563965, 0.02014224052429199, 0.02010095977783203, 0.0200581111907959, 0.020165664672851562, 0.020048864364624025, 0.02004694366455078, 0.02012828826904297, 0.020103551864624022, 0.020202911376953125, 0.020259424209594725, 0.02020966339111328, 0.020440479278564454, 0.020212320327758788, 0.020305919647216796, 0.02021174430847168, 0.020165792465209963, 0.020521760940551758, 0.02031184005737305, 0.02025644874572754, 0.020724767684936522, 0.020133184432983398, 0.020453279495239257, 0.020113279342651367, 0.020074752807617186, 0.02012611198425293, 0.020180864334106444, 0.02018707275390625, 0.020365503311157225, 0.020148223876953125, 0.020214944839477538, 0.02005193519592285, 0.02010316848754883, 0.020044864654541014, 0.02019945526123047, 0.020073759078979493, 0.020081151962280275, 0.0200949764251709, 0.02026278305053711, 0.020309823989868164, 0.020175167083740234, 0.020068351745605468, 0.020164608001708984, 0.020066015243530272, 0.020023679733276366, 0.019989471435546875, 0.01997091293334961, 0.020044895172119142, 0.02006118392944336, 0.020121440887451172, 0.02010691261291504, 0.020029951095581054, 0.020033151626586913, 0.020021631240844728, 0.02002467155456543, 0.020010879516601562, 0.02017132759094238, 0.020031904220581053, 0.020000064849853515, 0.02011801528930664, 0.02004582405090332, 0.020082687377929686, 0.020105279922485352, 0.02003046417236328, 0.019999904632568358, 0.020051071166992188, 0.020019519805908204, 0.02003388786315918, 0.020113407135009767, 0.02026905632019043, 0.02046940803527832, 0.020199392318725588, 0.020172607421875, 0.020177024841308594, 0.02033679962158203, 0.02029939270019531, 0.02013599967956543, 0.0201693115234375, 0.020160512924194338, 0.02016364860534668, 0.020118463516235353, 0.020411968231201172, 0.02010643196105957, 0.0201592960357666, 0.02003558349609375, 0.020207168579101563, 0.020425151824951172, 0.020516864776611327, 0.020312063217163084, 0.020175872802734376, 0.02016972732543945, 0.020100223541259767, 0.020102016448974608, 0.020082015991210938, 0.02028982353210449, 0.020460960388183593, 0.020362207412719727, 0.02031830406188965, 0.020389440536499024, 0.020395519256591797, 0.02018931198120117, 0.020077375411987303, 0.020207359313964845, 0.020099231719970703, 0.020164352416992187, 0.020121856689453124, 0.02027270317077637, 0.020392383575439453, 0.020230144500732423, 0.020246528625488282, 0.02037555122375488, 0.02025267219543457, 0.02067251205444336, 0.02025881576538086, 0.02031001663208008, 0.020295679092407228, 0.020361215591430663, 0.02050662422180176, 0.022425600051879883, 0.02103500747680664, 0.020445024490356446, 0.02034499168395996, 0.020174848556518556, 0.020227167129516603, 0.020177824020385742, 0.020570112228393556, 0.020246528625488282, 0.020202880859375, 0.02105196762084961, 0.020237951278686522, 0.02027769660949707, 0.020319807052612306, 0.02017695999145508, 0.02022028732299805, 0.020154144287109373, 0.02026927947998047, 0.020180992126464844, 0.020317567825317382, 0.0201177921295166, 0.02025712013244629, 0.020178783416748048, 0.020174400329589844, 0.02012015914916992, 0.020178016662597657, 0.020170879364013673, 0.020103424072265626, 0.020619264602661135, 0.020092832565307618, 0.020173919677734374, 0.02047488021850586, 0.020442432403564453, 0.020374143600463867, 0.020158527374267578, 0.020381696701049806, 0.020514368057250976, 0.02064633560180664, 0.02059040069580078, 0.020496576309204102, 0.02011244773864746, 0.02017375946044922, 0.020215423583984374, 0.02017523193359375, 0.020133407592773437, 0.020134368896484376, 0.02012876892089844, 0.02100022315979004, 0.021195743560791017, 0.02171062469482422, 0.020363487243652344, 0.020186624526977538, 0.020117631912231447, 0.02011788749694824, 0.020180608749389647, 0.020515199661254882, 0.020307647705078126, 0.020686527252197266, 0.02038438415527344, 0.02081702423095703, 0.020246944427490234, 0.020165088653564454, 0.020238336563110353, 0.020200639724731444, 0.020107295989990233, 0.020263200759887696, 0.02024323272705078, 0.020356832504272462, 0.020316160202026368, 0.020073759078979493, 0.020397951126098633, 0.02015318489074707, 0.02022400093078613, 0.020163999557495118, 0.02013654327392578, 0.020213760375976563, 0.02014806365966797, 0.02015452766418457, 0.020132095336914062, 0.020094720840454102, 0.020213375091552733, 0.02022345542907715, 0.02018556785583496, 0.020171199798583984, 0.020125696182250977, 0.02015785598754883, 0.020277856826782226, 0.020158239364624023, 0.020403776168823242, 0.02026767921447754]",tokens/s,48.31314767434083,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1172.39808,1109.262336,0.0,706.740224,681.6384,s,1,8.467322265625,8.467322265625,0.0,8.467322265625,8.467322265625,8.467322265625,8.467322265625,[8.467322265625],,kWh,3.163328099167302e-05,3.4820551652054004e-06,1.003139691399535e-05,4.514673307087377e-05,,MB,1511.624704,1413.349376,0.0,996.1472,949.238272,s,10,0.36979677009582523,0.036979677009582525,0.0037964822348549284,0.03911412811279297,0.03977917366027832,0.04022387447357177,0.040579635124206544,"[0.03968035125732422, 0.039476638793945314, 0.039167713165283204, 0.04066857528686523, 0.039113121032714845, 0.039115135192871096, 0.038862945556640625, 0.031132608413696288, 0.030735904693603516, 0.03184377670288086]",tokens/s,6922.721362159622,kWh,9.00259352461571e-07,9.92822468878701e-08,5.695483188860297e-07,1.5690899182354707e-06,tokens/kWh,163151899.0880308,MB,1545.0112,1423.835136,0.0,1006.63296,949.240832,s,10,18.18659765625,1.818659765625,0.23351107890337508,2.003505126953125,2.0106150268554686,2.0155365173339845,2.019473709716797,"[2.0204580078125, 2.0069830322265627, 2.0095213623046875, 2.00326171875, 2.00374853515625, 2.009217041015625, 1.5757489013671875, 1.519130126953125, 1.5216429443359376, 1.516885986328125]",tokens/s,34.64089391032931,kWh,4.400386707379863e-05,4.853239254597283e-06,1.762668846031665e-05,6.648379478871257e-05,tokens/kWh,947599.3390602302,,s,630,18.184453041076672,0.028864211176312157,0.003827032328238246,0.031614912033081054,0.03209693832397461,0.03231874008178711,0.033272937164306654,"[0.03209875106811524, 0.032194561004638675, 0.032043041229248045, 0.03238220977783203, 0.03228067016601562, 0.03224822235107422, 0.032309505462646486, 0.03233542251586914, 0.03234838485717773, 0.03219068908691406, 0.03229004669189453, 0.032537345886230466, 0.03229216003417969, 0.03238982391357422, 0.032412990570068356, 0.03234201431274414, 0.03254281616210938, 0.032440929412841796, 0.032118175506591795, 0.0346399040222168, 0.03242860794067383, 0.03239084625244141, 0.03253884887695312, 0.032327777862548826, 0.03239251327514649, 0.03237139129638672, 0.03216300964355469, 0.03207673645019531, 0.03212275314331055, 0.03218950271606445, 0.03212963104248047, 0.03216419219970703, 0.032036865234375, 0.03183171272277832, 0.03183651161193848, 0.03196879959106445, 0.031772703170776365, 0.031846847534179684, 0.03241708755493164, 0.031702976226806644, 0.031677120208740236, 0.031580223083496096, 0.03158566474914551, 0.0315068473815918, 0.03148822402954102, 0.03153676795959472, 0.03159049606323242, 0.031559968948364256, 0.031643648147583005, 0.03182131195068359, 0.0316441593170166, 0.03157206344604492, 0.031508384704589845, 0.03147983932495117, 0.03200201416015625, 0.031565471649169924, 0.03177507209777832, 0.032679935455322266, 0.031660032272338864, 0.031934240341186526, 0.03172735977172852, 0.03185094451904297, 0.03170483207702637, 0.031739776611328124, 0.03201935958862305, 0.03208396911621094, 0.03205734252929687, 0.03277619171142578, 0.03187455940246582, 0.03180390357971191, 0.03187001609802246, 0.031777727127075196, 0.03180339241027832, 0.0317255687713623, 0.03169075202941894, 0.03197452735900879, 0.03174652862548828, 0.03174646377563477, 0.031733760833740236, 0.031925952911376954, 0.03200188827514648, 0.03193302345275879, 0.03189132881164551, 0.03177382469177246, 0.03177712059020996, 0.03189929580688477, 0.031752479553222655, 0.032317825317382816, 0.0320714225769043, 0.03224009704589844, 0.031781951904296876, 0.03187808036804199, 0.03172352027893066, 0.031674335479736325, 0.031805471420288084, 0.03171891212463379, 0.031649599075317385, 0.03160883140563965, 0.03170995140075684, 0.0319008960723877, 0.03204169464111328, 0.03216332626342774, 0.031746847152709964, 0.03179484748840332, 0.03180668830871582, 0.03153215980529785, 0.03169593620300293, 0.03180611228942871, 0.03167212867736816, 0.031815391540527344, 0.031725343704223634, 0.03167708778381348, 0.03164777565002441, 0.032008190155029294, 0.03171750450134277, 0.03178073692321777, 0.03176243209838867, 0.03173785591125488, 0.031682559967041016, 0.03179644775390625, 0.03273603057861328, 0.03160883140563965, 0.03183369636535645, 0.03180175971984863, 0.031779903411865235, 0.031941568374633786, 0.03199590492248535, 0.03168412780761719, 0.03158473587036133, 0.03167436790466309, 0.03182284736633301, 0.03177103996276855, 0.03189411163330078, 0.03171039962768555, 0.03208480072021484, 0.032276702880859376, 0.03218204879760742, 0.032349281311035157, 0.03233679962158203, 0.03220479965209961, 0.03200204849243164, 0.031866880416870115, 0.031676416397094724, 0.03182355117797851, 0.03171155166625977, 0.03184220886230469, 0.03210659027099609, 0.031757984161376956, 0.03176278305053711, 0.031784959793090824, 0.035009632110595705, 0.03204143905639648, 0.03172127914428711, 0.031695487976074216, 0.031605791091918946, 0.031689695358276364, 0.03157606315612793, 0.031631359100341795, 0.031859935760498045, 0.031689504623413085, 0.031645599365234374, 0.031587936401367187, 0.03169945526123047, 0.031647743225097655, 0.0316231689453125, 0.0315043830871582, 0.03148524856567383, 0.03164368057250976, 0.03146575927734375, 0.031674144744873046, 0.03158012771606445, 0.0317957763671875, 0.03164752006530762, 0.03177068710327149, 0.031714752197265626, 0.031587104797363284, 0.03166399955749512, 0.03184227180480957, 0.03181702423095703, 0.031729631423950196, 0.03165056037902832, 0.033814655303955075, 0.03337571334838867, 0.03207171249389648, 0.031770751953125, 0.031684991836547854, 0.03181977653503418, 0.03176819229125977, 0.03179523277282715, 0.03160678482055664, 0.031532703399658205, 0.0315316162109375, 0.03165888023376465, 0.03164454460144043, 0.03221417617797852, 0.03159708786010742, 0.031627264022827145, 0.03149456024169922, 0.03151823997497558, 0.03170518493652344, 0.03156732749938965, 0.03150543975830078, 0.03168579292297363, 0.03154803276062012, 0.03151872062683105, 0.031680511474609374, 0.03155254364013672, 0.03167136001586914, 0.03220387268066406, 0.032072513580322266, 0.031888608932495115, 0.03175708770751953, 0.03158582305908203, 0.03161532783508301, 0.03168092727661133, 0.031629024505615236, 0.03193795204162598, 0.031759967803955076, 0.031681535720825195, 0.03183375930786133, 0.03169657516479492, 0.03183888053894043, 0.031663263320922855, 0.0319719352722168, 0.03177312088012695, 0.03172742462158203, 0.032400928497314456, 0.03210636901855469, 0.03195171165466309, 0.03173107147216797, 0.03168294334411621, 0.03157401657104492, 0.03156377601623535, 0.03167231941223145, 0.03157401657104492, 0.03166358375549316, 0.0316912956237793, 0.03180748748779297, 0.03163750457763672, 0.03155907249450684, 0.03168012809753418, 0.03486796951293945, 0.032030113220214845, 0.03222940826416015, 0.03168544006347656, 0.03169481658935547, 0.03172761535644531, 0.03179315185546875, 0.03182803153991699, 0.031757823944091795, 0.032159614562988284, 0.03179311943054199, 0.031690080642700194, 0.0317138557434082, 0.031719423294067385, 0.03166108894348144, 0.031760799407958985, 0.03178348731994629, 0.031792543411254884, 0.03163811111450195, 0.0315513916015625, 0.03163324737548828, 0.031606208801269534, 0.03168924713134766, 0.0315283203125, 0.03164393615722656, 0.03160128021240234, 0.03164512062072754, 0.03165267181396485, 0.03162291145324707, 0.03167436790466309, 0.03160000038146973, 0.03168646430969238, 0.03167315292358398, 0.031616031646728514, 0.03158320045471191, 0.03161257553100586, 0.03165539169311524, 0.03261695861816406, 0.032280864715576174, 0.031929567337036134, 0.03176499176025391, 0.03192665672302246, 0.03210444641113281, 0.03198310470581055, 0.03183689689636231, 0.03170281600952148, 0.03172966384887695, 0.03160054397583008, 0.03209574508666992, 0.03200640106201172, 0.03188489532470703, 0.03193318367004395, 0.0315863037109375, 0.03182937622070312, 0.032142047882080076, 0.03207107162475586, 0.03195750427246094, 0.03195084762573242, 0.03184639930725098, 0.031662080764770506, 0.03206480026245117, 0.03190585517883301, 0.031875743865966796, 0.031749120712280275, 0.032096736907958986, 0.03175068855285645, 0.03177459144592285, 0.03164777565002441, 0.03177628707885742, 0.031666751861572265, 0.031698944091796875, 0.03194470405578613, 0.031784959793090824, 0.03231948852539063, 0.032055393218994144, 0.031726367950439455, 0.03159459114074707, 0.03144409561157226, 0.03157843208312988, 0.03147423934936523, 0.03167814445495606, 0.03175356864929199, 0.03166511917114258, 0.03174502372741699, 0.0318342399597168, 0.03168550491333008, 0.03224576187133789, 0.040529918670654294, 0.031737119674682616, 0.03147776031494141, 0.031646432876586916, 0.03164768028259277, 0.031514656066894534, 0.03168054389953613, 0.032290206909179685, 0.031717952728271485, 0.031651872634887696, 0.03180102348327637, 0.031826240539550785, 0.03169004821777344, 0.03166073608398438, 0.031634944915771485, 0.03157423973083496, 0.03167465591430664, 0.031488000869750975, 0.03150028800964356, 0.03180748748779297, 0.031686016082763675, 0.031629695892333984, 0.03164521598815918, 0.03197942352294922, 0.03158095932006836, 0.03179136085510254, 0.031844127655029295, 0.03171036720275879, 0.03178377532958984, 0.0316231689453125, 0.03152239990234375, 0.031576511383056644, 0.03167814445495606, 0.03162092781066895, 0.03164137649536133, 0.031715072631835935, 0.031671232223510745, 0.03302131271362305, 0.0315350399017334, 0.03175699234008789, 0.03150233650207519, 0.03155353546142578, 0.03168460845947266, 0.0316144962310791, 0.03159087944030762, 0.03197747230529785, 0.03155695915222168, 0.03396470260620117, 0.03184982490539551, 0.03165855979919434, 0.03161311912536621, 0.03159696006774902, 0.031641504287719724, 0.03176761627197266, 0.03147372817993164, 0.031600927352905275, 0.03145379257202149, 0.031676416397094724, 0.024221696853637696, 0.02404662322998047, 0.023991296768188477, 0.024186847686767578, 0.02386720085144043, 0.02388559913635254, 0.023844255447387695, 0.023931264877319336, 0.0238384952545166, 0.023841632843017577, 0.024132768630981447, 0.02398294448852539, 0.024030752182006836, 0.02388755226135254, 0.023849407196044923, 0.02410531234741211, 0.02393836784362793, 0.024000192642211916, 0.02391756820678711, 0.024130880355834963, 0.024099519729614258, 0.024050880432128906, 0.023978815078735352, 0.024129535675048826, 0.023973888397216796, 0.023900224685668946, 0.02387727928161621, 0.023875871658325196, 0.023812095642089845, 0.02481939125061035, 0.024087072372436524, 0.024213279724121094, 0.024045568466186523, 0.023932928085327147, 0.023908351898193358, 0.02473369598388672, 0.02396099281311035, 0.023806560516357423, 0.024024543762207032, 0.023996448516845702, 0.024002687454223633, 0.023990655899047853, 0.024250368118286132, 0.024055168151855467, 0.024095359802246093, 0.02404751968383789, 0.0239268798828125, 0.024084672927856446, 0.023942975997924804, 0.024029184341430664, 0.024012224197387695, 0.024664224624633788, 0.024119712829589843, 0.02433856010437012, 0.024284383773803712, 0.02449068832397461, 0.024059104919433593, 0.023964672088623046, 0.023914079666137695, 0.024023231506347657, 0.02404351997375488, 0.02400217628479004, 0.023935359954833986, 0.02421958351135254, 0.024168512344360352, 0.023994367599487306, 0.02415782356262207, 0.02385923194885254, 0.023922655105590822, 0.023816576004028322, 0.023829631805419922, 0.023906368255615235, 0.023869535446166993, 0.02401968002319336, 0.024092416763305664, 0.024344703674316407, 0.024653024673461914, 0.024386463165283204, 0.02444857597351074, 0.024426368713378905, 0.02453116798400879, 0.024369375228881836, 0.02431398391723633, 0.02429952049255371, 0.024344415664672853, 0.024264448165893553, 0.023904191970825196, 0.023818399429321287, 0.023839040756225584, 0.02395484733581543, 0.024003168106079102, 0.02388374328613281, 0.023887903213500976, 0.023997631072998047, 0.023861503601074217, 0.02462774467468262, 0.025712671279907225, 0.024462911605834962, 0.02415782356262207, 0.024289247512817382, 0.02395222473144531, 0.023975263595581053, 0.02434662437438965, 0.02397772789001465, 0.02400089645385742, 0.02400284767150879, 0.024119712829589843, 0.02390399932861328, 0.023916543960571288, 0.023902496337890624, 0.023898208618164062, 0.02384252738952637, 0.02385081672668457, 0.023881919860839845, 0.02421286392211914, 0.023964000701904298, 0.023929119110107422, 0.024174591064453126, 0.023918367385864257, 0.023791616439819335, 0.023764991760253908, 0.02380803108215332, 0.023938175201416015, 0.023888736724853515, 0.023879680633544922, 0.02415398406982422, 0.02397177505493164, 0.023871007919311522, 0.023765920639038086, 0.023828224182128908, 0.023799072265625, 0.023861024856567385, 0.024033376693725586, 0.023857696533203125, 0.02394463920593262, 0.024848575592041015, 0.02424083137512207, 0.026019327163696288, 0.024252511978149413, 0.024139808654785155, 0.024060447692871093, 0.024686431884765624, 0.02404761505126953, 0.023960927963256835, 0.024046239852905275, 0.02422761535644531, 0.024234207153320312, 0.02408038330078125, 0.024029184341430664, 0.02397337532043457, 0.024043167114257812, 0.024093183517456054, 0.024006399154663086, 0.024453279495239257, 0.02472991943359375, 0.024395904541015624, 0.024407615661621095, 0.02436966323852539, 0.024093727111816406, 0.023984479904174804, 0.024005184173583983, 0.02389414405822754, 0.02404118347167969, 0.024329631805419923, 0.024888063430786134, 0.024577823638916016, 0.024501535415649416, 0.02432713508605957, 0.0241561279296875, 0.024116735458374023, 0.02403955268859863, 0.02402547264099121, 0.02400611114501953, 0.024195167541503908, 0.02406172752380371, 0.024246047973632813, 0.023892864227294922, 0.02408857536315918, 0.02445516777038574, 0.024115360260009766, 0.02396726417541504, 0.02407846450805664, 0.024120384216308594, 0.024098976135253906, 0.02396022415161133, 0.024510751724243163, 0.02412646484375, 0.023889856338500978, 0.023961984634399414, 0.023984128952026368, 0.024005151748657225, 0.023797760009765623, 0.023805696487426756, 0.023939327239990236, 0.02390630340576172, 0.023996416091918944, 0.02388742446899414, 0.02428767967224121, 0.02397804832458496, 0.023934783935546874, 0.023920415878295898, 0.02390457534790039, 0.02379747200012207, 0.023985952377319337, 0.023955936431884765, 0.023997888565063477, 0.02396019172668457, 0.024194751739501953, 0.02392915153503418, 0.02394688034057617, 0.024117631912231444, 0.02409062385559082, 0.023883775711059572, 0.02405580711364746, 0.024092159271240234, 0.028621120452880858, 0.024277183532714845, 0.02413680076599121, 0.024257440567016602, 0.023973888397216796, 0.023774368286132812, 0.023955904006958007, 0.02384118461608887, 0.02389360046386719, 0.023951776504516603, 0.02384390449523926, 0.023923648834228515, 0.024807424545288087, 0.0238919677734375, 0.023872512817382813, 0.023838815689086915, 0.023822975158691407, 0.024374719619750976, 0.02384899139404297, 0.023818944931030272, 0.02392630386352539, 0.024074079513549805, 0.023933120727539062, 0.024078847885131836, 0.024212959289550782, 0.023927391052246092, 0.023949216842651368, 0.023840864181518553, 0.02390425682067871]",tokens/s,34.644979344547785,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4972.498944,7346.192384,0.0,6943.670272,6539.1744,s,1,11.7941640625,11.7941640625,0.0,11.7941640625,11.7941640625,11.7941640625,11.7941640625,[11.7941640625],,kWh,0.00013259486651253003,1.4618991521099659e-05,4.208364477800708e-05,0.00018929750281163678,,MB,4797.804544,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0134595489501956,0.20134595489501955,0.00051771705743223,0.20138206481933596,0.2017876007080078,0.20210072479248048,0.2023512240600586,"[0.2003088684082031, 0.20104896545410156, 0.20091384887695313, 0.20132870483398438, 0.20151091003417967, 0.2014354248046875, 0.201718017578125, 0.20131452941894531, 0.2014664306640625, 0.20241384887695313]",tokens/s,1271.4434721744308,kWh,5.904086264416644e-06,6.511160666742724e-07,3.9020475660794805e-06,1.0457249897170397e-05,tokens/kWh,24480623.731605615,MB,4797.804544,7643.987968,0.0,7226.785792,6917.4016,s,10,18.930143188476563,1.8930143188476563,0.014283110508362238,1.8886383666992188,1.9184749267578125,1.92010947265625,1.9214171093749999,"[1.8801324462890625, 1.885781982421875, 1.8883280029296876, 1.8926431884765624, 1.8868673095703126, 1.8760447998046874, 1.88894873046875, 1.891541015625, 1.9217440185546875, 1.9181116943359375]",tokens/s,33.28025539624565,kWh,5.4961760128917076e-05,6.0621266488289e-06,3.6594512608919554e-05,9.761839938666552e-05,tokens/kWh,645370.1391933054,,s,630,18.92765817832947,0.030043901870364233,0.0005085053205969684,0.02993129634857178,0.03053910484313965,0.03077317123413086,0.03214753639221192,"[0.030493183135986326, 0.03035545539855957, 0.030366912841796875, 0.030017440795898437, 0.030159679412841797, 0.030275232315063478, 0.030035808563232423, 0.030014047622680663, 0.030494047164916993, 0.029865760803222657, 0.029780864715576172, 0.029750816345214842, 0.02958790397644043, 0.029610015869140624, 0.02954444885253906, 0.029655040740966795, 0.029544416427612304, 0.02955267143249512, 0.02958131217956543, 0.02997200012207031, 0.02998524856567383, 0.0297574405670166, 0.029954048156738283, 0.029576992034912108, 0.029720064163208007, 0.029856000900268555, 0.02970467185974121, 0.02974515151977539, 0.029816831588745117, 0.02993561553955078, 0.03064793586730957, 0.029877952575683594, 0.029772480010986327, 0.029815807342529296, 0.02993040084838867, 0.029843168258666994, 0.029827455520629882, 0.029879936218261717, 0.029612415313720702, 0.029684768676757813, 0.029610719680786133, 0.0296507511138916, 0.03015920066833496, 0.029795808792114256, 0.029815423965454103, 0.0295765438079834, 0.029581247329711916, 0.029538368225097655, 0.029542144775390626, 0.029680543899536133, 0.02958745574951172, 0.029577215194702147, 0.02966032028198242, 0.029647712707519532, 0.029702144622802733, 0.02993078422546387, 0.029727455139160155, 0.02999488067626953, 0.030124160766601564, 0.029971616744995117, 0.02986684799194336, 0.02970377540588379, 0.0298438720703125, 0.03011177635192871, 0.029791200637817383, 0.02949331283569336, 0.029543359756469725, 0.029795360565185548, 0.029754335403442383, 0.030302207946777345, 0.030217599868774415, 0.02985843276977539, 0.029933055877685546, 0.02962483215332031, 0.02978755187988281, 0.02972732734680176, 0.029777536392211913, 0.029792640686035158, 0.02977791976928711, 0.02977561569213867, 0.029745407104492187, 0.029672447204589843, 0.029880672454833983, 0.030077600479125978, 0.030117887496948242, 0.02994175910949707, 0.029867040634155274, 0.029726720809936522, 0.029903839111328125, 0.029997055053710937, 0.030627744674682617, 0.02990812873840332, 0.02976249694824219, 0.02964672088623047, 0.02993984031677246, 0.029918943405151367, 0.03004572868347168, 0.02982067108154297, 0.02981171226501465, 0.029661184310913087, 0.03010755157470703, 0.029871807098388672, 0.02979596710205078, 0.030241567611694335, 0.02985753631591797, 0.029827327728271485, 0.029667327880859375, 0.030085119247436523, 0.03022960090637207, 0.030000032424926756, 0.02995574378967285, 0.029800800323486327, 0.029829120635986327, 0.029925376892089843, 0.029836416244506836, 0.029671808242797852, 0.02995609664916992, 0.0298438720703125, 0.030248992919921874, 0.030352928161621093, 0.03022435188293457, 0.030024255752563477, 0.030736383438110353, 0.029831167221069335, 0.030738143920898436, 0.029753631591796875, 0.030517696380615234, 0.030015039443969726, 0.029956960678100587, 0.029785600662231446, 0.03006924819946289, 0.029984607696533203, 0.02985331153869629, 0.029700639724731446, 0.02972572708129883, 0.02989676856994629, 0.030077407836914063, 0.03019615936279297, 0.029970016479492188, 0.02987254333496094, 0.029853439331054686, 0.029677215576171874, 0.029805152893066407, 0.02965862464904785, 0.029616479873657228, 0.029747360229492186, 0.029677536010742186, 0.030015520095825195, 0.030135616302490235, 0.030853824615478517, 0.03060086441040039, 0.030261600494384765, 0.02976348876953125, 0.02981395149230957, 0.02973695945739746, 0.030126848220825196, 0.029989023208618164, 0.030005247116088866, 0.030119935989379884, 0.029906944274902345, 0.029999103546142578, 0.029949951171875, 0.029851648330688478, 0.0303308162689209, 0.030219903945922853, 0.029905344009399416, 0.02953011131286621, 0.029779264450073242, 0.029883392333984377, 0.029975744247436525, 0.030079488754272462, 0.03017692756652832, 0.029929536819458008, 0.030008895874023438, 0.029915327072143554, 0.030251167297363282, 0.02989094352722168, 0.029890560150146486, 0.029869152069091798, 0.0297271671295166, 0.02971900749206543, 0.029752416610717772, 0.030515552520751953, 0.029948480606079103, 0.03060860824584961, 0.03014735984802246, 0.02972991943359375, 0.029825920104980468, 0.029693056106567382, 0.031082496643066407, 0.030416032791137696, 0.030864063262939452, 0.03056656074523926, 0.030199392318725586, 0.030131872177124024, 0.029950176239013672, 0.030140960693359375, 0.029789728164672853, 0.029788127899169924, 0.029712896347045898, 0.029982048034667967, 0.029883039474487304, 0.0297227840423584, 0.0296343994140625, 0.029908992767333983, 0.02940108871459961, 0.029691680908203125, 0.030107872009277344, 0.032718849182128903, 0.030158624649047852, 0.02980998420715332, 0.029811136245727538, 0.029921760559082033, 0.029742559432983397, 0.0297989444732666, 0.02999091148376465, 0.030263296127319338, 0.029929471969604493, 0.02974515151977539, 0.02951763153076172, 0.032061630249023435, 0.029995008468627928, 0.030220287322998047, 0.030062591552734375, 0.030517248153686522, 0.030105600357055663, 0.030212095260620117, 0.030050304412841795, 0.02996633529663086, 0.029742176055908204, 0.030178207397460938, 0.02950547218322754, 0.029509248733520507, 0.02945430374145508, 0.029510112762451173, 0.029539424896240233, 0.029668256759643553, 0.029326847076416016, 0.029675392150878905, 0.02940787124633789, 0.029648895263671874, 0.031909887313842776, 0.03117251205444336, 0.029927263259887694, 0.030468128204345704, 0.029506975173950196, 0.0295350399017334, 0.029715967178344727, 0.03086089515686035, 0.02952412796020508, 0.029589536666870118, 0.029454912185668945, 0.03030281639099121, 0.029698047637939453, 0.029625568389892578, 0.02971113586425781, 0.029498559951782227, 0.029614912033081055, 0.02944819259643555, 0.029612031936645508, 0.029542400360107423, 0.02954649543762207, 0.02974060821533203, 0.029540735244750975, 0.02952592086791992, 0.029838752746582032, 0.02989104080200195, 0.02993180847167969, 0.030310047149658202, 0.02990835189819336, 0.02963657569885254, 0.030422016143798827, 0.029661056518554687, 0.029719968795776368, 0.0298687686920166, 0.029861471176147462, 0.029745567321777345, 0.029667104721069336, 0.029549087524414062, 0.029640384674072266, 0.02954854393005371, 0.029664863586425783, 0.02956729507446289, 0.029895967483520507, 0.029567455291748045, 0.030314176559448243, 0.030546592712402343, 0.029752895355224608, 0.02970992088317871, 0.0296210880279541, 0.029560831069946288, 0.030033824920654296, 0.030205631256103517, 0.030114208221435547, 0.030105600357055663, 0.030482431411743165, 0.030074880599975585, 0.02987731170654297, 0.029651103973388673, 0.032624736785888675, 0.0305916805267334, 0.030038015365600586, 0.030182464599609375, 0.030004159927368164, 0.03057459259033203, 0.029716480255126954, 0.029691743850708007, 0.029675359725952147, 0.0313449592590332, 0.030728191375732423, 0.030242368698120116, 0.03015235137939453, 0.02998147201538086, 0.02993561553955078, 0.029792255401611328, 0.030646976470947267, 0.029970432281494142, 0.02970857620239258, 0.0297139835357666, 0.029604000091552736, 0.02965020751953125, 0.029631200790405272, 0.02965488052368164, 0.02976736068725586, 0.029652799606323242, 0.02963862419128418, 0.029864639282226563, 0.029626079559326172, 0.02989619255065918, 0.029909088134765626, 0.030066911697387694, 0.030611936569213866, 0.029913087844848633, 0.029724672317504884, 0.029755392074584962, 0.03011577606201172, 0.029953567504882813, 0.02979414367675781, 0.0297838077545166, 0.02967353630065918, 0.029723264694213866, 0.030176607131958008, 0.02997465515136719, 0.02974390411376953, 0.029870080947875976, 0.029800447463989257, 0.030168256759643554, 0.02973369598388672, 0.02994175910949707, 0.0295731201171875, 0.029652992248535157, 0.029442047119140623, 0.029607263565063477, 0.029548288345336914, 0.029524896621704103, 0.029472768783569334, 0.02976486396789551, 0.02983193588256836, 0.030007295608520508, 0.029913087844848633, 0.029783071517944334, 0.029572063446044922, 0.02994175910949707, 0.02955183982849121, 0.029630783081054688, 0.030065120697021483, 0.02970163154602051, 0.02950553512573242, 0.02976793670654297, 0.03031884765625, 0.02974412727355957, 0.029512704849243163, 0.029674528121948242, 0.02949129676818848, 0.029505695343017578, 0.029387487411499023, 0.029491199493408202, 0.029378559112548826, 0.03003705596923828, 0.030148992538452147, 0.029557279586791992, 0.029626399993896484, 0.02947587203979492, 0.029543392181396483, 0.029456384658813478, 0.029454336166381836, 0.029381792068481447, 0.029569887161254884, 0.029528032302856444, 0.029640064239501954, 0.029854528427124022, 0.029577056884765626, 0.030136320114135744, 0.02974412727355957, 0.02933452796936035, 0.029471744537353517, 0.029608959197998046, 0.029658687591552733, 0.029868000030517577, 0.029790624618530274, 0.0297096004486084, 0.029866720199584963, 0.030195520401000975, 0.029804800033569338, 0.029728736877441406, 0.030042144775390626, 0.02970751953125, 0.03029376029968262, 0.030086143493652344, 0.030052352905273437, 0.029789695739746092, 0.029659648895263672, 0.02974710464477539, 0.03012144088745117, 0.03027827262878418, 0.030397695541381838, 0.03017804718017578, 0.03017942428588867, 0.030322208404541015, 0.03024729537963867, 0.030054399490356445, 0.030083072662353515, 0.03093708801269531, 0.03017318344116211, 0.029871231079101564, 0.030669408798217772, 0.0298351993560791, 0.03218262481689453, 0.03114188766479492, 0.030824415206909178, 0.030007328033447266, 0.03018547248840332, 0.030136320114135744, 0.029998495101928712, 0.030079328536987304, 0.030346847534179686, 0.029788448333740235, 0.02977555274963379, 0.030005247116088866, 0.02983123207092285, 0.029883007049560546, 0.030998367309570313, 0.03014313507080078, 0.030203231811523436, 0.029942495346069336, 0.029928607940673826, 0.029893375396728514, 0.029754623413085938, 0.03002582359313965, 0.030601184844970705, 0.030883712768554686, 0.029615039825439452, 0.030105600357055663, 0.029638656616210936, 0.02962019157409668, 0.029837343215942384, 0.029702144622802733, 0.029638656616210936, 0.029928831100463866, 0.029802879333496093, 0.030429439544677736, 0.030205440521240235, 0.030527999877929687, 0.03025529670715332, 0.029921152114868163, 0.030022783279418944, 0.03009609603881836, 0.03012179183959961, 0.030015552520751953, 0.029894880294799805, 0.029915136337280275, 0.02971820831298828, 0.029767200469970702, 0.029714752197265625, 0.02995846366882324, 0.02971459197998047, 0.030006464004516602, 0.02966531181335449, 0.02975382423400879, 0.029702463150024415, 0.029797632217407225, 0.029702239990234375, 0.03132646369934082, 0.030064287185668944, 0.030062528610229493, 0.03051807975769043, 0.03002572822570801, 0.029767679214477538, 0.02979430389404297, 0.030672224044799804, 0.03004483222961426, 0.02965692710876465, 0.02960915184020996, 0.029578208923339844, 0.02957926368713379, 0.029656736373901368, 0.030007648468017577, 0.03036947250366211, 0.030242399215698244, 0.03025971221923828, 0.03037971115112305, 0.03043712043762207, 0.030198047637939453, 0.029876735687255858, 0.030816160202026367, 0.02976367950439453, 0.029427711486816405, 0.029706239700317383, 0.033320384979248045, 0.030476863861083985, 0.03020595169067383, 0.029714239120483397, 0.02966547203063965, 0.030289920806884765, 0.029988479614257813, 0.031218048095703124, 0.030121984481811522, 0.030072832107543947, 0.030055456161499024, 0.029884960174560545, 0.02978656005859375, 0.03332710266113281, 0.030219776153564453, 0.030765567779541016, 0.030662656784057617, 0.03077939224243164, 0.03059712028503418, 0.030364864349365233, 0.030797632217407226, 0.030634815216064454, 0.030813791275024413, 0.030444128036499023, 0.030383583068847655, 0.030455808639526367, 0.030366239547729493, 0.030686304092407225, 0.030331104278564454, 0.030397119522094725, 0.030678367614746092, 0.03040323257446289, 0.031154176712036134, 0.033298431396484376, 0.03065782356262207, 0.03060960006713867, 0.030292512893676758, 0.030534847259521485, 0.030358335494995118, 0.030423040390014647, 0.03178848075866699, 0.030431808471679686, 0.030246912002563478, 0.0302587833404541, 0.030376352310180665, 0.030332319259643553, 0.030115936279296877, 0.030413312911987303, 0.030238719940185548, 0.03011577606201172, 0.030281791687011717, 0.0301977596282959, 0.03013212776184082, 0.030666624069213867, 0.030285823822021486, 0.030363872528076173, 0.03012112045288086, 0.030241632461547853, 0.029913087844848633, 0.03099852752685547, 0.03079523277282715, 0.030538272857666016, 0.031471616744995115, 0.0303470401763916, 0.030373119354248048, 0.030317119598388672, 0.030187936782836915, 0.030087039947509765, 0.030062719345092772, 0.029947519302368164, 0.029995391845703125, 0.029970048904418945, 0.030009727478027343, 0.02989798355102539, 0.029928192138671875, 0.02999091148376465, 0.02989606475830078, 0.030360191345214844, 0.030246912002563478, 0.030296064376831053, 0.030651935577392576, 0.03030790328979492, 0.030407583236694336, 0.030363071441650392, 0.030389856338500977, 0.030350303649902342, 0.030427135467529298, 0.03033497619628906, 0.03022233581542969, 0.030148223876953126, 0.0337817268371582, 0.03048899269104004, 0.030631519317626952, 0.030654111862182618, 0.03064854431152344, 0.03037424087524414, 0.03047372817993164, 0.030525344848632813, 0.030477088928222658, 0.031715328216552735, 0.030494112014770508, 0.03030076789855957, 0.030453760147094725, 0.030341119766235353, 0.03041689682006836, 0.030323871612548826, 0.03051811218261719, 0.030487775802612305, 0.030484703063964842, 0.03042745590209961, 0.030372095108032227, 0.030537696838378908, 0.030569599151611327, 0.030627904891967775, 0.030484735488891603, 0.030188127517700194, 0.03037593650817871, 0.030119935989379884, 0.030110879898071288, 0.030333215713500977, 0.03035798454284668, 0.03039344024658203]",tokens/s,33.284624757292775,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4974.399488,7346.192384,0.0,6943.670272,6539.1744,s,1,11.6430400390625,11.6430400390625,0.0,11.6430400390625,11.6430400390625,11.6430400390625,11.6430400390625,[11.6430400390625],,kWh,0.0001315777272250822,1.4506553731726868e-05,4.096031054598437e-05,0.00018704459150279345,,MB,4998.361088,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0815584869384764,0.20815584869384765,0.000453871607753706,0.20819725036621095,0.20859234619140626,0.20877018737792968,0.20891246032714844,"[0.20725570678710936, 0.20804966735839844, 0.20759536743164062, 0.20847407531738282, 0.20827040100097657, 0.20816304016113282, 0.2085528259277344, 0.20894802856445313, 0.20801791381835938, 0.20823146057128905]",tokens/s,1229.8477395968862,kWh,6.11945123915013e-06,6.748611463750618e-07,4.059250932582706e-06,1.0853563318107898e-05,tokens/kWh,23586723.778806727,MB,5003.59168,7643.987968,0.0,7226.785792,6917.4016,s,10,21.9069091796875,2.19069091796875,0.011229853992387764,2.188249633789063,2.2043486328125,2.2094287109375004,2.2134927734375003,"[2.199599365234375, 2.18250732421875, 2.184108154296875, 2.190656494140625, 2.1799404296875, 2.2032197265625, 2.2145087890625, 2.18830078125, 2.17586962890625, 2.188198486328125]",tokens/s,28.758050477707183,kWh,6.39527449554342e-05,7.05387187779224e-06,4.247406407181902e-05,0.00011348068090504545,tokens/kWh,555160.5744480422,,s,630,21.90452645874025,0.03476908961704799,0.0004906550635814626,0.034681968688964844,0.03518411064147949,0.035453441619873044,0.0366552529525757,"[0.03605731201171875, 0.03514614486694336, 0.03532620620727539, 0.035020320892333985, 0.03482806396484375, 0.03461328125, 0.03512313461303711, 0.035039424896240234, 0.03488822555541992, 0.03494857788085937, 0.03509446334838867, 0.03498636627197266, 0.03486947250366211, 0.03474431991577148, 0.035250175476074216, 0.034953216552734374, 0.034870849609375, 0.034533119201660155, 0.03476723098754883, 0.034670143127441405, 0.0347143669128418, 0.034988033294677735, 0.034723838806152346, 0.03475251388549805, 0.03512115097045899, 0.03506995010375977, 0.03489936065673828, 0.03524655914306641, 0.03473977661132813, 0.03499679946899414, 0.03505955123901367, 0.03475471878051758, 0.03481353759765625, 0.03477926254272461, 0.03464771270751953, 0.03507606506347656, 0.034941600799560546, 0.03487539291381836, 0.034881504058837894, 0.034925662994384765, 0.034953216552734374, 0.034638782501220704, 0.03467264175415039, 0.03491849517822266, 0.03464796829223633, 0.03465334320068359, 0.03486956787109375, 0.03456902313232422, 0.0346638069152832, 0.03458707046508789, 0.034503902435302734, 0.034400959014892575, 0.03486310577392578, 0.03522057723999023, 0.03516713714599609, 0.034936702728271485, 0.036371742248535156, 0.03508310317993164, 0.03495017623901367, 0.035183582305908206, 0.034802688598632815, 0.034579456329345705, 0.0344002571105957, 0.03526591873168945, 0.034756446838378904, 0.034487071990966796, 0.03435481643676758, 0.03434739303588867, 0.034351104736328124, 0.034465824127197266, 0.03425481414794922, 0.03438387298583984, 0.034272415161132816, 0.03441340637207031, 0.034446945190429686, 0.03443548965454102, 0.034533374786376955, 0.03497475051879883, 0.03445654296875, 0.03442483139038086, 0.03459686279296875, 0.03438528060913086, 0.034361278533935544, 0.03424121475219727, 0.03424870300292969, 0.03424428939819336, 0.03450207901000977, 0.034438014984130856, 0.03434454345703125, 0.034314655303955076, 0.03473612976074219, 0.03449958419799805, 0.03440332794189453, 0.03457228851318359, 0.03428518295288086, 0.03444060897827148, 0.034159584045410155, 0.03429158401489258, 0.03429526519775391, 0.03434972763061524, 0.03440435028076172, 0.034353153228759765, 0.034508190155029296, 0.03450057601928711, 0.034579071044921875, 0.03504908752441406, 0.03499385452270508, 0.03461600112915039, 0.03497369766235352, 0.03471558380126953, 0.034398273468017576, 0.035288928985595706, 0.03471308898925781, 0.0346610221862793, 0.03475894546508789, 0.03472339248657227, 0.03474038314819336, 0.03467411041259766, 0.03481363296508789, 0.03487424087524414, 0.0350682258605957, 0.035092159271240236, 0.035232864379882815, 0.03617849731445313, 0.03706447982788086, 0.03496352005004883, 0.0356577262878418, 0.03526611328125, 0.03573190307617188, 0.03524607849121094, 0.0349224967956543, 0.03474822235107422, 0.03471299362182617, 0.034538272857666016, 0.03472995376586914, 0.03469929504394531, 0.034754302978515624, 0.03475024032592773, 0.03468540954589844, 0.03455945587158203, 0.03477686309814453, 0.034617183685302734, 0.034560798645019535, 0.034375198364257814, 0.03450518417358398, 0.03445568084716797, 0.03473612976074219, 0.03453849411010742, 0.03451087951660156, 0.03432723236083984, 0.03437968063354492, 0.03435558319091797, 0.03437948989868164, 0.034418304443359374, 0.03455414581298828, 0.03462387084960938, 0.03571916961669922, 0.03565347290039062, 0.03534864044189453, 0.03473993682861328, 0.034498241424560545, 0.03449654388427734, 0.03443564987182617, 0.03484672164916992, 0.03442892837524414, 0.03424991989135742, 0.0342147216796875, 0.034285568237304685, 0.03433219146728515, 0.03429219055175781, 0.034293407440185546, 0.034242366790771486, 0.03432502365112305, 0.03438383865356445, 0.034756641387939456, 0.03468825531005859, 0.03489049530029297, 0.034676353454589845, 0.0345109748840332, 0.03455516815185547, 0.034710494995117185, 0.03456739044189453, 0.034626335144042966, 0.034531326293945314, 0.03453071975708008, 0.03447663879394531, 0.03484627151489258, 0.03490041732788086, 0.03470883178710937, 0.03577897644042969, 0.035053569793701174, 0.03458793640136719, 0.03449740982055664, 0.034445152282714844, 0.034705150604248045, 0.03478076934814453, 0.034433055877685546, 0.034529918670654296, 0.034664447784423826, 0.03515801620483398, 0.03547676849365235, 0.034922592163085936, 0.0349315185546875, 0.035038848876953126, 0.03464764785766602, 0.034605663299560545, 0.03638995361328125, 0.03472889709472656, 0.034748416900634765, 0.03454073715209961, 0.0348919677734375, 0.0344951057434082, 0.03486515045166016, 0.034856319427490234, 0.03529110336303711, 0.03501123046875, 0.034912158966064456, 0.03497510528564453, 0.034721694946289065, 0.03488441467285156, 0.03444076919555664, 0.03461503982543945, 0.034406944274902346, 0.03443097686767578, 0.03443920135498047, 0.03441990280151367, 0.03447084808349609, 0.03501465606689453, 0.034586208343505856, 0.03492441558837891, 0.03478787231445313, 0.034531326293945314, 0.03454771041870117, 0.03458793640136719, 0.034450145721435545, 0.034533374786376955, 0.03445280075073242, 0.03431414413452148, 0.034261600494384765, 0.034398143768310546, 0.03424812698364258, 0.0346561279296875, 0.03448108673095703, 0.03449446487426758, 0.034168033599853515, 0.03443571090698242, 0.034239967346191405, 0.034367359161376954, 0.0342597770690918, 0.034403968811035156, 0.039963008880615235, 0.034525184631347655, 0.035682239532470704, 0.03506499099731445, 0.03468339157104492, 0.03452134323120117, 0.03476249694824219, 0.034836734771728516, 0.034687103271484374, 0.03458047866821289, 0.0355860481262207, 0.03444460678100586, 0.03449721527099609, 0.034574337005615234, 0.03466239929199219, 0.03457843017578125, 0.03549209594726563, 0.034487777709960935, 0.034545505523681644, 0.03460655975341797, 0.03439510345458984, 0.034334400177001956, 0.03438214492797852, 0.03421286392211914, 0.03421014404296875, 0.03436150360107422, 0.03437750244140625, 0.03426287841796875, 0.03582860946655273, 0.03449814224243164, 0.034431392669677735, 0.03530547332763672, 0.034543617248535156, 0.03452444839477539, 0.034243297576904294, 0.03441049575805664, 0.03431219100952149, 0.034301952362060545, 0.03452928161621094, 0.034351104736328124, 0.03440790557861328, 0.034496799468994144, 0.03444883346557617, 0.03455055999755859, 0.034603038787841794, 0.03439580917358399, 0.034289249420166014, 0.03444771194458008, 0.034345375061035154, 0.03502412796020508, 0.03442134475708008, 0.03432243347167969, 0.0344447021484375, 0.03446656036376953, 0.034648063659667966, 0.03481087875366211, 0.034640830993652345, 0.03446102523803711, 0.03461503982543945, 0.03447087860107422, 0.03446710586547851, 0.034533729553222654, 0.03467305755615235, 0.03457020950317383, 0.03500851058959961, 0.03676361465454102, 0.03507379150390625, 0.03475977706909179, 0.03473171234130859, 0.03464396667480469, 0.03451084899902344, 0.03465241622924805, 0.034938625335693356, 0.034816001892089846, 0.03480575942993164, 0.0349977912902832, 0.03506147384643555, 0.03487615966796875, 0.03496527862548828, 0.0348895378112793, 0.03525273513793945, 0.03497289657592773, 0.03486995315551758, 0.03498303985595703, 0.03494182586669922, 0.03491804885864258, 0.03499248123168945, 0.035135486602783206, 0.03512934494018555, 0.03501670455932617, 0.03509622573852539, 0.03543280029296875, 0.035198974609375, 0.03525836944580078, 0.03544473648071289, 0.03513958358764648, 0.034816001892089846, 0.034934078216552734, 0.03475667190551758, 0.034804096221923826, 0.034953472137451175, 0.03495116806030273, 0.034811550140380856, 0.03497132873535156, 0.03473884963989258, 0.034803009033203124, 0.03485327911376953, 0.03485520172119141, 0.03475251388549805, 0.03504537582397461, 0.03469481658935547, 0.03472623825073242, 0.034713470458984375, 0.034643360137939457, 0.03503177642822266, 0.03489107131958008, 0.0351607666015625, 0.0353546257019043, 0.03515801620483398, 0.035297279357910154, 0.03488153457641602, 0.03485440063476562, 0.034722305297851565, 0.035087711334228514, 0.034914974212646485, 0.03501670455932617, 0.0347770881652832, 0.03472588729858399, 0.03574208068847656, 0.03530569458007812, 0.03515094375610352, 0.03498294448852539, 0.03851161575317383, 0.03489449691772461, 0.03488742446899414, 0.03475199890136719, 0.035006656646728515, 0.034822719573974606, 0.03499622344970703, 0.03477932739257812, 0.0349029426574707, 0.037933025360107425, 0.03545388793945312, 0.035163486480712894, 0.03481052780151367, 0.03561881637573242, 0.035645439147949216, 0.034754558563232424, 0.03508224105834961, 0.03509471893310547, 0.03500969696044922, 0.03486991882324219, 0.03484672164916992, 0.034711456298828124, 0.03490825653076172, 0.034872447967529294, 0.0350047378540039, 0.034838623046875, 0.034961887359619144, 0.03498112106323242, 0.03534467315673828, 0.034885215759277347, 0.03505433654785156, 0.03518886566162109, 0.0352542724609375, 0.03508019256591797, 0.03514348983764649, 0.03470892715454101, 0.03492736053466797, 0.035699935913085935, 0.03601475143432617, 0.03501500701904297, 0.03491721725463867, 0.0349617919921875, 0.035165985107421874, 0.0349334716796875, 0.03545289611816406, 0.0349752311706543, 0.03506358337402344, 0.03519062423706055, 0.0352407341003418, 0.034929248809814455, 0.0349315185546875, 0.03478102493286133, 0.03468326568603516, 0.03494873428344727, 0.035011520385742186, 0.035064830780029296, 0.03496448135375976, 0.034772544860839846, 0.03468067169189453, 0.03605833435058594, 0.0354815673828125, 0.03536518478393555, 0.03490252685546875, 0.034869247436523435, 0.034729057312011716, 0.03457664108276367, 0.03459084701538086, 0.034447200775146486, 0.034288318634033206, 0.03442483139038086, 0.034463455200195316, 0.034496063232421874, 0.034458335876464845, 0.03441609573364258, 0.0345318717956543, 0.03453129577636719, 0.034497760772705076, 0.03434988784790039, 0.03420310211181641, 0.034396095275878905, 0.03477766418457031, 0.03481744003295899, 0.034679424285888674, 0.03483363342285156, 0.035054367065429685, 0.034836383819580076, 0.034723934173583985, 0.03788390350341797, 0.03502262496948242, 0.034848255157470705, 0.03475939178466797, 0.034751968383789064, 0.034462238311767576, 0.03460300827026367, 0.03495935821533203, 0.035216926574707035, 0.03478764724731445, 0.03482371139526367, 0.034796257019042966, 0.034779041290283204, 0.034799583435058595, 0.034555713653564454, 0.034649761199951175, 0.03464044952392578, 0.03459411239624023, 0.03458220672607422, 0.03455487823486328, 0.034508800506591795, 0.03578060913085938, 0.034551807403564457, 0.03461737442016601, 0.03453235244750977, 0.03451500701904297, 0.034744350433349606, 0.03434310531616211, 0.03433132934570313, 0.03442435073852539, 0.03434543991088867, 0.03427123260498047, 0.034356575012207034, 0.03452380752563477, 0.034344959259033206, 0.035000736236572266, 0.03475491333007812, 0.03462083053588867, 0.034664703369140626, 0.034602367401123046, 0.03455279922485351, 0.0348590087890625, 0.03444736099243164, 0.034434368133544925, 0.034508575439453126, 0.034544063568115235, 0.03422198486328125, 0.03428409576416016, 0.03456630325317383, 0.03448947143554688, 0.03491913604736328, 0.03472588729858399, 0.03461939239501953, 0.03448419189453125, 0.03464195251464844, 0.03442179107666016, 0.03449654388427734, 0.03474937438964844, 0.034751838684082034, 0.03454742431640625, 0.035093441009521484, 0.03463577651977539, 0.03436937713623047, 0.034465953826904296, 0.034326526641845705, 0.034493953704833984, 0.034242721557617185, 0.03440995025634765, 0.03432128143310547, 0.03441459274291992, 0.03443097686767578, 0.03458009719848633, 0.034474369049072265, 0.034571712493896484, 0.03433260726928711, 0.03443289566040039, 0.03444947052001953, 0.03438633728027344, 0.03457974243164062, 0.034356224060058595, 0.03431615829467773, 0.03441436767578125, 0.03426339340209961, 0.03439206314086914, 0.0344268798828125, 0.034369537353515625, 0.03468697738647461, 0.03511075210571289, 0.03467689514160156, 0.03464518356323242, 0.03464684677124023, 0.034514400482177736, 0.03436912155151367, 0.0344378547668457, 0.034422111511230466, 0.034546558380126956, 0.03450198364257812, 0.034615966796875, 0.035519710540771486, 0.034775264739990236, 0.03448684692382813, 0.03472320175170898, 0.034425472259521486, 0.034541568756103515, 0.034632991790771485, 0.03437161636352539, 0.03469513702392578, 0.03421014404296875, 0.03440473556518555, 0.035811328887939455, 0.03480739212036133, 0.03461062240600586, 0.03491526412963867, 0.0369183349609375, 0.034604000091552733, 0.03457843017578125, 0.0343818244934082, 0.034576385498046876, 0.034543617248535156, 0.03430403137207031, 0.0345327033996582, 0.03437363052368164, 0.034603649139404294, 0.0344035530090332, 0.03437811279296875, 0.03466486358642578, 0.03442844772338867, 0.03464230346679688, 0.03467887878417969, 0.03512319946289062, 0.03460300827026367, 0.03446076965332031, 0.034756641387939456, 0.03461324691772461, 0.03445235061645508, 0.035104351043701174, 0.03462390518188477, 0.03474227142333984, 0.035149822235107424, 0.03465811157226562, 0.03480595016479492, 0.03451289749145508, 0.034699264526367186, 0.034631679534912106, 0.03467647933959961, 0.0353702392578125, 0.034776065826416014, 0.03472588729858399, 0.03470070266723633, 0.03449711990356445, 0.03439187240600586, 0.034369728088378904, 0.034383071899414065, 0.034298656463623046, 0.03497369766235352, 0.03506585693359375, 0.03538639831542969, 0.035049888610839845, 0.03528553771972656, 0.03466243362426758, 0.034852863311767575]",tokens/s,28.76117870827655,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3167.440896,4431.151104,0.0,4028.628992,3944.723968,s,1,10.4510712890625,10.4510712890625,0.0,10.4510712890625,10.4510712890625,10.4510712890625,10.4510712890625,[10.4510712890625],,kWh,9.542317954166417e-05,1.0518519734284388e-05,3.059169114000282e-05,0.00013653339041595137,,MB,3221.356544,4770.889728,0.0,4353.687552,4305.05728,s,10,1.1044366760253905,0.11044366760253906,0.0002848389871099568,0.11044403076171874,0.11082803268432617,0.11088694343566895,0.11093407203674317,"[0.11081494140625, 0.11040764617919922, 0.1105552978515625, 0.10989884948730469, 0.11029542541503906, 0.11020684814453124, 0.11029376220703126, 0.11048041534423828, 0.11053763580322265, 0.11094585418701172]",tokens/s,2317.923748433311,kWh,3.307156384222962e-06,3.647112465421859e-07,2.195594765213451e-06,5.8674623959785986e-06,tokens/kWh,43630445.79125987,MB,3221.356544,4770.889728,0.0,4353.687552,4305.05984,s,10,22.064403320312493,2.20644033203125,0.02121619752585357,2.201143310546875,2.2305120117187496,2.23901015625,2.245808671875,"[2.17440966796875, 2.191793701171875, 2.18486376953125, 2.195716552734375, 2.19678369140625, 2.2055029296875, 2.21407568359375, 2.22512548828125, 2.24750830078125, 2.22862353515625]",tokens/s,28.55277756004495,kWh,6.443335558952629e-05,7.106840502124574e-06,3.5765296714585635e-05,0.00010730549280623651,tokens/kWh,587108.8082486164,,s,630,22.061627162933306,0.035018455814179915,0.0007061827530035264,0.03489764785766602,0.03583762092590332,0.03606394004821777,0.037804643249511743,"[0.034930912017822266, 0.03487385559082031, 0.034369537353515625, 0.03424374389648437, 0.03439049530029297, 0.03438937759399414, 0.03450556945800781, 0.03436076736450195, 0.034350975036621094, 0.0344277458190918, 0.03423027038574219, 0.03426508712768555, 0.03424019241333008, 0.03431046295166015, 0.034127872467041014, 0.03433881759643555, 0.034592769622802735, 0.034414657592773436, 0.034242462158203125, 0.0344598388671875, 0.03416035079956055, 0.034283294677734374, 0.03441052627563477, 0.039233566284179684, 0.034801952362060545, 0.03440662384033203, 0.03442051315307617, 0.03619574356079101, 0.03471414566040039, 0.03429321670532227, 0.03460156631469727, 0.03425894546508789, 0.03419334411621094, 0.03423379135131836, 0.034156478881835935, 0.034189537048339845, 0.03401161575317383, 0.03408406448364258, 0.03418806457519531, 0.03419136047363281, 0.03421184158325195, 0.03419750213623047, 0.03413212966918945, 0.03407593536376953, 0.03412844848632812, 0.0343469123840332, 0.034145824432373045, 0.034136001586914065, 0.034460575103759765, 0.034002880096435546, 0.03408259201049805, 0.03435059356689453, 0.03478704071044922, 0.03865657424926758, 0.03477875137329101, 0.03438361740112305, 0.034415454864501954, 0.034574207305908204, 0.03422035217285156, 0.03425491333007812, 0.03429667282104492, 0.0342367057800293, 0.03420419311523438, 0.035590145111083986, 0.034772991180419925, 0.03468492889404297, 0.034598911285400394, 0.03451206588745117, 0.034319168090820314, 0.0341907844543457, 0.03426902389526367, 0.03425158309936523, 0.03419884872436523, 0.03433123016357422, 0.034359294891357424, 0.03413520050048828, 0.03426300811767578, 0.034296703338623044, 0.03416640090942383, 0.03450099182128906, 0.03444940948486328, 0.03449001693725586, 0.038003040313720704, 0.03472793579101562, 0.034334495544433595, 0.03486313629150391, 0.034406593322753906, 0.03426003265380859, 0.03444623947143555, 0.03434870529174805, 0.034486686706542966, 0.03436067199707031, 0.03439680099487305, 0.03439820861816406, 0.03511865615844727, 0.03501715087890625, 0.03504531097412109, 0.035157344818115235, 0.034888416290283206, 0.03482624053955078, 0.0348686408996582, 0.0345337905883789, 0.03427756881713867, 0.03441392135620117, 0.034362014770507814, 0.034199710845947265, 0.03403913497924805, 0.03506134414672851, 0.035959392547607424, 0.03574406433105469, 0.03539286422729492, 0.03495920181274414, 0.0361313591003418, 0.034621376037597656, 0.03507440185546875, 0.0351539192199707, 0.035143680572509765, 0.03511904144287109, 0.035114208221435544, 0.03525923156738281, 0.03527065658569336, 0.035348480224609374, 0.03517424011230469, 0.03525759887695312, 0.034951774597167966, 0.03464156723022461, 0.03520153427124023, 0.03472022247314453, 0.03489737701416016, 0.034605792999267575, 0.03473183822631836, 0.03454329681396484, 0.035555553436279294, 0.03652579116821289, 0.035888511657714846, 0.035148799896240236, 0.03516758346557617, 0.03520169448852539, 0.034996063232421874, 0.03534864044189453, 0.03500646209716797, 0.034602592468261716, 0.03450851058959961, 0.03444806289672851, 0.03445753479003906, 0.03437993621826172, 0.034490272521972655, 0.034459648132324217, 0.034325729370117186, 0.03424361419677734, 0.034491233825683594, 0.034395038604736326, 0.03439734268188477, 0.03438844680786133, 0.034417022705078126, 0.034277374267578126, 0.034221534729003904, 0.03460764694213867, 0.03411763381958008, 0.03419945526123047, 0.034740318298339845, 0.034361343383789066, 0.03430809783935547, 0.034311233520507814, 0.03449766540527344, 0.03425471878051758, 0.03497148895263672, 0.03433465576171875, 0.03417449569702148, 0.034119422912597654, 0.03420969772338867, 0.03434729766845703, 0.03478537750244141, 0.03441916656494141, 0.03588313674926758, 0.03458780670166016, 0.034210655212402345, 0.03432969665527344, 0.03469321441650391, 0.03445638275146484, 0.034395263671875, 0.034361312866210934, 0.03479580688476563, 0.0345852165222168, 0.034907424926757816, 0.035050209045410154, 0.03508428955078125, 0.0350904312133789, 0.03534131240844727, 0.03566726303100586, 0.03503484725952148, 0.03513647842407226, 0.03527475357055664, 0.034988033294677735, 0.035533023834228517, 0.03512847900390625, 0.035033729553222655, 0.0353546257019043, 0.035141632080078124, 0.03514566421508789, 0.03535651016235351, 0.034732257843017575, 0.03520307159423828, 0.035149822235107424, 0.03503513717651367, 0.03503513717651367, 0.034648063659667966, 0.03462144088745117, 0.034475200653076174, 0.0344543342590332, 0.03450172805786133, 0.034560352325439456, 0.03436374282836914, 0.03452918243408203, 0.03460537719726563, 0.034678592681884765, 0.03457231903076172, 0.03462566375732422, 0.03453142547607422, 0.03457763290405273, 0.0349969596862793, 0.03492659378051758, 0.03510172653198242, 0.03472662353515625, 0.03456230545043945, 0.03439311981201172, 0.03437871932983398, 0.03445555114746094, 0.0342806396484375, 0.034406208038330076, 0.03439923095703125, 0.03455126571655273, 0.03472848129272461, 0.03467673492431641, 0.034840576171875, 0.034815807342529294, 0.03479158401489258, 0.03475049591064453, 0.03483561706542969, 0.034711456298828124, 0.03484972763061524, 0.035095680236816404, 0.035137889862060546, 0.035195423126220704, 0.035076095581054685, 0.03486873626708984, 0.034781631469726564, 0.034826305389404295, 0.034620639801025394, 0.035007137298583985, 0.035038719177246096, 0.03589379119873047, 0.035388992309570315, 0.03505027389526367, 0.03480316925048828, 0.03495993423461914, 0.03501001739501953, 0.035047969818115234, 0.03480780792236328, 0.03483612823486328, 0.03474028778076172, 0.03469062423706055, 0.03486793518066406, 0.034770942687988284, 0.03468697738647461, 0.034936832427978515, 0.03493379211425781, 0.03466953659057617, 0.034516990661621096, 0.03445731353759766, 0.03464774322509766, 0.034586654663085935, 0.03461552047729492, 0.034574687957763674, 0.034598911285400394, 0.03474444961547852, 0.03475033569335938, 0.03489791870117188, 0.03472566223144531, 0.034827617645263674, 0.034665184020996095, 0.034506591796875, 0.034570560455322266, 0.0345263671875, 0.034406593322753906, 0.03444598388671875, 0.03438387298583984, 0.034514942169189454, 0.03478966522216797, 0.0347685432434082, 0.03464380645751953, 0.034613632202148435, 0.034813953399658204, 0.03504867172241211, 0.035170177459716796, 0.03483430480957031, 0.0347902717590332, 0.03499734497070312, 0.0347097282409668, 0.03464262390136719, 0.03473408126831055, 0.03476598358154297, 0.03711676788330078, 0.03487948989868164, 0.03597875213623047, 0.0356704330444336, 0.037318912506103516, 0.03496451187133789, 0.03497625732421875, 0.0348570556640625, 0.03459708786010742, 0.03460015869140625, 0.03469619369506836, 0.034622528076171874, 0.03476076889038086, 0.03558617782592773, 0.035128833770751954, 0.03504115295410156, 0.03498035049438476, 0.034854911804199216, 0.03485696029663086, 0.03475251388549805, 0.035194881439208986, 0.03498767852783203, 0.03513353729248047, 0.03505721664428711, 0.034912033081054686, 0.035035839080810545, 0.035481822967529296, 0.035323902130126955, 0.03514572906494141, 0.03505379104614258, 0.034796737670898435, 0.03478511810302734, 0.03457510375976563, 0.03501055908203125, 0.034856128692626956, 0.034904895782470705, 0.03474816131591797, 0.03460137557983398, 0.034807647705078125, 0.03592396926879883, 0.035018047332763674, 0.034861759185791014, 0.03498534393310547, 0.034828929901123046, 0.03462348937988281, 0.03486105728149414, 0.0348221435546875, 0.034631679534912106, 0.03467843246459961, 0.03489007949829102, 0.03522150421142578, 0.03531103897094726, 0.03498614501953125, 0.034917919158935544, 0.03491929626464844, 0.038319583892822265, 0.034860862731933596, 0.03538198471069336, 0.03479132843017578, 0.034781280517578124, 0.035028640747070315, 0.03523209762573242, 0.035120254516601564, 0.034958206176757815, 0.03478227233886719, 0.034817985534667965, 0.03504844665527344, 0.034955265045166016, 0.03504246520996094, 0.03492918395996094, 0.03459257507324219, 0.034974369049072265, 0.03474723052978516, 0.03453763198852539, 0.03449878311157226, 0.03475724792480469, 0.03703647994995117, 0.03562099075317383, 0.03499008178710938, 0.035004417419433595, 0.03487539291381836, 0.03468198394775391, 0.03465692901611328, 0.034963680267333985, 0.03516783905029297, 0.03485532760620117, 0.034783233642578126, 0.034748416900634765, 0.034893505096435545, 0.03522089767456055, 0.035261344909667966, 0.03526873779296875, 0.03516198348999024, 0.035012702941894534, 0.03497942352294922, 0.03487776184082031, 0.03503104019165039, 0.03525996780395508, 0.03520556640625, 0.03518259048461914, 0.03514080047607422, 0.03493952178955078, 0.03489811325073242, 0.03489369583129883, 0.03525151824951172, 0.03876665496826172, 0.035402496337890624, 0.03537676620483399, 0.03530790328979492, 0.035460094451904296, 0.03510492706298828, 0.03542716979980469, 0.035320095062255856, 0.035057376861572266, 0.03493478393554687, 0.03465420913696289, 0.035176448822021485, 0.03554064178466797, 0.03541167831420899, 0.03525475311279297, 0.03530534362792969, 0.03510646438598633, 0.03519120025634766, 0.03517462539672851, 0.03506335830688476, 0.03521376037597656, 0.03510444641113281, 0.034853214263916014, 0.03503916931152344, 0.03501379013061524, 0.0353059196472168, 0.03501100921630859, 0.03485081481933594, 0.034260990142822266, 0.034710784912109376, 0.03419350433349609, 0.034957374572753906, 0.03451257705688476, 0.03489238357543945, 0.03473657608032227, 0.034297439575195314, 0.03434131240844727, 0.03419289779663086, 0.03409766387939453, 0.034385055541992185, 0.03427363204956055, 0.03425331115722656, 0.034209121704101564, 0.03426899337768555, 0.034062240600585936, 0.034134654998779296, 0.03487472152709961, 0.03484156799316406, 0.034522720336914066, 0.034519454956054685, 0.03452099227905273, 0.03513695907592773, 0.03844681549072266, 0.03542486572265625, 0.03494947052001953, 0.03450265502929688, 0.03457404708862305, 0.03455414581298828, 0.03446739196777344, 0.034447647094726565, 0.035035392761230466, 0.035149505615234375, 0.035130592346191404, 0.034948097229003904, 0.03530047988891601, 0.03544153594970703, 0.03544063949584961, 0.03548681640625, 0.035640224456787106, 0.03574169540405273, 0.03559219360351563, 0.03551776123046875, 0.03566201782226563, 0.035769855499267575, 0.03602329635620117, 0.03621478271484375, 0.03571507263183594, 0.035777729034423826, 0.036109119415283206, 0.03597340774536133, 0.03602608108520508, 0.03628646469116211, 0.03618611145019531, 0.03605913543701172, 0.03604889678955078, 0.03609328079223633, 0.036168350219726565, 0.036016128540039063, 0.03605846405029297, 0.0365656623840332, 0.035800193786621096, 0.03572358322143555, 0.03589734268188476, 0.035664447784423826, 0.03551232147216797, 0.03648102569580078, 0.035522560119628906, 0.03604275131225586, 0.03527475357055664, 0.035454975128173825, 0.035555328369140625, 0.03521331024169922, 0.03632707214355469, 0.03536931228637695, 0.03544678497314453, 0.035389438629150394, 0.03549798583984375, 0.03560857772827149, 0.03531158447265625, 0.035330078125, 0.0355860481262207, 0.03581657409667969, 0.035692447662353514, 0.03596144104003906, 0.03552499389648438, 0.03525344085693359, 0.035624897003173825, 0.035748737335205075, 0.03572124862670899, 0.03580105590820312, 0.03584329605102539, 0.03592428970336914, 0.03610201644897461, 0.036020641326904294, 0.03618998336791992, 0.03610051345825195, 0.0359147834777832, 0.035990272521972656, 0.03597321701049805, 0.03587699127197266, 0.03599359893798828, 0.03585228729248047, 0.03589120101928711, 0.03586975860595703, 0.03597334289550781, 0.03568921661376953, 0.03541167831420899, 0.03565132904052734, 0.0355230712890625, 0.03575398254394531, 0.03540991973876953, 0.03551228713989258, 0.035366943359375, 0.03548364639282227, 0.03551641464233399, 0.03582566452026367, 0.03546047973632813, 0.03532662582397461, 0.03542422485351562, 0.03538479995727539, 0.035506526947021486, 0.03550348663330078, 0.0357589111328125, 0.035604480743408204, 0.035520511627197264, 0.03583699035644531, 0.035498207092285156, 0.03557449722290039, 0.0358171501159668, 0.03576863861083984, 0.03624755096435547, 0.03548160171508789, 0.03548108673095703, 0.036453887939453124, 0.03563600158691406, 0.035370559692382814, 0.03594112014770508, 0.0360134391784668, 0.035947040557861326, 0.03609190368652344, 0.03636825561523437, 0.035724544525146486, 0.035459007263183594, 0.03534329605102539, 0.03542425537109375, 0.035448833465576174, 0.03573049545288086, 0.03535763168334961, 0.036317024230957035, 0.03567827224731445, 0.03545609664916992, 0.03606787109375, 0.03543497467041016, 0.03526838302612305, 0.0349760627746582, 0.03486300659179688, 0.03476070404052734, 0.0346907844543457, 0.035084415435791015, 0.03537526321411133, 0.03512934494018555, 0.03511705780029297, 0.03539961624145508, 0.035102783203125, 0.034829727172851564, 0.034780799865722654, 0.03489238357543945, 0.03487343978881836, 0.035295040130615234, 0.03520764923095703, 0.03480121612548828, 0.034666942596435546, 0.03440841674804687, 0.034530494689941404, 0.03442319869995117, 0.03452105712890625, 0.03457660675048828, 0.03458483123779297, 0.034754112243652345, 0.03476144027709961, 0.035069473266601564, 0.03455609512329102, 0.03465830230712891, 0.03451903915405274, 0.03507382583618164, 0.0351923828125, 0.03521807861328125, 0.03576627349853516, 0.03555084609985352, 0.03553958511352539, 0.03582336044311524, 0.04110704040527344, 0.03608358383178711]",tokens/s,28.556370540904123,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3158.24128,4431.151104,0.0,4028.628992,3944.723968,s,1,10.468568359375,10.468568359375,0.0,10.468568359375,10.468568359375,10.468568359375,10.468568359375,[10.468568359375],,kWh,9.744984121242244e-05,1.0741945159657262e-05,3.0471135488013834e-05,0.00013866292186009353,,MB,2980.560896,4770.889728,0.0,4353.687552,4305.05728,s,10,1.1536016311645507,0.1153601631164551,0.00015453403989926204,0.1153535041809082,0.11555985946655274,0.11557176933288574,0.11558129722595215,"[0.11535424041748046, 0.11558367919921875, 0.11516553497314454, 0.11529641723632812, 0.11506012725830078, 0.11532889556884765, 0.11546454620361328, 0.11543820953369141, 0.11535276794433594, 0.11555721282958985]",tokens/s,2219.136945407837,kWh,3.4513731909318464e-06,3.806244922671959e-07,2.295939745247176e-06,6.127937428446218e-06,tokens/kWh,41775883.48269258,MB,2984.689664,4770.889728,0.0,4353.687552,4305.05984,s,10,25.45242407226562,2.5452424072265627,0.010921404254275176,2.54685986328125,2.5569438720703124,2.5578602416992187,2.5585933374023435,"[2.5409130859375, 2.551753173828125, 2.55426025390625, 2.556740234375, 2.527989990234375, 2.52950341796875, 2.535803955078125, 2.541966552734375, 2.558776611328125, 2.554716796875]",tokens/s,24.75206283736577,kWh,7.40796593303255e-05,8.170885498793493e-06,3.88408431641512e-05,0.00012109138799327019,tokens/kWh,520268.2126618394,,s,630,25.449817668914807,0.04039653598240444,0.0006701905993121197,0.04035440063476563,0.04085800514221191,0.04105314445495605,0.04386141094207764,"[0.04106208038330078, 0.040908832550048825, 0.043471134185791016, 0.04086374282836914, 0.040578624725341794, 0.04029280090332031, 0.04080025482177734, 0.040118431091308596, 0.0404826545715332, 0.039979007720947264, 0.039946239471435545, 0.04038655853271484, 0.04025548934936524, 0.040268959045410155, 0.0402762565612793, 0.03991545486450195, 0.04027619171142578, 0.04063216018676758, 0.04064896011352539, 0.04031875228881836, 0.040258079528808596, 0.0403394546508789, 0.04021452713012695, 0.04071219253540039, 0.040515583038330076, 0.04030451202392578, 0.040673408508300785, 0.04037222290039062, 0.04026118469238281, 0.04007980728149414, 0.04029439926147461, 0.040101886749267575, 0.0398045768737793, 0.0396475830078125, 0.04419372940063476, 0.03988828659057617, 0.03996307373046875, 0.03963926315307617, 0.03953855895996094, 0.03944643020629883, 0.039504096984863284, 0.039512065887451174, 0.03975968170166016, 0.03998527908325195, 0.04018316650390625, 0.039768768310546876, 0.039847934722900394, 0.03944243240356445, 0.03978649520874023, 0.03997491073608399, 0.04287648010253906, 0.04073625564575195, 0.04035878372192383, 0.039951744079589846, 0.039817920684814455, 0.04006694412231445, 0.039882881164550785, 0.04040281677246094, 0.04015526580810547, 0.040114177703857425, 0.04027971267700195, 0.04032931137084961, 0.04018832015991211, 0.04092768096923828, 0.04038246536254883, 0.040478080749511716, 0.04066368103027344, 0.040439231872558594, 0.04025811386108399, 0.04053807830810547, 0.04048278427124023, 0.04053225708007813, 0.040823745727539065, 0.0408328971862793, 0.04035404968261719, 0.04038051223754883, 0.040443710327148434, 0.04036214447021484, 0.04022060775756836, 0.0401640625, 0.04034764862060547, 0.04045619201660156, 0.040425472259521485, 0.04078182220458984, 0.0409989128112793, 0.04087744140625, 0.04097526550292969, 0.04121712112426758, 0.04081523132324219, 0.04061347198486328, 0.040692127227783204, 0.04073267364501953, 0.04023196792602539, 0.04039779281616211, 0.04048691177368164, 0.04090060806274414, 0.04074700927734375, 0.04075110244750976, 0.04036812973022461, 0.040235008239746094, 0.04019564819335938, 0.03996307373046875, 0.03986943817138672, 0.039785472869873044, 0.04006911849975586, 0.03975372695922851, 0.04039241409301758, 0.040429855346679686, 0.04058281707763672, 0.040589664459228514, 0.040292350769042966, 0.040510494232177736, 0.0406343994140625, 0.04063452911376953, 0.04064131164550781, 0.040568801879882814, 0.04057014465332031, 0.04081449508666992, 0.04056729507446289, 0.040358238220214844, 0.04017356872558594, 0.040007038116455076, 0.04041766357421875, 0.041142528533935546, 0.04036812973022461, 0.04079990386962891, 0.041070751190185543, 0.040422176361083986, 0.04000732803344727, 0.04115251159667969, 0.040140064239501956, 0.04033200073242187, 0.04057907104492187, 0.04040233612060547, 0.040342113494873044, 0.03989641571044922, 0.04076134490966797, 0.04068214416503906, 0.04084940719604492, 0.040812545776367185, 0.04029644775390625, 0.04025059127807617, 0.04030953598022461, 0.040271137237548826, 0.04030332946777344, 0.040321025848388675, 0.040022239685058594, 0.041644065856933594, 0.04010147094726563, 0.04007571029663086, 0.04048457717895508, 0.040089599609375, 0.04014899063110351, 0.03995238494873047, 0.04001587295532227, 0.04029782485961914, 0.04039952087402344, 0.04047052764892578, 0.04061983871459961, 0.04059360122680664, 0.040564735412597655, 0.040357887268066404, 0.040432926177978515, 0.04097014236450195, 0.040873886108398434, 0.0403988151550293, 0.04039680099487305, 0.041669567108154296, 0.04055859375, 0.04087376022338867, 0.04085782241821289, 0.04071014404296875, 0.04068096160888672, 0.04098303985595703, 0.04071971130371094, 0.04076723098754883, 0.04044073486328125, 0.040400894165039065, 0.040591358184814456, 0.04250995254516601, 0.04156646347045898, 0.040683712005615234, 0.04033529663085938, 0.040051807403564454, 0.04024208068847656, 0.04005887985229492, 0.04004044723510742, 0.04025139236450195, 0.04090265655517578, 0.04108835220336914, 0.040599552154541016, 0.04084326553344726, 0.040721057891845706, 0.040976383209228515, 0.04096614456176758, 0.04043571090698242, 0.040716289520263675, 0.04036403274536133, 0.040065025329589846, 0.040275966644287106, 0.040032257080078126, 0.04013238525390625, 0.04017484664916992, 0.04033020782470703, 0.040390655517578124, 0.04057702255249023, 0.0404213752746582, 0.04053343963623047, 0.04028812789916992, 0.040624576568603514, 0.04045235061645508, 0.04014899063110351, 0.040140159606933595, 0.04024111938476563, 0.04023305511474609, 0.04020809555053711, 0.043995967864990236, 0.04038864135742187, 0.04009910583496094, 0.04070678329467774, 0.04050080108642578, 0.04054880142211914, 0.040648704528808595, 0.04040499114990234, 0.0403614387512207, 0.040403488159179685, 0.04037222290039062, 0.04051148986816406, 0.041009151458740234, 0.04085964965820312, 0.0404213752746582, 0.04058515167236328, 0.040527294158935544, 0.0407740478515625, 0.041019615173339845, 0.04087795257568359, 0.040540065765380856, 0.04071142578125, 0.04058415985107422, 0.040570465087890625, 0.04556841659545898, 0.04151500701904297, 0.04004044723510742, 0.039965953826904294, 0.04003097534179687, 0.040234657287597654, 0.04032956695556641, 0.04009081649780273, 0.03992671966552734, 0.039888065338134764, 0.03987036895751953, 0.0396431999206543, 0.03996819305419922, 0.03992633438110352, 0.03991321563720703, 0.04006447982788086, 0.0401431999206543, 0.039973312377929685, 0.04025753784179688, 0.04041676712036133, 0.04034928131103516, 0.040856479644775394, 0.04061145782470703, 0.04000806427001953, 0.03974863815307617, 0.03992031860351562, 0.039980926513671876, 0.039969184875488284, 0.040376319885253906, 0.03964313507080078, 0.0395489273071289, 0.03941785430908203, 0.03942956924438477, 0.03946758270263672, 0.039782398223876955, 0.03954278564453125, 0.03983747100830078, 0.03949619293212891, 0.03960992050170899, 0.03940572738647461, 0.039610366821289066, 0.03994825744628906, 0.04005007934570313, 0.040067710876464845, 0.039948287963867186, 0.04015718460083008, 0.043896480560302736, 0.04035184097290039, 0.04060124969482422, 0.04004048156738281, 0.04010649490356445, 0.04016134262084961, 0.04012851333618164, 0.04076464080810547, 0.04032531356811524, 0.03995881652832031, 0.039768383026123046, 0.03964723205566406, 0.040032257080078126, 0.040232769012451174, 0.039944385528564455, 0.039815166473388675, 0.03979612731933594, 0.03965935897827148, 0.03974220657348633, 0.04027395248413086, 0.040468448638916014, 0.040398975372314454, 0.04059689712524414, 0.040425952911376954, 0.040409088134765625, 0.04088150405883789, 0.040562496185302735, 0.04056671905517578, 0.04069468688964844, 0.04099737548828125, 0.04057088088989258, 0.04035353469848633, 0.04014652633666992, 0.040210689544677734, 0.040021568298339846, 0.04033622360229492, 0.04056876754760742, 0.040400894165039065, 0.04020780944824219, 0.04031935882568359, 0.03999513626098633, 0.04001007843017578, 0.03991712188720703, 0.0397762565612793, 0.039989856719970705, 0.04016489410400391, 0.040016353607177736, 0.04016316986083984, 0.039919776916503905, 0.03978854370117187, 0.03954687881469727, 0.040103935241699216, 0.0409804801940918, 0.0405948486328125, 0.040835678100585936, 0.04024115371704102, 0.04027532958984375, 0.03997350311279297, 0.03966566467285156, 0.03970457458496094, 0.03982534408569336, 0.03998099136352539, 0.03970060729980469, 0.040114177703857425, 0.04009292984008789, 0.04010470581054688, 0.04050118255615234, 0.040538177490234376, 0.0400423698425293, 0.04037235260009766, 0.03999683380126953, 0.039637439727783205, 0.03967606353759766, 0.04089401626586914, 0.04068601608276367, 0.040202144622802735, 0.04012812805175781, 0.04030636978149414, 0.039869216918945315, 0.03988275146484375, 0.03970041656494141, 0.04020844650268555, 0.04034764862060547, 0.040570209503173825, 0.04014147186279297, 0.039898143768310544, 0.04112892913818359, 0.039823360443115234, 0.03964044952392578, 0.03977280044555664, 0.039927806854248044, 0.03974457550048828, 0.04050207901000977, 0.0400711669921875, 0.03991142272949219, 0.040115646362304684, 0.04114076614379883, 0.041139999389648435, 0.03975193786621094, 0.03987251281738281, 0.041717758178710936, 0.04673535919189453, 0.043407360076904294, 0.03999129486083984, 0.03990095901489258, 0.03995260620117187, 0.03997081756591797, 0.039695743560791016, 0.040263614654541015, 0.03995625686645508, 0.03975030517578125, 0.03977036666870117, 0.039753536224365234, 0.03975596618652344, 0.03984502410888672, 0.03953062438964844, 0.03993673706054687, 0.04001171112060547, 0.039908512115478516, 0.039750560760498044, 0.0404398078918457, 0.040285888671875, 0.039868736267089845, 0.039820510864257815, 0.040220542907714846, 0.03976688003540039, 0.039870529174804686, 0.0397022705078125, 0.03972121429443359, 0.04016332626342774, 0.040474624633789064, 0.04281744003295898, 0.040460384368896485, 0.040217823028564456, 0.040114974975585936, 0.04003772735595703, 0.04014556884765625, 0.039894367218017576, 0.04008617782592774, 0.03992089462280273, 0.03980556869506836, 0.040244640350341795, 0.03996950531005859, 0.03989254379272461, 0.040044670104980466, 0.040634654998779295, 0.0401387825012207, 0.03999948883056641, 0.0397127685546875, 0.04015024185180664, 0.03967001724243164, 0.0399486083984375, 0.0396956787109375, 0.03967478561401367, 0.03984371185302735, 0.04105215835571289, 0.04064051055908203, 0.040678462982177734, 0.04064710235595703, 0.04053657531738281, 0.040371646881103514, 0.040378944396972656, 0.040062721252441404, 0.04012419128417969, 0.040126529693603516, 0.03988111877441406, 0.0399441909790039, 0.04464345550537109, 0.04030963134765625, 0.04014694213867188, 0.04003635025024414, 0.04016515350341797, 0.03968841552734375, 0.04018175888061523, 0.03984086227416992, 0.039826335906982424, 0.039746910095214846, 0.03999164962768555, 0.039839134216308594, 0.040502174377441406, 0.040277183532714846, 0.040385345458984374, 0.04005379104614258, 0.040372383117675784, 0.04049798583984375, 0.04029849624633789, 0.04055244827270508, 0.04056883239746094, 0.04025680160522461, 0.04032284927368164, 0.04016428756713867, 0.04048271942138672, 0.043775550842285155, 0.04048284912109375, 0.04012019348144531, 0.04045270538330078, 0.04011420822143555, 0.04016880035400391, 0.039870590209960935, 0.039645854949951174, 0.03960176086425781, 0.03965161514282227, 0.03951411056518555, 0.040072799682617184, 0.0400654411315918, 0.04049071884155273, 0.04006467056274414, 0.04028889465332031, 0.04024943923950195, 0.04056399917602539, 0.04035033416748047, 0.04033059310913086, 0.040056896209716794, 0.04022332763671875, 0.0405032958984375, 0.04078291320800781, 0.04036111831665039, 0.04029212951660156, 0.04111721420288086, 0.04066352081298828, 0.0405032958984375, 0.04030831909179688, 0.040308769226074216, 0.040333694458007815, 0.04041424179077149, 0.04039574432373047, 0.04027376174926758, 0.04066934585571289, 0.04043971252441406, 0.040287521362304686, 0.04043036651611328, 0.04046236801147461, 0.04072652816772461, 0.040460289001464846, 0.04064255905151367, 0.0404881591796875, 0.040921886444091796, 0.044075263977050784, 0.041053951263427736, 0.04089452743530273, 0.04089644622802734, 0.040597503662109374, 0.04088627243041992, 0.04064156723022461, 0.0410711669921875, 0.0405951042175293, 0.04048358535766602, 0.04049417495727539, 0.04085238265991211, 0.040377761840820314, 0.040450656890869144, 0.040322975158691404, 0.04042351913452148, 0.04053811264038086, 0.04036812973022461, 0.0401162223815918, 0.04014694213867188, 0.040149055480957034, 0.04038444900512695, 0.04051968002319336, 0.04061539077758789, 0.04038915252685547, 0.04039465713500977, 0.04054153442382812, 0.04031702423095703, 0.04077840042114258, 0.04068320083618164, 0.04079568099975586, 0.04096659088134766, 0.040551841735839846, 0.04045481491088867, 0.04038412857055664, 0.040595169067382815, 0.04035475158691406, 0.040313983917236326, 0.04032144165039062, 0.040968448638916015, 0.04082710266113281, 0.041603073120117184, 0.04032627105712891, 0.04015359878540039, 0.04078182220458984, 0.04055039978027344, 0.04057843017578125, 0.04048550415039062, 0.04070809555053711, 0.040474624633789064, 0.04049286270141601, 0.04056857681274414, 0.040384960174560544, 0.040687614440917966, 0.040572223663330076, 0.040524478912353515, 0.04050495910644531, 0.04068511962890625, 0.04072911834716797, 0.040542209625244144, 0.04100124740600586, 0.04037836837768555, 0.040629440307617185, 0.04038943862915039, 0.04066255950927734, 0.04036412811279297, 0.04053619384765625, 0.04033561706542969, 0.0404727668762207, 0.04049692916870117, 0.040587295532226564, 0.04074291229248047, 0.041025054931640624, 0.040581600189208984, 0.040687614440917966, 0.040588863372802736, 0.040685760498046876, 0.04041475296020508, 0.04058595275878906, 0.04038006210327148, 0.04048931121826172, 0.04022886276245117, 0.04014448165893555, 0.04025708770751953, 0.04043862533569336, 0.040560222625732424, 0.040470943450927735, 0.04037555313110352, 0.04033203125, 0.04042342376708984, 0.04050057601928711, 0.04054390335083008, 0.0404431037902832, 0.04056156921386719, 0.04044275283813477, 0.040576385498046874, 0.04065548706054688, 0.04054009628295899, 0.04043721771240234, 0.041783905029296874, 0.04043571090698242, 0.04045228958129883, 0.04035728073120117, 0.04053238296508789, 0.040460384368896485, 0.040626014709472656, 0.040556190490722656]",tokens/s,24.754597781244684,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1849.36448,2745.040896,0.0,2365.587456,2314.318336,s,1,8.9284794921875,8.9284794921875,0.0,8.9284794921875,8.9284794921875,8.9284794921875,8.9284794921875,[8.9284794921875],,kWh,5.5911116945814375e-05,6.160209576682485e-06,1.722529155800956e-05,7.929661808050642e-05,,MB,1901.912064,3099.459584,0.0,2682.257408,2607.60832,s,10,0.5095027809143067,0.05095027809143067,0.0002655836749891441,0.05082716751098633,0.051276120376586916,0.051431324577331544,0.05155548793792725,"[0.05158652877807617, 0.0511148796081543, 0.05124163055419922, 0.050816158294677734, 0.050872478485107425, 0.05082032012939453, 0.050745792388916015, 0.050738784790039064, 0.050834014892578126, 0.05073219299316406]",tokens/s,5024.506432341861,kWh,1.55596752468974e-06,1.7159585863836332e-07,1.0371137138509626e-06,2.764677097179066e-06,tokens/kWh,92596708.76617353,MB,1906.229248,3099.459584,0.0,2682.257408,2607.61088,s,10,17.19343530273438,1.7193435302734374,0.22054194164928356,1.5809989624023437,2.0618133544921875,2.0663018188476565,2.0698925903320315,"[2.06081591796875, 2.070790283203125, 2.03573388671875, 1.562585205078125, 1.5716207275390626, 1.566328369140625, 1.5710460205078125, 1.5799976806640625, 1.5925169677734374, 1.582000244140625]",tokens/s,36.641892030722175,kWh,4.591858439613902e-05,5.064410582656288e-06,2.1991932487149806e-05,7.297492746594513e-05,tokens/kWh,863310.2106117188,,s,630,17.191366809844972,0.02728788382515075,0.0035817107658509817,0.02500704002380371,0.032768848037719726,0.03301007080078125,0.03351466503143311,"[0.03260588836669922, 0.03253193664550781, 0.03265647888183594, 0.03264486312866211, 0.03263862228393555, 0.03267324829101562, 0.03269718551635742, 0.03250771331787109, 0.03239913558959961, 0.03298358535766602, 0.03254886245727539, 0.032479137420654294, 0.03261849594116211, 0.0326098861694336, 0.032732639312744144, 0.0326255989074707, 0.03259772872924805, 0.03271820831298828, 0.032682910919189456, 0.03288246536254883, 0.03271072006225586, 0.03261190414428711, 0.03260681533813477, 0.0325511360168457, 0.03248083114624024, 0.032549087524414065, 0.03287449645996094, 0.032755294799804685, 0.03254508972167969, 0.03263091278076172, 0.03272236633300781, 0.032710464477539065, 0.032621280670166015, 0.03270041656494141, 0.032866302490234374, 0.032664737701416015, 0.032643199920654294, 0.03271753692626953, 0.032797695159912106, 0.0328917121887207, 0.032989376068115236, 0.032612350463867186, 0.03279872131347656, 0.03265500640869141, 0.03266204833984375, 0.03255686569213867, 0.03279264068603516, 0.03267987060546875, 0.032615585327148436, 0.032610912322998044, 0.03269862365722656, 0.032644287109375, 0.032664382934570316, 0.03255081558227539, 0.032714847564697266, 0.033099777221679685, 0.033159168243408206, 0.0333507194519043, 0.03320649719238281, 0.032932575225830076, 0.03264921569824219, 0.03267951965332031, 0.03279254531860352, 0.032989662170410154, 0.032909473419189456, 0.032457889556884764, 0.032608448028564455, 0.03293225479125977, 0.03412198257446289, 0.03322675323486328, 0.03264284896850586, 0.03252409744262695, 0.03251673507690429, 0.0326448974609375, 0.032612350463867186, 0.03308319854736328, 0.033644031524658204, 0.03267558288574219, 0.03276243209838867, 0.03278220748901367, 0.03278435134887695, 0.03287622451782227, 0.032734046936035155, 0.032679935455322266, 0.03274300765991211, 0.032741886138916015, 0.0327710075378418, 0.033022945404052734, 0.033535999298095705, 0.0332014389038086, 0.033097633361816405, 0.033139518737792965, 0.032994335174560546, 0.03289715194702148, 0.033131359100341796, 0.032671745300292966, 0.0327081298828125, 0.03270470428466797, 0.03413020706176758, 0.034401729583740236, 0.033116607666015624, 0.033043582916259764, 0.03279280090332031, 0.03281999969482422, 0.03272911834716797, 0.03316118240356445, 0.03279052734375, 0.03252633666992188, 0.03254233551025391, 0.032506240844726565, 0.032595966339111326, 0.03255078506469727, 0.03265139389038086, 0.032671615600585936, 0.03256742477416992, 0.03258566284179688, 0.03254399871826172, 0.032578369140625, 0.03269539260864258, 0.03284819030761719, 0.03275862503051758, 0.032656673431396485, 0.032622112274169925, 0.032573631286621094, 0.032766719818115235, 0.03278438568115234, 0.03269968032836914, 0.032688575744628905, 0.03257753753662109, 0.03261219024658203, 0.03346243286132813, 0.03281283187866211, 0.03317334365844726, 0.03314470291137695, 0.033262081146240234, 0.03308316802978516, 0.03320035171508789, 0.032976863861083984, 0.03264924621582031, 0.03263068771362305, 0.03264521789550781, 0.032702465057373044, 0.032652736663818356, 0.03319046401977539, 0.03264889526367187, 0.03269254302978516, 0.03273468780517578, 0.03267023849487305, 0.032860160827636715, 0.032873886108398434, 0.032768608093261715, 0.03272246551513672, 0.03263638305664063, 0.0327435188293457, 0.032762783050537106, 0.03362406539916992, 0.03482998275756836, 0.03316361618041992, 0.03317375946044922, 0.033031486511230466, 0.03277180862426758, 0.03272518539428711, 0.03249375915527344, 0.03258607864379883, 0.032796607971191404, 0.0325939826965332, 0.0324956169128418, 0.03242598342895508, 0.032487422943115234, 0.032415393829345704, 0.03244271850585938, 0.032481281280517575, 0.03249900817871094, 0.03241030502319336, 0.03238326263427734, 0.03242364883422852, 0.03249683380126953, 0.0324780158996582, 0.03253657531738281, 0.0325049934387207, 0.032645633697509766, 0.03264483261108399, 0.03235903930664062, 0.03245651245117188, 0.03252774429321289, 0.02765500831604004, 0.02487424087524414, 0.02498396873474121, 0.024819295883178712, 0.025044063568115234, 0.024728479385375975, 0.0247271671295166, 0.02473750305175781, 0.0248407039642334, 0.02465987205505371, 0.024899391174316405, 0.02476611137390137, 0.02487376022338867, 0.02481305694580078, 0.024700544357299806, 0.024660287857055666, 0.02460643196105957, 0.024714080810546876, 0.024832000732421877, 0.024770559310913084, 0.02515488052368164, 0.024736448287963866, 0.02468454360961914, 0.02469273567199707, 0.024685951232910158, 0.024772991180419923, 0.024709375381469726, 0.024649696350097658, 0.024668191909790037, 0.02481545639038086, 0.024793119430541993, 0.024724992752075195, 0.024772735595703126, 0.024879264831542968, 0.02495318412780762, 0.02485043144226074, 0.024807424545288087, 0.02496451187133789, 0.025219680786132813, 0.025026559829711914, 0.024890464782714845, 0.024877983093261717, 0.024732864379882813, 0.02472208023071289, 0.02483830451965332, 0.024805376052856445, 0.02532745552062988, 0.024834144592285157, 0.02501420783996582, 0.02490492820739746, 0.024748064041137694, 0.02479372787475586, 0.02489369583129883, 0.024823808670043947, 0.024784160614013673, 0.024712032318115234, 0.02469055938720703, 0.024696832656860353, 0.024849695205688478, 0.024617696762084963, 0.024657472610473633, 0.024770624160766603, 0.024725696563720704, 0.02466975975036621, 0.024674623489379884, 0.024731967926025392, 0.024666112899780275, 0.024838144302368165, 0.024977407455444335, 0.024767871856689452, 0.02477939224243164, 0.02467840003967285, 0.02476032066345215, 0.02513283157348633, 0.0255053768157959, 0.02506831932067871, 0.025184095382690428, 0.025040544509887696, 0.02478121566772461, 0.024770496368408203, 0.024852479934692383, 0.02485968017578125, 0.024845279693603516, 0.024893152236938478, 0.02488675117492676, 0.024842111587524415, 0.02482067108154297, 0.024929344177246095, 0.02485139274597168, 0.02487001609802246, 0.024768831253051758, 0.024786624908447265, 0.02482211112976074, 0.024853023529052734, 0.024685951232910158, 0.024768640518188476, 0.024799455642700197, 0.024908000946044923, 0.02470710372924805, 0.024752416610717774, 0.024815359115600587, 0.024862655639648436, 0.024867008209228516, 0.024724992752075195, 0.024729055404663088, 0.024794015884399414, 0.02476416015625, 0.02472166442871094, 0.024766016006469726, 0.024783296585083006, 0.02474505615234375, 0.02469366455078125, 0.027510784149169923, 0.025131263732910157, 0.024964864730834962, 0.025001535415649413, 0.025012096405029296, 0.025160255432128905, 0.025036735534667967, 0.02484000015258789, 0.025004480361938478, 0.026435392379760742, 0.024838144302368165, 0.02490982437133789, 0.024810880661010743, 0.02536716842651367, 0.02472310447692871, 0.02490403175354004, 0.024796255111694337, 0.02491596794128418, 0.024913919448852538, 0.02476032066345215, 0.02473936080932617, 0.024784448623657227, 0.024757247924804687, 0.02496713638305664, 0.024806655883789063, 0.02466886329650879, 0.024698879241943358, 0.024788991928100586, 0.02483328056335449, 0.02472012710571289, 0.02469068717956543, 0.02476025581359863, 0.024860736846923828, 0.024788991928100586, 0.024716863632202147, 0.02479897689819336, 0.02485728073120117, 0.024694688796997072, 0.024755647659301758, 0.024770368576049806, 0.02500489616394043, 0.024737152099609375, 0.024857215881347657, 0.02507161521911621, 0.024966848373413085, 0.024899904251098632, 0.024893152236938478, 0.024908000946044923, 0.025317312240600586, 0.024977439880371093, 0.02490787124633789, 0.02481337547302246, 0.024817472457885743, 0.024803712844848634, 0.024751968383789062, 0.024787103652954102, 0.024858463287353517, 0.024852479934692383, 0.02474723243713379, 0.024931264877319337, 0.02485043144226074, 0.024997568130493163, 0.024903072357177734, 0.02483292770385742, 0.02493622398376465, 0.024858848571777344, 0.02490982437133789, 0.02553241539001465, 0.024870912551879884, 0.024821983337402344, 0.024794015884399414, 0.02489833641052246, 0.02492425537109375, 0.024780799865722656, 0.02481260871887207, 0.024850400924682617, 0.02490777587890625, 0.024859840393066407, 0.024833824157714842, 0.024800352096557617, 0.025110496520996093, 0.024927936553955077, 0.025153823852539062, 0.025102304458618163, 0.024905248641967772, 0.024750591278076172, 0.02492572784423828, 0.025033184051513672, 0.02469228744506836, 0.02463142395019531, 0.024660287857055666, 0.024748031616210937, 0.02473574447631836, 0.024634912490844728, 0.02493507194519043, 0.024743743896484375, 0.024670015335083006, 0.024651968002319335, 0.02465177536010742, 0.024764415740966796, 0.024616960525512696, 0.02465692710876465, 0.024658943176269533, 0.02471241569519043, 0.024661823272705077, 0.024753087997436522, 0.02509823989868164, 0.02512873649597168, 0.02510870361328125, 0.025157375335693358, 0.025179840087890624, 0.02503327941894531, 0.024813568115234375, 0.024758272171020508, 0.024765792846679686, 0.024783519744873046, 0.024768287658691407, 0.02474233627319336, 0.024886207580566408, 0.025008703231811525, 0.02494451141357422, 0.02485203170776367, 0.02503167915344238, 0.025112415313720705, 0.024936447143554686, 0.02750668716430664, 0.025780223846435548, 0.02528665542602539, 0.024952831268310546, 0.025161727905273438, 0.025006080627441408, 0.025141248703002928, 0.024688640594482423, 0.024750175476074218, 0.02489334487915039, 0.025140575408935547, 0.02498419189453125, 0.024838176727294922, 0.024893440246582032, 0.024831872940063476, 0.024727039337158203, 0.02497395133972168, 0.024836095809936523, 0.024966623306274412, 0.024976160049438475, 0.024846208572387694, 0.0246712646484375, 0.024806175231933594, 0.025053375244140624, 0.02502979278564453, 0.025398111343383788, 0.025106399536132813, 0.02503273582458496, 0.025382144927978516, 0.02522598457336426, 0.025030080795288085, 0.024927936553955077, 0.024882047653198243, 0.024970239639282226, 0.024968191146850584, 0.025085664749145507, 0.025144927978515624, 0.026033952713012697, 0.027518976211547853, 0.026250112533569337, 0.025417760848999025, 0.02511039924621582, 0.024909952163696288, 0.024811519622802734, 0.025063455581665037, 0.024987520217895506, 0.02493449592590332, 0.025204736709594725, 0.02485875129699707, 0.024962368011474608, 0.02495136070251465, 0.024975584030151366, 0.02503376007080078, 0.02523417663574219, 0.02508185577392578, 0.024991743087768553, 0.02497331237792969, 0.024961023330688475, 0.024864767074584963, 0.02482316780090332, 0.024879104614257814, 0.024912031173706054, 0.02502000045776367, 0.024787200927734374, 0.024892032623291014, 0.024949920654296874, 0.024896352767944337, 0.024779903411865235, 0.024756128311157227, 0.024816511154174804, 0.02511471939086914, 0.025277503967285155, 0.025063999176025392, 0.024961376190185548, 0.02484841537475586, 0.025016319274902343, 0.025091264724731447, 0.024945472717285155, 0.024993343353271483, 0.02510220718383789, 0.025309024810791017, 0.024889663696289064, 0.025665695190429688, 0.02547110366821289, 0.03313919830322266, 0.02542505645751953, 0.025211679458618165, 0.025284671783447267, 0.02530508804321289, 0.025118719100952147, 0.024983327865600587, 0.024887744903564452, 0.024958751678466798, 0.02482745552062988, 0.02484783935546875, 0.02493539237976074, 0.025014368057250977, 0.02508348846435547, 0.024723167419433593, 0.02484694480895996, 0.025135103225708007, 0.025194208145141603, 0.025051424026489258, 0.02491164779663086, 0.025081344604492187, 0.024832735061645506, 0.02490982437133789, 0.02500809669494629, 0.025046848297119142, 0.024820032119750975, 0.024778656005859375, 0.024856191635131836, 0.024994112014770507, 0.031006784439086915, 0.025231552124023438, 0.02520841598510742, 0.026127967834472656, 0.024942272186279296, 0.02509417533874512, 0.025069631576538087, 0.0251167049407959, 0.025032960891723632, 0.024893600463867186, 0.024934816360473632, 0.025236608505249024, 0.02497830390930176, 0.02490563201904297, 0.025331199645996092, 0.024973215103149413, 0.024818368911743164, 0.02488934326171875, 0.02488528060913086, 0.025173919677734375, 0.025025951385498048, 0.025033119201660157, 0.025016191482543946, 0.025007999420166016, 0.024936960220336913, 0.02490310478210449, 0.025018943786621093, 0.02507776069641113, 0.02485843276977539, 0.024901824951171873, 0.024962976455688478, 0.02534204864501953, 0.025253311157226562, 0.025148319244384765, 0.025240991592407228, 0.025032703399658202, 0.024971872329711913, 0.02501593589782715, 0.025100799560546876, 0.024995712280273436, 0.02492166328430176, 0.02510047912597656, 0.02499942398071289, 0.025122943878173827, 0.02522380828857422, 0.025236768722534178, 0.02513564872741699, 0.0249182071685791, 0.024832000732421877, 0.025226303100585937, 0.025201215744018554, 0.024885120391845702, 0.024863231658935548, 0.02490982437133789, 0.024786943435668944, 0.02470848083496094, 0.0247957763671875, 0.02490982437133789, 0.024930303573608398, 0.02492416000366211, 0.02485980796813965, 0.025113439559936522, 0.026128543853759765, 0.02492748832702637, 0.024858688354492186, 0.024992288589477538, 0.02489139175415039, 0.024773759841918944, 0.02495372772216797, 0.024951871871948243, 0.02484284782409668, 0.02493020820617676, 0.02485875129699707, 0.02558188819885254, 0.02513920021057129, 0.02496291160583496, 0.02522537612915039, 0.025278656005859376, 0.025220767974853515, 0.025605440139770508, 0.026491167068481446, 0.02570707130432129, 0.025649152755737304, 0.02498067283630371, 0.02529676818847656, 0.025541568756103517, 0.024938495635986328, 0.024805376052856445, 0.024780799865722656, 0.026757312774658204, 0.024968544006347657, 0.024820192337036133, 0.024740928649902343, 0.024976320266723633, 0.024860671997070313]",tokens/s,36.646300842072556,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4969.734144,7346.192384,0.0,6943.670272,6539.1744,s,1,11.756984375,11.756984375,0.0,11.756984375,11.756984375,11.756984375,11.756984375,[11.756984375],,kWh,0.00013279523331669526,1.4641012119514684e-05,4.293447879200052e-05,0.00019037072422821048,,MB,5058.154496,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0122382202148437,0.20122382202148437,0.0006060728743618355,0.2010496368408203,0.20204302520751952,0.20231422958374023,0.2025311930847168,"[0.20076329040527344, 0.20133564758300782, 0.2011254119873047, 0.20132041931152345, 0.20258543395996093, 0.20038633728027344, 0.20092022705078125, 0.20198275756835937, 0.20084483337402342, 0.20097386169433593]",tokens/s,1272.2151752622374,kWh,5.900931269333342e-06,6.507662158810033e-07,3.933125368720259e-06,1.0484822853934602e-05,tokens/kWh,24416244.658242535,MB,5062.479872,7643.987968,0.0,7226.785792,6917.4016,s,10,19.582733764648438,1.958273376464844,0.02183788982488931,1.9564053955078125,1.9850491088867186,1.9903446838378906,1.9945811437988281,"[1.92741796875, 1.9416356201171876, 1.9384376220703126, 1.936445068359375, 1.9584119873046875, 1.9543988037109374, 1.983872314453125, 1.9956402587890625, 1.9820599365234375, 1.9644141845703125]",tokens/s,32.17119772813855,kWh,5.622335264525129e-05,6.201267903624188e-06,3.7160018616878563e-05,9.958463916575402e-05,tokens/kWh,632627.687641057,,s,630,19.579840072631836,0.03107911122639974,0.0007084460346753492,0.031024640083312988,0.03181113929748535,0.03200096435546875,0.032820097312927246,"[0.031295999526977536, 0.03064473533630371, 0.030342527389526367, 0.03030672073364258, 0.030359615325927736, 0.030328128814697267, 0.030259904861450197, 0.030259199142456054, 0.030373344421386717, 0.030900768280029297, 0.030535680770874023, 0.03041279983520508, 0.030271488189697264, 0.030484479904174806, 0.030265344619750976, 0.030273536682128906, 0.030321983337402342, 0.030414527893066406, 0.030537919998168947, 0.030482271194458007, 0.030521440505981445, 0.030699392318725587, 0.03036467170715332, 0.03034316825866699, 0.03056169509887695, 0.030266111373901367, 0.030356607437133788, 0.030251039505004882, 0.030630592346191407, 0.03155763244628906, 0.030877344131469725, 0.03136739158630371, 0.032847232818603515, 0.03070342445373535, 0.03068560028076172, 0.03046028709411621, 0.03051644706726074, 0.030753728866577148, 0.03057868766784668, 0.03055411148071289, 0.030582048416137694, 0.030397151947021483, 0.030441471099853516, 0.030631168365478516, 0.030535839080810548, 0.03054355239868164, 0.03041702461242676, 0.03036649513244629, 0.030369792938232422, 0.03047603225708008, 0.030771360397338868, 0.03052060890197754, 0.030612607955932618, 0.030951040267944336, 0.030668224334716797, 0.030501312255859375, 0.03067103958129883, 0.030707712173461913, 0.030455808639526367, 0.030468095779418947, 0.03094223976135254, 0.03068003273010254, 0.030486528396606444, 0.03146169662475586, 0.030797088623046875, 0.030644800186157225, 0.030489919662475586, 0.030522016525268553, 0.030382144927978517, 0.03049456024169922, 0.030545408248901368, 0.030421056747436524, 0.030468671798706055, 0.030765056610107422, 0.03079987144470215, 0.030881696701049805, 0.030933088302612304, 0.030842880249023437, 0.03080396842956543, 0.030893280029296876, 0.030690080642700197, 0.03075065612792969, 0.03058633613586426, 0.030779903411865234, 0.030482528686523437, 0.030552000045776368, 0.030399648666381836, 0.030585760116577147, 0.030426368713378907, 0.030746656417846678, 0.0302271671295166, 0.030244415283203124, 0.030304704666137695, 0.03042508888244629, 0.030328832626342773, 0.03050044822692871, 0.03047430419921875, 0.03054739189147949, 0.030847871780395508, 0.031014368057250975, 0.03096633529663086, 0.03093280029296875, 0.030965951919555663, 0.031053823471069338, 0.031171680450439453, 0.031089216232299804, 0.031082752227783204, 0.03093948745727539, 0.030899648666381837, 0.031053632736206056, 0.031164255142211914, 0.031223840713500976, 0.030935680389404297, 0.030910463333129884, 0.032753662109375, 0.031660032272338864, 0.030879743576049806, 0.031014720916748048, 0.030940895080566407, 0.031173088073730468, 0.030830591201782227, 0.03118284797668457, 0.030920703887939452, 0.03076300811767578, 0.03105574417114258, 0.03075676727294922, 0.03169705581665039, 0.03103059196472168, 0.031031839370727538, 0.03103740882873535, 0.034828319549560546, 0.03141222381591797, 0.03098419189453125, 0.03079510307312012, 0.030941856384277343, 0.031057920455932617, 0.030717023849487303, 0.030716127395629882, 0.030636192321777344, 0.03088150405883789, 0.031818464279174806, 0.032149055480957034, 0.030869279861450195, 0.030737152099609377, 0.030615488052368165, 0.030541887283325197, 0.030517087936401368, 0.031059455871582032, 0.030696096420288085, 0.030672224044799804, 0.03035103988647461, 0.030198751449584962, 0.030308351516723633, 0.030281280517578123, 0.03034671974182129, 0.030349727630615234, 0.030226816177368165, 0.03024505615234375, 0.030301664352416994, 0.030171520233154298, 0.030195871353149415, 0.030342880249023436, 0.03040412712097168, 0.030563072204589845, 0.03096575927734375, 0.030728191375732423, 0.030588735580444337, 0.030938880920410156, 0.030504735946655273, 0.030657184600830077, 0.03060736083984375, 0.03048758316040039, 0.03045680046081543, 0.03066783905029297, 0.03053664016723633, 0.030643295288085938, 0.030907295227050782, 0.030410751342773438, 0.030909856796264647, 0.030310720443725587, 0.030662944793701172, 0.03029631996154785, 0.030371583938598633, 0.030398624420166016, 0.030527328491210936, 0.03053919982910156, 0.030712383270263672, 0.03201180648803711, 0.030597312927246094, 0.03168316841125488, 0.03148988723754883, 0.030879104614257812, 0.031044384002685547, 0.03133369636535645, 0.03110403251647949, 0.030727840423583983, 0.03156582450866699, 0.03205295944213867, 0.03103267288208008, 0.03135548782348633, 0.031033695220947264, 0.031005792617797852, 0.030905248641967774, 0.031068159103393556, 0.03081363105773926, 0.03055059242248535, 0.03262416076660156, 0.03073004722595215, 0.03080463981628418, 0.030616928100585937, 0.030525344848632813, 0.030563072204589845, 0.03037321662902832, 0.030462080001831055, 0.030480127334594726, 0.03035830307006836, 0.030537727355957032, 0.030473663330078126, 0.03032940864562988, 0.030271488189697264, 0.030302207946777345, 0.030492671966552733, 0.03055001640319824, 0.030326431274414062, 0.03037628746032715, 0.030402559280395508, 0.0307509765625, 0.030234367370605468, 0.03057663917541504, 0.0302893123626709, 0.03017788887023926, 0.030074144363403322, 0.030597856521606445, 0.030298112869262695, 0.030248064041137696, 0.03049555206298828, 0.030572608947753908, 0.030279680252075194, 0.03038003158569336, 0.030524448394775392, 0.030441696166992188, 0.03030035209655762, 0.030146432876586915, 0.030222976684570312, 0.03047327995300293, 0.03091961669921875, 0.030924863815307617, 0.03096985626220703, 0.031268863677978515, 0.031018688201904298, 0.031239551544189455, 0.031465696334838866, 0.031987712860107424, 0.031082080841064452, 0.03112940788269043, 0.03120128059387207, 0.03079347229003906, 0.03048854446411133, 0.030513792037963866, 0.030662399291992187, 0.031203840255737306, 0.030791807174682616, 0.03103321647644043, 0.030532863616943358, 0.0311157112121582, 0.031162687301635742, 0.03077440071105957, 0.030757759094238283, 0.030393407821655272, 0.03040662384033203, 0.03037830352783203, 0.03037843132019043, 0.0303569278717041, 0.030290719985961913, 0.030451711654663087, 0.03041257667541504, 0.030654399871826173, 0.030275232315063478, 0.03027577590942383, 0.030541471481323242, 0.03038697624206543, 0.030664703369140626, 0.030682655334472658, 0.03034307289123535, 0.03078201675415039, 0.030384031295776368, 0.030718048095703124, 0.030621376037597656, 0.030861120223999023, 0.03089254379272461, 0.03081625556945801, 0.03097599983215332, 0.030699520111083983, 0.031094783782958983, 0.03241296005249023, 0.03208793640136719, 0.031927072525024414, 0.03226835250854492, 0.031850496292114255, 0.03204915237426758, 0.031895328521728515, 0.03179542350769043, 0.03175766372680664, 0.0320148811340332, 0.0318853759765625, 0.03170515251159668, 0.03183206367492676, 0.03167430305480957, 0.03180550384521484, 0.03163750457763672, 0.03189913558959961, 0.03143935966491699, 0.03139583969116211, 0.03162112045288086, 0.03117171287536621, 0.031715232849121096, 0.03126198387145996, 0.031084928512573242, 0.03148355293273926, 0.0312512321472168, 0.03232707214355469, 0.031154783248901367, 0.03159859275817871, 0.03122115135192871, 0.031103584289550783, 0.03119276809692383, 0.031077951431274415, 0.031306495666503904, 0.031212575912475587, 0.03141526412963867, 0.03138323211669922, 0.031694944381713865, 0.03151619148254395, 0.03139574432373047, 0.03149084854125977, 0.031531007766723636, 0.031491327285766604, 0.03137366485595703, 0.0318939208984375, 0.031595903396606444, 0.03143539237976074, 0.031108928680419923, 0.03141836738586426, 0.03103968048095703, 0.03125609588623047, 0.030885408401489258, 0.031071168899536133, 0.03127228736877442, 0.03137398338317871, 0.03078348731994629, 0.030681087493896485, 0.03062131118774414, 0.030775680541992187, 0.03052505683898926, 0.031015520095825196, 0.030981664657592774, 0.03098838424682617, 0.03070787239074707, 0.030925920486450195, 0.030902624130249023, 0.030654560089111327, 0.030572704315185547, 0.030333248138427735, 0.030262720108032225, 0.030371679306030273, 0.030186208724975586, 0.03016012763977051, 0.030335168838500976, 0.030336639404296876, 0.030423999786376953, 0.03055961608886719, 0.03103603172302246, 0.030695423126220703, 0.03068262481689453, 0.030697248458862306, 0.03039664077758789, 0.030498592376708985, 0.03040643119812012, 0.031819135665893554, 0.030986879348754885, 0.03119308853149414, 0.030932992935180665, 0.030803295135498048, 0.030937759399414063, 0.030840831756591795, 0.030652416229248046, 0.030547296524047852, 0.03048431968688965, 0.030446304321289062, 0.030630111694335938, 0.03095075225830078, 0.03105846405029297, 0.03088969612121582, 0.030954784393310546, 0.030640384674072266, 0.03050729560852051, 0.030562784194946287, 0.030793727874755858, 0.030529407501220702, 0.030572832107543944, 0.03087753677368164, 0.030709760665893555, 0.030947328567504883, 0.030709728240966797, 0.030754848480224608, 0.03222719955444336, 0.03201241683959961, 0.0313375358581543, 0.03125139236450195, 0.03164800071716309, 0.03192806434631348, 0.03165711975097656, 0.031597408294677734, 0.03177881622314453, 0.03161203193664551, 0.03171017646789551, 0.03189545631408691, 0.031680160522460935, 0.031785152435302735, 0.031797407150268554, 0.031698944091796875, 0.03171536064147949, 0.031897567749023435, 0.03184556770324707, 0.03162553596496582, 0.031732000350952146, 0.031860960006713866, 0.03175984001159668, 0.03614569473266602, 0.03178863906860351, 0.03144262313842774, 0.031353567123413084, 0.03148595237731933, 0.031135744094848632, 0.03135078430175781, 0.03130316734313965, 0.03192268753051758, 0.039360511779785154, 0.031899648666381834, 0.031464864730834964, 0.03111382484436035, 0.03204009628295899, 0.031572608947753905, 0.031530559539794924, 0.03154396820068359, 0.03157926368713379, 0.0313885440826416, 0.03155052757263183, 0.03166624069213867, 0.03183500862121582, 0.03190169525146484, 0.032045055389404296, 0.031800575256347656, 0.03228863906860351, 0.03195583915710449, 0.03198134422302246, 0.03186710357666016, 0.032398880004882814, 0.03172108840942383, 0.03169571113586426, 0.03190732765197754, 0.03178137588500977, 0.031268192291259767, 0.03166060829162597, 0.03253193664550781, 0.0312669448852539, 0.03136700820922852, 0.031328672409057616, 0.034261249542236326, 0.031649791717529296, 0.031054880142211912, 0.03132028770446777, 0.03126963233947754, 0.031139839172363282, 0.031047679901123046, 0.03126873588562012, 0.031131071090698244, 0.030974048614501953, 0.03111996841430664, 0.031031295776367186, 0.031057920455932617, 0.031100927352905275, 0.03116646385192871, 0.03115945625305176, 0.03109676742553711, 0.03140441513061523, 0.031226303100585936, 0.031247583389282227, 0.0316300163269043, 0.03150457572937012, 0.031416032791137694, 0.03287187194824219, 0.03442697525024414, 0.03202646255493164, 0.03204393768310547, 0.031815200805664065, 0.03177302360534668, 0.03159401512145996, 0.03181625556945801, 0.031511999130249026, 0.031433311462402344, 0.032220191955566406, 0.031534048080444334, 0.03150543975830078, 0.031819328308105466, 0.031054176330566407, 0.03113577651977539, 0.03144313621520996, 0.031245344161987303, 0.03104630470275879, 0.031238048553466798, 0.03107254409790039, 0.03130339241027832, 0.03142070388793945, 0.03160268783569336, 0.03127619171142578, 0.031155040740966797, 0.031168256759643555, 0.03117487907409668, 0.031153215408325195, 0.03130672073364258, 0.03128934478759766, 0.031455039978027344, 0.031496383666992187, 0.03161692810058594, 0.03158569526672363, 0.03189369583129883, 0.031758848190307616, 0.031696319580078125, 0.031855167388916014, 0.03179520034790039, 0.03178512001037598, 0.03173155212402344, 0.03266489410400391, 0.031735744476318356, 0.03166080093383789, 0.03181068801879883, 0.031978336334228516, 0.03152694320678711, 0.03140403175354004, 0.03148185539245606, 0.03156604766845703, 0.031399711608886716, 0.031270912170410156, 0.03145107269287109, 0.031590463638305664, 0.03143270492553711, 0.03142022323608398, 0.031303199768066406, 0.031294111251831055, 0.0316824951171875, 0.03156505584716797, 0.03142073631286621, 0.03136063957214356, 0.031142528533935548, 0.03149372863769531, 0.031324832916259766, 0.031107072830200196, 0.03231439971923828, 0.031140735626220703, 0.03104572868347168, 0.031121408462524414, 0.031088640213012695, 0.031071359634399415, 0.03122265625, 0.03153715133666992, 0.031514623641967776, 0.03217203140258789, 0.03169926452636719, 0.0316183032989502, 0.03146329689025879, 0.031857215881347656, 0.031936511993408204, 0.03134259223937988, 0.03120947265625, 0.03154876708984375, 0.031666847229003904, 0.03129360008239746, 0.031321952819824216, 0.031238048553466798, 0.031204639434814455, 0.031396127700805666, 0.03121331214904785, 0.031265567779541016, 0.03144195175170898, 0.03125551986694336, 0.031156223297119142, 0.03132563209533691, 0.03138611221313477, 0.03112454414367676, 0.03124083137512207, 0.03105734443664551, 0.031099359512329103, 0.0311177921295166, 0.031303680419921875, 0.03123200035095215, 0.031054079055786134, 0.03113484764099121, 0.0314968318939209, 0.03126198387145996, 0.031091424942016603, 0.03138070487976074, 0.031140640258789064, 0.031479711532592776, 0.030826847076416017, 0.03103104019165039, 0.03100057601928711, 0.030901248931884766, 0.03061619186401367, 0.030416351318359375, 0.03034204864501953, 0.030631935119628906, 0.0307640323638916, 0.03263590240478516, 0.030451711654663087, 0.030846975326538087, 0.030980064392089845, 0.030516895294189453, 0.030780927658081055, 0.0306014404296875, 0.030575071334838867, 0.030860607147216796, 0.03128790473937988, 0.03108246421813965, 0.031115583419799805, 0.03101286315917969, 0.030971519470214842, 0.031474048614501954, 0.031006816864013673, 0.0311724796295166]",tokens/s,32.1759522888339,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3162.947584,4431.151104,0.0,4028.628992,3944.723968,s,1,10.3563037109375,10.3563037109375,0.0,10.3563037109375,10.3563037109375,10.3563037109375,10.3563037109375,[10.3563037109375],,kWh,9.635384677909921e-05,1.0621268716066966e-05,2.9896412806013073e-05,0.00013687152830117925,,MB,2949.7344,4770.889728,0.0,4353.687552,4305.05728,s,10,1.151768440246582,0.11517684402465819,0.001460502679056841,0.11562615966796876,0.11584229583740234,0.11585278091430665,0.11586116897583008,"[0.11551551818847657, 0.11560066986083985, 0.1158399658203125, 0.11566355133056641, 0.1156400604248047, 0.11081043243408203, 0.11586326599121094, 0.11561225891113282, 0.11546774291992187, 0.11575497436523438]",tokens/s,2222.6689936493917,kWh,3.4484402547555047e-06,3.7996639105771827e-07,2.2964397456467565e-06,6.12484639145998e-06,tokens/kWh,41796966.59118618,MB,2951.626752,4770.889728,0.0,4353.687552,4305.05984,s,10,26.14082861328125,2.614082861328125,0.011914775413420791,2.610779541015625,2.6306078857421875,2.634568347167969,2.637736716308594,"[2.611528564453125, 2.6104423828125, 2.59789453125, 2.61111669921875, 2.63852880859375, 2.629727783203125, 2.6220380859375, 2.608635498046875, 2.600720947265625, 2.6101953125]",tokens/s,24.10023068970043,kWh,7.610696786274925e-05,8.394867219130538e-06,3.956487152115584e-05,0.00012406670660303562,tokens/kWh,507791.34648568596,,s,630,26.13824554824831,0.04148927864801316,0.0006184958777199364,0.041419855117797846,0.04189420585632324,0.04236502590179443,0.04382623149871827,"[0.04195667266845703, 0.04202963256835938, 0.041267200469970705, 0.04137078475952148, 0.0412660140991211, 0.042557086944580075, 0.04126927947998047, 0.04149484634399414, 0.041197856903076174, 0.04129148864746094, 0.04126617431640625, 0.041118881225585935, 0.041162464141845705, 0.04125699234008789, 0.04129391860961914, 0.04084086227416992, 0.041008670806884764, 0.04100566482543945, 0.041231998443603514, 0.04145180892944336, 0.0422935676574707, 0.044232704162597655, 0.04164931106567383, 0.04157891082763672, 0.041103233337402345, 0.041818687438964844, 0.04180748748779297, 0.04140428924560547, 0.04101603317260742, 0.04107651138305664, 0.0413941764831543, 0.04100710296630859, 0.04099407958984375, 0.04083375930786133, 0.04077151870727539, 0.041072704315185546, 0.040858848571777344, 0.04132534408569336, 0.041379711151123044, 0.04089251327514649, 0.040884254455566406, 0.040855457305908206, 0.041457664489746096, 0.04174800109863281, 0.041439712524414064, 0.046988800048828126, 0.04187811279296875, 0.041504032135009764, 0.04126793670654297, 0.04129740905761719, 0.041056766510009765, 0.041002399444580076, 0.041101921081542966, 0.041560062408447264, 0.04127651214599609, 0.041304832458496095, 0.04122639846801758, 0.04103372955322265, 0.04098428726196289, 0.041533729553222654, 0.04110559844970703, 0.041401248931884765, 0.04152028656005859, 0.04128121566772461, 0.041234752655029294, 0.04130799865722656, 0.041205921173095704, 0.04147814559936523, 0.04184896087646484, 0.041495487213134764, 0.04142176055908203, 0.0414865608215332, 0.04095772933959961, 0.04122608184814453, 0.04103798294067383, 0.041267425537109374, 0.041757568359375, 0.04132547378540039, 0.04164108657836914, 0.04168918228149414, 0.04189263916015625, 0.041783294677734374, 0.04181388854980469, 0.04174860763549805, 0.04185414505004883, 0.04139705657958984, 0.041094432830810546, 0.04138873672485351, 0.04115459060668945, 0.042151710510253904, 0.04319168090820313, 0.041517345428466794, 0.04183436965942383, 0.0419947509765625, 0.04145980834960938, 0.04141884613037109, 0.0413040657043457, 0.04101324844360352, 0.04084121704101563, 0.04096409606933594, 0.04149248123168945, 0.041014270782470705, 0.04082995223999023, 0.04078182220458984, 0.04091494369506836, 0.04139606475830078, 0.04163190460205078, 0.04177651214599609, 0.041849472045898437, 0.04139212799072266, 0.04104121780395508, 0.04083520126342773, 0.04112831878662109, 0.0414455680847168, 0.0430772476196289, 0.04150416183471679, 0.041157600402832034, 0.041226112365722656, 0.04152127838134766, 0.0418240966796875, 0.04128492736816406, 0.04090351867675781, 0.04076755142211914, 0.04133881759643555, 0.04142489624023438, 0.04114022445678711, 0.04160086441040039, 0.04104540634155274, 0.040891136169433594, 0.041119487762451175, 0.04092684936523437, 0.04109888076782227, 0.040941566467285154, 0.041073375701904294, 0.0413350715637207, 0.04105625534057617, 0.04153139114379883, 0.04155091094970703, 0.04127840042114258, 0.041267070770263675, 0.041181121826171875, 0.041316001892089844, 0.04127340698242187, 0.04132912063598633, 0.04118735885620117, 0.04121392059326172, 0.041728000640869144, 0.04143907165527344, 0.04128729629516602, 0.04127961730957031, 0.04143779373168945, 0.04115647888183594, 0.04159481430053711, 0.0413675537109375, 0.041064640045166016, 0.04096736145019531, 0.0409381103515625, 0.04096614456176758, 0.04293222427368164, 0.04140982437133789, 0.04238016128540039, 0.04139596939086914, 0.04135523223876953, 0.04134105682373047, 0.041232383728027344, 0.041529342651367186, 0.04123648071289063, 0.04115251159667969, 0.04118527984619141, 0.04100710296630859, 0.041240577697753904, 0.04130524826049805, 0.0412369613647461, 0.04092892837524414, 0.040813247680664064, 0.04087811279296875, 0.040736766815185545, 0.04090265655517578, 0.04134092712402344, 0.04091648101806641, 0.04093094253540039, 0.04081139373779297, 0.04093132781982422, 0.040681472778320314, 0.04148652648925781, 0.041312065124511715, 0.04105011367797851, 0.04110732650756836, 0.0414474868774414, 0.04245888137817383, 0.04284988784790039, 0.041732769012451175, 0.04717500686645508, 0.04209040069580078, 0.041661151885986326, 0.04148358535766602, 0.04119209671020508, 0.04107475280761719, 0.041342815399169924, 0.041418910980224606, 0.04107193756103516, 0.041022144317626956, 0.04152051162719726, 0.041468544006347655, 0.041390079498291016, 0.041826305389404295, 0.041436607360839844, 0.04120025634765625, 0.041271232604980466, 0.04088835144042969, 0.04126857757568359, 0.04126537704467773, 0.04102595138549805, 0.04132863998413086, 0.041644031524658204, 0.04127743911743164, 0.041265151977539063, 0.04096819305419922, 0.04093286514282227, 0.04085785675048828, 0.04082412719726562, 0.04081760025024414, 0.040818367004394535, 0.04082454299926758, 0.04116540908813476, 0.04097433471679687, 0.041035423278808596, 0.040829280853271484, 0.04089206314086914, 0.04097196960449219, 0.04098729705810547, 0.04107468795776367, 0.04119337463378906, 0.04117308807373047, 0.04121331024169922, 0.04113398361206055, 0.04108771133422852, 0.0409535026550293, 0.04130031967163086, 0.041207809448242184, 0.04159078216552734, 0.041766910552978515, 0.04173183822631836, 0.04177123260498047, 0.04184681701660156, 0.041715614318847655, 0.04176700973510742, 0.04167411041259766, 0.041874046325683596, 0.04177305603027344, 0.04168473434448242, 0.0417446403503418, 0.04325580978393555, 0.04167270278930664, 0.04229526519775391, 0.04178083038330078, 0.042840511322021484, 0.04359065628051758, 0.04166329574584961, 0.04171583938598633, 0.04182636642456055, 0.04230144119262695, 0.04182015991210938, 0.04172390365600586, 0.04234652709960938, 0.041770977020263673, 0.04189532852172852, 0.04167283248901367, 0.04177897644042969, 0.04202550506591797, 0.04169334411621094, 0.0417628173828125, 0.042237918853759764, 0.04176899337768555, 0.041578495025634765, 0.042266849517822266, 0.04587046432495117, 0.04177961730957031, 0.04220723342895508, 0.04155187225341797, 0.04184822463989258, 0.04149308776855469, 0.041582561492919924, 0.04239567947387695, 0.04180377578735352, 0.04188774490356445, 0.04262092971801758, 0.0416255989074707, 0.0417259521484375, 0.04148223876953125, 0.04351379013061524, 0.04166662216186524, 0.041404415130615234, 0.04155958557128906, 0.041412639617919925, 0.041320030212402346, 0.04124553680419922, 0.04131164932250977, 0.0417163200378418, 0.041875137329101565, 0.04139235305786133, 0.04144342422485352, 0.04141875076293945, 0.04134297561645508, 0.04148771286010742, 0.04122000122070312, 0.04111027145385742, 0.04134092712402344, 0.04151289749145508, 0.041412033081054685, 0.04159961700439453, 0.04177305603027344, 0.04169113540649414, 0.04173971176147461, 0.04158067321777344, 0.0421558723449707, 0.04183260726928711, 0.041621505737304686, 0.0415939826965332, 0.042621822357177735, 0.04240588760375977, 0.041788894653320315, 0.04158428955078125, 0.04176780700683594, 0.0413941764831543, 0.04171891021728515, 0.041702110290527346, 0.041968894958496095, 0.041745311737060545, 0.04151433563232422, 0.04140099334716797, 0.04169318389892578, 0.04173619079589844, 0.04228643035888672, 0.04189977645874023, 0.04183516693115234, 0.04171596908569336, 0.04177612686157227, 0.04172902297973633, 0.04173004913330078, 0.0414837760925293, 0.04160099029541016, 0.04161180877685547, 0.043265727996826174, 0.04388691329956055, 0.04172528076171875, 0.04184521484375, 0.04179167938232422, 0.041662464141845705, 0.04178700637817383, 0.041516990661621095, 0.041549697875976566, 0.04146201705932617, 0.041394111633300784, 0.041511295318603515, 0.04171980667114258, 0.04179763031005859, 0.041678848266601565, 0.04160102462768555, 0.041640960693359375, 0.041517375946044925, 0.04160550308227539, 0.041443359375, 0.04154390335083008, 0.041633857727050784, 0.04168703842163086, 0.04150207901000977, 0.041509502410888674, 0.041668159484863285, 0.04177145767211914, 0.04185059356689453, 0.04178963088989258, 0.04154377746582031, 0.041374752044677734, 0.04143155288696289, 0.041288158416748044, 0.041297824859619144, 0.041240352630615235, 0.042524833679199216, 0.04174393463134766, 0.04138848114013672, 0.04145971298217774, 0.04146176147460937, 0.04140188980102539, 0.04137622451782227, 0.04150886535644531, 0.04131804656982422, 0.04339315032958985, 0.04182777786254883, 0.041947166442871095, 0.04165708923339844, 0.04184665679931641, 0.041766944885253905, 0.04146966552734375, 0.04189408111572265, 0.04165840148925781, 0.04147011184692383, 0.0413873291015625, 0.04153414535522461, 0.04152489471435547, 0.041643585205078125, 0.041597728729248044, 0.0417259521484375, 0.0418581771850586, 0.04154025650024414, 0.04154185485839844, 0.041322494506835936, 0.0413675537109375, 0.04158399963378906, 0.04163993453979492, 0.04145420837402344, 0.041404415130615234, 0.04136470413208008, 0.041597728729248044, 0.04147609710693359, 0.04146921539306641, 0.04155839920043945, 0.041656383514404295, 0.04152348709106445, 0.04142655944824219, 0.04146342468261719, 0.041501697540283204, 0.04168220901489258, 0.04194761657714844, 0.04165206527709961, 0.04155615997314453, 0.041688190460205075, 0.041753440856933596, 0.041680736541748045, 0.041799102783203125, 0.0414502067565918, 0.04143539047241211, 0.04136067199707031, 0.041562591552734375, 0.041713600158691404, 0.04153760147094727, 0.04139199829101563, 0.041767040252685544, 0.04155206298828125, 0.04139206314086914, 0.04157632064819336, 0.04217446517944336, 0.04154508972167969, 0.04158118438720703, 0.041441280364990236, 0.04150067138671875, 0.04141670227050781, 0.04158259201049805, 0.041695232391357424, 0.041551456451416016, 0.04165264129638672, 0.041545726776123046, 0.04134089660644531, 0.04157817459106445, 0.04142524719238281, 0.04148223876953125, 0.04152896118164062, 0.041551456451416016, 0.04153731155395508, 0.04151193618774414, 0.04143308639526367, 0.04189574432373047, 0.041631935119628906, 0.04155327987670898, 0.041495166778564456, 0.04135116958618164, 0.041215999603271485, 0.04132361602783203, 0.04130902481079102, 0.04121401596069336, 0.04137472152709961, 0.04149116897583008, 0.041509151458740234, 0.041646080017089845, 0.04132863998413086, 0.041439231872558595, 0.04152524948120117, 0.041420799255371094, 0.04118527984619141, 0.04132863998413086, 0.04125900650024414, 0.0413675537109375, 0.041457313537597656, 0.042084705352783205, 0.04253696060180664, 0.041565567016601565, 0.04181670379638672, 0.04205363082885742, 0.04164198303222656, 0.041232383728027344, 0.04110707092285156, 0.04090099334716797, 0.04080230331420898, 0.04088627243041992, 0.040864990234375, 0.0407949104309082, 0.04093952178955078, 0.04098867034912109, 0.04085321426391601, 0.040828224182128905, 0.04123235321044922, 0.04130099105834961, 0.04086191940307617, 0.04070345687866211, 0.04162358474731445, 0.041076736450195314, 0.043443359375, 0.04140323257446289, 0.041076736450195314, 0.04140828704833984, 0.041326305389404294, 0.04130867385864258, 0.041479969024658205, 0.04140662384033203, 0.04117715072631836, 0.04106854248046875, 0.04105625534057617, 0.041060352325439455, 0.04103577423095703, 0.04108492660522461, 0.041025535583496094, 0.040976383209228515, 0.04091670227050781, 0.0415332145690918, 0.04091107177734375, 0.04096387100219727, 0.041337345123291014, 0.04109900665283203, 0.04262732696533203, 0.041398273468017575, 0.04101103973388672, 0.04135913467407227, 0.041183361053466795, 0.041453823089599606, 0.04111123275756836, 0.041100704193115234, 0.04078192138671875, 0.040775615692138674, 0.040771617889404296, 0.04081734466552735, 0.0413059196472168, 0.04179180908203125, 0.04178716659545898, 0.04197196960449219, 0.04163987350463867, 0.04173625564575195, 0.04138598251342773, 0.04157030487060547, 0.04190947341918945, 0.041543872833251956, 0.04121782302856445, 0.04128031921386719, 0.04367766571044922, 0.04148227310180664, 0.04111974334716797, 0.04088623809814453, 0.04075113677978515, 0.04100492858886719, 0.041381759643554686, 0.041197822570800784, 0.04097558212280274, 0.0407949104309082, 0.040521728515625, 0.040538047790527346, 0.04066019058227539, 0.04064716720581055, 0.04050495910644531, 0.04201232147216797, 0.0413842887878418, 0.04179558563232422, 0.041797439575195314, 0.04211321640014649, 0.04227859115600586, 0.043055423736572264, 0.04301824188232422, 0.04143692779541015, 0.041707775115966794, 0.04136140823364258, 0.04100259017944336, 0.04166902542114258, 0.04116656112670898, 0.04101763153076172, 0.04076512145996094, 0.04118355178833008, 0.041248767852783204, 0.04183555221557617, 0.042054271697998045, 0.041816062927246093, 0.04214204788208008, 0.041681919097900394, 0.041247455596923825, 0.04117942428588867, 0.04102143859863281, 0.041201663970947267, 0.04103577423095703, 0.04122592163085938, 0.04132675170898437, 0.040900768280029295, 0.041088863372802736, 0.04082089614868164, 0.04102089691162109, 0.04123654556274414, 0.04095024108886719, 0.04426259231567383, 0.04100998306274414, 0.040771583557128906, 0.04094889450073242, 0.04098748779296875, 0.041100353240966794, 0.04105926513671875, 0.04122320175170899, 0.04106953430175781, 0.041166847229003906, 0.04106752014160156, 0.04101011276245117, 0.0409431037902832, 0.04098515319824219, 0.04100505447387695, 0.040771488189697266, 0.04126115036010742, 0.04394803237915039, 0.041422847747802735, 0.04249948883056641, 0.040849567413330075, 0.040632030487060544, 0.04167478561401367, 0.041267200469970705, 0.04157884979248047, 0.04094153594970703, 0.040683841705322264]",tokens/s,24.102612351586114,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8221.282304,11251.089408,0.0,10848.567296,10616.027648,s,1,14.7554013671875,14.7554013671875,0.0,14.7554013671875,14.7554013671875,14.7554013671875,14.7554013671875,[14.7554013671875],,kWh,0.0002209302226541543,2.436287976282825e-05,6.9959778189993e-05,0.00031525288060697555,,MB,4064.768,11683.10272,0.0,11265.900544,11070.470656,s,10,3.879580230712891,0.38795802307128907,0.0011890479509772249,0.3883685150146484,0.3889663269042969,0.38932593688964845,0.3896136248779297,"[0.38569003295898435, 0.3864775695800781, 0.38696759033203126, 0.3882878112792969, 0.3875245361328125, 0.38844921875, 0.3887919921875, 0.389685546875, 0.38888641357421877, 0.38881951904296874]",tokens/s,659.8652039036677,kWh,1.137961524022779e-05,1.254076140663074e-06,7.593948382845729e-06,2.0227639763736594e-05,tokens/kWh,12655950.125182072,MB,4069.093376,11685.199872,0.0,11267.997696,11070.473216,s,10,29.413074218749998,2.941307421875,0.003709578984283107,2.941876342773438,2.9450629150390624,2.945871179199219,2.9465177905273436,"[2.933745849609375, 2.940679443359375, 2.93713037109375, 2.93880419921875, 2.944797119140625, 2.942601806640625, 2.942174560546875, 2.941578125, 2.94488330078125, 2.946679443359375]",tokens/s,21.41904635042851,kWh,8.608231636060668e-05,9.495982635104732e-06,5.7154992305154565e-05,0.000152733291300866,tokens/kWh,412483.7451181332,,s,630,29.4103555870056,0.04668310410635813,0.0005255910670798737,0.0466583194732666,0.04706921768188477,0.04724175472259521,0.048495202293395996,"[0.04829600143432617, 0.046514175415039063, 0.046229343414306644, 0.04603305435180664, 0.04623462295532227, 0.04603740692138672, 0.045771358489990234, 0.04579328155517578, 0.046129150390625, 0.04637843322753906, 0.04619244766235352, 0.046760704040527346, 0.04653862380981445, 0.046192798614501956, 0.046122974395751956, 0.04616191864013672, 0.04629449462890625, 0.046120830535888675, 0.04599465560913086, 0.0466431999206543, 0.04670803070068359, 0.04671939086914063, 0.04651651382446289, 0.04636467361450195, 0.04610047912597656, 0.04639744186401367, 0.046473217010498044, 0.046069759368896485, 0.046378273010253906, 0.04660092926025391, 0.046476894378662106, 0.0466981430053711, 0.046932735443115235, 0.04674150466918945, 0.04641334533691406, 0.04662255859375, 0.04647942352294922, 0.046137920379638674, 0.04638105773925781, 0.04660019302368164, 0.046530208587646484, 0.04653910446166992, 0.04682547378540039, 0.046884864807128904, 0.04677632141113281, 0.046820449829101565, 0.04669647979736328, 0.04642873764038086, 0.046897377014160156, 0.04684502410888672, 0.04650086212158203, 0.046721023559570314, 0.04682137680053711, 0.046698497772216796, 0.04672512054443359, 0.046925121307373044, 0.04910559844970703, 0.04643840026855469, 0.046671520233154296, 0.04653705596923828, 0.046800224304199216, 0.047094337463378905, 0.0469095344543457, 0.04945030212402344, 0.046708927154541016, 0.04609891128540039, 0.04603715133666992, 0.04636876678466797, 0.04570486450195312, 0.05181475067138672, 0.04499987030029297, 0.04573430252075195, 0.04624832153320312, 0.04659817504882813, 0.046029983520507814, 0.046076286315917966, 0.04653641510009766, 0.046285377502441404, 0.04629318237304687, 0.046773887634277346, 0.04653683090209961, 0.046168319702148436, 0.04622454452514648, 0.046793567657470704, 0.04655923080444336, 0.046276256561279296, 0.046592353820800785, 0.046745246887207034, 0.04664297485351562, 0.0469318733215332, 0.04649027252197266, 0.04614963150024414, 0.04642406463623047, 0.04652646255493164, 0.04603696060180664, 0.04611689758300781, 0.046884864807128904, 0.04630697631835937, 0.046428352355957034, 0.046778144836425783, 0.046965118408203124, 0.046542686462402345, 0.046536865234375, 0.046704254150390624, 0.04649203109741211, 0.046714366912841795, 0.04700739288330078, 0.046728031158447265, 0.04670217514038086, 0.04725372695922852, 0.046953983306884765, 0.04758393478393555, 0.04649574279785156, 0.046561279296875, 0.0466165771484375, 0.04693401718139648, 0.04668390274047852, 0.04667391967773438, 0.0469153938293457, 0.046815681457519534, 0.04658995056152344, 0.04704460906982422, 0.04695040130615234, 0.04686643218994141, 0.046833568572998044, 0.04683990478515625, 0.04942233657836914, 0.04676198577880859, 0.04587519836425781, 0.04580556869506836, 0.04603871917724609, 0.04590169525146484, 0.04571590423583984, 0.04631955337524414, 0.046359840393066405, 0.046113601684570314, 0.04618972778320313, 0.04638803100585937, 0.046317569732666014, 0.04597488021850586, 0.04650380706787109, 0.04635084915161133, 0.04623750305175781, 0.04629497528076172, 0.04658771133422852, 0.046461471557617186, 0.04665676879882812, 0.04743008041381836, 0.04673129653930664, 0.04617855834960938, 0.04678255844116211, 0.04662239837646484, 0.046219711303710935, 0.04641177749633789, 0.04650608062744141, 0.04638505554199219, 0.04642816162109375, 0.04669007873535156, 0.04639059066772461, 0.046488544464111325, 0.04674460983276367, 0.04697100830078125, 0.04639004898071289, 0.04655513763427734, 0.046575489044189455, 0.04659622573852539, 0.04673126220703125, 0.04710400009155274, 0.047233024597167966, 0.04712243270874023, 0.046809215545654294, 0.046646686553955076, 0.046524513244628904, 0.04679894256591797, 0.04665727996826172, 0.04689891052246094, 0.04712019348144531, 0.04648819351196289, 0.04659260940551758, 0.04669827270507813, 0.04675318527221679, 0.046672481536865235, 0.046841312408447265, 0.04689974212646485, 0.046729217529296874, 0.04729596710205078, 0.046878719329833986, 0.0469398078918457, 0.04704886245727539, 0.048091583251953125, 0.04640473556518555, 0.046363521575927734, 0.04606771087646484, 0.04585007858276367, 0.04629353713989258, 0.04628585433959961, 0.04609737777709961, 0.04614947128295899, 0.04623756790161133, 0.04603548812866211, 0.046257568359375, 0.046852447509765624, 0.04653875350952148, 0.04605132675170898, 0.04638719940185547, 0.04664934539794922, 0.046114303588867187, 0.04650390243530273, 0.04653110504150391, 0.04660838317871094, 0.04658380889892578, 0.046894367218017576, 0.04673353576660156, 0.04650774383544922, 0.04660713577270508, 0.04670444869995117, 0.04628704071044922, 0.04620431900024414, 0.04658646392822265, 0.04629417419433594, 0.04644905471801758, 0.04682387161254883, 0.046772224426269535, 0.0467432975769043, 0.046784767150878905, 0.04690124893188476, 0.046514175415039063, 0.04650131225585938, 0.046699073791503905, 0.04652032089233398, 0.046827518463134765, 0.04708508682250977, 0.046928352355957034, 0.04696473693847656, 0.04702207946777344, 0.04698432159423828, 0.04692671966552735, 0.04689920043945312, 0.04646912002563477, 0.046680065155029295, 0.04673126220703125, 0.046746910095214846, 0.04665212631225586, 0.047065086364746093, 0.046827518463134765, 0.047006942749023437, 0.047121185302734375, 0.0471176643371582, 0.04703913497924805, 0.04687036895751953, 0.0468966064453125, 0.04718422317504883, 0.04805686569213867, 0.046181598663330076, 0.046041057586669924, 0.0462564811706543, 0.04610496139526367, 0.04617020797729492, 0.04639539337158203, 0.047232894897460936, 0.04614771270751953, 0.046331455230712894, 0.046322113037109376, 0.04604108810424805, 0.04619417572021484, 0.04626483154296875, 0.04629462432861328, 0.0468504638671875, 0.046657470703125, 0.046569534301757816, 0.0465797119140625, 0.04675958251953125, 0.04655129623413086, 0.04619068908691406, 0.05128963088989258, 0.04571798324584961, 0.046427520751953125, 0.04671142578125, 0.046682113647460936, 0.04661840057373047, 0.046696670532226564, 0.04678451156616211, 0.04664710235595703, 0.046631103515625, 0.04646092987060547, 0.04645280075073242, 0.04637664031982422, 0.04681548690795898, 0.04649574279785156, 0.04681727981567383, 0.046943519592285154, 0.046758079528808595, 0.046836158752441404, 0.046929054260253907, 0.04675884628295898, 0.04671855926513672, 0.04821238327026367, 0.046698497772216796, 0.046413822174072264, 0.046650623321533205, 0.04716550445556641, 0.046610977172851564, 0.046934177398681644, 0.046968223571777344, 0.04665200042724609, 0.04673072052001953, 0.046995998382568356, 0.046626304626464846, 0.04695644760131836, 0.04686211013793945, 0.046771007537841795, 0.047065086364746093, 0.04719804763793945, 0.047115550994873044, 0.047137569427490235, 0.04835184097290039, 0.04641286468505859, 0.046349246978759764, 0.046080001831054686, 0.045856769561767576, 0.046358528137207033, 0.04624492645263672, 0.046140159606933594, 0.04638329696655273, 0.04625408172607422, 0.04628591918945312, 0.046443233489990236, 0.04643859100341797, 0.046369953155517576, 0.04664713668823242, 0.046430206298828124, 0.0464967041015625, 0.04662025451660156, 0.04647164916992187, 0.04671692657470703, 0.046651103973388675, 0.04656771087646484, 0.046607967376708984, 0.04638351821899414, 0.04640768051147461, 0.04667801666259765, 0.0468131217956543, 0.046470623016357425, 0.046700958251953126, 0.04656515121459961, 0.04626668930053711, 0.04656547164916992, 0.046811134338378906, 0.046460769653320313, 0.04639350509643555, 0.04671599960327148, 0.04675267028808594, 0.04673331069946289, 0.046999233245849606, 0.04676217651367188, 0.048410751342773437, 0.04656947326660156, 0.04686438369750977, 0.04684185409545898, 0.046704639434814454, 0.04702544021606445, 0.04688275146484375, 0.04683769607543945, 0.046965599060058594, 0.04697907257080078, 0.046843902587890625, 0.046751552581787106, 0.04665116882324219, 0.04695024108886719, 0.046774463653564455, 0.046258560180664064, 0.04680704116821289, 0.04688000106811523, 0.04687744140625, 0.04700364685058594, 0.04717148971557617, 0.04712403106689453, 0.04849692916870117, 0.04849097442626953, 0.04662435150146484, 0.046093921661376956, 0.04587177658081055, 0.04598515319824219, 0.04611663818359375, 0.04631232070922851, 0.046096511840820316, 0.04637286376953125, 0.04629094314575195, 0.04627865600585938, 0.0463419189453125, 0.047530208587646484, 0.046159870147705076, 0.04601036834716797, 0.04645862579345703, 0.046573825836181644, 0.046647071838378906, 0.0467355842590332, 0.04678451156616211, 0.04673843383789063, 0.04711244964599609, 0.04705104064941406, 0.04659791946411133, 0.046557888031005856, 0.04648550415039063, 0.04600627136230469, 0.04639641571044922, 0.046601215362548826, 0.04674950408935547, 0.04667724609375, 0.04648236846923828, 0.04693196868896484, 0.04674150466918945, 0.04664476776123047, 0.04692793655395508, 0.046110462188720704, 0.04660086441040039, 0.04693715286254883, 0.046787521362304685, 0.04681011199951172, 0.04710092926025391, 0.047058944702148435, 0.04697087860107422, 0.04683161544799805, 0.04703615951538086, 0.04656083297729492, 0.046871231079101565, 0.04668758392333985, 0.04655904006958008, 0.0468939208984375, 0.04678425598144531, 0.046661888122558594, 0.04700774383544922, 0.046929920196533206, 0.04675993728637695, 0.04675337600708008, 0.047497631072998044, 0.0468109130859375, 0.0470786247253418, 0.04710707092285156, 0.047168544769287106, 0.04705174255371094, 0.04799676895141602, 0.04636483383178711, 0.04616556930541992, 0.04591865539550781, 0.04605094528198242, 0.0463691520690918, 0.04638006210327148, 0.04674012756347656, 0.04618703842163086, 0.04644540786743164, 0.04647417449951172, 0.046411361694335934, 0.04647756958007813, 0.04626243209838867, 0.046279903411865234, 0.046647777557373045, 0.04662713623046875, 0.04635027313232422, 0.046537952423095705, 0.0465437126159668, 0.04663075256347656, 0.0465838737487793, 0.04677846527099609, 0.04639888000488281, 0.046441055297851565, 0.046659168243408204, 0.04667843246459961, 0.04629232025146485, 0.04635919952392578, 0.046561279296875, 0.046488670349121096, 0.04634307098388672, 0.04669615936279297, 0.04686249542236328, 0.04675801467895508, 0.04698112106323242, 0.04680214309692383, 0.046717121124267576, 0.04682198333740235, 0.04677571105957031, 0.046885471343994144, 0.04725350570678711, 0.04699504089355469, 0.046858657836914064, 0.04695449447631836, 0.04684799957275391, 0.04657766342163086, 0.04682547378540039, 0.04690118408203125, 0.046732990264892575, 0.04668460845947266, 0.04693119812011719, 0.04671900939941406, 0.046639328002929685, 0.04693446350097656, 0.04675174331665039, 0.04699545669555664, 0.04698505783081055, 0.04686249542236328, 0.047075294494628904, 0.04726732635498047, 0.047298080444335935, 0.04749414443969727, 0.0482529296875, 0.04642201614379883, 0.04611638259887695, 0.04609276962280273, 0.04594483184814453, 0.04662886428833008, 0.046179454803466795, 0.0458474235534668, 0.04632166290283203, 0.046929920196533206, 0.04665139389038086, 0.04672512054443359, 0.046635009765625, 0.046429759979248045, 0.04653919982910156, 0.04658713531494141, 0.0464719352722168, 0.046243839263916016, 0.04653827285766601, 0.046872638702392576, 0.04671321487426758, 0.04706467056274414, 0.0467685775756836, 0.04639334487915039, 0.046357601165771485, 0.04656793594360351, 0.046504352569580076, 0.046480960845947265, 0.046825408935546875, 0.04690995025634766, 0.04672249603271485, 0.04676051330566406, 0.04689481735229492, 0.04661423873901367, 0.046688480377197264, 0.04681763076782226, 0.046478431701660154, 0.046494529724121096, 0.04681299209594727, 0.047057022094726564, 0.04720655822753906, 0.04724889755249023, 0.04686899185180664, 0.04693302536010742, 0.046817310333251955, 0.04666054534912109, 0.04656947326660156, 0.04689673614501953, 0.046594017028808596, 0.04667596817016602, 0.046987617492675784, 0.04716553497314453, 0.04718307113647461, 0.04712323379516602, 0.04698051071166992, 0.0469692497253418, 0.04698336029052735, 0.046931072235107424, 0.04698406219482422, 0.04711199951171875, 0.04695391845703125, 0.047128894805908206, 0.04729651260375976, 0.049423583984375, 0.046863136291503904, 0.0461143684387207, 0.04599443054199219, 0.04634979248046875, 0.046217025756835936, 0.0458221435546875, 0.04631196975708008, 0.046534656524658206, 0.04654883193969726, 0.046543006896972654, 0.046508033752441405, 0.046446273803710934, 0.04648505783081055, 0.04655795288085938, 0.046233505249023435, 0.046336063385009764, 0.04646710586547852, 0.046548065185546876, 0.046535457611083984, 0.046983295440673825, 0.04687577438354492, 0.04660831832885742, 0.0465847053527832, 0.04685120010375977, 0.04669945526123047, 0.046704063415527346, 0.04653878402709961, 0.046714622497558596, 0.04644124984741211, 0.046745376586914064, 0.04666799926757813, 0.04632073593139648, 0.04641788864135742, 0.04682841491699219, 0.04682150268554688, 0.04680409622192383, 0.0468078727722168, 0.04660224151611328, 0.04668822479248047, 0.04706854248046875, 0.04689168167114258, 0.04682342529296875, 0.046884159088134765, 0.046905792236328125, 0.04687283325195313, 0.04692172622680664, 0.04688016128540039, 0.04698992156982422, 0.046855327606201175, 0.04713558578491211, 0.04704051208496094, 0.04698316955566406, 0.04688876724243164, 0.04687686538696289, 0.04705043029785156, 0.04703263854980469, 0.047527935028076174, 0.04738396835327149, 0.04708822250366211, 0.047167198181152344, 0.04733161544799805, 0.04726784133911133]",tokens/s,21.421026282264773,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1843.441664,2768.109568,0.0,2365.587456,2314.318336,s,1,9.0314384765625,9.0314384765625,0.0,9.0314384765625,9.0314384765625,9.0314384765625,9.0314384765625,[9.0314384765625],,kWh,5.576951102924189e-05,6.143028031684534e-06,1.747556953599072e-05,7.938810859691714e-05,,MB,1802.203136,3099.459584,0.0,2682.257408,2607.60832,s,10,0.5094228477478027,0.050942284774780265,0.000164140763643313,0.0509212646484375,0.05103930244445801,0.05121025829315186,0.051347022972106936,"[0.0513812141418457, 0.050778465270996095, 0.050880321502685545, 0.05085750579833984, 0.051001312255859375, 0.05083763122558594, 0.05079107284545899, 0.05096323013305664, 0.05096988677978516, 0.05096220779418945]",tokens/s,5025.294823971786,kWh,1.5732816776659665e-06,1.7349392900307585e-07,1.0388986805806188e-06,2.785674287249661e-06,tokens/kWh,91898755.41865762,MB,1802.203136,3099.459584,0.0,2682.257408,2607.61088,s,10,16.15611767578125,1.615611767578125,0.011111971023395102,1.6170904541015627,1.62726630859375,1.6286459716796875,1.6297497021484375,"[1.610062255859375, 1.5920372314453124, 1.6093472900390624, 1.6120970458984376, 1.6062078857421875, 1.626959716796875, 1.630025634765625, 1.6220838623046876, 1.6231119384765624, 1.624184814453125]",tokens/s,38.99451666809771,kWh,4.756923224399482e-05,5.246551573431485e-06,2.2803148170818837e-05,7.561893198824516e-05,tokens/kWh,833124.6996425876,,s,630,16.15389964866638,0.025641110553438703,0.0006638667839047792,0.025539455413818357,0.025951209449768066,0.02636874895095825,0.029257596092224142,"[0.025554975509643554, 0.025382911682128906, 0.025636320114135743, 0.025473567962646486, 0.025359712600708007, 0.02520319938659668, 0.025341503143310545, 0.025606399536132814, 0.02529110336303711, 0.025198591232299804, 0.02512895965576172, 0.025296096801757813, 0.025166624069213866, 0.025239551544189453, 0.025364479064941405, 0.025204736709594725, 0.025143295288085937, 0.029410688400268555, 0.025628576278686522, 0.02553932762145996, 0.025427936553955078, 0.0254334716796875, 0.025604991912841796, 0.026100608825683595, 0.028091264724731446, 0.026148832321166993, 0.02590105628967285, 0.028327968597412108, 0.025540607452392578, 0.025577472686767577, 0.025573375701904297, 0.025409536361694338, 0.025350143432617187, 0.02570240020751953, 0.025421472549438478, 0.025373023986816408, 0.02524569511413574, 0.025369983673095703, 0.025249664306640624, 0.02525823974609375, 0.02522163200378418, 0.02529689598083496, 0.02529280090332031, 0.025470176696777345, 0.02529795265197754, 0.025230623245239257, 0.02535036849975586, 0.025449951171875, 0.025445152282714843, 0.025536512374877928, 0.02529427146911621, 0.02525446319580078, 0.025181791305541993, 0.02530124855041504, 0.025196224212646483, 0.025329280853271484, 0.025613151550292968, 0.025433887481689454, 0.025241792678833006, 0.025327648162841797, 0.025255935668945313, 0.02534604835510254, 0.02520591926574707, 0.02554252815246582, 0.025452768325805664, 0.025509792327880858, 0.025296319961547853, 0.02553286361694336, 0.02535321617126465, 0.025218048095703126, 0.02533990478515625, 0.025297920227050782, 0.025084928512573244, 0.02513907241821289, 0.025146656036376953, 0.025074527740478515, 0.025020095825195314, 0.02520848083496094, 0.025094432830810545, 0.025137535095214845, 0.025140735626220705, 0.025035263061523438, 0.02509552001953125, 0.02516649627685547, 0.025163616180419922, 0.02523971176147461, 0.025286048889160157, 0.025368671417236328, 0.025086336135864258, 0.02508559989929199, 0.02549603271484375, 0.02531123161315918, 0.025104639053344726, 0.02537446403503418, 0.02532147216796875, 0.025188352584838865, 0.024968767166137697, 0.025176511764526368, 0.025525663375854494, 0.02523535919189453, 0.025600799560546873, 0.025216928482055666, 0.025364383697509766, 0.025143104553222655, 0.02506985664367676, 0.025309183120727538, 0.0251494083404541, 0.02504889678955078, 0.02530121612548828, 0.025405439376831054, 0.025435903549194335, 0.025058847427368164, 0.025078496932983398, 0.025433504104614257, 0.025506399154663087, 0.02553753662109375, 0.025539583206176757, 0.02547030448913574, 0.025391775131225584, 0.025177824020385743, 0.025076192855834963, 0.025124671936035157, 0.025192447662353516, 0.025829376220703124, 0.025208160400390624, 0.02532419204711914, 0.025854560852050783, 0.02523360061645508, 0.025069568634033205, 0.025116672515869142, 0.026666431427001952, 0.025267776489257814, 0.025244064331054687, 0.02504313659667969, 0.025049375534057616, 0.025100223541259764, 0.025241600036621094, 0.02519171142578125, 0.025297983169555664, 0.02526608085632324, 0.02555897521972656, 0.025460575103759767, 0.025335136413574218, 0.02522195243835449, 0.02523766326904297, 0.025073503494262694, 0.02511052894592285, 0.025075712203979493, 0.025350143432617187, 0.025056671142578125, 0.025098848342895507, 0.025135103225708007, 0.025274431228637696, 0.025137088775634767, 0.025251840591430662, 0.02532147216796875, 0.025650848388671876, 0.02584815979003906, 0.025609792709350584, 0.025670368194580077, 0.02665238380432129, 0.025738975524902345, 0.02562220764160156, 0.025666303634643554, 0.025579359054565428, 0.027954303741455078, 0.027660287857055665, 0.02556198310852051, 0.025446495056152343, 0.027240224838256836, 0.026191999435424804, 0.025650943756103516, 0.025628320693969725, 0.02544905662536621, 0.025464832305908205, 0.02548121643066406, 0.025450496673583983, 0.025657567977905273, 0.025644832611083985, 0.02612656021118164, 0.025417184829711913, 0.025221439361572267, 0.025624544143676757, 0.025507583618164062, 0.025454879760742188, 0.02524492835998535, 0.025281280517578126, 0.025126976013183595, 0.02527225685119629, 0.025606271743774413, 0.025235456466674806, 0.02520265579223633, 0.02522319984436035, 0.02513920021057129, 0.025276191711425783, 0.025286880493164063, 0.02521673583984375, 0.0257989444732666, 0.025289823532104492, 0.025186943054199218, 0.025387039184570314, 0.025138559341430663, 0.02556198310852051, 0.02591859245300293, 0.0295118408203125, 0.025297632217407228, 0.0251693115234375, 0.02547567939758301, 0.0251146240234375, 0.025238975524902344, 0.025882368087768556, 0.025215520858764648, 0.025155872344970704, 0.025726688385009765, 0.025530656814575194, 0.025490560531616212, 0.02525484848022461, 0.025345375061035156, 0.02554038429260254, 0.02542880058288574, 0.02520479965209961, 0.02514259147644043, 0.025037120819091797, 0.025106496810913086, 0.025176319122314453, 0.025133056640625, 0.025126911163330077, 0.025479167938232423, 0.025415679931640626, 0.025374496459960937, 0.025413856506347657, 0.02545254325866699, 0.025273408889770508, 0.025233760833740234, 0.025283039093017576, 0.02526630401611328, 0.02535206413269043, 0.02614630317687988, 0.02537651252746582, 0.025392192840576172, 0.025437311172485353, 0.02566828727722168, 0.0256484489440918, 0.026626752853393554, 0.03148956871032715, 0.02593235206604004, 0.02576688003540039, 0.025658239364624025, 0.025462848663330078, 0.0256975040435791, 0.02550454330444336, 0.02574127960205078, 0.025986623764038087, 0.02555254364013672, 0.025346879959106446, 0.025298751831054688, 0.02553251266479492, 0.02535433578491211, 0.025590848922729493, 0.025391359329223633, 0.025268543243408204, 0.02525404739379883, 0.025237695693969726, 0.025204736709594725, 0.025146560668945314, 0.02517888069152832, 0.025231424331665038, 0.02520230484008789, 0.025302623748779295, 0.026015743255615235, 0.02535513687133789, 0.025153472900390626, 0.025204511642456056, 0.02522502326965332, 0.026120576858520508, 0.02523664093017578, 0.025242240905761718, 0.025229536056518554, 0.025308256149291993, 0.025152416229248048, 0.025262176513671877, 0.025644927978515624, 0.025688095092773436, 0.02976972770690918, 0.02569011116027832, 0.025642431259155274, 0.02567625617980957, 0.025563167572021483, 0.02576799964904785, 0.02566886329650879, 0.02572159957885742, 0.025554943084716796, 0.025632255554199217, 0.02562873649597168, 0.025441856384277345, 0.025186239242553712, 0.025207744598388673, 0.025218463897705077, 0.02544246482849121, 0.02551785659790039, 0.025461408615112306, 0.025234432220458985, 0.025355104446411134, 0.026912927627563477, 0.025201663970947266, 0.02516080093383789, 0.02514678382873535, 0.02522572708129883, 0.025219072341918947, 0.025112319946289062, 0.025223424911499023, 0.025178112030029298, 0.02524278450012207, 0.025825567245483398, 0.025258047103881836, 0.026087551116943358, 0.025666271209716797, 0.02553772735595703, 0.02569215965270996, 0.025486112594604492, 0.026117792129516603, 0.030194080352783204, 0.025702207565307618, 0.025510080337524416, 0.025346336364746095, 0.025541727066040038, 0.02561199951171875, 0.025365440368652344, 0.02573030471801758, 0.025551584243774413, 0.025753599166870117, 0.025825279235839844, 0.02575564765930176, 0.025568416595458984, 0.025479391098022462, 0.025502336502075194, 0.02557257652282715, 0.025481248855590822, 0.025485439300537108, 0.025427743911743163, 0.02542854309082031, 0.025388832092285155, 0.025304800033569337, 0.025373472213745116, 0.0254935359954834, 0.02560406494140625, 0.025397247314453125, 0.025390464782714842, 0.025436447143554686, 0.025829727172851563, 0.026572799682617186, 0.02592972755432129, 0.026097471237182618, 0.026316415786743163, 0.02637606430053711, 0.025706367492675783, 0.025680479049682618, 0.025872095108032227, 0.025727487564086913, 0.025831424713134765, 0.02602614402770996, 0.02888278388977051, 0.02614236831665039, 0.025808576583862305, 0.026438304901123047, 0.02599078369140625, 0.025923967361450195, 0.025665023803710937, 0.025604639053344726, 0.02575152015686035, 0.025599744796752928, 0.025764095306396485, 0.0256265926361084, 0.025691808700561522, 0.02558195114135742, 0.025475072860717773, 0.025472160339355468, 0.02554147148132324, 0.025894912719726562, 0.02549068832397461, 0.02548931121826172, 0.025567487716674806, 0.02567206382751465, 0.025491680145263672, 0.02553241539001465, 0.02533171272277832, 0.025570720672607423, 0.02618003273010254, 0.025532447814941406, 0.02550592041015625, 0.02555084800720215, 0.025619712829589844, 0.025668352127075196, 0.025530080795288086, 0.025522016525268556, 0.025403263092041016, 0.027214431762695314, 0.025632736206054687, 0.028835840225219726, 0.030126079559326172, 0.025823232650756835, 0.02569830322265625, 0.02566531181335449, 0.025718687057495117, 0.02618400001525879, 0.02557542419433594, 0.025554943084716796, 0.025479135513305665, 0.025622047424316407, 0.02562713623046875, 0.02637648010253906, 0.02574300765991211, 0.025657024383544922, 0.025479551315307616, 0.025604095458984375, 0.025540128707885742, 0.025546527862548827, 0.025465087890625, 0.025550271987915037, 0.02559404754638672, 0.025633216857910156, 0.025718879699707032, 0.025606559753417968, 0.025548671722412108, 0.02554265594482422, 0.02549065589904785, 0.026178272247314453, 0.025806880950927733, 0.02590444755554199, 0.02584239959716797, 0.025915391921997072, 0.025866239547729493, 0.0256092472076416, 0.025572383880615234, 0.02551100730895996, 0.025725088119506835, 0.02565315246582031, 0.02544233512878418, 0.025725055694580078, 0.028067935943603517, 0.02755436706542969, 0.026952959060668944, 0.025617151260375975, 0.02555423927307129, 0.025725055694580078, 0.025662015914916993, 0.025795711517333984, 0.025860895156860353, 0.025661535263061523, 0.02578358459472656, 0.02604310417175293, 0.02570579147338867, 0.02569696044921875, 0.02545180892944336, 0.025725664138793944, 0.025663135528564453, 0.025712799072265625, 0.02565497589111328, 0.02582579231262207, 0.025777984619140625, 0.02558585548400879, 0.02548067283630371, 0.026247711181640626, 0.025571264266967774, 0.025485376358032226, 0.025632768630981444, 0.025707712173461916, 0.025674367904663088, 0.02549295997619629, 0.02567651176452637, 0.025677600860595704, 0.025706367492675783, 0.025441823959350587, 0.025453407287597655, 0.02553788757324219, 0.025599807739257813, 0.025489952087402342, 0.027807136535644532, 0.027714208602905275, 0.025868255615234374, 0.025565439224243165, 0.025556320190429686, 0.02552662467956543, 0.025454912185668945, 0.025338207244873047, 0.02556096076965332, 0.02552835273742676, 0.02546233558654785, 0.02538844871520996, 0.025594655990600585, 0.025581567764282227, 0.02592767906188965, 0.025833471298217774, 0.025911296844482422, 0.02589286422729492, 0.025751264572143554, 0.025635103225708007, 0.025607456207275392, 0.02573695945739746, 0.025487712860107422, 0.02551251220703125, 0.026218015670776366, 0.025487903594970704, 0.025577280044555666, 0.025834911346435546, 0.02557939147949219, 0.02561916732788086, 0.0260067195892334, 0.02689311981201172, 0.025577280044555666, 0.025790687561035155, 0.025650144577026367, 0.02567065620422363, 0.025860095977783205, 0.025634815216064453, 0.025683679580688477, 0.026175647735595702, 0.025950143814086914, 0.025837568283081053, 0.025849184036254882, 0.025713504791259764, 0.025605791091918944, 0.02555120086669922, 0.025488447189331055, 0.025457599639892577, 0.02548310470581055, 0.02557145690917969, 0.02541324806213379, 0.02547881507873535, 0.025514720916748047, 0.0257322883605957, 0.025604223251342772, 0.02565177536010742, 0.025900896072387696, 0.025935392379760742, 0.02568272018432617, 0.025636863708496094, 0.025932960510253907, 0.026614112854003905, 0.02576972770690918, 0.025711360931396483, 0.02570172882080078, 0.025733760833740234, 0.025538047790527343, 0.02556368064880371, 0.025605247497558593, 0.025830432891845702, 0.02676675224304199, 0.02588857650756836, 0.025794687271118163, 0.025739168167114256, 0.025487360000610353, 0.02565795135498047, 0.025734624862670898, 0.025707040786743164, 0.026023935317993165, 0.02634137535095215, 0.025894912719726562, 0.025813024520874025, 0.02561395263671875, 0.02555939292907715, 0.025559072494506837, 0.025577600479125977, 0.02541916847229004, 0.02556153678894043, 0.02588057518005371, 0.025821184158325194, 0.025907199859619142, 0.02565385627746582, 0.025479167938232423, 0.025638912200927736, 0.025517311096191406, 0.025844064712524414, 0.025931392669677734, 0.025785120010375976, 0.026359807968139647, 0.025757055282592773, 0.02571676826477051, 0.02564566421508789, 0.025608192443847655, 0.025660512924194336, 0.025767839431762696, 0.025607168197631838, 0.025640031814575196, 0.025960800170898437, 0.02563539123535156, 0.02540278434753418, 0.025399007797241212, 0.02545961570739746, 0.025526304244995118, 0.025536447525024413, 0.025788415908813478, 0.02977382469177246, 0.02631270408630371, 0.025806528091430664, 0.025757951736450194, 0.025654367446899414, 0.025439199447631837, 0.025417472839355467, 0.025481472015380858, 0.025298944473266603, 0.025392831802368163, 0.025284576416015624, 0.02546928024291992, 0.02607923126220703, 0.025622528076171876, 0.02548940849304199, 0.02556662368774414, 0.02560438346862793, 0.025727296829223634, 0.025665056228637694, 0.02558950424194336, 0.025688800811767578, 0.02572697639465332, 0.026355136871337892, 0.025649728775024413, 0.025483264923095703, 0.025460224151611328, 0.025790912628173828, 0.02600559997558594, 0.028651487350463866, 0.025843711853027345, 0.02566147232055664, 0.025777408599853516, 0.025567743301391603, 0.02556540870666504, 0.025466495513916016, 0.025462207794189454, 0.025564191818237304, 0.025575199127197266]",tokens/s,38.999870848647426,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1172.3776,1109.262336,0.0,706.740224,681.6384,s,1,8.13638427734375,8.13638427734375,0.0,8.13638427734375,8.13638427734375,8.13638427734375,8.13638427734375,[8.13638427734375],,kWh,3.217124067499147e-05,3.541478614607225e-06,1.0050563596014639e-05,4.576328288561333e-05,,MB,1481.433088,1413.349376,0.0,996.1472,949.238272,s,10,0.34973065567016604,0.03497306556701659,0.000400760519890848,0.03498417663574219,0.035526938629150386,0.0355604606628418,0.035587278289794924,"[0.035085376739501954, 0.03483337783813477, 0.03515129470825195, 0.0355939826965332, 0.035519489288330076, 0.03450864028930664, 0.03451801681518555, 0.03488297653198242, 0.03438310241699219, 0.03525439834594726]",tokens/s,7319.918796064474,kWh,8.555630113025822e-07,9.435329168395995e-08,5.256635029677828e-07,1.4755798059543247e-06,tokens/kWh,173491124.61893114,MB,1502.16704,1421.737984,0.0,1004.535808,949.240832,s,10,17.58772900390625,1.7587729003906252,0.014309907393559867,1.7535071411132814,1.7797772338867188,1.7799074768066405,1.780011671142578,"[1.7414859619140626, 1.7516700439453126, 1.7760692138671874, 1.7800377197265624, 1.75534423828125, 1.74103076171875, 1.7500540771484374, 1.76288037109375, 1.7494083251953125, 1.779748291015625]",tokens/s,35.82042911055068,kWh,3.7213702561613084e-05,4.104216324588106e-06,1.5281126920232873e-05,5.659904580643406e-05,tokens/kWh,1113092.9700733274,,s,630,17.585252761840813,0.027913099621969556,0.0005253528450554025,0.027864671707153323,0.028449827957153322,0.028616238594055175,0.029959557762146004,"[0.027993919372558594, 0.027707136154174805, 0.02775904083251953, 0.027623424530029295, 0.027604991912841798, 0.027577760696411133, 0.027920064926147462, 0.02764687919616699, 0.02768403244018555, 0.0276693115234375, 0.02767395210266113, 0.027678752899169923, 0.027896448135375975, 0.027696767807006837, 0.027701631546020507, 0.02772921562194824, 0.028506175994873047, 0.028480127334594728, 0.02822889518737793, 0.028449504852294923, 0.027875328063964845, 0.028096511840820314, 0.027810815811157227, 0.02803993606567383, 0.02788991928100586, 0.028103967666625977, 0.027929311752319337, 0.027885087966918944, 0.028112991333007813, 0.028617088317871093, 0.027934207916259765, 0.02742937660217285, 0.0272992000579834, 0.027703935623168946, 0.027608831405639647, 0.02789606475830078, 0.027678720474243163, 0.02774220848083496, 0.027409599304199218, 0.027263551712036132, 0.02734467124938965, 0.02734886360168457, 0.027380319595336915, 0.02716057586669922, 0.02712918472290039, 0.02734707260131836, 0.027117631912231446, 0.027101663589477538, 0.027149408340454102, 0.027163135528564454, 0.027203744888305664, 0.02753561592102051, 0.02750070381164551, 0.027371360778808595, 0.027340799331665038, 0.027207616806030274, 0.02742851257324219, 0.027199392318725587, 0.027275423049926757, 0.027382368087768554, 0.027305728912353517, 0.027502016067504884, 0.027222591400146483, 0.027187488555908204, 0.027778112411499023, 0.027245439529418946, 0.027362464904785156, 0.027118783950805664, 0.027061983108520506, 0.02710483169555664, 0.02713209533691406, 0.026970367431640625, 0.027030912399291993, 0.02742255973815918, 0.02727510452270508, 0.027077568054199218, 0.02732784080505371, 0.02741321563720703, 0.027285215377807617, 0.027285728454589844, 0.027237951278686525, 0.027113248825073242, 0.02714067268371582, 0.027175008773803713, 0.027348991394042968, 0.027441343307495116, 0.02722969627380371, 0.027105600357055663, 0.027670495986938475, 0.027207040786743165, 0.027329120635986328, 0.03029408073425293, 0.027883167266845702, 0.027612735748291015, 0.027648319244384767, 0.027903839111328124, 0.027742847442626953, 0.027800735473632814, 0.028267360687255858, 0.028178112030029297, 0.02841779136657715, 0.029064992904663085, 0.02893084716796875, 0.02957107162475586, 0.028497919082641602, 0.028291072845458985, 0.02853193664550781, 0.028427040100097656, 0.028388799667358397, 0.028452735900878906, 0.02847567939758301, 0.028452959060668945, 0.028039487838745117, 0.027955039978027344, 0.027974912643432617, 0.027956127166748047, 0.02791164779663086, 0.02792019271850586, 0.02780022430419922, 0.02781804847717285, 0.02775654411315918, 0.028073984146118162, 0.02779702377319336, 0.0279003849029541, 0.027735584259033202, 0.02885478401184082, 0.027784767150878905, 0.027712383270263673, 0.027758079528808592, 0.02783292770385742, 0.028278751373291014, 0.027962528228759765, 0.028310400009155273, 0.028041215896606447, 0.028092416763305664, 0.02842844772338867, 0.029219839096069337, 0.028148384094238282, 0.028145856857299804, 0.02817638397216797, 0.02860032081604004, 0.028479040145874022, 0.0285413761138916, 0.02845699119567871, 0.02857980728149414, 0.028374624252319337, 0.028530464172363282, 0.028508800506591797, 0.028073728561401366, 0.02825587272644043, 0.02789344024658203, 0.02832009506225586, 0.027992671966552734, 0.027873056411743164, 0.028100032806396485, 0.027904800415039063, 0.02778848075866699, 0.028070560455322267, 0.02780931282043457, 0.027748992919921875, 0.028231456756591798, 0.028411935806274415, 0.027844480514526367, 0.02786911964416504, 0.027909952163696287, 0.02801273536682129, 0.028032928466796874, 0.027804128646850584, 0.027805696487426756, 0.027984128952026368, 0.027850591659545898, 0.028288000106811522, 0.02806399917602539, 0.02819715118408203, 0.028059392929077148, 0.02808310317993164, 0.028020448684692383, 0.028214847564697266, 0.028341791152954102, 0.028447647094726563, 0.02834636878967285, 0.02854911994934082, 0.02870681571960449, 0.028794527053833008, 0.028501728057861327, 0.02857638359069824, 0.02835171127319336, 0.02848204803466797, 0.02822083282470703, 0.028351200103759765, 0.02818771171569824, 0.02800147247314453, 0.028296319961547852, 0.028054079055786132, 0.028247615814208985, 0.028207616806030275, 0.02818662452697754, 0.02792448043823242, 0.027868511199951172, 0.027924736022949218, 0.027873983383178712, 0.028382688522338866, 0.028449024200439453, 0.028132768630981447, 0.028064224243164064, 0.028052928924560547, 0.028862720489501954, 0.028239360809326174, 0.027902816772460936, 0.028045375823974608, 0.028165504455566405, 0.02802140808105469, 0.028203008651733398, 0.028203008651733398, 0.028241567611694336, 0.028161632537841798, 0.028535776138305664, 0.029062944412231444, 0.02853273582458496, 0.028409311294555664, 0.028254304885864258, 0.02832649612426758, 0.028362592697143554, 0.028347936630249024, 0.028467679977416994, 0.02855731201171875, 0.028292160034179687, 0.028095264434814454, 0.028217504501342774, 0.030248960494995116, 0.029665279388427734, 0.028116960525512696, 0.028481504440307618, 0.02790153694152832, 0.02808470344543457, 0.028867967605590822, 0.028070528030395506, 0.028030975341796875, 0.028078367233276367, 0.027966400146484376, 0.02804374313354492, 0.028311647415161133, 0.028366655349731446, 0.028241823196411133, 0.02798028755187988, 0.028056768417358397, 0.027976512908935547, 0.027867136001586915, 0.027800832748413086, 0.02802560043334961, 0.027865087509155274, 0.027981151580810548, 0.02844700813293457, 0.02836992073059082, 0.028320480346679687, 0.028230688095092774, 0.028101600646972657, 0.028182559967041016, 0.028251775741577147, 0.028125343322753907, 0.028327968597412108, 0.028272224426269532, 0.02836128044128418, 0.029766687393188475, 0.029630624771118164, 0.02863577651977539, 0.02811087989807129, 0.028125343322753907, 0.028004352569580077, 0.028229631423950196, 0.027910144805908203, 0.028303136825561525, 0.028470943450927735, 0.027914751052856446, 0.02801446342468262, 0.027951295852661134, 0.028395519256591797, 0.02801161575317383, 0.028072799682617187, 0.028180639266967775, 0.028024288177490236, 0.02788761520385742, 0.02774003219604492, 0.02796121597290039, 0.027651872634887695, 0.027896223068237306, 0.02781439971923828, 0.028240991592407227, 0.02767660713195801, 0.027726816177368163, 0.0279466552734375, 0.02764771270751953, 0.02736137580871582, 0.02723484802246094, 0.02728473663330078, 0.027237119674682616, 0.02736128044128418, 0.027440736770629883, 0.027124128341674804, 0.027167903900146485, 0.027165536880493165, 0.027256288528442384, 0.027134687423706054, 0.027300895690917967, 0.02729792022705078, 0.02743510437011719, 0.027441728591918946, 0.027662111282348634, 0.027389663696289063, 0.027396608352661132, 0.02721177673339844, 0.027364479064941407, 0.027236736297607422, 0.027392511367797853, 0.027299327850341795, 0.0273305606842041, 0.027322368621826174, 0.027340799331665038, 0.027265024185180665, 0.02731772804260254, 0.027396543502807617, 0.02751456069946289, 0.02765158462524414, 0.027314592361450195, 0.02751535987854004, 0.027430944442749024, 0.02794268798828125, 0.027654367446899412, 0.0279300479888916, 0.027882144927978515, 0.027977088928222656, 0.027783231735229494, 0.031492191314697264, 0.028143999099731445, 0.027894880294799803, 0.028187551498413087, 0.027690208435058594, 0.027796255111694337, 0.027572223663330078, 0.027798688888549805, 0.027937631607055664, 0.027666431427001953, 0.027877376556396483, 0.027954463958740235, 0.027714271545410157, 0.027830047607421873, 0.027461856842041017, 0.02750985527038574, 0.027657119750976563, 0.027730016708374022, 0.027668384552001952, 0.027502592086791993, 0.027684064865112306, 0.02794576072692871, 0.027711488723754882, 0.027737247467041017, 0.02744563293457031, 0.02735487937927246, 0.027447296142578126, 0.02722006416320801, 0.027152992248535155, 0.027187360763549804, 0.027230079650878907, 0.02726911926269531, 0.027410432815551757, 0.027251743316650392, 0.02721686363220215, 0.027082496643066407, 0.02714035224914551, 0.027335807800292967, 0.027302783966064455, 0.02792038345336914, 0.027421695709228516, 0.027775999069213866, 0.027379711151123046, 0.027203264236450194, 0.02750454330444336, 0.027767423629760743, 0.027615232467651366, 0.027698976516723633, 0.027717632293701173, 0.02757040023803711, 0.027665855407714844, 0.02806227111816406, 0.027744064331054686, 0.027715232849121092, 0.02782467269897461, 0.027620960235595703, 0.027719968795776366, 0.027582592010498046, 0.027594751358032226, 0.027578367233276366, 0.027717632293701173, 0.027844608306884764, 0.027672767639160156, 0.027624288558959962, 0.02757526397705078, 0.02769715118408203, 0.02755583953857422, 0.02762678337097168, 0.028141504287719728, 0.028145471572875978, 0.027673791885375977, 0.027690784454345703, 0.02775040054321289, 0.02783171272277832, 0.02829955291748047, 0.027637632369995117, 0.02785228729248047, 0.02782489585876465, 0.02793199920654297, 0.027962303161621092, 0.027908000946044922, 0.02803875160217285, 0.02784009552001953, 0.02790073585510254, 0.027674623489379883, 0.02751692771911621, 0.02769273567199707, 0.02775196838378906, 0.028338623046875, 0.027934431076049804, 0.027836448669433595, 0.027828832626342774, 0.027673824310302735, 0.0277860164642334, 0.02775593566894531, 0.027716192245483398, 0.027665407180786132, 0.027614208221435548, 0.02774963188171387, 0.02778009605407715, 0.027638879776000977, 0.027729568481445314, 0.02760601615905762, 0.027596799850463868, 0.02794905662536621, 0.028231136322021483, 0.027863584518432617, 0.02772687911987305, 0.027697856903076173, 0.02776473617553711, 0.030619104385375975, 0.027863744735717774, 0.02801033592224121, 0.02792588806152344, 0.027961984634399414, 0.02781123161315918, 0.02798396873474121, 0.027843008041381834, 0.028610624313354493, 0.02777497673034668, 0.027885568618774413, 0.027801631927490234, 0.02782169532775879, 0.028403615951538085, 0.02803321647644043, 0.02788083267211914, 0.027827072143554687, 0.027708831787109374, 0.027774816513061525, 0.027859872817993164, 0.027719520568847657, 0.027673919677734374, 0.02792518424987793, 0.02778678321838379, 0.027738048553466798, 0.027733695983886718, 0.027980640411376954, 0.027648000717163085, 0.02775359916687012, 0.02794175910949707, 0.02775654411315918, 0.02806755256652832, 0.02803536033630371, 0.029105344772338868, 0.027928703308105467, 0.027637887954711914, 0.027542335510253906, 0.02763132858276367, 0.028170272827148436, 0.030996320724487304, 0.028483743667602538, 0.02785411262512207, 0.028299999237060548, 0.027836416244506838, 0.027864255905151368, 0.027841087341308593, 0.027771135330200196, 0.027653535842895507, 0.027830879211425782, 0.028520448684692383, 0.027994144439697267, 0.027991231918334962, 0.027828031539916993, 0.02783535957336426, 0.027842559814453126, 0.02771958351135254, 0.027674720764160155, 0.0277258243560791, 0.027752447128295898, 0.027711488723754882, 0.027399871826171877, 0.02730006408691406, 0.027394048690795897, 0.027312128067016602, 0.027262432098388672, 0.027916479110717773, 0.027328863143920898, 0.027707391738891602, 0.02765779113769531, 0.028074432373046875, 0.027881471633911133, 0.027815359115600586, 0.027634239196777342, 0.02759440040588379, 0.02789321517944336, 0.027661184310913085, 0.02757427215576172, 0.027580064773559572, 0.027578271865844727, 0.027543359756469727, 0.0275599365234375, 0.02758428764343262, 0.027947872161865235, 0.02778112030029297, 0.027910432815551757, 0.02798348808288574, 0.02778041648864746, 0.027920576095581056, 0.027828832626342774, 0.027889663696289063, 0.027807743072509765, 0.02791219139099121, 0.027694847106933592, 0.027801151275634765, 0.027696992874145506, 0.027904863357543944, 0.027696416854858397, 0.027914976119995116, 0.027778112411499023, 0.027867935180664063, 0.027791040420532227, 0.02791881561279297, 0.027772607803344725, 0.027936159133911134, 0.028008575439453124, 0.02818076705932617, 0.027980031967163085, 0.027779327392578126, 0.027677791595458984, 0.027590944290161134, 0.02742927932739258, 0.027358943939208985, 0.02742323112487793, 0.02795315170288086, 0.02759065628051758, 0.027805696487426756, 0.027691007614135742, 0.02793267250061035, 0.027809791564941407, 0.02790755271911621, 0.02780828857421875, 0.028100608825683594, 0.027939935684204102, 0.028105791091918946, 0.027999103546142576, 0.028047359466552735, 0.027962591171264647, 0.028123455047607424, 0.02783020782470703, 0.027637535095214844, 0.02775948715209961, 0.027592416763305663, 0.02765020751953125, 0.027586559295654296, 0.027639711380004883, 0.02755183982849121, 0.027596799850463868, 0.027467775344848632, 0.02749849510192871, 0.02938275146484375, 0.028225439071655273, 0.02796659278869629, 0.02788150405883789, 0.027779935836791992, 0.027883295059204102, 0.02790403175354004, 0.027860671997070312, 0.028099071502685546, 0.028438528060913085, 0.02833558464050293, 0.028543071746826174, 0.028420543670654295, 0.028337631225585937, 0.02861520004272461, 0.028641151428222655, 0.02865951919555664, 0.028494144439697267, 0.02863532829284668, 0.028256063461303712, 0.02834774398803711, 0.028354911804199218, 0.02861292839050293, 0.02832758331298828, 0.02895292854309082, 0.029161088943481444, 0.028442975997924804, 0.028233760833740233, 0.028107967376708985, 0.02837945556640625, 0.028272192001342775, 0.028138431549072265, 0.028117151260375978, 0.028229087829589845, 0.02878892707824707, 0.03170291137695312, 0.030038335800170898, 0.028227584838867188, 0.028014495849609376, 0.028026847839355468, 0.028000127792358397, 0.027883359909057617, 0.027856319427490235, 0.028637567520141603, 0.02861942481994629, 0.027985343933105467, 0.02801100730895996, 0.02791628837585449, 0.027807743072509765]",tokens/s,35.8254731127363,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1846.5792,2768.109568,0.0,2365.587456,2314.318336,s,1,9.036365234375,9.036365234375,0.0,9.036365234375,9.036365234375,9.036365234375,9.036365234375,[9.036365234375],,kWh,5.618690720416453e-05,6.190405181315809e-06,1.7523347351999785e-05,7.990065973748012e-05,,MB,1898.520576,3099.459584,0.0,2682.257408,2607.60832,s,10,0.48352464675903317,0.048352464675903324,0.0003477804899576911,0.04826857757568359,0.048533411788940425,0.048943969154357905,0.04927241504669189,"[0.04935452651977539, 0.04826486587524414, 0.04807254409790039, 0.048272289276123044, 0.04824908828735352, 0.04844217681884765, 0.0482872314453125, 0.048120929718017576, 0.048173152923583984, 0.048287841796875]",tokens/s,5294.456067874008,kWh,1.484856418464715e-06,1.6375179276808438e-07,9.805789006497542e-07,2.6291871118825534e-06,tokens/kWh,97368497.98289883,MB,1898.520576,3099.459584,0.0,2682.257408,2607.61088,s,10,13.706199218750001,1.370619921875,0.010775047816095241,1.3732237548828126,1.3850309814453126,1.385641162109375,1.386129306640625,"[1.359454345703125, 1.376085693359375, 1.3724990234375, 1.3862513427734375, 1.373948486328125, 1.3848953857421875, 1.3637572021484374, 1.352936279296875, 1.3774613037109376, 1.35891015625]",tokens/s,45.964602582031915,kWh,3.923490884486323e-05,4.327100776787809e-06,2.002191012354968e-05,6.358391974520073e-05,tokens/kWh,990816.5500406288,,s,630,13.703950056076053,0.021752301676311193,0.00038698172565860715,0.02173595237731934,0.02205950412750244,0.02218456687927246,0.02328615364074707,"[0.022113216400146483, 0.021847488403320313, 0.021741695404052734, 0.02152620887756348, 0.021475488662719727, 0.021395647048950195, 0.021684032440185547, 0.02177903938293457, 0.021575679779052736, 0.02152448081970215, 0.022189376831054687, 0.02159401512145996, 0.021440927505493163, 0.02174208068847656, 0.02150592041015625, 0.021481184005737303, 0.021358528137207032, 0.02136016082763672, 0.021324607849121095, 0.02169036865234375, 0.021366687774658204, 0.0213668155670166, 0.02159212875366211, 0.02167353630065918, 0.021545024871826173, 0.02181158447265625, 0.021507871627807616, 0.021455072402954103, 0.021639167785644533, 0.02146892738342285, 0.021688575744628905, 0.021890687942504882, 0.021784608840942382, 0.021705055236816408, 0.021489664077758788, 0.02146406364440918, 0.02140585517883301, 0.021485824584960938, 0.021361087799072264, 0.02125555229187012, 0.02128348731994629, 0.02136793518066406, 0.021346368789672852, 0.021375936508178713, 0.02146713638305664, 0.021370975494384766, 0.021511743545532227, 0.021590368270874023, 0.021419103622436524, 0.02140611267089844, 0.021342144012451172, 0.021410367965698243, 0.02153267288208008, 0.021444063186645507, 0.02155120086669922, 0.02159222412109375, 0.022290367126464844, 0.02169625663757324, 0.022049375534057617, 0.021871871948242187, 0.021784959793090822, 0.021670143127441407, 0.02155708885192871, 0.021880895614624023, 0.021630975723266603, 0.0215882568359375, 0.02152988815307617, 0.021589824676513672, 0.021264415740966797, 0.02130156707763672, 0.021420320510864257, 0.02157788848876953, 0.02173936080932617, 0.0216463680267334, 0.02170159912109375, 0.02160358428955078, 0.021596799850463866, 0.02200998306274414, 0.02166169548034668, 0.02147737693786621, 0.0213338565826416, 0.021708959579467772, 0.021520191192626954, 0.021563583374023438, 0.021388864517211913, 0.02153228759765625, 0.021490495681762697, 0.021413888931274414, 0.02133225631713867, 0.021624544143676757, 0.02145715141296387, 0.02160111999511719, 0.021435264587402345, 0.021477407455444335, 0.02148524856567383, 0.02147974395751953, 0.02135980796813965, 0.021373695373535156, 0.021458911895751952, 0.021544160842895507, 0.022178688049316407, 0.02151571273803711, 0.02149782371520996, 0.022001632690429686, 0.023290496826171875, 0.021934080123901366, 0.021843967437744142, 0.02202828788757324, 0.021788576126098632, 0.02231100845336914, 0.02183123207092285, 0.023376319885253908, 0.025311040878295898, 0.02213497543334961, 0.022189823150634766, 0.025481056213378907, 0.021620351791381835, 0.02407846450805664, 0.021790815353393556, 0.021451328277587892, 0.021331968307495116, 0.02149580764770508, 0.021757951736450197, 0.021604352951049805, 0.021565216064453125, 0.021655647277832032, 0.022220895767211913, 0.021987360000610353, 0.021792032241821288, 0.02164192008972168, 0.021485055923461914, 0.021321407318115236, 0.021478208541870117, 0.021550912857055664, 0.02177039909362793, 0.02163100814819336, 0.02169046401977539, 0.02163907241821289, 0.021639167785644533, 0.021583871841430666, 0.02161836814880371, 0.02159823989868164, 0.021625120162963866, 0.021606399536132814, 0.022487039566040038, 0.021807104110717773, 0.021699647903442382, 0.021861312866210937, 0.02188697624206543, 0.021835199356079103, 0.02184454345703125, 0.021896383285522462, 0.021809343338012696, 0.021741695404052734, 0.0217271671295166, 0.02168275260925293, 0.02173084831237793, 0.021811679840087892, 0.02176790428161621, 0.021667680740356444, 0.021731807708740235, 0.021850080490112306, 0.02214297676086426, 0.022058464050292968, 0.02196944046020508, 0.021794815063476563, 0.021817312240600586, 0.021723167419433594, 0.021790847778320313, 0.021987199783325195, 0.022108160018920898, 0.021843551635742187, 0.021857791900634766, 0.021967775344848634, 0.021772287368774415, 0.021685472488403322, 0.0217423038482666, 0.021757535934448242, 0.0217640323638916, 0.02161033630371094, 0.021787328720092772, 0.021817344665527344, 0.02178793525695801, 0.021744287490844727, 0.0219169921875, 0.021811967849731446, 0.02176345634460449, 0.02177292823791504, 0.021760000228881835, 0.02267225646972656, 0.02319548797607422, 0.022157312393188477, 0.02213408088684082, 0.02198940849304199, 0.02187868881225586, 0.022054655075073242, 0.022266431808471678, 0.022024576187133788, 0.0217825927734375, 0.021893119812011717, 0.022142080307006835, 0.02184441566467285, 0.021795263290405275, 0.021741472244262695, 0.021828927993774415, 0.021928735733032226, 0.02170863914489746, 0.021809312820434572, 0.02187161636352539, 0.021752832412719726, 0.02185740852355957, 0.021852319717407226, 0.021889759063720704, 0.022147071838378905, 0.022116352081298828, 0.022144128799438476, 0.022041343688964845, 0.021997312545776367, 0.021926271438598634, 0.02188287925720215, 0.021931232452392577, 0.02183247947692871, 0.021792543411254882, 0.02208188819885254, 0.021955808639526366, 0.021880960464477538, 0.021770784378051758, 0.02185539245605469, 0.021836671829223633, 0.022155231475830078, 0.02184124755859375, 0.0218753604888916, 0.021764095306396485, 0.022568063735961916, 0.022780351638793946, 0.022839744567871093, 0.02205286407470703, 0.02191564750671387, 0.02190937614440918, 0.021950592041015626, 0.021843967437744142, 0.02203647994995117, 0.021873695373535156, 0.021896160125732422, 0.021821407318115233, 0.02189014434814453, 0.02193027114868164, 0.021974815368652343, 0.021952447891235353, 0.022079999923706056, 0.021768096923828126, 0.022030879974365234, 0.022345727920532226, 0.022107391357421874, 0.022027008056640623, 0.021941728591918945, 0.021836256027221678, 0.02177030372619629, 0.02183193588256836, 0.021939968109130858, 0.02188630485534668, 0.021770591735839843, 0.02189958381652832, 0.022007295608520508, 0.021850624084472657, 0.021774335861206053, 0.021882207870483398, 0.021962432861328124, 0.021905471801757812, 0.021838752746582032, 0.021833728790283204, 0.021831680297851562, 0.021733375549316408, 0.021731103897094727, 0.021780704498291014, 0.021587520599365233, 0.021322175979614257, 0.021784576416015625, 0.022792192459106447, 0.023480319976806642, 0.02150601577758789, 0.021346336364746095, 0.021358591079711914, 0.021348352432250976, 0.021327871322631836, 0.021547008514404296, 0.021589536666870118, 0.02161097526550293, 0.02170591926574707, 0.021949247360229494, 0.021703744888305666, 0.021541536331176756, 0.021635360717773437, 0.02166783905029297, 0.021950464248657226, 0.021691455841064453, 0.021668575286865235, 0.02165782356262207, 0.02171238327026367, 0.02176870346069336, 0.0216494083404541, 0.021698463439941407, 0.0218768310546875, 0.021832927703857422, 0.021756704330444337, 0.02162816047668457, 0.021785343170166015, 0.022032159805297852, 0.021858528137207033, 0.021786624908447266, 0.021812864303588867, 0.021899200439453124, 0.021762208938598634, 0.021700416564941406, 0.021862911224365233, 0.022339679718017577, 0.021975648880004882, 0.022191551208496092, 0.022165983200073243, 0.02191548728942871, 0.021868831634521486, 0.02187414360046387, 0.021846176147460938, 0.021784128189086913, 0.02170150375366211, 0.02170675277709961, 0.022673408508300782, 0.021925888061523437, 0.021803007125854493, 0.021898815155029297, 0.02197110366821289, 0.02198944091796875, 0.021792800903320312, 0.021921983718872072, 0.02180019187927246, 0.02171500778198242, 0.021869056701660155, 0.022068864822387697, 0.021942047119140624, 0.022038719177246095, 0.02196131134033203, 0.02213478469848633, 0.022033824920654296, 0.02193164825439453, 0.02195964813232422, 0.02197699165344238, 0.021977439880371093, 0.021862112045288085, 0.021907487869262696, 0.021984800338745118, 0.021932287216186522, 0.022001888275146483, 0.02202828788757324, 0.022037599563598635, 0.021942655563354493, 0.02189523124694824, 0.021840351104736328, 0.02189030456542969, 0.02200150489807129, 0.021864799499511717, 0.02180726432800293, 0.02202822494506836, 0.021942432403564454, 0.02194576072692871, 0.022332319259643553, 0.02327552032470703, 0.02210358428955078, 0.022129119873046874, 0.022108160018920898, 0.02194396781921387, 0.021948768615722657, 0.021897216796875, 0.022040576934814454, 0.022024192810058595, 0.021977088928222657, 0.02172313690185547, 0.021704704284667968, 0.021784576416015625, 0.02228486442565918, 0.022071296691894532, 0.0221265926361084, 0.022103935241699218, 0.02204649543762207, 0.02178492736816406, 0.02189833641052246, 0.02177305603027344, 0.021874048233032226, 0.02167046356201172, 0.02180512046813965, 0.021911712646484376, 0.021716543197631836, 0.02171129608154297, 0.02177872085571289, 0.021934911727905272, 0.021728160858154297, 0.021731456756591796, 0.021692287445068358, 0.021858047485351563, 0.021780736923217775, 0.02166374397277832, 0.021755712509155273, 0.02176630401611328, 0.02165353584289551, 0.021562400817871093, 0.02171388816833496, 0.02143027114868164, 0.021374975204467773, 0.02126643180847168, 0.021567520141601564, 0.021467103958129882, 0.02137606430053711, 0.02143942451477051, 0.021342079162597657, 0.021622432708740234, 0.02148121643066406, 0.021607135772705077, 0.021505184173583984, 0.021606815338134765, 0.021805503845214843, 0.02160767936706543, 0.021561376571655272, 0.02154364776611328, 0.021448703765869142, 0.02137638473510742, 0.02144895935058594, 0.021465471267700195, 0.021575679779052736, 0.021274560928344725, 0.02138323211669922, 0.021546207427978515, 0.02137107276916504, 0.021258495330810548, 0.02210646438598633, 0.02153023910522461, 0.021516511917114258, 0.02161065673828125, 0.021554399490356445, 0.02161267280578613, 0.021481695175170897, 0.021497631072998048, 0.02149033546447754, 0.021733024597167968, 0.021656448364257813, 0.021641088485717774, 0.02155465507507324, 0.021490207672119142, 0.021474720001220703, 0.021463647842407226, 0.021525632858276366, 0.02156224060058594, 0.021831680297851562, 0.02177964782714844, 0.021539648056030272, 0.021391359329223633, 0.021364736557006835, 0.021522432327270507, 0.021598207473754884, 0.021538816452026367, 0.021362688064575194, 0.021481472015380858, 0.021468576431274415, 0.021344064712524414, 0.02149238395690918, 0.021493888854980468, 0.021432191848754882, 0.02138934326171875, 0.021987424850463868, 0.021657791137695313, 0.021700416564941406, 0.021602304458618164, 0.021331968307495116, 0.02137014389038086, 0.0215817928314209, 0.021410560607910155, 0.021368831634521485, 0.021309440612792968, 0.021493759155273438, 0.021263935089111327, 0.021410240173339843, 0.021399551391601563, 0.021449823379516602, 0.02150432014465332, 0.021281375885009765, 0.02140105628967285, 0.021383712768554688, 0.021347360610961916, 0.02125632095336914, 0.02147190475463867, 0.021403839111328125, 0.02127257537841797, 0.021483327865600584, 0.021546207427978515, 0.021508960723876952, 0.02139967918395996, 0.021321727752685548, 0.021407743453979493, 0.02142617607116699, 0.021468832015991212, 0.021340511322021485, 0.021440511703491212, 0.021415935516357423, 0.021431423187255858, 0.021441312789916993, 0.02129929542541504, 0.021865215301513672, 0.021549087524414062, 0.02187868881225586, 0.02158758354187012, 0.02158016014099121, 0.022040416717529297, 0.021681983947753905, 0.0214814395904541, 0.021481855392456055, 0.021415647506713868, 0.021596128463745118, 0.021593984603881837, 0.021856704711914064, 0.0215897274017334, 0.021627456665039062, 0.021655263900756835, 0.021704704284667968, 0.021532127380371094, 0.021784223556518555, 0.02189299201965332, 0.021816320419311523, 0.021952512741088868, 0.021805055618286134, 0.021987327575683592, 0.021798912048339843, 0.021741535186767576, 0.02201935958862305, 0.02189516830444336, 0.02190617561340332, 0.021909183502197265, 0.022010175704956055, 0.02203830337524414, 0.022242624282836913, 0.02203126335144043, 0.02206924819946289, 0.02211840057373047, 0.021856000900268555, 0.02194047927856445, 0.02191702461242676, 0.021805728912353516, 0.021851327896118163, 0.021794815063476563, 0.022108352661132813, 0.02333555221557617, 0.02200124740600586, 0.021759904861450196, 0.02176438331604004, 0.021584096908569335, 0.021825023651123047, 0.02156729507446289, 0.021649280548095704, 0.021608800888061525, 0.022323488235473633, 0.021681663513183593, 0.021723648071289063, 0.021819583892822264, 0.021608448028564452, 0.022855680465698244, 0.022665216445922853, 0.021786624908447266, 0.021866079330444335, 0.02173583984375, 0.02204876708984375, 0.02254198455810547, 0.022151840209960937, 0.021970943450927736, 0.022011775970458985, 0.021886911392211914, 0.02177452850341797, 0.022042623519897463, 0.02189516830444336, 0.02213478469848633, 0.021888959884643556, 0.021878463745117187, 0.021909759521484374, 0.021761760711669922, 0.02175222396850586, 0.02190336036682129, 0.021871904373168945, 0.021736064910888673, 0.02174166488647461, 0.021548479080200196, 0.021510976791381836, 0.02157542419433594, 0.021407743453979493, 0.021522432327270507, 0.02184579277038574, 0.021383487701416015, 0.021511520385742187, 0.02141971206665039, 0.021407743453979493, 0.021452768325805664, 0.021328800201416014, 0.021307392120361326, 0.02147737693786621, 0.021329919815063478, 0.021360639572143555, 0.021403167724609377, 0.021352415084838868, 0.021305055618286134, 0.02138175964355469, 0.02141814422607422, 0.02184601593017578, 0.021569536209106444, 0.02128099250793457, 0.021372095108032226, 0.021475936889648436, 0.02126438331604004, 0.021395456314086913, 0.02130646324157715, 0.021375616073608397, 0.021270015716552734, 0.02126313591003418, 0.02132156753540039, 0.021452959060668946, 0.021331968307495116, 0.021200063705444337, 0.022085664749145507, 0.02141263961791992, 0.0212807674407959, 0.02122444725036621, 0.02125312042236328, 0.021364736557006835, 0.021311487197875977, 0.02130121612548828, 0.02163871955871582]",tokens/s,45.97214652870622,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1174.13888,1109.262336,0.0,706.740224,681.6384,s,1,8.08989892578125,8.08989892578125,0.0,8.08989892578125,8.08989892578125,8.08989892578125,8.08989892578125,[8.08989892578125],,kWh,3.102778032082369e-05,3.4153921805478033e-06,9.87973012597787e-06,4.432290262734936e-05,,MB,1535.066112,1413.349376,0.0,996.1472,949.238272,s,10,0.31854323005676266,0.03185432300567627,0.002472548886372582,0.030983920097351077,0.032472121429443356,0.03585321197509765,0.0385580844116211,"[0.031720767974853514, 0.039234302520751954, 0.031104320526123046, 0.03102582359313965, 0.030994176864624023, 0.030785856246948243, 0.03096700859069824, 0.030973663330078127, 0.03077731132507324, 0.030959999084472656]",tokens/s,8036.585802008166,kWh,9.126173521354274e-07,1.0064308611548481e-07,5.729535833624323e-07,1.5862140216133447e-06,tokens/kWh,161390579.3996332,MB,1568.722944,1423.835136,0.0,1006.63296,949.240832,s,10,15.38084375,1.538084375,0.0555053323866767,1.5145836791992187,1.5985275512695312,1.6442640197753906,1.6808531945800782,"[1.5883638916015625, 1.69000048828125, 1.5092664794921875, 1.508010009765625, 1.5152440185546876, 1.51392333984375, 1.512622314453125, 1.5093424072265624, 1.5177325439453124, 1.5163382568359376]",tokens/s,40.960041610201,kWh,4.8426861233699076e-05,5.341145395471431e-06,1.8901839774239284e-05,7.26698464034098e-05,tokens/kWh,866934.5418768345,,s,630,15.378701023101828,0.02441063654460604,0.0017473389796751598,0.02392523193359375,0.02450701103210449,0.029884153652191152,0.03190432140350342,"[0.024265663146972656, 0.02428927993774414, 0.024591360092163086, 0.02390323257446289, 0.023883775711059572, 0.02391769599914551, 0.023864191055297853, 0.023863296508789062, 0.023937023162841797, 0.024133087158203125, 0.023974111557006836, 0.024223072052001953, 0.02394211196899414, 0.02405990409851074, 0.02397920036315918, 0.023853504180908203, 0.023927135467529295, 0.023978015899658204, 0.023963647842407225, 0.023910400390625, 0.0238919677734375, 0.0238919677734375, 0.024436735153198243, 0.024303615570068358, 0.02403055953979492, 0.023990943908691408, 0.02415542411804199, 0.023904991149902344, 0.024164287567138672, 0.02397100830078125, 0.02391155242919922, 0.023995199203491212, 0.024101823806762696, 0.024001728057861327, 0.023939903259277345, 0.02387139129638672, 0.023848543167114256, 0.02387388801574707, 0.024029375076293946, 0.023827743530273438, 0.024133567810058595, 0.02407222366333008, 0.024097503662109374, 0.02405580711364746, 0.024002559661865236, 0.024057695388793945, 0.02400668716430664, 0.024029184341430664, 0.02391872024536133, 0.023975936889648438, 0.02395356750488281, 0.024039264678955077, 0.024084224700927734, 0.029773696899414063, 0.031717279434204104, 0.031771392822265626, 0.031771839141845705, 0.031709728240966795, 0.03168220710754394, 0.031668031692504886, 0.03167900848388672, 0.03175833511352539, 0.031596256256103517, 0.03155580711364746, 0.031887168884277346, 0.031462528228759765, 0.03152508735656738, 0.031406719207763674, 0.03155532836914063, 0.0314483528137207, 0.03206460952758789, 0.03159187126159668, 0.031529407501220706, 0.0315570240020752, 0.03164630317687988, 0.031643648147583005, 0.03159244728088379, 0.031366559982299806, 0.03140873527526856, 0.031911327362060544, 0.032248001098632816, 0.032093727111816406, 0.032121185302734376, 0.03221148681640625, 0.03214950561523437, 0.02997452735900879, 0.02409676742553711, 0.024131839752197265, 0.023906047821044923, 0.02386105537414551, 0.02381843185424805, 0.02395110321044922, 0.023961856842041017, 0.02386534309387207, 0.02395136070251465, 0.02397750473022461, 0.023879487991333007, 0.026251935958862306, 0.025161727905273438, 0.02390630340576172, 0.02395955276489258, 0.023889120101928712, 0.023978784561157228, 0.023875167846679687, 0.024652191162109375, 0.025300064086914063, 0.02384783935546875, 0.02391811180114746, 0.02394563293457031, 0.023863359451293944, 0.02386534309387207, 0.02391801643371582, 0.02397145652770996, 0.024025184631347656, 0.023827295303344726, 0.023903583526611327, 0.023796384811401367, 0.02369945526123047, 0.023656160354614257, 0.023904544830322266, 0.023759008407592774, 0.02379724884033203, 0.02427120018005371, 0.02389756774902344, 0.023783008575439454, 0.023819263458251954, 0.0240120964050293, 0.02381862449645996, 0.023783744812011717, 0.023752927780151367, 0.02386288070678711, 0.02380348777770996, 0.02379427146911621, 0.023851007461547852, 0.02392412757873535, 0.023925344467163087, 0.023836671829223634, 0.024184831619262694, 0.02429702377319336, 0.025004159927368163, 0.02395359992980957, 0.023990400314331056, 0.023977983474731446, 0.023834400177001953, 0.02413590431213379, 0.023810047149658203, 0.02392064094543457, 0.024147968292236328, 0.02469660758972168, 0.02395683288574219, 0.02391075134277344, 0.0238573112487793, 0.024217151641845704, 0.02400499153137207, 0.023901920318603515, 0.024064096450805664, 0.02394537544250488, 0.024002368927001954, 0.02383647918701172, 0.023900672912597655, 0.023849376678466795, 0.023970880508422852, 0.023737279891967774, 0.023918399810791014, 0.023799936294555665, 0.024031488418579102, 0.023789312362670897, 0.02390425682067871, 0.02393087959289551, 0.024016895294189454, 0.02395267105102539, 0.023835487365722656, 0.02391539192199707, 0.02385817527770996, 0.023874656677246094, 0.023829408645629883, 0.023893888473510743, 0.023997983932495116, 0.023939680099487305, 0.0238919677734375, 0.023828031539916993, 0.02408083152770996, 0.02385651206970215, 0.023814144134521483, 0.023898399353027344, 0.023979999542236327, 0.023986560821533203, 0.02389811134338379, 0.023873151779174803, 0.023885055541992186, 0.023841632843017577, 0.023764991760253908, 0.023850656509399413, 0.02414124870300293, 0.023943872451782228, 0.023838943481445312, 0.023887712478637694, 0.023860544204711915, 0.023836896896362304, 0.023865760803222655, 0.023812320709228514, 0.024219200134277342, 0.023967552185058593, 0.023999103546142576, 0.023953407287597657, 0.023874752044677733, 0.023869632720947265, 0.023853696823120118, 0.023977983474731446, 0.023969343185424805, 0.024125888824462892, 0.0240883846282959, 0.023930335998535158, 0.023937759399414064, 0.0239554557800293, 0.023967744827270508, 0.024048864364624025, 0.024410911560058594, 0.025051136016845704, 0.02417804718017578, 0.024146591186523438, 0.024279008865356444, 0.02390163230895996, 0.023875520706176757, 0.02381273651123047, 0.023908287048339843, 0.023959775924682618, 0.02397987174987793, 0.023870559692382814, 0.0239401912689209, 0.024012607574462892, 0.02389811134338379, 0.023840608596801757, 0.02378563117980957, 0.023830528259277343, 0.02397558403015137, 0.02390870475769043, 0.023870784759521483, 0.023761600494384767, 0.02380303955078125, 0.02378428840637207, 0.023789567947387694, 0.023858720779418946, 0.02382464027404785, 0.02374870491027832, 0.023741664886474608, 0.023847135543823242, 0.023808319091796874, 0.023773248672485352, 0.023765216827392577, 0.02370915222167969, 0.02388435173034668, 0.023916383743286133, 0.02388582420349121, 0.02381318473815918, 0.023850976943969728, 0.02379465675354004, 0.023895423889160158, 0.024009344100952148, 0.023908351898193358, 0.023884992599487304, 0.0238907527923584, 0.02393824005126953, 0.02403536033630371, 0.02405855941772461, 0.02399001693725586, 0.02400265693664551, 0.02401417541503906, 0.024049823760986327, 0.024054527282714844, 0.024130783081054687, 0.023945951461791994, 0.023995744705200196, 0.024001087188720703, 0.024045087814331054, 0.024306304931640627, 0.02407151985168457, 0.02405407905578613, 0.02408041572570801, 0.024035776138305664, 0.024057727813720703, 0.02405295944213867, 0.023984928131103516, 0.023975936889648438, 0.02402639961242676, 0.024552160263061524, 0.023946399688720702, 0.023941984176635744, 0.024071903228759767, 0.024481184005737306, 0.02427788734436035, 0.02410495948791504, 0.024205184936523436, 0.02444816017150879, 0.02452374458312988, 0.02429952049255371, 0.024254016876220703, 0.02409926414489746, 0.02409062385559082, 0.024002559661865236, 0.023852031707763673, 0.02402799987792969, 0.02388803291320801, 0.024940256118774415, 0.0237857608795166, 0.023882911682128905, 0.023826656341552736, 0.023879743576049803, 0.023832351684570312, 0.023890623092651365, 0.023865440368652343, 0.023797760009765623, 0.024252416610717774, 0.023848960876464844, 0.024412160873413087, 0.024151935577392578, 0.023816192626953125, 0.023784671783447266, 0.023800607681274413, 0.02384649658203125, 0.023839136123657227, 0.027742048263549805, 0.025999519348144533, 0.024276895523071287, 0.023881824493408203, 0.023855104446411132, 0.023869056701660157, 0.024076000213623047, 0.02382054328918457, 0.023994720458984375, 0.023871551513671874, 0.024057855606079103, 0.023834623336791993, 0.023876863479614256, 0.024295200347900392, 0.024384479522705078, 0.024184831619262694, 0.024157760620117187, 0.02397612762451172, 0.023887519836425782, 0.023947872161865235, 0.023755840301513672, 0.02383148765563965, 0.02393907165527344, 0.024051328659057618, 0.023925119400024415, 0.023842464447021483, 0.023901983261108397, 0.024240703582763673, 0.02394060707092285, 0.02385971260070801, 0.023962623596191408, 0.024146944046020507, 0.02393087959289551, 0.02378748893737793, 0.02386128044128418, 0.023815839767456055, 0.024026912689208986, 0.02382912063598633, 0.023941055297851562, 0.023901920318603515, 0.023951648712158203, 0.02387459182739258, 0.023862239837646484, 0.023805952072143553, 0.023832576751708984, 0.023717952728271485, 0.023830463409423828, 0.023842975616455077, 0.023865184783935546, 0.023873567581176758, 0.023906272888183595, 0.02386944007873535, 0.023992191314697264, 0.023896192550659178, 0.023863296508789062, 0.023950687408447267, 0.023980031967163085, 0.02413529586791992, 0.024267744064331055, 0.02430156707763672, 0.025257984161376954, 0.02378758430480957, 0.023916479110717773, 0.023768543243408203, 0.02392323112487793, 0.02367692756652832, 0.023745695114135743, 0.024021856307983397, 0.023824127197265624, 0.02386089515686035, 0.023876192092895508, 0.02400841522216797, 0.026515296936035156, 0.023976383209228517, 0.02408857536315918, 0.023891199111938478, 0.024255231857299806, 0.023858720779418946, 0.023822687149047853, 0.02424838447570801, 0.023961664199829102, 0.02397929573059082, 0.02381916809082031, 0.023899967193603516, 0.02390425682067871, 0.02388387107849121, 0.02380124855041504, 0.023857887268066407, 0.02381932830810547, 0.023761632919311524, 0.023795616149902343, 0.023844480514526367, 0.02386582374572754, 0.023822463989257813, 0.02377289581298828, 0.023855072021484375, 0.023890111923217775, 0.023875583648681642, 0.02380931282043457, 0.023769792556762696, 0.02452694320678711, 0.023961599349975587, 0.02390115165710449, 0.02395369529724121, 0.023906143188476562, 0.02402118492126465, 0.023859872817993164, 0.02386124801635742, 0.02393497657775879, 0.024321216583251953, 0.023911231994628905, 0.023908351898193358, 0.024176288604736328, 0.02393942451477051, 0.023812095642089845, 0.023754751205444336, 0.023826431274414063, 0.023785120010375978, 0.02502195167541504, 0.024014911651611327, 0.024398399353027345, 0.023921728134155273, 0.023877887725830077, 0.02369753646850586, 0.023763519287109375, 0.023785472869873047, 0.02369264030456543, 0.0237410888671875, 0.023688352584838868, 0.02366361618041992, 0.023875423431396484, 0.02368511962890625, 0.023737920761108398, 0.02373472023010254, 0.023752704620361328, 0.024254335403442382, 0.02376710319519043, 0.023722047805786132, 0.023969791412353517, 0.02382009506225586, 0.023769567489624024, 0.023772991180419922, 0.02387548828125, 0.023949312210083007, 0.023863296508789062, 0.024199039459228515, 0.024029312133789064, 0.023871488571166992, 0.023965696334838867, 0.023901695251464843, 0.023943679809570313, 0.023870752334594725, 0.023931488037109375, 0.024074464797973632, 0.024040735244750977, 0.023876096725463865, 0.0241329288482666, 0.023944000244140624, 0.02395955276489258, 0.023967744827270508, 0.023777280807495117, 0.02376851272583008, 0.023971839904785155, 0.024076032638549804, 0.024099039077758788, 0.02444963264465332, 0.024543424606323243, 0.024422208786010743, 0.024451072692871095, 0.024475135803222657, 0.02449443244934082, 0.024174400329589844, 0.023976287841796874, 0.023879680633544922, 0.02393087959289551, 0.023816192626953125, 0.02380771255493164, 0.02384899139404297, 0.023918848037719726, 0.023848960876464844, 0.024045568466186523, 0.02388198471069336, 0.023975936889648438, 0.023785408020019532, 0.025782047271728517, 0.02493062400817871, 0.023984064102172853, 0.023846559524536133, 0.023855072021484375, 0.023769535064697266, 0.023795488357543946, 0.023791711807250978, 0.02372825622558594, 0.023834623336791993, 0.023980031967163085, 0.023924192428588866, 0.02393247985839844, 0.0240631046295166, 0.023845727920532227, 0.024020992279052734, 0.023893247604370116, 0.023866111755371094, 0.02376803207397461, 0.023846431732177733, 0.023951839447021485, 0.024034912109375, 0.023785919189453126, 0.023861408233642578, 0.023854047775268554, 0.023722335815429686, 0.023808544158935546, 0.023758111953735353, 0.023874271392822267, 0.023808000564575195, 0.024319999694824217, 0.023791616439819335, 0.023932640075683593, 0.02374684715270996, 0.023771135330200196, 0.02385305595397949, 0.02386124801635742, 0.023829952239990234, 0.023788095474243164, 0.023713151931762697, 0.023811840057373048, 0.024259456634521483, 0.028274688720703125, 0.029049087524414062, 0.024027135848999022, 0.023875328063964845, 0.02386944007873535, 0.02371942329406738, 0.023804416656494142, 0.023721343994140626, 0.025045631408691406, 0.024223487854003908, 0.023953664779663087, 0.02381011199951172, 0.02383251190185547, 0.023698816299438475, 0.023947904586791992, 0.023967744827270508, 0.023775232315063476, 0.02376825523376465, 0.023787712097167967, 0.02398681640625, 0.023945568084716796, 0.02392278480529785, 0.02419705581665039, 0.02395577621459961, 0.02389401626586914, 0.023887903213500976, 0.023948831558227537, 0.023906751632690428, 0.02384486389160156, 0.023848960876464844, 0.02388172721862793, 0.023797760009765623, 0.024112575531005858, 0.023982400894165038, 0.023941375732421874, 0.02383577537536621, 0.024505151748657226, 0.023908416748046876, 0.024051071166992188, 0.02397792053222656, 0.02453945541381836, 0.024195455551147462, 0.024131584167480468, 0.024032800674438477, 0.024852479934692383, 0.024039392471313478, 0.024000032424926758, 0.024856704711914063, 0.02396860885620117, 0.023863296508789062, 0.023899391174316408, 0.023965536117553712, 0.02413481521606445, 0.023918336868286132, 0.02380419158935547, 0.02399411201477051, 0.023857120513916016, 0.023742464065551756, 0.023873823165893555, 0.024063711166381837, 0.02403708839416504, 0.024017248153686523, 0.024018239974975587, 0.023850784301757813, 0.024879968643188477, 0.023795007705688476, 0.023741119384765624, 0.0242108154296875, 0.024304256439208985, 0.023758464813232422, 0.02380633544921875, 0.02373219108581543, 0.024098207473754883, 0.02375129508972168, 0.023836671829223634, 0.023777280807495117, 0.023967744827270508, 0.02389811134338379, 0.02383625602722168, 0.025031232833862306, 0.025331743240356447, 0.024405216217041014, 0.02496771240234375]",tokens/s,40.96574860605049,, 4bit-gptq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1844.191232,2768.109568,0.0,2365.587456,2314.318336,s,1,9.0901767578125,9.0901767578125,0.0,9.0901767578125,9.0901767578125,9.0901767578125,9.0901767578125,[9.0901767578125],,kWh,5.615339820413586e-05,6.1869261097778005e-06,1.844723697999595e-05,8.078756129390961e-05,,MB,1896.177664,3099.459584,0.0,2682.257408,2607.60832,s,10,0.4847334403991699,0.04847334403991699,0.0004979703883766398,0.04830492782592774,0.04874606323242187,0.04933855247497558,0.04981254386901855,"[0.049931041717529295, 0.048318401336669925, 0.048207008361816406, 0.04824272155761719, 0.048214462280273436, 0.04831631851196289, 0.04832207870483399, 0.04829353713989258, 0.04827347183227539, 0.04861439895629883]",tokens/s,5281.253131395026,kWh,1.4958004754465124e-06,1.6495918835224292e-07,9.940795934488052e-07,2.65483925724756e-06,tokens/kWh,96427683.6351333,MB,1896.177664,3099.459584,0.0,2682.257408,2607.61088,s,10,13.784520263671874,1.3784520263671873,0.013034876196884818,1.372310363769531,1.393464599609375,1.3998223876953124,1.4049086181640624,"[1.3683370361328124, 1.3920517578125, 1.36633935546875, 1.364835205078125, 1.3732437744140624, 1.371376953125, 1.3841221923828124, 1.3689954833984375, 1.389038330078125, 1.40618017578125]",tokens/s,45.70344037727018,kWh,3.957900431537869e-05,4.365166276127635e-06,2.003079833755413e-05,6.397496892906045e-05,tokens/kWh,984760.1500183368,,s,630,13.78239563369751,0.021876818466186523,0.0004994660624197005,0.02175241661071777,0.02238352870941162,0.022705539417266843,0.023641034183502205,"[0.02203241539001465, 0.022072383880615234, 0.02175200080871582, 0.021639104843139648, 0.02176902389526367, 0.021731327056884766, 0.021679359436035155, 0.021725215911865235, 0.02219059181213379, 0.02244233512878418, 0.021636735916137694, 0.02168556785583496, 0.02158278465270996, 0.02177020835876465, 0.021730720520019533, 0.02190332794189453, 0.02186739158630371, 0.021752895355224608, 0.021527263641357423, 0.02164246368408203, 0.021545183181762694, 0.021539392471313475, 0.02175699234008789, 0.02151308822631836, 0.021424192428588867, 0.021693727493286134, 0.021680896759033202, 0.02169647979736328, 0.021547008514404296, 0.02185215950012207, 0.02185625648498535, 0.021702335357666015, 0.02152435111999512, 0.021524927139282227, 0.021598207473754884, 0.02167747116088867, 0.021502559661865234, 0.02150399971008301, 0.02170675277709961, 0.021550495147705077, 0.021561887741088866, 0.021507904052734374, 0.021459199905395507, 0.021618688583374023, 0.021549055099487305, 0.02168230438232422, 0.022791744232177735, 0.02167430305480957, 0.021563392639160156, 0.02177039909362793, 0.021681184768676757, 0.021658432006835936, 0.021656639099121095, 0.021957088470458984, 0.021742048263549803, 0.022648448944091796, 0.022011392593383788, 0.02162345504760742, 0.021509920120239258, 0.021604799270629884, 0.02140348815917969, 0.02151260757446289, 0.02142742347717285, 0.021674240112304687, 0.02145280075073242, 0.021405759811401366, 0.02162719917297363, 0.021796064376831056, 0.021789472579956056, 0.02169856071472168, 0.021838016510009765, 0.022156415939331056, 0.02200953674316406, 0.022178464889526368, 0.022154592514038087, 0.02210508728027344, 0.021869951248168946, 0.022231679916381836, 0.022052928924560546, 0.022099807739257814, 0.022003807067871094, 0.022266944885253905, 0.022164096832275392, 0.02211846351623535, 0.022141183853149414, 0.02200371170043945, 0.021739391326904296, 0.021953887939453125, 0.022156063079833983, 0.023690784454345703, 0.022280672073364257, 0.022103168487548827, 0.021846912384033204, 0.021938175201416017, 0.021940223693847655, 0.021975040435791016, 0.022691839218139647, 0.02335683250427246, 0.02283958435058594, 0.02202025604248047, 0.02212428855895996, 0.022126272201538087, 0.021961023330688476, 0.02184774398803711, 0.021946367263793946, 0.02198192024230957, 0.02184592056274414, 0.021964895248413087, 0.022092992782592774, 0.02217884826660156, 0.022045503616333006, 0.022385631561279297, 0.021884223937988282, 0.02203513526916504, 0.023199743270874023, 0.022349119186401367, 0.022008224487304686, 0.021986879348754883, 0.0222906551361084, 0.02279257583618164, 0.02223321533203125, 0.021895103454589844, 0.02188038444519043, 0.021860864639282225, 0.021733375549316408, 0.021815296173095702, 0.021661407470703126, 0.021635360717773437, 0.02159324836730957, 0.021481695175170897, 0.021511808395385742, 0.022188127517700194, 0.022752159118652342, 0.02289356803894043, 0.02190438461303711, 0.021673055648803712, 0.021709632873535157, 0.021581920623779297, 0.02142348861694336, 0.02149849510192871, 0.02145894432067871, 0.021563392639160156, 0.022326751708984374, 0.021581663131713866, 0.021690975189208983, 0.02174166488647461, 0.02184121513366699, 0.02145555114746094, 0.021489664077758788, 0.021449920654296874, 0.02159452819824219, 0.0214304313659668, 0.021528831481933595, 0.021525951385498048, 0.021525056838989257, 0.02175939178466797, 0.021746623992919923, 0.021597856521606444, 0.021720895767211913, 0.021734624862670898, 0.021941215515136718, 0.021751808166503905, 0.021816896438598632, 0.021854656219482422, 0.022241247177124022, 0.021702816009521484, 0.021651327133178713, 0.021444095611572265, 0.021540416717529296, 0.021519296646118163, 0.021534303665161132, 0.021696287155151366, 0.02154764747619629, 0.02170252799987793, 0.021592191696166992, 0.0215001277923584, 0.02164873504638672, 0.021559743881225585, 0.02166783905029297, 0.021569183349609375, 0.021538400650024415, 0.021416704177856447, 0.02139948844909668, 0.021454559326171876, 0.02147567939758301, 0.021535776138305665, 0.02154185676574707, 0.02169856071472168, 0.022326271057128907, 0.021752832412719726, 0.021625471115112305, 0.02183964729309082, 0.02165519905090332, 0.021514848709106447, 0.021518688201904296, 0.02148054313659668, 0.02148406410217285, 0.02147724723815918, 0.021361152648925782, 0.021788671493530275, 0.021456064224243163, 0.021445440292358398, 0.021436159133911132, 0.02154092788696289, 0.021643455505371095, 0.02152448081970215, 0.021549055099487305, 0.02153843116760254, 0.02229088020324707, 0.021484928131103517, 0.021526304244995118, 0.021457439422607423, 0.021335744857788087, 0.021360479354858398, 0.021272703170776366, 0.021627487182617186, 0.021452032089233398, 0.021363264083862306, 0.021394752502441407, 0.022018943786621094, 0.023388320922851563, 0.02173526382446289, 0.021984832763671875, 0.02187129592895508, 0.02152627182006836, 0.02147327995300293, 0.021778432846069336, 0.021713247299194337, 0.021451520919799804, 0.021617183685302733, 0.021438848495483397, 0.02166783905029297, 0.022193824768066406, 0.021618175506591796, 0.021531679153442382, 0.021458751678466798, 0.02167100715637207, 0.021527456283569335, 0.021760000228881835, 0.021727231979370116, 0.021640768051147462, 0.021782976150512695, 0.02168422317504883, 0.021794336318969727, 0.0218239688873291, 0.02208083152770996, 0.021676736831665037, 0.02188287925720215, 0.021811199188232423, 0.021712736129760744, 0.02176425552368164, 0.021626880645751953, 0.022841344833374022, 0.02205695915222168, 0.021933984756469727, 0.021768287658691408, 0.02172662353515625, 0.021807712554931642, 0.021823488235473632, 0.021774335861206053, 0.021734687805175783, 0.02207539176940918, 0.021903743743896486, 0.022088159561157228, 0.022112127304077148, 0.021743295669555664, 0.02193440055847168, 0.021526399612426757, 0.021510080337524416, 0.02151456069946289, 0.021593984603881837, 0.02169241523742676, 0.021538816452026367, 0.02176838493347168, 0.02164512062072754, 0.02172217559814453, 0.022262720108032225, 0.021440448760986327, 0.021645376205444336, 0.02158083152770996, 0.02159715270996094, 0.021474527359008788, 0.021515039443969725, 0.021510143280029297, 0.022331392288208008, 0.021526527404785157, 0.021501344680786134, 0.02147327995300293, 0.021420255661010742, 0.021864383697509766, 0.021819391250610352, 0.021612159729003905, 0.02157241630554199, 0.02158768081665039, 0.021796512603759765, 0.021602943420410158, 0.021727231979370116, 0.02194384002685547, 0.022538719177246095, 0.022340768814086913, 0.021865312576293944, 0.02170675277709961, 0.021719039916992186, 0.022509567260742186, 0.02174492835998535, 0.02154364776611328, 0.021792768478393554, 0.02212236785888672, 0.021743743896484376, 0.021589696884155272, 0.021667360305786133, 0.021480224609375, 0.02230665588378906, 0.021847583770751952, 0.021870880126953124, 0.02186150360107422, 0.02176089668273926, 0.021579776763916016, 0.021547008514404296, 0.02172867202758789, 0.022466943740844725, 0.021548479080200196, 0.021487424850463867, 0.021418336868286134, 0.021508384704589843, 0.021653087615966796, 0.02149247932434082, 0.021405696868896484, 0.021462432861328123, 0.021606559753417968, 0.021671775817871095, 0.02154147148132324, 0.02147727966308594, 0.02167203140258789, 0.02152448081970215, 0.02148521614074707, 0.021559648513793946, 0.021644832611083985, 0.021563871383666992, 0.02145894432067871, 0.021372928619384765, 0.021630975723266603, 0.021557247161865235, 0.022114303588867186, 0.02186617660522461, 0.021843679428100587, 0.021853919982910155, 0.021920352935791015, 0.02175823974609375, 0.02169148826599121, 0.021904287338256837, 0.02192793655395508, 0.021841567993164064, 0.021780832290649414, 0.02184294319152832, 0.021832704544067383, 0.021882688522338867, 0.021946559906005858, 0.02188083267211914, 0.021837791442871093, 0.021890560150146485, 0.022040992736816405, 0.02197491264343262, 0.021968896865844727, 0.021731584548950196, 0.021913568496704103, 0.022327327728271486, 0.021659040451049806, 0.021518047332763673, 0.021812095642089843, 0.02202841567993164, 0.022069120407104494, 0.021995519638061522, 0.02204876708984375, 0.022044416427612304, 0.021944576263427735, 0.021802047729492187, 0.021975072860717773, 0.021887712478637696, 0.021617664337158202, 0.021580799102783203, 0.021547008514404296, 0.02146918487548828, 0.021411840438842773, 0.021562688827514647, 0.021689376831054687, 0.021955360412597658, 0.022092672348022462, 0.02206105613708496, 0.021876735687255858, 0.021976608276367188, 0.021746143341064453, 0.02209587287902832, 0.021800800323486327, 0.022319007873535156, 0.021907199859619142, 0.021858816146850587, 0.021740671157836913, 0.021638015747070312, 0.021657312393188476, 0.021977312088012697, 0.022845504760742187, 0.021592063903808592, 0.021622783660888673, 0.021784576416015625, 0.021661792755126953, 0.02618953514099121, 0.025788223266601563, 0.022838687896728514, 0.02209686470031738, 0.021600448608398437, 0.024810848236083986, 0.02226633644104004, 0.022140928268432617, 0.021663135528564453, 0.021404064178466797, 0.021498079299926757, 0.02157379150390625, 0.021503807067871094, 0.021591903686523438, 0.02149737548828125, 0.02145692825317383, 0.021809856414794923, 0.021606239318847656, 0.021596223831176757, 0.021544607162475585, 0.02268400001525879, 0.02351923179626465, 0.021683456420898438, 0.02150067138671875, 0.021476543426513672, 0.02158675193786621, 0.021448703765869142, 0.022063104629516602, 0.021550432205200195, 0.0215251522064209, 0.02150726318359375, 0.021664575576782228, 0.02146099281311035, 0.02128281593322754, 0.021501951217651367, 0.021645471572875975, 0.021466175079345704, 0.021426719665527345, 0.0217542724609375, 0.02143846321105957, 0.021292959213256836, 0.0215184326171875, 0.02171900749206543, 0.021473087310791016, 0.02160201644897461, 0.021723104476928712, 0.021448640823364257, 0.02139174461364746, 0.021252256393432617, 0.02147260856628418, 0.021395328521728516, 0.021506879806518556, 0.021481664657592773, 0.02195644760131836, 0.02192793655395508, 0.02180054473876953, 0.021854623794555664, 0.02192083168029785, 0.02159507179260254, 0.021646944046020508, 0.022032703399658203, 0.02231235122680664, 0.0223832950592041, 0.02229452705383301, 0.022380544662475587, 0.022358015060424806, 0.022163455963134765, 0.022080959320068358, 0.021967424392700195, 0.022116352081298828, 0.02203411293029785, 0.02226777648925781, 0.021821887969970703, 0.021677280426025392, 0.021930112838745117, 0.0219116153717041, 0.021809759140014647, 0.0219015998840332, 0.02196886444091797, 0.02157529640197754, 0.021563295364379884, 0.021548479080200196, 0.02142083168029785, 0.021552127838134767, 0.021295167922973632, 0.02149843215942383, 0.02162112045288086, 0.021720319747924804, 0.021463808059692384, 0.021737279891967772, 0.02160054397583008, 0.02185206413269043, 0.021765247344970703, 0.021734272003173828, 0.02133955192565918, 0.02150806427001953, 0.02137868881225586, 0.02148643112182617, 0.022290271759033205, 0.021716672897338866, 0.021447999954223633, 0.02157788848876953, 0.021385759353637696, 0.021299392700195312, 0.021993087768554687, 0.021717695236206053, 0.02179257583618164, 0.021796863555908205, 0.021914623260498048, 0.021617664337158202, 0.02160576057434082, 0.021376640319824218, 0.021473983764648437, 0.02136636734008789, 0.02135491180419922, 0.02129657554626465, 0.02182643127441406, 0.021393407821655275, 0.021442432403564454, 0.02159014320373535, 0.02143574333190918, 0.02155120086669922, 0.02135443115234375, 0.021268320083618165, 0.021508895874023437, 0.02129715156555176, 0.022017183303833007, 0.022366975784301756, 0.021620927810668947, 0.02148659133911133, 0.022016063690185547, 0.021672576904296876, 0.021751487731933594, 0.022131231307983397, 0.022008928298950195, 0.022020416259765627, 0.022028671264648438, 0.022040800094604494, 0.02233263969421387, 0.02240342330932617, 0.022792608261108398, 0.022934656143188476, 0.02282383918762207, 0.022761472702026365, 0.022788095474243163, 0.022726688385009765, 0.022588672637939452, 0.022538911819458007, 0.02261612892150879, 0.022744991302490233, 0.022583391189575194, 0.022534143447875975, 0.023016767501831056, 0.022715072631835937, 0.02260700798034668, 0.022491968154907227, 0.022640447616577148, 0.023998687744140625, 0.022468608856201173, 0.022392255783081055, 0.022417856216430665, 0.022952320098876954, 0.0226779842376709, 0.02280195236206055, 0.022354400634765625, 0.0224849910736084, 0.022417407989501953, 0.022421503067016603, 0.023382015228271484, 0.024639392852783205, 0.024649696350097658, 0.022325376510620117, 0.022650623321533205, 0.021991647720336915, 0.022067232131958006, 0.02226790428161621, 0.02205606460571289, 0.02204761505126953, 0.02211840057373047, 0.022599456787109375, 0.022255008697509765, 0.023241535186767578, 0.021941343307495118, 0.022153760910034178, 0.022279647827148436, 0.022141984939575195, 0.02194819259643555, 0.022065248489379883, 0.02191535949707031, 0.022069536209106445, 0.02189926338195801, 0.022058784484863283, 0.022417312622070314, 0.022233312606811523, 0.022127744674682617, 0.022319616317749022, 0.022290624618530274, 0.02251580810546875, 0.022294208526611327, 0.022262271881103517, 0.022148096084594726, 0.022246400833129884, 0.022427648544311524, 0.022173696517944336, 0.02269388771057129, 0.022212608337402344, 0.02267673683166504, 0.022170368194580077, 0.022245439529418945, 0.02217158317565918, 0.022173696517944336, 0.02208563232421875, 0.022021951675415038, 0.02212063980102539, 0.021962751388549806, 0.02184556770324707, 0.021791168212890625, 0.02183286476135254, 0.02236851119995117, 0.021842527389526366, 0.021874048233032226, 0.02194905662536621, 0.02209689521789551, 0.021508703231811522]",tokens/s,45.71048580695728,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,4975.325184,7346.192384,0.0,6943.670272,6539.1744,s,1,11.610884765625,11.610884765625,0.0,11.610884765625,11.610884765625,11.610884765625,11.610884765625,[11.610884765625],,kWh,0.00013150182546660288,1.4495563407165315e-05,4.1937255772006576e-05,0.00018793464464577476,,MB,5058.383872,7641.890816,0.0,7224.68864,6917.39904,s,10,2.0870283813476562,0.20870283813476562,0.0003155553283194226,0.2087106246948242,0.20905210113525388,0.20906222915649414,0.2090703315734863,"[0.20811203002929687, 0.20895455932617188, 0.2085798034667969, 0.20858934020996095, 0.20856150817871094, 0.20899449157714844, 0.20904985046386718, 0.20828253173828126, 0.2088319091796875, 0.20907235717773437]",tokens/s,1226.6244306399572,kWh,6.133132390364595e-06,6.759877575955095e-07,4.076999789375558e-06,1.0886119937335663e-05,tokens/kWh,23516184.04662323,MB,5062.709248,7643.987968,0.0,7226.785792,6917.4016,s,10,21.80282080078125,2.180282080078125,0.00542946884505395,2.1797763671875,2.186784912109375,2.1880472412109375,2.1890571044921874,"[2.1766376953125, 2.170473388671875, 2.17936181640625, 2.180831298828125, 2.17493505859375, 2.178594482421875, 2.1893095703125, 2.185982177734375, 2.18019091796875, 2.18650439453125]",tokens/s,28.895343669357935,kWh,6.368841650463267e-05,7.0251264313698245e-06,4.2065176013220944e-05,0.00011277871894922341,tokens/kWh,558616.0278018818,,s,630,21.800411769866923,0.034603828206138,0.0003936771880204999,0.03450404739379882,0.034978083038330075,0.035205082893371584,0.03625874851226807,"[0.03558873748779297, 0.0347210578918457, 0.0344106559753418, 0.03439673614501953, 0.03434396743774414, 0.034398273468017576, 0.034270111083984374, 0.034481822967529295, 0.03458636856079102, 0.03455855941772461, 0.035160064697265625, 0.03517030334472656, 0.03530960083007813, 0.03591574478149414, 0.03483852767944336, 0.0348037109375, 0.03478915023803711, 0.03465024185180664, 0.03444950485229492, 0.03500572967529297, 0.035119808197021485, 0.03470134353637695, 0.03450835037231445, 0.03424915313720703, 0.03425667190551758, 0.03419948959350586, 0.03428995132446289, 0.03451696014404297, 0.034408481597900394, 0.03417440032958984, 0.034164833068847655, 0.03424099349975586, 0.034409568786621096, 0.03444854354858398, 0.03444915390014648, 0.03439996719360352, 0.03433033752441406, 0.03449641418457031, 0.03468348693847656, 0.03448553466796875, 0.034501407623291014, 0.03442403030395508, 0.034345184326171875, 0.03442335891723633, 0.0347586555480957, 0.0348221435546875, 0.0349780158996582, 0.034469470977783204, 0.03449260711669922, 0.03438735961914063, 0.03439676666259766, 0.03430179214477539, 0.034275264739990235, 0.03427107238769531, 0.03433305740356445, 0.0343551025390625, 0.034307552337646485, 0.034423423767089845, 0.03449398422241211, 0.03434096145629883, 0.034280960083007815, 0.0342841911315918, 0.03435951995849609, 0.035917728424072266, 0.0347770881652832, 0.034648063659667966, 0.034203647613525394, 0.03434086227416992, 0.03521535873413086, 0.034662593841552736, 0.03485676956176758, 0.03467036819458008, 0.03644380950927734, 0.03442335891723633, 0.03440435028076172, 0.034236415863037106, 0.03413622283935547, 0.034223583221435545, 0.03415488052368164, 0.03425484848022461, 0.03422003173828125, 0.0343633918762207, 0.03425414276123047, 0.03427113723754883, 0.034245407104492184, 0.03432400131225586, 0.03438582229614258, 0.03425513458251953, 0.03423875045776367, 0.034463584899902346, 0.03435708618164063, 0.03427094268798828, 0.034253406524658206, 0.034285568237304685, 0.034557952880859374, 0.03437180709838867, 0.03431939315795898, 0.03451302337646484, 0.03463151931762695, 0.03438671875, 0.03434832000732422, 0.034596767425537106, 0.03438406372070312, 0.03436435317993164, 0.034576000213623045, 0.034436607360839845, 0.03444179153442383, 0.03439820861816406, 0.03436246490478516, 0.034449630737304685, 0.034351806640625, 0.034498817443847654, 0.03450790405273438, 0.03435171127319336, 0.03427331161499023, 0.03442073440551758, 0.034402305603027344, 0.03441459274291992, 0.034516990661621096, 0.03425689697265625, 0.03418092727661133, 0.03420121765136719, 0.03423904037475586, 0.0343633918762207, 0.03417494583129883, 0.03419055938720703, 0.03597260665893555, 0.03480438232421875, 0.03488479995727539, 0.03472019195556641, 0.03465977478027344, 0.03454576110839844, 0.034777599334716795, 0.034765151977539065, 0.034653278350830076, 0.03470537567138672, 0.034609153747558595, 0.03436435317993164, 0.034264095306396486, 0.03415548706054688, 0.03417702484130859, 0.03444121551513672, 0.034463455200195316, 0.03428524780273438, 0.03420630264282227, 0.03586899185180664, 0.036456127166748044, 0.03449651336669922, 0.03435446548461914, 0.03421571350097656, 0.03421689605712891, 0.03413372802734375, 0.03426332855224609, 0.034606239318847654, 0.03441267013549805, 0.03440627288818359, 0.03429782485961914, 0.03426387023925781, 0.03455347061157227, 0.03441043090820312, 0.03445129776000976, 0.03448899078369141, 0.03454889678955078, 0.0342639045715332, 0.03434905624389648, 0.034490367889404294, 0.034508800506591795, 0.034557952880859374, 0.03440639877319336, 0.03458867263793945, 0.034580032348632814, 0.034406848907470707, 0.034410015106201175, 0.03433420944213867, 0.03541910552978516, 0.03616057586669922, 0.0347309455871582, 0.034560001373291016, 0.03445529556274414, 0.034668384552001955, 0.034398624420166016, 0.034667552947998045, 0.03439715194702148, 0.034428382873535154, 0.034365985870361326, 0.034256256103515625, 0.0345483512878418, 0.03469833755493164, 0.0345711669921875, 0.03511289596557617, 0.034834495544433595, 0.03438617706298828, 0.03447747039794922, 0.03496755218505859, 0.03458902359008789, 0.0344637451171875, 0.034409534454345705, 0.034196414947509766, 0.0348359375, 0.03431887817382812, 0.03494319915771484, 0.03436326217651367, 0.03451894378662109, 0.03421593475341797, 0.034088958740234376, 0.034278816223144534, 0.03425263977050781, 0.03419990539550781, 0.03423680114746094, 0.03416617584228516, 0.03421043014526367, 0.03463091278076172, 0.03443484878540039, 0.034794273376464846, 0.03424620819091797, 0.03421187210083008, 0.03409366226196289, 0.034367488861083983, 0.03502854537963867, 0.03484716796875, 0.03477212905883789, 0.03476736068725586, 0.03468304061889648, 0.03444345474243164, 0.035305374145507815, 0.03629884719848633, 0.034840576171875, 0.035076095581054685, 0.034497760772705076, 0.03470931243896484, 0.034538463592529295, 0.03460457611083984, 0.03520905685424805, 0.035153888702392576, 0.03481782531738281, 0.03497868728637695, 0.03490544128417969, 0.03463644790649414, 0.0344719352722168, 0.03471763229370117, 0.03449657440185547, 0.03450668716430664, 0.034487743377685544, 0.03451968002319336, 0.03461324691772461, 0.03455088043212891, 0.03445427322387695, 0.034414623260498045, 0.034498687744140624, 0.03455385589599609, 0.03472300720214844, 0.034633567810058594, 0.035014591217041015, 0.034827041625976565, 0.03443503952026367, 0.03440643310546875, 0.03444736099243164, 0.03429779052734375, 0.03443072128295899, 0.03449273681640625, 0.03462758255004883, 0.034609153747558595, 0.03474460983276367, 0.034602718353271486, 0.0347770881652832, 0.034759807586669925, 0.034680992126464846, 0.0345300178527832, 0.034473983764648435, 0.0343900146484375, 0.03441254425048828, 0.034461921691894534, 0.03452073669433594, 0.03481817626953125, 0.03490614318847656, 0.03478953552246094, 0.034660160064697264, 0.03447382354736328, 0.03451100921630859, 0.0345863037109375, 0.03442105484008789, 0.0344640007019043, 0.03456707382202148, 0.034349086761474606, 0.03444595336914062, 0.03446393585205078, 0.03441664123535156, 0.03473612976074219, 0.034544769287109374, 0.034519935607910154, 0.0344925765991211, 0.03432575988769531, 0.034398815155029294, 0.03435712051391602, 0.0345928955078125, 0.034793792724609376, 0.03495289611816406, 0.03460095977783203, 0.03444736099243164, 0.03434905624389648, 0.03444326400756836, 0.034366687774658206, 0.034470687866210936, 0.03492681503295898, 0.03440588760375977, 0.034296161651611326, 0.03431142425537109, 0.03429983901977539, 0.034361217498779295, 0.03441756820678711, 0.03425417709350586, 0.034437568664550784, 0.03430998229980469, 0.03424086380004883, 0.034236415863037106, 0.03502899169921875, 0.03464988708496094, 0.035471935272216794, 0.03436268615722656, 0.03441446304321289, 0.03447420883178711, 0.03437206268310547, 0.03436531066894531, 0.03432233428955078, 0.03440787124633789, 0.034542144775390624, 0.03468806457519531, 0.03459372711181641, 0.034410369873046874, 0.0343392333984375, 0.03476860809326172, 0.034396190643310544, 0.034418113708496095, 0.03427110290527344, 0.03570665740966797, 0.03431705474853516, 0.035104736328125, 0.034590206146240234, 0.03440707015991211, 0.03417702484130859, 0.03444736099243164, 0.03454534530639648, 0.03429369735717774, 0.034548446655273436, 0.034666145324707034, 0.034315582275390624, 0.034337120056152345, 0.03434902572631836, 0.0342694091796875, 0.03427139282226563, 0.034602848052978516, 0.0346069450378418, 0.034464065551757815, 0.03483004760742187, 0.03439235305786133, 0.034531326293945314, 0.034350177764892575, 0.03448748779296875, 0.0343590087890625, 0.03428966522216797, 0.03444326400756836, 0.034342910766601564, 0.03437071990966797, 0.03454006576538086, 0.034807552337646486, 0.0346363525390625, 0.0349224967956543, 0.03468265533447266, 0.03451107025146485, 0.03477913665771484, 0.03500339126586914, 0.035353599548339845, 0.03510067367553711, 0.03508019256591797, 0.03480166244506836, 0.03459411239624023, 0.03438662338256836, 0.03444652938842773, 0.03558396911621094, 0.03469311904907227, 0.03471952056884765, 0.03470358276367187, 0.034799617767333986, 0.03427532958984375, 0.03428511810302735, 0.034255294799804686, 0.0344637451171875, 0.034508800506591795, 0.034987297058105465, 0.034869247436523435, 0.03482697677612305, 0.03457843017578125, 0.034541568756103515, 0.034713600158691404, 0.03509036636352539, 0.03488924789428711, 0.03488191986083984, 0.03525785446166992, 0.03507267379760742, 0.03492454528808594, 0.03501795196533203, 0.034557857513427735, 0.03442572784423828, 0.03437353515625, 0.03446700668334961, 0.03455456161499024, 0.034817726135253906, 0.0347509765625, 0.03457231903076172, 0.034609153747558595, 0.03681280136108398, 0.03542784118652344, 0.03745363235473633, 0.03501964950561524, 0.035049278259277346, 0.03462144088745117, 0.03447395324707031, 0.034651905059814456, 0.03491254425048828, 0.03459267044067383, 0.03436528015136719, 0.034495777130126956, 0.03435139083862305, 0.03475116729736328, 0.034664447784423826, 0.03457622528076172, 0.03454291152954102, 0.03441532897949219, 0.03432201766967773, 0.03443084716796875, 0.034431873321533205, 0.03431196975708008, 0.0343573112487793, 0.03426230239868164, 0.034362014770507814, 0.034394241333007815, 0.03420467376708984, 0.03410009765625, 0.034272640228271485, 0.03567884826660156, 0.03566582489013672, 0.035399681091308595, 0.034787105560302733, 0.03443491363525391, 0.03435472106933594, 0.03442720031738281, 0.03440079879760742, 0.03432243347167969, 0.03452928161621094, 0.03448627090454102, 0.03440582275390625, 0.034427455902099606, 0.03439820861816406, 0.03434086227416992, 0.034351104736328124, 0.034496318817138674, 0.03446726226806641, 0.03460540771484375, 0.03441292953491211, 0.034480159759521484, 0.03458636856079102, 0.03466889572143555, 0.0348056640625, 0.03486675262451172, 0.034681278228759764, 0.03474431991577148, 0.03463782501220703, 0.03452284622192383, 0.03722873687744141, 0.0345068473815918, 0.03434598541259765, 0.03444131088256836, 0.03451587295532227, 0.03457843017578125, 0.034756446838378904, 0.034646175384521485, 0.03438406372070312, 0.03443487930297852, 0.03458035278320312, 0.03446918487548828, 0.034509536743164065, 0.03434662246704102, 0.034633697509765624, 0.034961887359619144, 0.03475199890136719, 0.03483292770385742, 0.03484262466430664, 0.03505110549926758, 0.034636192321777344, 0.03721555328369141, 0.03485065460205078, 0.03475132751464844, 0.03447788619995117, 0.034406593322753906, 0.03473408126831055, 0.034707584381103516, 0.034603038787841794, 0.03557360076904297, 0.03492582321166992, 0.0347061767578125, 0.03465420913696289, 0.03462963104248047, 0.034481857299804686, 0.034533695220947264, 0.03553030395507813, 0.03493523025512695, 0.03447555160522461, 0.03437807846069336, 0.03437343978881836, 0.03448368072509766, 0.034466655731201175, 0.0342745590209961, 0.034582271575927734, 0.03431478500366211, 0.03415702438354492, 0.03425465774536133, 0.03422601699829102, 0.034410400390625, 0.0348389778137207, 0.03451289749145508, 0.03433785629272461, 0.03433772659301758, 0.03431628799438476, 0.03419340896606445, 0.034195457458496094, 0.03417292785644531, 0.0344637451171875, 0.03470131301879883, 0.034344959259033206, 0.034395614624023435, 0.03443561553955078, 0.03440639877319336, 0.034465694427490236, 0.034305793762207035, 0.03430665588378906, 0.03430342483520508, 0.034543327331542965, 0.0343353271484375, 0.03463711929321289, 0.03486495971679687, 0.03491884613037109, 0.03479939270019531, 0.034713470458984375, 0.03481884765625, 0.034944190979003906, 0.03510995101928711, 0.035925758361816405, 0.035200225830078126, 0.03497420883178711, 0.03515011215209961, 0.034692577362060543, 0.034465343475341796, 0.03432137680053711, 0.03457228851318359, 0.0342999038696289, 0.03461119842529297, 0.03467203140258789, 0.034680992126464846, 0.03455430221557617, 0.03452435302734375, 0.03485747146606445, 0.034691390991210935, 0.034670272827148435, 0.03480607986450195, 0.03493478393554687, 0.03497740936279297, 0.03477951812744141, 0.0354856948852539, 0.03499622344970703, 0.03487241744995117, 0.03477344131469726, 0.03478982543945312, 0.03498787307739258, 0.03466873550415039, 0.03448831939697266, 0.03436544036865234, 0.0351907844543457, 0.034452545166015626, 0.034606014251708984, 0.03452928161621094, 0.03466854476928711, 0.0349409294128418, 0.03452057647705078, 0.03471206283569336, 0.035127296447753906, 0.03501849746704101, 0.034902271270751954, 0.0349409294128418, 0.03474822235107422, 0.03489401626586914, 0.03448115158081055, 0.035084671020507815, 0.03484048080444336, 0.03461795043945313, 0.03442905426025391, 0.0348322868347168, 0.03465430450439453, 0.03461939239501953, 0.03602793502807617, 0.03460758590698242, 0.034422592163085936, 0.03467488098144531, 0.03460300827026367, 0.034472000122070315, 0.03440224075317383, 0.034454879760742185, 0.0343661117553711, 0.03460710525512695, 0.034732032775878906, 0.03496691131591797, 0.03460179138183594, 0.0345098876953125, 0.034494655609130856, 0.034855487823486325, 0.03478073501586914, 0.03468080139160156, 0.034748897552490235, 0.03451084899902344, 0.03440332794189453, 0.034579456329345705, 0.03441049575805664, 0.034522239685058596, 0.03463257598876953, 0.03445468902587891, 0.03477315139770508, 0.03446787261962891, 0.034382495880126956, 0.03470336151123047, 0.034592159271240236, 0.03455231857299805]",tokens/s,28.898536718044994,, 4bit-gptq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,2,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-gptq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,8224.391168,11251.089408,0.0,10848.567296,10616.027648,s,1,14.707181640625,14.707181640625,0.0,14.707181640625,14.707181640625,14.707181640625,14.707181640625,[14.707181640625],,kWh,0.0002217485047749809,2.4453100640080384e-05,6.92994998839902e-05,0.0003155011052990515,,MB,3927.830528,11683.10272,0.0,11265.900544,11070.470656,s,10,3.869327514648438,0.38693275146484374,0.0015723757777633528,0.38700979614257813,0.38842025756835935,0.38910264587402343,0.3896485565185547,"[0.3837105712890625, 0.38558758544921873, 0.38826861572265625, 0.38587115478515627, 0.3866356506347656, 0.38675909423828125, 0.387260498046875, 0.3877727966308594, 0.3897850341796875, 0.387676513671875]",tokens/s,661.6136758411877,kWh,1.135294146875087e-05,1.2516110310114135e-06,7.526939782230889e-06,2.0131492281993172e-05,tokens/kWh,12716394.612682633,MB,3927.830528,11685.199872,0.0,11267.997696,11070.473216,s,10,29.395304443359375,2.9395304443359374,0.0063517143109843195,2.938293823242187,2.9485116455078124,2.9486101928710937,2.948689030761719,"[2.931767822265625, 2.9330380859375, 2.93488330078125, 2.93354296875, 2.934613037109375, 2.941704345703125, 2.945873291015625, 2.948708740234375, 2.94848974609375, 2.94268310546875]",tokens/s,21.43199439263919,kWh,8.604304739916469e-05,9.491161779877193e-06,5.7227056465370925e-05,0.0001527612656444128,tokens/kWh,412408.20920302585,,s,630,29.392640850067146,0.046654985476297046,0.0006162115645370318,0.04658526420593262,0.047064745330810544,0.04731296463012695,0.049377217025756835,"[0.04797983932495117, 0.04630521774291992, 0.04594895935058594, 0.04579388809204102, 0.04568880081176758, 0.04603100967407227, 0.046243839263916016, 0.04603289413452148, 0.046176254272460936, 0.046209022521972655, 0.04637081527709961, 0.04669635009765625, 0.04658287811279297, 0.046418495178222656, 0.04607804870605469, 0.04602207946777344, 0.04611779022216797, 0.04601824188232422, 0.046063934326171875, 0.046357601165771485, 0.046598880767822266, 0.04624345779418945, 0.04633184051513672, 0.04660492706298828, 0.0461495361328125, 0.04623942565917969, 0.046680225372314456, 0.04628096008300781, 0.04749702453613281, 0.04642425537109375, 0.04654489517211914, 0.04646297454833984, 0.04700902557373047, 0.0467217903137207, 0.046321376800537106, 0.04655542373657227, 0.04631548690795898, 0.04627609634399414, 0.04634064102172852, 0.046569313049316406, 0.046825630187988285, 0.046655487060546875, 0.04655923080444336, 0.04676809692382813, 0.046397472381591795, 0.0465849609375, 0.04664559936523437, 0.04633411026000977, 0.04637712097167969, 0.04660041427612305, 0.04674560165405273, 0.04674969482421875, 0.046954113006591795, 0.04691392135620117, 0.04697903823852539, 0.047030017852783206, 0.04669865417480469, 0.04683555221557617, 0.04690761566162109, 0.04667587280273437, 0.04870364761352539, 0.046571422576904296, 0.046679935455322265, 0.04938137435913086, 0.046523998260498044, 0.04575324630737305, 0.04593993759155274, 0.04607056045532226, 0.04611008071899414, 0.04607859039306641, 0.046130302429199216, 0.04627241516113281, 0.046126049041748045, 0.04621660614013672, 0.04611667251586914, 0.04593718338012695, 0.04595702362060547, 0.046276161193847656, 0.04629993438720703, 0.04605132675170898, 0.04605747222900391, 0.0466328010559082, 0.04623580932617188, 0.0463721923828125, 0.046643169403076175, 0.04645878219604492, 0.046230304718017576, 0.046653438568115234, 0.04677017593383789, 0.04649267196655273, 0.046265342712402346, 0.04653859329223633, 0.04653276824951172, 0.0465016975402832, 0.04626655960083008, 0.04612496185302734, 0.046194080352783204, 0.04633670425415039, 0.04639539337158203, 0.04632166290283203, 0.04657766342163086, 0.04686188888549805, 0.046653694152832034, 0.04692768096923828, 0.0469587516784668, 0.0467437744140625, 0.04641321563720703, 0.04668188858032227, 0.046838367462158206, 0.04648681640625, 0.04647417449951172, 0.047426910400390626, 0.04834108734130859, 0.046717342376708985, 0.046784671783447265, 0.046456832885742184, 0.04658367919921875, 0.04663488006591797, 0.046524097442626956, 0.04653113555908203, 0.04680294418334961, 0.04670054244995117, 0.04679065704345703, 0.04718796920776367, 0.047306049346923826, 0.047079135894775394, 0.04832972717285156, 0.04652134323120117, 0.04625612640380859, 0.04604089736938476, 0.04566239929199219, 0.04596736145019531, 0.045995105743408204, 0.04585123062133789, 0.045891902923583985, 0.0462081298828125, 0.04607254409790039, 0.046116321563720704, 0.0460164794921875, 0.04633673477172852, 0.04651212692260742, 0.04614115142822266, 0.04639081573486328, 0.04683852767944336, 0.046404830932617186, 0.04665628814697265, 0.04696031951904297, 0.04682572937011719, 0.04666988754272461, 0.04666716766357422, 0.046475486755371095, 0.04642784118652344, 0.04636947250366211, 0.04637900924682617, 0.04598988723754883, 0.04640563201904297, 0.04655923080444336, 0.04624998474121094, 0.046208446502685546, 0.046666046142578126, 0.046607681274414066, 0.046314430236816403, 0.04679065704345703, 0.04851251220703125, 0.04634019088745117, 0.04655878448486328, 0.047125217437744144, 0.046801025390625, 0.04692582321166992, 0.04714700698852539, 0.04657561492919922, 0.046430335998535154, 0.046873950958251955, 0.04662531280517578, 0.04653670501708984, 0.04651152038574219, 0.04641417694091797, 0.046424320220947266, 0.04664115142822266, 0.04658995056152344, 0.04639644622802734, 0.04664828872680664, 0.04659404754638672, 0.04701388931274414, 0.04770611190795898, 0.047529022216796876, 0.046678974151611326, 0.047150943756103514, 0.04708367919921875, 0.04881203079223633, 0.04652851104736328, 0.04602470397949219, 0.045864959716796876, 0.04610639953613281, 0.04599772644042969, 0.04603267288208008, 0.04612156677246094, 0.04638937759399414, 0.04648556900024414, 0.04658790588378906, 0.046730785369873046, 0.0463744010925293, 0.04593139266967773, 0.04627024078369141, 0.04605148696899414, 0.046200702667236325, 0.0462400016784668, 0.04650156784057617, 0.046387359619140624, 0.046600383758544923, 0.046659072875976565, 0.04678847885131836, 0.04646361541748047, 0.04639744186401367, 0.04631347274780274, 0.04610867309570312, 0.046204544067382815, 0.04642649459838867, 0.04691763305664062, 0.04652563095092774, 0.04666003036499024, 0.04678876876831055, 0.046260448455810545, 0.046690048217773436, 0.04649795150756836, 0.04618035125732422, 0.046600288391113284, 0.04679270553588867, 0.04648492813110352, 0.04646937561035156, 0.04672748947143555, 0.046931774139404296, 0.04684163284301758, 0.04687913513183594, 0.046671871185302735, 0.046358528137207033, 0.04652592086791992, 0.04664969635009766, 0.04621126556396484, 0.046606464385986326, 0.0467372817993164, 0.04667795181274414, 0.046649406433105466, 0.04697020721435547, 0.04705913543701172, 0.04688304138183594, 0.04676992034912109, 0.04678688049316406, 0.04671903991699219, 0.0467191047668457, 0.04727580642700195, 0.04715068817138672, 0.049161727905273435, 0.04652048110961914, 0.04620102310180664, 0.04595507049560547, 0.04593664169311523, 0.04594278335571289, 0.04614963150024414, 0.04620492935180664, 0.04588479995727539, 0.046225345611572266, 0.04632236862182617, 0.04602880096435547, 0.046159870147705076, 0.046525951385498046, 0.04628483200073242, 0.04626889419555664, 0.046519905090332034, 0.046397857666015625, 0.046063617706298826, 0.04628636932373047, 0.04691737747192383, 0.04696752166748047, 0.04676348876953125, 0.04685878372192383, 0.04640563201904297, 0.046204193115234375, 0.0463135986328125, 0.04650249481201172, 0.04623155212402344, 0.04642611312866211, 0.04650188827514649, 0.04620697784423828, 0.046343711853027346, 0.0467542724609375, 0.046440223693847656, 0.046465248107910154, 0.04677836990356445, 0.04695449447631836, 0.04639744186401367, 0.046476448059082034, 0.046742366790771483, 0.046604286193847655, 0.04685398483276367, 0.0469587516784668, 0.04691273498535156, 0.0469532470703125, 0.04673331069946289, 0.04653670501708984, 0.04635443115234375, 0.047005630493164065, 0.046700607299804686, 0.046738494873046876, 0.0465601921081543, 0.047099903106689454, 0.046941184997558595, 0.04682854461669922, 0.04663868713378906, 0.046583297729492185, 0.046741729736328126, 0.04706374359130859, 0.04679673767089844, 0.04715116882324219, 0.046908702850341794, 0.04932799911499024, 0.046895233154296875, 0.04595711898803711, 0.04588729476928711, 0.0462174072265625, 0.04599193572998047, 0.04581577682495117, 0.04610796737670898, 0.04642684936523438, 0.04646627044677734, 0.046443294525146485, 0.04630527877807617, 0.04626227188110352, 0.04623952102661133, 0.046491870880126955, 0.04607148742675781, 0.04576224136352539, 0.046240383148193356, 0.046548606872558594, 0.04628313446044922, 0.04637900924682617, 0.047091712951660154, 0.04676198577880859, 0.04630646514892578, 0.04660924911499023, 0.046430206298828124, 0.04595507049560547, 0.04621311950683594, 0.046781726837158207, 0.04673199844360352, 0.04677632141113281, 0.046643169403076175, 0.04658560180664063, 0.04700124740600586, 0.04680742263793945, 0.04657980728149414, 0.046491809844970707, 0.04664460754394531, 0.047264385223388675, 0.046243839263916016, 0.04653055953979492, 0.04711423873901367, 0.04688649749755859, 0.04687299346923828, 0.04694630432128906, 0.046752960205078124, 0.04667679977416992, 0.04657049560546875, 0.04697763061523438, 0.04689347076416016, 0.04672512054443359, 0.0465428466796875, 0.04668127822875977, 0.04684006500244141, 0.04677280044555664, 0.04658950424194336, 0.04681772613525391, 0.04692582321166992, 0.04683145523071289, 0.05233270263671875, 0.04627180862426758, 0.04685251235961914, 0.04698863983154297, 0.049285823822021485, 0.04665900802612305, 0.04614995193481446, 0.04593875122070312, 0.04614163208007813, 0.04609228897094726, 0.04602675247192383, 0.04659321594238281, 0.04658585739135742, 0.04613536071777344, 0.04841052627563477, 0.046295520782470706, 0.04623193740844726, 0.04613711929321289, 0.046532833099365234, 0.047156448364257815, 0.04576131057739258, 0.04635347366333008, 0.046597057342529294, 0.0465428466796875, 0.046677120208740236, 0.04687673568725586, 0.046656318664550785, 0.046585567474365236, 0.04651651382446289, 0.04690700912475586, 0.04625993728637695, 0.04652671813964844, 0.046709152221679685, 0.04648550415039063, 0.04686988830566406, 0.0471701774597168, 0.04930879974365234, 0.04648796844482422, 0.04656745529174805, 0.04648185729980469, 0.04639750289916992, 0.046604286193847655, 0.046319137573242186, 0.046437950134277345, 0.046771041870117186, 0.04698316955566406, 0.04684799957275391, 0.0469381103515625, 0.0469703369140625, 0.046957088470458985, 0.046951839447021484, 0.04674176025390625, 0.04664559936523437, 0.04694009780883789, 0.04669036865234375, 0.046622718811035156, 0.04689715194702149, 0.04680499267578125, 0.04913971328735352, 0.046456512451171876, 0.04655251312255859, 0.046448734283447264, 0.0467496337890625, 0.047061279296875, 0.04677027130126953, 0.04707376098632812, 0.04712607955932617, 0.05009328079223633, 0.04690409469604492, 0.04597350311279297, 0.04605952072143555, 0.046441982269287106, 0.04790937423706055, 0.04580966567993164, 0.046516223907470705, 0.04610047912597656, 0.04603673553466797, 0.04641817474365234, 0.046516223907470705, 0.04626979064941406, 0.0461965446472168, 0.046158016204833986, 0.04650870513916015, 0.04652646255493164, 0.04664303970336914, 0.04635049438476563, 0.046233024597167965, 0.04709228897094726, 0.04722073745727539, 0.046639102935791016, 0.046548416137695316, 0.046661792755126955, 0.04663132858276367, 0.04667391967773438, 0.049582080841064455, 0.04601174545288086, 0.04639401626586914, 0.046534656524658206, 0.04619673538208008, 0.04638467025756836, 0.04631804656982422, 0.04636822509765625, 0.046493663787841796, 0.046756416320800784, 0.046811134338378906, 0.04689920043945312, 0.04681932830810547, 0.04708761596679688, 0.04711324691772461, 0.04712076950073242, 0.04689958572387695, 0.046763553619384765, 0.046921886444091794, 0.047120929718017575, 0.046843902587890625, 0.046835712432861325, 0.050036670684814454, 0.04608620834350586, 0.0466632308959961, 0.046755550384521484, 0.04642425537109375, 0.046438945770263675, 0.04665280151367188, 0.046803585052490236, 0.047001598358154296, 0.0470200309753418, 0.04698934555053711, 0.04772415924072266, 0.04728854370117187, 0.04714713668823242, 0.04981488037109375, 0.04610934448242188, 0.046048545837402345, 0.04620486450195312, 0.04616649627685547, 0.046129470825195314, 0.045956478118896485, 0.0462276496887207, 0.04594019317626953, 0.04648649597167969, 0.04668201446533203, 0.046273792266845706, 0.046293472290039064, 0.04648783874511719, 0.04648764801025391, 0.046309375762939455, 0.046516223907470705, 0.04681523132324219, 0.04662051010131836, 0.04667407989501953, 0.04699955368041992, 0.04670873641967774, 0.04936703872680664, 0.04613324737548828, 0.046327808380126956, 0.04645382308959961, 0.046185375213623044, 0.046852127075195316, 0.046682113647460936, 0.04656857681274414, 0.046881664276123045, 0.046685855865478514, 0.04652796936035156, 0.04692233657836914, 0.046491168975830076, 0.046777088165283205, 0.04659318542480469, 0.046588768005371095, 0.04722012710571289, 0.04705545425415039, 0.04686643218994141, 0.0474337272644043, 0.0470384635925293, 0.04680294418334961, 0.04962303924560547, 0.046300830841064455, 0.046421920776367184, 0.04693423843383789, 0.04661884689331055, 0.04658377456665039, 0.04701187133789062, 0.04677171325683594, 0.04675532913208008, 0.04684492874145508, 0.04674560165405273, 0.046698497772216796, 0.04699135971069336, 0.04708761596679688, 0.04701984024047851, 0.047417537689208984, 0.0472022705078125, 0.04731862258911133, 0.04748128128051758, 0.04869132614135742, 0.04668758392333985, 0.04635923385620117, 0.04601241683959961, 0.04594851303100586, 0.04639692687988281, 0.046271392822265625, 0.046235649108886716, 0.046622718811035156, 0.046539806365966795, 0.04662371063232422, 0.04647727966308594, 0.04633980941772461, 0.04650608062744141, 0.04624563217163086, 0.04616444778442383, 0.04648291015625, 0.04609487915039062, 0.046266014099121094, 0.04662511825561524, 0.046845951080322266, 0.04702816009521484, 0.046823486328125, 0.046357921600341793, 0.04650985717773438, 0.04681203079223633, 0.04693385696411133, 0.046423969268798826, 0.04642425537109375, 0.04758095932006836, 0.04645296096801758, 0.04673292922973633, 0.04671267318725586, 0.047022624969482424, 0.046473217010498044, 0.046495105743408205, 0.04676774215698242, 0.04632463836669922, 0.04674979019165039, 0.046744991302490234, 0.04655984115600586, 0.046851425170898436, 0.04712287902832031, 0.0471693115234375, 0.04685184097290039, 0.04680099105834961, 0.047053409576416017, 0.04687257766723633, 0.04688083267211914, 0.046636993408203126, 0.04692940902709961, 0.046970752716064455, 0.047043201446533206, 0.047042560577392575, 0.046835678100585934, 0.046745376586914064, 0.04696271896362305, 0.04686870574951172, 0.04676723098754883, 0.046711681365966796, 0.047034366607666016, 0.04692326354980469, 0.04697708892822266]",tokens/s,21.433936583434317,, 4bit-gptq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,gptq,4,True,1,256,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,3163.377664,4431.151104,0.0,4028.628992,3944.723968,s,1,10.3440693359375,10.3440693359375,0.0,10.3440693359375,10.3440693359375,10.3440693359375,10.3440693359375,[10.3440693359375],,kWh,9.552016602917016e-05,1.0529324165995e-05,3.056530223002896e-05,0.0001366147924251941,,MB,3218.006016,4770.889728,0.0,4353.687552,4305.05728,s,10,1.0984847106933593,0.10984847106933593,0.0002605002368488148,0.10989107131958008,0.11014441680908203,0.1101818733215332,0.11021183853149413,"[0.1100123519897461, 0.10947843170166016, 0.109482177734375, 0.10986265563964843, 0.11021932983398437, 0.10954656219482421, 0.10991948699951172, 0.11013609313964844, 0.10974729919433594, 0.110080322265625]",tokens/s,2330.4830509512853,kWh,3.2821042248123672e-06,3.617759558636077e-07,2.177841942022553e-06,5.821722122698528e-06,tokens/kWh,43973242.728620134,MB,3220.365312,4770.889728,0.0,4353.687552,4305.05984,s,10,21.5583271484375,2.1558327148437497,0.007466216733338763,2.1534361572265626,2.1672942138671876,2.1687994506835935,2.1700036401367186,"[2.152592041015625, 2.159719482421875, 2.157880859375, 2.166959716796875, 2.148681396484375, 2.15079931640625, 2.1542802734375, 2.1703046875, 2.15074169921875, 2.14636767578125]",tokens/s,29.223046652098937,kWh,6.291325999268587e-05,6.939341831054376e-06,3.525141022357967e-05,0.00010510401204731992,tokens/kWh,599406.2336234715,,s,630,21.555751449584964,0.0342154784914047,0.0005858940224112002,0.03409353637695313,0.034640785598754886,0.034891537284851074,0.0361589853668213,"[0.03465465545654297, 0.03443548965454102, 0.03428044891357422, 0.034067104339599606, 0.03439411163330078, 0.03402060699462891, 0.033968894958496094, 0.03413577651977539, 0.034183456420898435, 0.03390214538574219, 0.03392147064208984, 0.03384060668945312, 0.033786399841308594, 0.03391692733764649, 0.03381043243408203, 0.03407430267333984, 0.033803966522216795, 0.03396876907348633, 0.03393644714355469, 0.035597248077392576, 0.03631718444824219, 0.034714817047119144, 0.03456476974487305, 0.0342501106262207, 0.03434921646118164, 0.03402352142333984, 0.034199649810791016, 0.03420409774780273, 0.03439206314086914, 0.033980415344238284, 0.03411286544799805, 0.03375785446166992, 0.03400447845458984, 0.03402092742919922, 0.03391494369506836, 0.03391139221191406, 0.03383148956298828, 0.033844959259033205, 0.033933311462402346, 0.033928768157958984, 0.03397795104980469, 0.0339505615234375, 0.033989696502685546, 0.033887359619140626, 0.034001792907714844, 0.034177982330322265, 0.03404800033569336, 0.033982463836669925, 0.03432243347167969, 0.03410927963256836, 0.03413148880004883, 0.034089599609375, 0.034080768585205076, 0.03405209732055664, 0.03450406265258789, 0.03558259201049805, 0.03409475326538086, 0.034006656646728514, 0.03414499282836914, 0.03433881759643555, 0.033882110595703126, 0.03377151870727539, 0.03425468826293945, 0.03514064025878906, 0.03446697616577148, 0.03458614349365234, 0.034652446746826174, 0.034988033294677735, 0.03487900924682617, 0.035828193664550784, 0.03468697738647461, 0.03420889663696289, 0.03409539031982422, 0.03454966354370117, 0.03454022216796875, 0.03462758255004883, 0.03447824096679687, 0.03562176132202149, 0.03437871932983398, 0.03413967895507813, 0.03414473724365234, 0.03466035079956055, 0.03448627090454102, 0.0348135986328125, 0.034302303314208984, 0.03415244674682617, 0.03398195266723633, 0.03391743850708008, 0.03387564849853516, 0.03386838531494141, 0.033877727508544925, 0.033826560974121095, 0.03379571151733399, 0.033716865539550785, 0.033855392456054685, 0.03393759918212891, 0.03377449417114258, 0.03403059387207031, 0.03400076675415039, 0.03412732696533203, 0.03420947265625, 0.03428451156616211, 0.03419340896606445, 0.03436544036865234, 0.03432620620727539, 0.034141792297363284, 0.034004768371582034, 0.03394041442871094, 0.03413401412963867, 0.03405619049072266, 0.03396940612792969, 0.033780479431152345, 0.03424051284790039, 0.034092769622802735, 0.034073982238769535, 0.0345179214477539, 0.034325599670410156, 0.03455414581298828, 0.0344438705444336, 0.034275360107421875, 0.03410124969482422, 0.03414947128295898, 0.03392835235595703, 0.034031360626220704, 0.03404390335083008, 0.034266849517822266, 0.035157440185546875, 0.03461289596557617, 0.034392990112304685, 0.034539039611816404, 0.034501087188720705, 0.03458867263793945, 0.03465625762939453, 0.034402305603027344, 0.03583180618286133, 0.034665760040283204, 0.034802398681640624, 0.03464128112792969, 0.03452934265136719, 0.034562686920166015, 0.03411552047729492, 0.03421184158325195, 0.0339947509765625, 0.03411737442016602, 0.034422462463378906, 0.034501121520996096, 0.03422598266601563, 0.034493694305419924, 0.03408588790893555, 0.03399606323242187, 0.03392729568481445, 0.03392940902709961, 0.03394806289672852, 0.03386899185180664, 0.03397305679321289, 0.033920703887939455, 0.03387209701538086, 0.03373027038574219, 0.034120063781738284, 0.03376073455810547, 0.0341099853515625, 0.034088958740234376, 0.034170879364013675, 0.03401318359375, 0.033849086761474606, 0.033759422302246093, 0.033943614959716796, 0.033971649169921875, 0.03403424072265625, 0.034487648010253905, 0.034511520385742185, 0.0342421760559082, 0.03439606475830078, 0.03451132965087891, 0.03442617416381836, 0.03423302459716797, 0.03398774337768555, 0.03396489715576172, 0.03383091354370117, 0.03385696029663086, 0.034087486267089843, 0.03402137756347656, 0.034338176727294924, 0.035867103576660155, 0.03410550308227539, 0.033974273681640625, 0.033936737060546875, 0.033823455810546875, 0.03400902557373047, 0.035160545349121095, 0.03466976165771484, 0.03421676635742187, 0.03421334457397461, 0.03453094482421875, 0.03391990280151367, 0.03364364624023437, 0.034159358978271485, 0.03379987335205078, 0.03365727996826172, 0.034082656860351564, 0.03372208023071289, 0.03372281646728516, 0.03442800140380859, 0.03449654388427734, 0.03453222274780274, 0.03474982452392578, 0.03462617492675781, 0.03452419281005859, 0.034742462158203126, 0.034478622436523436, 0.0342154541015625, 0.03407843017578125, 0.033905025482177734, 0.03376927947998047, 0.03381683349609375, 0.03388844680786133, 0.039704063415527346, 0.03464076614379883, 0.03423551940917969, 0.034358142852783206, 0.034326080322265626, 0.03414265441894531, 0.034202720642089846, 0.034312286376953126, 0.034218814849853514, 0.035192832946777344, 0.03424051284790039, 0.03411289596557617, 0.033767295837402345, 0.033979137420654296, 0.03379516983032226, 0.03369052886962891, 0.03367663955688477, 0.03364694213867187, 0.03377388763427734, 0.03433868789672852, 0.034501953125, 0.0342597770690918, 0.03445555114746094, 0.03410124969482422, 0.03395174407958984, 0.03388809585571289, 0.03384905624389648, 0.033951713562011716, 0.03403996658325195, 0.03907411193847656, 0.03578265762329102, 0.034592769622802735, 0.034852863311767575, 0.03440591812133789, 0.03446217727661133, 0.03436742401123047, 0.03442192077636719, 0.03438800048828125, 0.034081600189208985, 0.03397017669677734, 0.03396198272705078, 0.034105342864990236, 0.0343059196472168, 0.034103424072265624, 0.0340882568359375, 0.033882816314697264, 0.03383910369873047, 0.03374460983276367, 0.03391222381591797, 0.033987457275390626, 0.03386368179321289, 0.033809696197509766, 0.03382550430297852, 0.0339865608215332, 0.03413119888305664, 0.03416726303100586, 0.03434067153930664, 0.03423075103759766, 0.03466649627685547, 0.03443478393554687, 0.034248992919921874, 0.034266719818115236, 0.034019744873046875, 0.035588096618652344, 0.03417264175415039, 0.03385772705078125, 0.033835105895996094, 0.03384873580932617, 0.03400150299072266, 0.03385139083862305, 0.033883518218994144, 0.034202239990234376, 0.0338671989440918, 0.0338458251953125, 0.03380806350708008, 0.03364255905151367, 0.03383475112915039, 0.03376153564453125, 0.033568286895751955, 0.03360636901855469, 0.03383910369873047, 0.03375823974609375, 0.03436982345581055, 0.03696915054321289, 0.034210975646972654, 0.034275360107421875, 0.03409183883666992, 0.03413151931762695, 0.03417337417602539, 0.03407257461547852, 0.034318336486816405, 0.03433881759643555, 0.03402048110961914, 0.034003841400146485, 0.03381155014038086, 0.033893280029296875, 0.03387798309326172, 0.03401302337646484, 0.03428700637817383, 0.03429532623291016, 0.03415315246582031, 0.03384239959716797, 0.034271808624267576, 0.034252799987792966, 0.03408486557006836, 0.03407462310791016, 0.03422208023071289, 0.03435520172119141, 0.034121726989746096, 0.034179073333740234, 0.033982463836669925, 0.03409100723266602, 0.034325790405273435, 0.03459878540039062, 0.03441910552978516, 0.034509246826171874, 0.034049758911132814, 0.03399708938598633, 0.0339568977355957, 0.03384624099731445, 0.034146305084228515, 0.03439168167114258, 0.034299488067626956, 0.03425564956665039, 0.03528908920288086, 0.034353153228759765, 0.03470940780639648, 0.033984607696533206, 0.03389164733886719, 0.034091678619384766, 0.033798175811767577, 0.0337256965637207, 0.03394620895385742, 0.03373788833618164, 0.033905281066894534, 0.033888286590576175, 0.03413654327392578, 0.034326400756835934, 0.034076416015625, 0.034177150726318356, 0.034049983978271484, 0.03406867218017578, 0.03391823959350586, 0.03400719833374023, 0.03392291259765625, 0.03382153701782226, 0.03448409652709961, 0.034152000427246094, 0.03400748825073242, 0.034139167785644534, 0.03415289688110352, 0.03426067352294922, 0.03432124710083008, 0.034423904418945314, 0.03393360137939453, 0.03419375991821289, 0.03418342590332031, 0.03406671905517578, 0.03398015975952148, 0.033990657806396485, 0.03389344024658203, 0.033813152313232425, 0.03453628921508789, 0.0337520637512207, 0.03390771102905273, 0.033801502227783206, 0.033917022705078126, 0.03383135986328125, 0.03423001480102539, 0.034183616638183596, 0.03398860931396484, 0.03439206314086914, 0.033987808227539065, 0.033653182983398436, 0.03364694213867187, 0.033732608795166014, 0.033698974609375, 0.03454038238525391, 0.033890304565429685, 0.034092479705810544, 0.03407689666748047, 0.03397052764892578, 0.03401929473876953, 0.03403369522094726, 0.03402035140991211, 0.033958206176757814, 0.03392134475708008, 0.03403014373779297, 0.033979198455810544, 0.03412201690673828, 0.033980224609375, 0.03433564758300781, 0.03442620849609375, 0.034585247039794924, 0.034236415863037106, 0.03401859283447266, 0.034013919830322266, 0.03421120071411133, 0.03548838424682617, 0.03443046569824219, 0.03492406463623047, 0.03447702407836914, 0.03507593536376953, 0.03447574234008789, 0.0340832633972168, 0.03419955062866211, 0.03389187240600586, 0.03413804626464844, 0.03400742340087891, 0.03436495971679687, 0.034119678497314454, 0.034076351165771485, 0.034323070526123045, 0.03431391906738281, 0.03459116744995117, 0.034214080810546874, 0.03434035110473633, 0.03457260894775391, 0.03418259048461914, 0.033946369171142576, 0.03431331253051758, 0.03403807830810547, 0.03467734527587891, 0.03471331024169922, 0.0343201904296875, 0.03506585693359375, 0.03468492889404297, 0.03446137619018555, 0.03532160186767578, 0.034304576873779295, 0.03399411010742188, 0.034611839294433594, 0.03465625762939453, 0.03457024002075195, 0.0341234245300293, 0.034029918670654295, 0.03400908660888672, 0.03390054321289063, 0.03404912185668945, 0.03421481704711914, 0.034129920959472655, 0.03396768188476563, 0.034009536743164065, 0.03421388626098633, 0.03626163101196289, 0.04180115127563477, 0.034040382385253906, 0.03383695983886719, 0.03378006362915039, 0.034136062622070314, 0.03509657669067383, 0.03397017669677734, 0.03452012634277344, 0.034374015808105465, 0.03441875076293945, 0.03469977569580078, 0.03411075210571289, 0.03429008102416992, 0.03417055892944336, 0.03404825592041016, 0.03393775939941406, 0.03398249435424805, 0.03423436737060547, 0.03402547073364258, 0.033775615692138675, 0.03450374221801758, 0.03441945648193359, 0.03446777725219727, 0.034885887145996095, 0.03464191818237305, 0.034640960693359375, 0.03462649536132813, 0.034564350128173826, 0.034205440521240235, 0.03402342224121094, 0.03378716659545898, 0.03385212707519531, 0.033947425842285155, 0.03384137725830078, 0.036345855712890625, 0.0343243522644043, 0.03408089447021485, 0.034050048828125, 0.03411763381958008, 0.0346844482421875, 0.033964511871337894, 0.03399270248413086, 0.03424812698364258, 0.034227649688720704, 0.03423068618774414, 0.03431219100952149, 0.03437148666381836, 0.03438396835327148, 0.03422342300415039, 0.0340200309753418, 0.03457404708862305, 0.03489616012573242, 0.03484467315673828, 0.0349488639831543, 0.034613502502441405, 0.034178848266601565, 0.03399087905883789, 0.03393523025512695, 0.03404355239868164, 0.034253280639648435, 0.03590768051147461, 0.03493264007568359, 0.03455385589599609, 0.034318336486816405, 0.034293056488037106, 0.03469382476806641, 0.03405811309814453, 0.03472937774658203, 0.03403952026367187, 0.03388518524169922, 0.033880062103271484, 0.033873920440673826, 0.03401113510131836, 0.03378790283203125, 0.03375718307495117, 0.03381836700439453, 0.034070175170898435, 0.034161247253417966, 0.03423027038574219, 0.03395711898803711, 0.03392982482910156, 0.034127296447753905, 0.03410812759399414, 0.03386163330078125, 0.033683456420898435, 0.03367731094360352, 0.03366022491455078, 0.03376403045654297, 0.03408486557006836, 0.033667072296142575, 0.0339002571105957, 0.03371587371826172, 0.03384998321533203, 0.034236415863037106, 0.03431219100952149, 0.03423436737060547, 0.03384463882446289, 0.03375369644165039, 0.033791999816894534, 0.03366912078857422, 0.03372003173828125, 0.03394384002685547, 0.03389440155029297, 0.03401728057861328, 0.03412947082519531, 0.03391718292236328, 0.03469043350219726, 0.03432723236083984, 0.03414220809936523, 0.03407462310791016, 0.03382886505126953, 0.03371753692626953, 0.03353059387207031, 0.034508800506591795, 0.035234848022460935, 0.034337696075439454, 0.03447814559936523, 0.03442422485351562, 0.03425545501708984, 0.03450470352172851, 0.03457843017578125, 0.03478112030029297, 0.03459251022338867, 0.03404345703125, 0.03398118209838867, 0.033699649810791016, 0.0337327995300293, 0.03400089645385742, 0.03397017669677734, 0.03385094451904297, 0.03373712158203125, 0.0337039680480957, 0.03375465774536133, 0.03382729721069336, 0.03392448043823242, 0.034050239562988284, 0.0341794548034668, 0.034003009796142576, 0.033809600830078126, 0.034127998352050784, 0.03416339111328125, 0.034105342864990236, 0.03402547073364258, 0.03395993423461914, 0.033890304565429685, 0.033923072814941405, 0.0339947509765625, 0.034094303131103516, 0.03396803283691406, 0.03417974472045898, 0.03402774429321289, 0.03401311874389648, 0.03383465576171875, 0.03384156799316406, 0.033804096221923825, 0.0342663688659668, 0.033962272644042966, 0.03392374420166016, 0.03384035110473633, 0.03391158294677735, 0.03386912155151367, 0.03392803192138672, 0.034598560333251954, 0.033925312042236325, 0.03383865737915039, 0.03383660888671875, 0.03389734268188477, 0.03399679946899414, 0.03409676742553711]",tokens/s,29.226538516806386,,