diff --git "a/perf-df-unquantized-1xT4.csv" "b/perf-df-unquantized-1xT4.csv" --- "a/perf-df-unquantized-1xT4.csv" +++ "b/perf-df-unquantized-1xT4.csv" @@ -41,7 +41,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 82559 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 76139 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -273,7 +273,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 85522 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 79196 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -369,7 +369,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfhrqspbs/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyi_u82d8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.139648,14639.104,0.0,14243.856384,14221.3376,s,1,7.5382119140625,7.5382119140625,0.0,7.5382119140625,7.5382119140625,7.5382119140625,7.5382119140625,[7.5382119140625],,kWh,1.5008527887503457e-05,1.6481913225325745e-06,8.933062701999006e-06,2.558978191203504e-05,,MB,1123.958784,14735.572992,0.0,14329.839616,14290.688,s,10,14.040768432617188,1.4040768432617188,0.0048848589672792955,1.405280517578125,1.4091292724609374,1.4101729736328126,1.4110079345703126,"[1.3953970947265626, 1.399153076171875, 1.3992520751953126, 1.400876953125, 1.404310302734375, 1.4079962158203125, 1.40741796875, 1.4112166748046875, 1.4088973388671875, 1.406250732421875]",tokens/s,182.32620331897448,kWh,4.105210496041688e-05,4.5276012484695975e-06,2.716368839760005e-05,7.274339460648653e-05,tokens/kWh,3519219.873981142,MB,1152.958464,14750.253056,0.0,14344.51968,14290.69056,s,10,39.413060791015624,3.9413060791015617,0.003247624827919594,3.9413275146484374,3.943664404296875,3.946299365234375,3.948407333984375,"[3.9380703125, 3.93835595703125, 3.936633544921875, 3.941100830078125, 3.94066552734375, 3.94183056640625, 3.94155419921875, 3.942836669921875, 3.943078857421875, 3.948934326171875]",tokens/s,15.984548963109491,kWh,0.00011535337135291625,1.2724405187126664e-05,7.679317254560004e-05,0.00020487094908564293,tokens/kWh,307510.65625055454,,s,630,39.40928758239751,0.0625544247339642,0.0002290329818859967,0.06253827095031739,0.0628556354522705,0.06295140991210937,0.06317164352416993,"[0.06301465606689453, 0.062229217529296874, 0.062402591705322266, 0.062215137481689456, 0.06197545623779297, 0.06248233413696289, 0.062167102813720704, 0.06237936019897461, 0.06253241729736328, 0.06231763076782226, 0.062069793701171876, 0.062396289825439454, 0.06242675018310547, 0.06287807846069336, 0.06252073669433594, 0.06271958541870117, 0.062311424255371096, 0.06230534362792969, 0.06220233535766601, 0.062204383850097654, 0.061972415924072266, 0.0622490234375, 0.06250291061401367, 0.062382080078125, 0.06239846420288086, 0.06239641571044922, 0.062407936096191406, 0.062497535705566404, 0.06253321456909179, 0.0627347526550293, 0.06249676895141602, 0.06255820846557616, 0.06289801788330078, 0.06245977783203125, 0.062368030548095706, 0.06234294509887695, 0.062488800048828126, 0.06255193710327149, 0.062400192260742185, 0.06242566299438477, 0.06238422393798828, 0.06246377563476563, 0.06233283233642578, 0.06340208053588867, 0.0627231674194336, 0.06289497756958008, 0.06259478378295899, 0.06267526245117187, 0.06269132614135742, 0.06253772735595703, 0.06240256118774414, 0.06249628829956055, 0.062396030426025394, 0.0631099853515625, 0.06281798553466797, 0.06265478515625, 0.06256028747558594, 0.0625909423828125, 0.06259423828125, 0.06275360107421875, 0.0624189453125, 0.06265862274169921, 0.06274041748046875, 0.06332227325439453, 0.06269110488891602, 0.0620689582824707, 0.062261280059814454, 0.062285247802734374, 0.06233961486816406, 0.061960193634033205, 0.06228124618530274, 0.06250294494628907, 0.0627242546081543, 0.06233660888671875, 0.06266336059570313, 0.06259219360351563, 0.06235638427734375, 0.06241059112548828, 0.06237571334838867, 0.062286113739013674, 0.06242502212524414, 0.062356990814208986, 0.0627116470336914, 0.06240943908691406, 0.06232835388183594, 0.06231497573852539, 0.062282783508300785, 0.06221676635742188, 0.06266307067871094, 0.062443519592285154, 0.0624947509765625, 0.06279727935791016, 0.06278505706787109, 0.06251824188232422, 0.062410751342773435, 0.06253500747680664, 0.06250358581542968, 0.06235033416748047, 0.06241094589233399, 0.062329662322998046, 0.06229756927490234, 0.062376480102539066, 0.062328865051269534, 0.06246329498291016, 0.06270383834838868, 0.0628023681640625, 0.06274662399291993, 0.06262764739990234, 0.06250617599487304, 0.06235855865478516, 0.06241494369506836, 0.06248767852783203, 0.06250777435302735, 0.062912353515625, 0.06270582580566406, 0.06268313598632813, 0.06253263854980469, 0.06242201614379883, 0.06253769683837891, 0.06255599975585938, 0.06254784011840821, 0.06275305557250976, 0.0627856330871582, 0.0629466552734375, 0.06270601654052735, 0.0625539207458496, 0.06293852615356445, 0.0622372817993164, 0.06213014221191406, 0.062183521270751954, 0.06217932891845703, 0.0624202880859375, 0.06216099166870117, 0.06233967971801758, 0.06250086212158203, 0.06250627136230469, 0.0625650863647461, 0.062306304931640626, 0.06214041519165039, 0.06239231872558594, 0.06254991912841797, 0.06258899307250977, 0.06262543869018555, 0.06276729583740234, 0.06234857559204102, 0.06225603103637695, 0.06215238571166992, 0.06246368026733398, 0.06224140930175781, 0.06246604919433594, 0.062273536682128906, 0.06258892822265626, 0.06288544082641602, 0.062472640991210936, 0.06227305603027344, 0.06248291015625, 0.06254959869384766, 0.06262214279174805, 0.06241891098022461, 0.06261759948730469, 0.06264774322509765, 0.06248883056640625, 0.06232633590698242, 0.062416831970214845, 0.06231865692138672, 0.06256921768188477, 0.062316574096679685, 0.06247222518920898, 0.0625458869934082, 0.06247529602050781, 0.06251820755004883, 0.0627094383239746, 0.06255791854858399, 0.06266684722900391, 0.06259107208251953, 0.06271590423583985, 0.0625115852355957, 0.06242720031738281, 0.062319583892822265, 0.06252022552490234, 0.06263001632690429, 0.06289395141601563, 0.06253158569335937, 0.06262707138061524, 0.06262246322631836, 0.06255324935913086, 0.062499679565429685, 0.06261350250244141, 0.06258483123779297, 0.0634106559753418, 0.06242652893066406, 0.06216953659057617, 0.062212032318115236, 0.062185630798339844, 0.06234511947631836, 0.062220287322998044, 0.06227507019042969, 0.06251718521118164, 0.062281982421875, 0.06216633605957031, 0.06251417541503906, 0.06242230224609375, 0.06253846359252929, 0.062394367218017575, 0.06223388671875, 0.062405441284179686, 0.06263596725463867, 0.06250086212158203, 0.06238819122314453, 0.06207078552246094, 0.062227680206298826, 0.06280047988891602, 0.06242700958251953, 0.06267939376831054, 0.06247366333007812, 0.06235087966918945, 0.06262790298461914, 0.06261238479614258, 0.06269286346435547, 0.06244972610473633, 0.062419296264648434, 0.0627276496887207, 0.06273500823974609, 0.06277072143554688, 0.06293532943725585, 0.06271811294555664, 0.062312095642089844, 0.062371711730957034, 0.06251772689819336, 0.06245587158203125, 0.06246118545532227, 0.06244966506958008, 0.0627207374572754, 0.06277228927612305, 0.06295641708374024, 0.062644287109375, 0.06251484680175781, 0.06247663879394531, 0.06255516815185547, 0.06256940841674805, 0.06266652679443359, 0.062486785888671875, 0.06263391876220703, 0.06278729629516601, 0.06276335906982422, 0.06284297561645508, 0.06266979217529296, 0.06253577423095703, 0.06285523223876953, 0.06299523162841797, 0.06285286331176758, 0.06290422439575195, 0.06305772781372071, 0.06225526428222656, 0.062169281005859375, 0.06220800018310547, 0.062195358276367185, 0.062406368255615234, 0.06218937683105469, 0.06230307388305664, 0.0623595199584961, 0.062455329895019535, 0.062519775390625, 0.06225823974609375, 0.062292831420898434, 0.06233715057373047, 0.062488544464111326, 0.06287062454223633, 0.06252803039550782, 0.06259955215454102, 0.06234112167358399, 0.062304031372070315, 0.062333152770996096, 0.06255820846557616, 0.062216159820556644, 0.06237392044067383, 0.06270083236694336, 0.06277548980712891, 0.06260377502441407, 0.06266643142700196, 0.06256880187988281, 0.06241689682006836, 0.062470142364501956, 0.06276300811767578, 0.06253673553466797, 0.062388225555419924, 0.06237216186523437, 0.06257251358032226, 0.06256438446044922, 0.06264284896850586, 0.06272735977172851, 0.0626102409362793, 0.0626684799194336, 0.06266502380371093, 0.06284672164916992, 0.06243318557739258, 0.06276108932495117, 0.062619873046875, 0.06270115280151367, 0.06265078353881835, 0.062437374114990236, 0.06260265731811523, 0.062473857879638675, 0.06240262222290039, 0.06235023880004883, 0.06258470535278321, 0.06271603012084961, 0.06295142364501953, 0.06271753692626954, 0.06297027206420898, 0.06266060638427734, 0.06264012908935547, 0.06292835235595703, 0.0626849594116211, 0.06275513458251954, 0.0631978874206543, 0.062252639770507816, 0.06214083099365234, 0.06196364974975586, 0.06255007934570313, 0.062578369140625, 0.062434177398681644, 0.06225715255737305, 0.06245580673217774, 0.06233695983886719, 0.062390335083007814, 0.062473342895507815, 0.06230499267578125, 0.06231260681152344, 0.06221619033813477, 0.06243260955810547, 0.06287635040283203, 0.06271996688842774, 0.06230220794677734, 0.06239004898071289, 0.062288127899169925, 0.06266249465942383, 0.06271603012084961, 0.06234521484375, 0.062416927337646484, 0.0625656967163086, 0.06255683135986329, 0.06266470336914062, 0.06252044677734375, 0.06263897705078125, 0.06251849746704101, 0.06256105422973633, 0.06273843383789063, 0.06262086486816407, 0.06225132751464844, 0.06250342559814454, 0.06252748870849609, 0.06261356735229492, 0.06278953552246094, 0.0625316162109375, 0.06254182434082031, 0.06255785751342774, 0.06260976028442383, 0.0626319351196289, 0.06247219085693359, 0.06250495910644531, 0.06271491241455078, 0.06287980651855468, 0.06284281539916992, 0.06256534576416016, 0.06244895935058594, 0.062576416015625, 0.06257977676391602, 0.06276889419555665, 0.06302278518676757, 0.0627729606628418, 0.0628037109375, 0.06280239868164063, 0.06281036758422852, 0.06300227355957032, 0.06278204727172852, 0.0626769905090332, 0.06248239898681641, 0.06316787338256837, 0.06242889785766602, 0.06221030426025391, 0.062058368682861326, 0.06238063812255859, 0.06222652816772461, 0.062117889404296876, 0.06243673706054687, 0.0625814094543457, 0.06250492858886719, 0.0627732810974121, 0.062349281311035155, 0.06234454345703125, 0.06228044891357422, 0.06233216094970703, 0.06267337417602539, 0.06269318389892578, 0.06286374282836914, 0.06248448181152344, 0.062339038848876954, 0.062426944732666016, 0.06212944030761719, 0.06226220703125, 0.062386207580566407, 0.06247011184692383, 0.06273833465576172, 0.06254131317138673, 0.06270550537109375, 0.06250576019287109, 0.06247625732421875, 0.06254771041870118, 0.06278374481201172, 0.0625860481262207, 0.06286214447021485, 0.06241628646850586, 0.06263868713378906, 0.06245775985717773, 0.06260528182983398, 0.0625830078125, 0.06261955261230469, 0.06253363037109375, 0.06256633758544922, 0.062475841522216795, 0.0625558090209961, 0.0625590705871582, 0.0626770896911621, 0.06266668701171875, 0.06276006317138672, 0.06269369506835938, 0.06278355026245117, 0.06251375961303711, 0.06251660919189453, 0.06242969512939453, 0.06260943984985351, 0.0630123519897461, 0.0627163848876953, 0.06255759811401367, 0.06290697479248047, 0.06272819137573242, 0.0628592643737793, 0.062740478515625, 0.06270361709594727, 0.06262979125976563, 0.06318255996704102, 0.062269599914550784, 0.06216511917114258, 0.062209022521972655, 0.06236656188964844, 0.0624901123046875, 0.06224137496948242, 0.06230227279663086, 0.0625541114807129, 0.0627276496887207, 0.062277568817138675, 0.06256646347045898, 0.06241535949707031, 0.06251087951660156, 0.06259875106811523, 0.06269161605834961, 0.06253948974609375, 0.06244572830200195, 0.06253807830810547, 0.06231670379638672, 0.062205951690673826, 0.06230809783935547, 0.06222441482543945, 0.06222396850585937, 0.06266329574584961, 0.0627691535949707, 0.06263603210449219, 0.0628326416015625, 0.06250675201416016, 0.06260268783569337, 0.06282118225097656, 0.06252953720092773, 0.06229196929931641, 0.06249679946899414, 0.06246806335449219, 0.06257664108276367, 0.062349342346191404, 0.062487583160400394, 0.062446529388427735, 0.06250230407714844, 0.06258544158935547, 0.06250291061401367, 0.06247622299194336, 0.06269343948364257, 0.06265651321411132, 0.06283065414428711, 0.06296979141235351, 0.06273027038574219, 0.06263804626464843, 0.0625885124206543, 0.06251356887817383, 0.06255401611328125, 0.062304031372070315, 0.06287308883666992, 0.06284371185302734, 0.06287360000610352, 0.06291254425048828, 0.06288927841186523, 0.06267878341674804, 0.06300969696044922, 0.06287926483154296, 0.06291215896606445, 0.06317318344116211, 0.06300243377685547, 0.0621956787109375, 0.062082942962646485, 0.062284286499023435, 0.062397823333740235, 0.06235529708862304, 0.06255081558227539, 0.06257459259033203, 0.062467838287353514, 0.06260966491699219, 0.06244147109985351, 0.062281726837158206, 0.06223427200317383, 0.062494686126708984, 0.06245974349975586, 0.06258848190307617, 0.06235030364990234, 0.06236735916137695, 0.06239401626586914, 0.062362335205078126, 0.062255104064941405, 0.06236511993408203, 0.06247436904907227, 0.06276959991455078, 0.06257254409790039, 0.06281126403808594, 0.0627534065246582, 0.0625973777770996, 0.06246745681762695, 0.06252108764648437, 0.062473087310791015, 0.06248432159423828, 0.06224297714233398, 0.06265856170654296, 0.06258480072021484, 0.06259238433837891, 0.06244419097900391, 0.06264012908935547, 0.06264403152465821, 0.06293318557739258, 0.06257600021362304, 0.06259779357910156, 0.06295139312744141, 0.06298009490966797, 0.06270361709594727, 0.062814208984375, 0.06276451110839844, 0.06250345611572265, 0.06233705520629883, 0.0624964485168457, 0.06267516708374024, 0.06274208068847656, 0.06265292739868164, 0.06295062255859375, 0.06288668823242187, 0.06278963088989258, 0.06269337463378906, 0.0628056640625, 0.06295695877075196, 0.06284384155273437, 0.06281795120239257, 0.06277155303955079, 0.06258483123779297, 0.06310518264770508, 0.06265372848510742, 0.06231711959838867, 0.06251929473876953, 0.062287200927734376, 0.06242355346679687, 0.062416702270507815, 0.06248483276367187, 0.062740478515625, 0.06252953720092773, 0.06231449508666992, 0.06223180770874023, 0.06230435180664062, 0.0627344970703125, 0.06274054336547852, 0.06266518402099609, 0.0627421760559082, 0.06252988815307617, 0.06260451126098633, 0.062488414764404296, 0.06236662292480469, 0.062422271728515624, 0.06280476760864258, 0.06304111862182617, 0.06304764938354492, 0.06302102279663085, 0.0627204475402832, 0.062461952209472656, 0.06250086212158203, 0.06234483337402344, 0.06247257614135742, 0.06258428955078126, 0.06279363250732421, 0.06295971298217773, 0.0624686393737793, 0.06277088165283203, 0.06257430267333984, 0.0625547218322754, 0.0627476806640625, 0.06265472030639649, 0.06250979232788086, 0.06255567932128907, 0.06261759948730469, 0.06296134567260742, 0.06260406494140625, 0.06279116821289063, 0.06278927993774414, 0.06263040161132813, 0.0630071029663086, 0.06263123321533202, 0.06268175888061524, 0.06276265716552734, 0.06282070541381836, 0.06276857757568359, 0.06290899276733398, 0.06280944061279296, 0.06292752075195313, 0.06318694305419922, 0.06268928146362304, 0.06277260971069336, 0.06304412841796875, 0.06288790512084962, 0.06301507186889649]",tokens/s,15.986079390112998,, @@ -480,7 +480,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 132426 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 125905 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -576,7 +576,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpphsfdhr2/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpazz3b2nd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -816,7 +816,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 50110 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 43993 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -988,7 +988,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 30618 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 25086 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1104,7 +1104,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 135311 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 128932 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1149,7 +1149,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 91603 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 85134 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1257,7 +1257,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 120548 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 114373 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1292,7 +1292,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 76651 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 69950 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1335,7 +1335,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 94770 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 88202 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1441,7 +1441,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 47164 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 41084 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1484,7 +1484,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 55146 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 48842 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1590,7 +1590,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 79566 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 73128 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1633,7 +1633,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 150251 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 143734 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1739,7 +1739,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 97928 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 91313 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -1813,7 +1813,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpznfzwc3v/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_8tdbcf1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2088,7 +2088,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 138273 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 131841 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2121,7 +2121,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpqlxyfu8b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpteyr3i8b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2166,7 +2166,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 117608 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 111345 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2199,7 +2199,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpejnw5bzo/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpa5g_bi46/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2305,7 +2305,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 88550 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 82188 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2338,7 +2338,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpehcfxjfo/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpwi4o81b_/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2444,7 +2444,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 80331 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 73883 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.209536,9637.39648,0.0,9242.148864,8603.568128,s,1,7.56006689453125,7.56006689453125,0.0,7.56006689453125,7.56006689453125,7.56006689453125,7.56006689453125,[7.56006689453125],,kWh,1.2805606391702895e-05,1.4049791025938898e-06,6.6475053179942956e-06,2.085809081229108e-05,,MB,1194.815488,9889.05472,0.0,9481.224192,8972.090368,s,10,1.128035583496094,0.11280355834960938,0.0011751249572338504,0.11267300796508789,0.11414554901123046,0.11428141555786132,0.11439010879516602,"[0.11201679992675781, 0.11153091430664062, 0.11100089263916016, 0.1131654052734375, 0.11218061065673827, 0.11388262176513672, 0.11441728210449219, 0.11393654632568359, 0.11178915405273437, 0.1141153564453125]",tokens/s,2269.431955387305,kWh,3.5066526572917507e-06,3.867173765259819e-07,2.328391412976124e-06,6.221761446793857e-06,tokens/kWh,41145904.12847147,MB,1223.151616,9893.249024,0.0,9485.418496,8972.092928,s,10,25.124321044921878,2.512432104492188,0.01365794814109764,2.508675903320312,2.5302686279296878,2.5304315551757814,2.530561896972656,"[2.523666748046875, 2.509734130859375, 2.530594482421875, 2.527587890625, 2.530232421875, 2.502421875, 2.49313671875, 2.495555908203125, 2.503773193359375, 2.50761767578125]",tokens/s,25.07530447782331,kWh,7.272253918687309e-05,8.02116478447727e-06,4.820976012862395e-05,0.0001289534640999743,tokens/kWh,488548.333615588,,s,630,25.12142214202883,0.03987527324131557,0.0008178128978666541,0.03974924850463867,0.04038665008544922,0.04076124038696289,0.044132319068908694,"[0.04416851043701172, 0.04000019073486328, 0.03940556716918946, 0.03972051239013672, 0.039725440979003906, 0.039647296905517576, 0.03943219375610352, 0.04027801513671875, 0.039779903411865235, 0.039735744476318356, 0.04018399810791016, 0.039707744598388675, 0.03994112014770508, 0.03994384002685547, 0.039516448974609375, 0.040199966430664064, 0.039792640686035156, 0.03975167846679688, 0.03993600082397461, 0.040013824462890625, 0.040013824462890625, 0.03999667358398438, 0.03985897445678711, 0.04008752059936523, 0.039755775451660154, 0.03988889694213867, 0.0397918701171875, 0.03990399932861328, 0.03994236755371094, 0.03968115234375, 0.039760543823242185, 0.039757694244384766, 0.040193214416503906, 0.039801151275634765, 0.039828033447265626, 0.040419391632080075, 0.04000153732299805, 0.039726112365722654, 0.03989193725585938, 0.03954457473754883, 0.039815425872802734, 0.040129791259765624, 0.03973936080932617, 0.039678752899169924, 0.04025324630737305, 0.03996895980834961, 0.039814334869384765, 0.03992659378051758, 0.039997665405273435, 0.043370079040527344, 0.0397457275390625, 0.03973494338989258, 0.039780033111572265, 0.04000425720214844, 0.039890209197998044, 0.03996131134033203, 0.03978854370117187, 0.04299980926513672, 0.040011489868164066, 0.04002025604248047, 0.040199649810791015, 0.039909919738769534, 0.03989871978759765, 0.04362803268432617, 0.039938526153564455, 0.03957350540161133, 0.039777313232421875, 0.04047766494750977, 0.04005062484741211, 0.03976134490966797, 0.040256126403808594, 0.0399441909790039, 0.03975065612792969, 0.039638015747070314, 0.03959366226196289, 0.0398191032409668, 0.03998972702026367, 0.03973484802246094, 0.03990367889404297, 0.04003180694580078, 0.039500064849853515, 0.03930944061279297, 0.039690273284912106, 0.04067132949829102, 0.039699615478515624, 0.04114246368408203, 0.039737503051757814, 0.04002012634277344, 0.03959436798095703, 0.03970220947265625, 0.0393238410949707, 0.03935388946533203, 0.03992009735107422, 0.039215103149414066, 0.03931340789794922, 0.039476577758789065, 0.039860897064208985, 0.03951366424560547, 0.0393322868347168, 0.03911679840087891, 0.039360321044921875, 0.039183616638183594, 0.03932841491699219, 0.03906588745117188, 0.03933388900756836, 0.039585792541503906, 0.03956326293945313, 0.03936595153808594, 0.040080062866210936, 0.04001331329345703, 0.040178176879882815, 0.04095795059204101, 0.03959177780151367, 0.03958297729492188, 0.0397334098815918, 0.03973606491088867, 0.03968819046020508, 0.040269824981689455, 0.0397127685546875, 0.039538272857666014, 0.039542911529541015, 0.039417537689208984, 0.04095651245117188, 0.04067532730102539, 0.03991551971435547, 0.03972614288330078, 0.044502368927001955, 0.04037311935424805, 0.04230348968505859, 0.03965692901611328, 0.039975456237792965, 0.03926607894897461, 0.03982521438598633, 0.03956089782714844, 0.03954147338867187, 0.03987017440795899, 0.039893280029296874, 0.039809024810791016, 0.03949756622314453, 0.03942211151123047, 0.03929087829589844, 0.039122943878173826, 0.03937068939208985, 0.03959609603881836, 0.03932160186767578, 0.039790592193603515, 0.039686145782470705, 0.040296192169189456, 0.04003036880493164, 0.04019619369506836, 0.04005478286743164, 0.040038047790527345, 0.03989132690429688, 0.04014182281494141, 0.040133598327636716, 0.0404370231628418, 0.04017635345458984, 0.039981056213378906, 0.04025276947021485, 0.04007183837890625, 0.040153087615966795, 0.040099071502685546, 0.040174335479736326, 0.040293697357177735, 0.03999609756469726, 0.040237056732177735, 0.04029644775390625, 0.04022995376586914, 0.04005574417114258, 0.040118270874023435, 0.040002750396728515, 0.040003616333007815, 0.04000611114501953, 0.040255809783935545, 0.04002191925048828, 0.04069136047363281, 0.040188350677490235, 0.04013868713378906, 0.04013388824462891, 0.04007404708862305, 0.04065280151367188, 0.04011212921142578, 0.04064051055908203, 0.04130815887451172, 0.04233027267456055, 0.0404826545715332, 0.040065025329589846, 0.03983769607543945, 0.040325023651123046, 0.04454598236083984, 0.04073878479003906, 0.03982432174682617, 0.03990867233276367, 0.039755550384521485, 0.03968912124633789, 0.03947708892822266, 0.03940572738647461, 0.039686145782470705, 0.04053919982910156, 0.039947200775146484, 0.039876609802246096, 0.0398289909362793, 0.04188415908813477, 0.039866336822509764, 0.040192031860351564, 0.040310081481933595, 0.04068239974975586, 0.04062547302246094, 0.039874561309814455, 0.04004092788696289, 0.040021854400634764, 0.03982966232299805, 0.03983564758300781, 0.03964723205566406, 0.04021247863769531, 0.04009769439697266, 0.03981286239624023, 0.04001827239990234, 0.0397946891784668, 0.039583744049072264, 0.03993190383911133, 0.039774208068847655, 0.040564735412597655, 0.039880702972412106, 0.03983564758300781, 0.040054336547851566, 0.03994854354858399, 0.040683712005615234, 0.03969228744506836, 0.040185855865478515, 0.04021583938598633, 0.04042211151123047, 0.04015465545654297, 0.03976611328125, 0.04021491241455078, 0.03987043380737305, 0.039779552459716795, 0.03981110382080078, 0.03974943923950195, 0.04038339233398437, 0.04019401550292969, 0.039888065338134764, 0.040308734893798825, 0.039697246551513674, 0.03986188888549805, 0.0402334098815918, 0.039749057769775394, 0.040632896423339844, 0.04041638565063477, 0.03982368087768555, 0.04014547348022461, 0.039853279113769534, 0.04419638442993164, 0.040647262573242186, 0.04017289733886719, 0.039680416107177735, 0.03966988754272461, 0.03995647811889649, 0.039876094818115236, 0.03988460922241211, 0.03977462387084961, 0.040083999633789065, 0.04032396697998047, 0.0397334098815918, 0.0398394889831543, 0.04041596984863281, 0.03963452911376953, 0.039674495697021486, 0.03987254333496094, 0.03968172836303711, 0.04043193435668945, 0.0401860466003418, 0.03982460784912109, 0.040020481109619144, 0.040244895935058596, 0.039877056121826175, 0.040994430541992186, 0.03979507064819336, 0.040101886749267575, 0.04036403274536133, 0.040013824462890625, 0.04011196899414062, 0.040183967590332034, 0.03992521667480469, 0.040024608612060544, 0.0398047981262207, 0.04005625534057617, 0.04019587326049805, 0.04310927963256836, 0.03971686553955078, 0.03985190582275391, 0.03957078552246094, 0.039772415161132814, 0.039893535614013674, 0.03971651077270508, 0.04048112106323242, 0.04131148910522461, 0.04000214385986328, 0.039860641479492184, 0.04032460784912109, 0.04018320083618164, 0.04069462585449219, 0.039874561309814455, 0.040310176849365234, 0.03986697769165039, 0.03973500823974609, 0.040054656982421874, 0.03984016036987305, 0.040597503662109374, 0.04007872009277344, 0.03984022521972656, 0.03994844818115234, 0.04029439926147461, 0.03989654541015625, 0.03979727935791016, 0.044315807342529295, 0.04021337509155273, 0.039763904571533205, 0.03956121444702149, 0.039376895904541014, 0.039636863708496096, 0.039701759338378904, 0.039457439422607425, 0.039912990570068356, 0.039701183319091796, 0.0393438720703125, 0.03923993682861328, 0.03911654281616211, 0.03942015838623047, 0.03958784103393555, 0.03967795181274414, 0.039964672088623046, 0.039901153564453125, 0.039487232208251954, 0.039534881591796876, 0.039232990264892575, 0.03936105728149414, 0.03945062255859375, 0.039583744049072264, 0.03944607925415039, 0.039637439727783205, 0.03950175857543945, 0.03947731018066406, 0.03930112075805664, 0.03944156646728516, 0.03965574264526367, 0.03964339065551758, 0.03953692626953125, 0.04002627182006836, 0.04156947326660156, 0.0396151351928711, 0.039196670532226564, 0.03935539245605469, 0.039152641296386716, 0.03936460876464844, 0.03943219375610352, 0.03949977493286133, 0.03947865676879883, 0.03946470260620117, 0.0392487678527832, 0.04017110443115234, 0.043108768463134765, 0.03973324966430664, 0.039403518676757815, 0.03924780654907226, 0.03909145736694336, 0.039322433471679685, 0.03950796890258789, 0.03980287933349609, 0.03946905517578125, 0.03958335876464844, 0.039518592834472656, 0.04061974334716797, 0.03964137649536133, 0.03956121444702149, 0.04031488037109375, 0.03924979019165039, 0.03921702575683594, 0.046403553009033205, 0.04075110244750976, 0.039499839782714846, 0.03943529510498047, 0.039175071716308595, 0.0390184326171875, 0.040470592498779295, 0.03879731369018555, 0.039651329040527344, 0.03889926528930664, 0.038986270904541015, 0.03884431838989258, 0.03925785446166992, 0.03955532836914062, 0.039172096252441405, 0.038997791290283204, 0.039230846405029295, 0.039050048828125, 0.03960335922241211, 0.0401396484375, 0.03922083282470703, 0.03927081680297852, 0.039115806579589844, 0.03900310516357422, 0.03983564758300781, 0.03937279891967774, 0.03908185577392578, 0.03906572723388672, 0.039093505859375, 0.039185150146484375, 0.039066879272460935, 0.039472991943359376, 0.039142433166503905, 0.03929651260375976, 0.04003478240966797, 0.039954334259033206, 0.039480705261230466, 0.039244415283203125, 0.039218433380126955, 0.03912726211547852, 0.03949350357055664, 0.03929155349731445, 0.0395338249206543, 0.04009651184082031, 0.040769535064697264, 0.041603073120117184, 0.04005411148071289, 0.039471073150634764, 0.03953129577636719, 0.03940752029418945, 0.03930646514892578, 0.039581985473632814, 0.0392545280456543, 0.039403263092041015, 0.03940991973876953, 0.039272449493408204, 0.03945808029174805, 0.03982566452026367, 0.03963913726806641, 0.039672191619873044, 0.03934620666503906, 0.0390491828918457, 0.03919664001464844, 0.04495942306518555, 0.040583839416503904, 0.03956115341186523, 0.03994585418701172, 0.039266529083251955, 0.03923510360717773, 0.03886137771606445, 0.03885391998291016, 0.038742881774902344, 0.03907583999633789, 0.039065601348876954, 0.03985203170776367, 0.0394886703491211, 0.03958208084106445, 0.0388325424194336, 0.03878713607788086, 0.03895065689086914, 0.03881804656982422, 0.038825984954833984, 0.03919257736206055, 0.0394886703491211, 0.039656288146972654, 0.03964271926879883, 0.03931584167480469, 0.03894889450073242, 0.039060993194580076, 0.03941017532348633, 0.03950140762329102, 0.0398803825378418, 0.03954537582397461, 0.0392869758605957, 0.03935027313232422, 0.03939689636230469, 0.039379425048828125, 0.03932956695556641, 0.039485088348388674, 0.03970073699951172, 0.03971097564697266, 0.03979388809204101, 0.03952316665649414, 0.039479297637939455, 0.039569408416748046, 0.039577598571777346, 0.039652862548828126, 0.03975628662109375, 0.03971072006225586, 0.03957468795776367, 0.03950646209716797, 0.0420560302734375, 0.039530464172363285, 0.03953039932250976, 0.040149089813232425, 0.039907329559326174, 0.040314208984375, 0.03966195297241211, 0.03973926544189453, 0.039666046142578126, 0.0394486083984375, 0.0398616943359375, 0.039561790466308595, 0.03949382400512695, 0.03981497573852539, 0.0398131217956543, 0.04395113754272461, 0.04020611190795898, 0.03978374481201172, 0.039664222717285154, 0.03952588653564453, 0.039413761138916016, 0.03952620697021485, 0.0396973762512207, 0.039757823944091795, 0.03965340805053711, 0.03953865432739258, 0.03948134231567383, 0.039559295654296875, 0.03969216156005859, 0.03980489730834961, 0.039830944061279294, 0.03978099060058594, 0.03984998321533203, 0.039642688751220706, 0.03972121429443359, 0.039819454193115236, 0.03972403335571289, 0.03970560073852539, 0.03982953643798828, 0.039514080047607425, 0.04192051315307617, 0.04002220916748047, 0.039862079620361326, 0.03935846328735351, 0.03936665725708008, 0.03949772644042969, 0.0397844467163086, 0.04230348968505859, 0.039981056213378906, 0.039775390625, 0.03953286361694336, 0.0399318733215332, 0.03956793594360351, 0.04051116943359375, 0.03909049606323242, 0.03952560043334961, 0.03932140731811523, 0.03964617538452148, 0.03977011108398437, 0.03956934356689453, 0.039849952697753904, 0.0393155517578125, 0.03943958282470703, 0.03950048065185547, 0.03920294570922851, 0.03937071990966797, 0.03948134231567383, 0.03990854263305664, 0.039586624145507815, 0.03920896148681641, 0.03927603149414063, 0.039284801483154295, 0.03922784042358399, 0.03924582290649414, 0.03894476699829102, 0.03919177627563476, 0.039348896026611326, 0.03906777572631836, 0.0440437126159668, 0.04017593765258789, 0.039144832611083986, 0.03923238372802734, 0.039236961364746095, 0.0405654067993164, 0.040202239990234374, 0.04010598373413086, 0.0396943359375, 0.039696063995361325, 0.03956972885131836, 0.03943833541870117, 0.03944607925415039, 0.03920665740966797, 0.03946566390991211, 0.039865665435791016, 0.039497665405273434, 0.039578369140625, 0.039043041229248045, 0.03991059112548828, 0.0393155517578125, 0.039556926727294925, 0.039772319793701175, 0.04103408050537109, 0.03951270294189453, 0.03949926376342774, 0.039133502960205076, 0.03932364654541016, 0.03923286437988281, 0.03954140853881836, 0.03996464157104492, 0.03998275375366211, 0.039741825103759766, 0.039790592193603515, 0.041825439453125, 0.04246409606933594, 0.039567359924316405, 0.03976396942138672, 0.039642528533935545, 0.040071041107177734, 0.039570335388183595, 0.03959174346923828, 0.0391511344909668, 0.03928931045532227, 0.03916799926757813, 0.03944985580444336, 0.03984870529174805, 0.04031049728393555, 0.039741569519042966, 0.03960438537597656, 0.039365665435791015, 0.03961318588256836, 0.03951638412475586, 0.03954278564453125, 0.039288833618164064, 0.039292606353759765, 0.04049270248413086, 0.03993814468383789, 0.03994812774658203, 0.04008009719848633, 0.039653377532958986, 0.03951577758789063, 0.03950627136230469]",tokens/s,25.07819805893844,, @@ -2490,7 +2490,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 83291 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 76952 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.153152,3354.329088,0.0,2959.081472,2942.567424,s,1,7.5465380859375,7.5465380859375,0.0,7.5465380859375,7.5465380859375,7.5465380859375,7.5465380859375,[7.5465380859375],,kWh,1.0555499091666812e-05,1.1571333836274744e-06,4.908337259995621e-06,1.6620969735289905e-05,,MB,1184.321536,3547.267072,0.0,3139.436544,3105.830912,s,10,0.35119199752807617,0.03511919975280762,0.0013828462358450484,0.03462977600097656,0.03707234077453613,0.037395865821838375,0.037654685859680175,"[0.037719390869140626, 0.03700044631958008, 0.03378755187988281, 0.03475555038452149, 0.03391804885864258, 0.033634750366210935, 0.03446249771118164, 0.034675006866455076, 0.034584545135498045, 0.03665420913696289]",tokens/s,7289.459947888875,kWh,1.2789389064228766e-06,1.4104498809024023e-07,8.523442044366632e-07,2.27232809894978e-06,tokens/kWh,112659787.16643849,MB,1217.888256,3589.210112,0.0,3181.379584,3162.0096,s,10,13.4394306640625,1.3439430664062502,0.00897286271096474,1.3480936889648438,1.3514184814453125,1.3525001953125,1.35336556640625,"[1.3511781005859376, 1.3503336181640626, 1.346485595703125, 1.35000439453125, 1.324740966796875, 1.3322947998046875, 1.3377889404296874, 1.3497017822265625, 1.3535819091796875, 1.343320556640625]",tokens/s,46.87698577028576,kWh,3.866066860732683e-05,4.263842917494485e-06,2.491350974156387e-05,6.783802126638519e-05,tokens/kWh,928682.7478739787,,s,630,13.436337451934827,0.021327519764975898,0.00045949658176490496,0.02128881549835205,0.02156562919616699,0.021789492511749265,0.02252170030593872,"[0.025484832763671875, 0.02147990417480469, 0.021353759765625, 0.021735328674316406, 0.021275455474853516, 0.02188467216491699, 0.021528831481933595, 0.021968896865844727, 0.02162892723083496, 0.021597440719604493, 0.020981536865234376, 0.02120137596130371, 0.02117683219909668, 0.020975360870361327, 0.021235456466674806, 0.021336576461791993, 0.021432319641113282, 0.021293279647827148, 0.021233728408813476, 0.021144575119018554, 0.02168191909790039, 0.022072288513183595, 0.021098495483398438, 0.021356544494628905, 0.021727231979370116, 0.021579776763916016, 0.021536415100097656, 0.021212959289550783, 0.021146175384521484, 0.021219551086425783, 0.02208745574951172, 0.02122956848144531, 0.02129715156555176, 0.021393760681152344, 0.02120035171508789, 0.021419872283935548, 0.02137481689453125, 0.021351167678833008, 0.02127027130126953, 0.02118560028076172, 0.02111788749694824, 0.021141504287719725, 0.02103500747680664, 0.021503904342651366, 0.022295808792114256, 0.02170128059387207, 0.02116559982299805, 0.021156511306762694, 0.021403200149536134, 0.021295520782470705, 0.02132988739013672, 0.02112828826904297, 0.021343008041381836, 0.021262432098388673, 0.02148467254638672, 0.021503040313720703, 0.02147292709350586, 0.02125644874572754, 0.021250175476074218, 0.02117955207824707, 0.02097545623779297, 0.02098636817932129, 0.021465311050415038, 0.021350112915039063, 0.021180767059326172, 0.021174272537231444, 0.022502527236938477, 0.021385408401489257, 0.021258943557739256, 0.021153791427612305, 0.021301504135131835, 0.021169919967651368, 0.021202943801879884, 0.021227519989013673, 0.021001247406005858, 0.021526655197143554, 0.021387935638427735, 0.02149836730957031, 0.021564960479736328, 0.021033119201660157, 0.02125004768371582, 0.021086208343505858, 0.02129305648803711, 0.021259584426879884, 0.021373632431030274, 0.02117148780822754, 0.021404287338256837, 0.021133407592773438, 0.021807104110717773, 0.027393056869506837, 0.021236703872680663, 0.02124185562133789, 0.021465087890625, 0.021356544494628905, 0.021358591079711914, 0.021346303939819337, 0.021223424911499023, 0.021172224044799806, 0.021239295959472656, 0.02128771209716797, 0.02181011199951172, 0.021377824783325197, 0.021215263366699218, 0.021365760803222656, 0.02114364814758301, 0.02119465637207031, 0.021420160293579103, 0.021414688110351562, 0.02130073547363281, 0.021100191116333007, 0.02103798484802246, 0.02104934310913086, 0.021387264251708983, 0.022521631240844726, 0.021280704498291017, 0.02119708824157715, 0.021329919815063478, 0.02126028823852539, 0.021352479934692383, 0.021065376281738282, 0.021118911743164062, 0.021660032272338866, 0.02143539237976074, 0.021240192413330077, 0.021571647644042968, 0.021176607131958007, 0.021271551132202148, 0.02129180717468262, 0.02112735939025879, 0.021313535690307618, 0.021171680450439455, 0.021158432006835936, 0.021149696350097655, 0.0212108154296875, 0.021391679763793945, 0.021616159439086916, 0.021493728637695313, 0.021182975769042968, 0.021037376403808594, 0.021093503952026367, 0.02125881576538086, 0.021180416107177736, 0.021427711486816405, 0.021248512268066407, 0.021382783889770506, 0.0212392635345459, 0.02144963264465332, 0.021194911956787108, 0.021143392562866212, 0.021522432327270507, 0.02131702423095703, 0.021534303665161132, 0.021300575256347657, 0.021288192749023438, 0.021221792221069336, 0.021159040451049806, 0.02122617530822754, 0.021413408279418945, 0.022235008239746095, 0.02174457550048828, 0.021364223480224608, 0.021289312362670898, 0.021411968231201173, 0.022521728515625, 0.021401599884033205, 0.021296319961547853, 0.02138217544555664, 0.021684160232543947, 0.02135264015197754, 0.021427871704101563, 0.021486879348754883, 0.021301599502563478, 0.021461343765258788, 0.021286943435668945, 0.02123695945739746, 0.021474143981933595, 0.02146268844604492, 0.021434080123901366, 0.021563968658447265, 0.02125971221923828, 0.02124038314819336, 0.02115782356262207, 0.021493824005126953, 0.021478975296020508, 0.021491743087768553, 0.02124355125427246, 0.02125708770751953, 0.021444255828857423, 0.021243360519409178, 0.021372991561889647, 0.021696672439575196, 0.021475488662719727, 0.02132918357849121, 0.021269216537475585, 0.021438304901123046, 0.021353952407836913, 0.02124991989135742, 0.021574464797973633, 0.021442880630493166, 0.021492927551269532, 0.021164287567138673, 0.021096704483032226, 0.021342496871948242, 0.021960416793823243, 0.022095296859741213, 0.021350976943969726, 0.021279104232788087, 0.021372255325317384, 0.021215360641479494, 0.0212010555267334, 0.020927743911743166, 0.021086208343505858, 0.02243168067932129, 0.024996671676635742, 0.02149772834777832, 0.021477504730224608, 0.021243423461914063, 0.02118489646911621, 0.02129007911682129, 0.021154815673828126, 0.02124575996398926, 0.021246143341064453, 0.021217376708984374, 0.02122742462158203, 0.02125619125366211, 0.021130271911621094, 0.021259040832519532, 0.021081279754638672, 0.020943616867065428, 0.02140595245361328, 0.021392576217651366, 0.021216064453125, 0.021114879608154297, 0.021207136154174806, 0.021168031692504884, 0.021233152389526368, 0.021552928924560545, 0.0212891845703125, 0.021281055450439453, 0.021358367919921874, 0.021203392028808592, 0.021184511184692383, 0.021207040786743164, 0.021108896255493163, 0.021249151229858397, 0.021680864334106445, 0.021292160034179688, 0.021727615356445313, 0.023144128799438477, 0.02140652847290039, 0.02125555229187012, 0.021351039886474608, 0.02146268844604492, 0.021379648208618166, 0.02125619125366211, 0.02111724853515625, 0.021103424072265627, 0.02107436752319336, 0.021096895217895508, 0.021220544815063476, 0.021479583740234374, 0.02125686454772949, 0.02109644889831543, 0.021125152587890626, 0.021059551239013673, 0.021129215240478515, 0.021130239486694336, 0.021334943771362306, 0.02104528045654297, 0.021063039779663086, 0.020996799468994142, 0.020996095657348633, 0.021231103897094726, 0.020893440246582032, 0.020781280517578125, 0.021151519775390624, 0.021311616897583006, 0.020980352401733397, 0.020711423873901368, 0.0204902400970459, 0.02066815948486328, 0.02150601577758789, 0.021673824310302733, 0.0210948486328125, 0.02098771286010742, 0.02088159942626953, 0.02082745552062988, 0.021103071212768554, 0.020856224060058593, 0.02084681510925293, 0.021043424606323243, 0.02115519905090332, 0.02082697677612305, 0.020680864334106444, 0.020709375381469726, 0.020779008865356444, 0.020967424392700194, 0.020954944610595702, 0.020994239807128907, 0.020934656143188478, 0.021114879608154297, 0.02090937614440918, 0.020779712677001953, 0.020747711181640625, 0.020931135177612303, 0.02091007995605469, 0.020915456771850586, 0.020951808929443358, 0.020773887634277344, 0.020715551376342775, 0.020848608016967772, 0.020935775756835938, 0.0212042236328125, 0.02102899169921875, 0.021209632873535156, 0.021141504287719725, 0.021004287719726563, 0.02110643196105957, 0.021180416107177736, 0.020915935516357422, 0.020816415786743165, 0.020875423431396485, 0.020854623794555664, 0.021098495483398438, 0.021014528274536134, 0.020785152435302736, 0.020653055191040038, 0.02087424087524414, 0.02104319953918457, 0.020917823791503906, 0.02381430435180664, 0.02199283218383789, 0.021167007446289063, 0.0211844482421875, 0.021076032638549805, 0.020888927459716797, 0.02103932762145996, 0.020947391510009766, 0.021123071670532227, 0.020875263214111327, 0.020959232330322267, 0.021251583099365236, 0.021089887619018553, 0.02106883239746094, 0.021215103149414063, 0.021171455383300782, 0.02113817596435547, 0.021303295135498047, 0.021056640625, 0.021151744842529296, 0.021128063201904298, 0.021137407302856445, 0.021041280746459962, 0.02108403205871582, 0.021168127059936523, 0.021198368072509764, 0.021061279296875, 0.021111007690429687, 0.02100822448730469, 0.021144128799438475, 0.02135264015197754, 0.021245599746704102, 0.02160470390319824, 0.021250207901000975, 0.02115990447998047, 0.021174079895019533, 0.021094463348388673, 0.020938880920410158, 0.020995967864990233, 0.021078079223632813, 0.021150976181030273, 0.021535423278808592, 0.021118431091308593, 0.021120960235595704, 0.02106368064880371, 0.02113539123535156, 0.021119552612304686, 0.02101043128967285, 0.021078655242919922, 0.021012224197387696, 0.021068063735961914, 0.020960607528686524, 0.021207391738891603, 0.02115001678466797, 0.02103500747680664, 0.021021728515625, 0.02104412841796875, 0.021184608459472655, 0.021085792541503907, 0.021053247451782227, 0.021000768661499022, 0.020944896697998046, 0.021090303421020508, 0.020946592330932618, 0.020748640060424806, 0.020672704696655272, 0.020896799087524415, 0.020832895278930664, 0.020897951126098633, 0.02079539108276367, 0.020709632873535156, 0.02071731185913086, 0.02093257522583008, 0.020831935882568358, 0.020732255935668947, 0.020905248641967772, 0.021353183746337892, 0.02127984046936035, 0.02163599967956543, 0.021307392120361326, 0.021403039932250977, 0.02133452796936035, 0.02126857566833496, 0.021310560226440428, 0.021337152481079102, 0.02130518341064453, 0.021485376358032226, 0.021440704345703124, 0.02203228759765625, 0.022298976898193358, 0.02212620735168457, 0.02140787124633789, 0.021329919815063478, 0.02137615966796875, 0.021537696838378906, 0.021384767532348633, 0.021494144439697264, 0.021445791244506837, 0.02131622314453125, 0.021353887557983398, 0.02131974411010742, 0.021448448181152345, 0.021441535949707033, 0.021452863693237303, 0.021415552139282226, 0.0213703670501709, 0.021306175231933594, 0.021349536895751954, 0.021469343185424806, 0.021350080490112305, 0.021399744033813478, 0.021516223907470704, 0.021407615661621093, 0.021469375610351563, 0.02136479949951172, 0.021417919158935546, 0.021399551391601563, 0.021366783142089844, 0.021419136047363282, 0.0212488956451416, 0.021303295135498047, 0.021518047332763673, 0.021420032501220702, 0.021899744033813475, 0.02123347282409668, 0.021344255447387696, 0.021286720275878905, 0.021428415298461914, 0.02128895950317383, 0.02143619155883789, 0.021327232360839842, 0.02189731216430664, 0.021391424179077148, 0.021418527603149416, 0.021340320587158203, 0.02155673599243164, 0.02152931213378906, 0.021307167053222657, 0.021301248550415038, 0.02150809669494629, 0.021329439163208008, 0.021453279495239258, 0.021379072189331053, 0.021465087890625, 0.021364736557006835, 0.02141798400878906, 0.02140563201904297, 0.02131059265136719, 0.021433280944824218, 0.021427967071533202, 0.02122572708129883, 0.021431520462036134, 0.021362720489501955, 0.021348127365112303, 0.02151148796081543, 0.021347999572753906, 0.021342208862304687, 0.021361791610717773, 0.02144879913330078, 0.021672735214233397, 0.02126438331604004, 0.02143951988220215, 0.021262367248535155, 0.02124812889099121, 0.021885408401489257, 0.021545312881469728, 0.021303232192993165, 0.021235103607177733, 0.02154969596862793, 0.02138115119934082, 0.021767967224121092, 0.021264608383178712, 0.021370880126953123, 0.02147532844543457, 0.021577823638916017, 0.02126710319519043, 0.021437984466552734, 0.02164156723022461, 0.021338111877441408, 0.021344160079956053, 0.021549152374267577, 0.021660703659057617, 0.02158896064758301, 0.021397504806518555, 0.021362592697143554, 0.021389408111572264, 0.021489887237548827, 0.02146665573120117, 0.021364992141723632, 0.021227519989013673, 0.021381120681762695, 0.02125619125366211, 0.021521856307983398, 0.021424896240234376, 0.02160416030883789, 0.021456895828247072, 0.021522432327270507, 0.021553152084350585, 0.021831680297851562, 0.021384767532348633, 0.021338560104370116, 0.021425535202026367, 0.021424768447875976, 0.021420032501220702, 0.02146713638305664, 0.02143027114868164, 0.021283136367797852, 0.021311168670654298, 0.021376991271972658, 0.021125152587890626, 0.02144451141357422, 0.02154640007019043, 0.02155926322937012, 0.02156342315673828, 0.02134448051452637, 0.021342079162597657, 0.0216124153137207, 0.02437411117553711, 0.021701536178588866, 0.02130633544921875, 0.02133407974243164, 0.021476703643798827, 0.021541439056396484, 0.021331167221069335, 0.021224159240722656, 0.021302623748779295, 0.0213243522644043, 0.02122083282470703, 0.02181328010559082, 0.02165216064453125, 0.02149580764770508, 0.021630048751831055, 0.02128374481201172, 0.021303295135498047, 0.021321407318115236, 0.02129155158996582, 0.02130668830871582, 0.021362688064575194, 0.02120012855529785, 0.021385728836059572, 0.02110207939147949, 0.021240575790405274, 0.021257663726806642, 0.02102889633178711, 0.021306175231933594, 0.0222696647644043, 0.02151580810546875, 0.021314016342163088, 0.021379072189331053, 0.02126028823852539, 0.02109187126159668, 0.021354719161987303, 0.02109609603881836, 0.021299232482910158, 0.021162559509277343, 0.02127052879333496, 0.021125247955322266, 0.021180288314819336, 0.021178688049316406, 0.021083999633789062, 0.021163040161132813, 0.021089088439941405, 0.02146236801147461, 0.021338783264160156, 0.021349727630615236, 0.021275104522705077, 0.021110847473144533, 0.02122460746765137, 0.021175552368164062, 0.021288671493530274, 0.021370880126953123, 0.02125004768371582, 0.02225107192993164, 0.021290431976318358, 0.02128998374938965, 0.021207040786743164, 0.021319679260253906, 0.021325824737548828, 0.021034112930297853, 0.021149696350097655, 0.02134127998352051, 0.02156224060058594, 0.021418912887573242, 0.021336063385009766, 0.02123366355895996, 0.021237312316894533, 0.021162784576416016, 0.021115776062011718, 0.021231712341308592, 0.02129318428039551, 0.02127145576477051, 0.02251897621154785, 0.021289440155029298, 0.021272319793701172, 0.02138751983642578, 0.021206783294677733, 0.021550432205200195, 0.0216278076171875, 0.02134988784790039, 0.02116441535949707]",tokens/s,46.88777743590243,, @@ -2526,7 +2526,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 143639 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 137019 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.344448,14639.104,0.0,14243.856384,14221.3376,s,1,7.5134609375,7.5134609375,0.0,7.5134609375,7.5134609375,7.5134609375,7.5134609375,[7.5134609375],,kWh,1.4797649766668522e-05,1.591941333074149e-06,6.8258387939997694e-06,2.321542989374244e-05,,MB,1185.431552,14737.670144,0.0,14329.839616,14290.688,s,10,2.1331998596191406,0.21331998596191407,0.00621579085342007,0.21396086120605468,0.21881604003906252,0.21890145874023437,0.21896979370117187,"[0.19669474792480468, 0.21166371154785157, 0.21879705810546876, 0.2109894714355469, 0.21761846923828124, 0.2128002166748047, 0.2177275848388672, 0.21898687744140624, 0.21454396057128905, 0.2133777618408203]",tokens/s,1200.075083661903,kWh,6.355655424999946e-06,7.009066060996178e-07,4.22104202417391e-06,1.1277604055273475e-05,tokens/kWh,22699857.056986574,MB,1206.10816,14752.350208,0.0,14344.51968,14290.69056,s,10,38.713044921874996,3.8713044921874995,0.006590696282903404,3.87208447265625,3.8785936767578124,3.8798875366210934,3.8809226245117188,"[3.859248046875, 3.86244873046875, 3.86632080078125, 3.871842529296875, 3.87074365234375, 3.872326416015625, 3.873673095703125, 3.8769541015625, 3.87830615234375, 3.881181396484375]",tokens/s,16.273584298816427,kWh,0.00011356573131749991,1.2526579352459377e-05,7.55003828640261e-05,0.00020159269353398537,tokens/kWh,312511.32615765755,,s,630,38.70906281661986,0.06144295685177758,0.0005424991378040025,0.06134395027160645,0.06177381134033203,0.06188752746582031,0.06480036430358888,"[0.06405974578857422, 0.062161758422851564, 0.061213825225830076, 0.061046974182128906, 0.06102067184448242, 0.06102588653564453, 0.06105859375, 0.061370399475097655, 0.060830398559570314, 0.06098457717895508, 0.061190654754638675, 0.06119014358520508, 0.06118239974975586, 0.061128158569335934, 0.060923839569091795, 0.06110879898071289, 0.0611409912109375, 0.061515777587890626, 0.06115024185180664, 0.061141983032226566, 0.061265918731689455, 0.06106524658203125, 0.061020160675048826, 0.06111142349243164, 0.060992351531982424, 0.061216766357421876, 0.06101196670532227, 0.06129641723632812, 0.061335777282714846, 0.06107340621948242, 0.061034015655517575, 0.06101449584960938, 0.06148294448852539, 0.061482494354248046, 0.06132297515869141, 0.06133436965942383, 0.061079425811767576, 0.06098956680297852, 0.06104883193969726, 0.061042686462402344, 0.06105702209472656, 0.061110271453857425, 0.06110819244384766, 0.06100380706787109, 0.061159423828125, 0.06123846435546875, 0.061217601776123044, 0.06126182556152344, 0.06138380813598633, 0.061367198944091796, 0.06120240020751953, 0.06129244613647461, 0.06142985534667969, 0.061459583282470705, 0.061295486450195315, 0.061402240753173826, 0.06144438552856445, 0.06120684814453125, 0.06145817565917969, 0.06136819076538086, 0.06121539306640625, 0.061302143096923827, 0.06120054244995117, 0.06465602874755859, 0.06275273513793946, 0.06161552047729492, 0.061132480621337894, 0.06117264175415039, 0.060909599304199216, 0.061389888763427734, 0.0613139533996582, 0.06109779357910156, 0.06121039962768555, 0.061082015991210936, 0.06079484939575195, 0.061273887634277345, 0.06093644714355469, 0.061143039703369144, 0.06141241455078125, 0.06156793594360352, 0.06172390365600586, 0.06135270309448242, 0.06141747283935547, 0.06125139236450195, 0.06113670349121094, 0.06125606536865234, 0.06111203384399414, 0.06100409698486328, 0.06098873519897461, 0.061203102111816406, 0.06111161422729492, 0.061023937225341794, 0.06088806533813477, 0.06140620803833008, 0.0612259521484375, 0.06145753479003906, 0.06153696060180664, 0.06148323059082031, 0.06149264144897461, 0.0613361930847168, 0.0610629768371582, 0.061033824920654296, 0.061020992279052735, 0.060911296844482425, 0.06089324951171875, 0.06098479843139649, 0.06093011093139648, 0.06114905548095703, 0.06103740692138672, 0.06117788696289062, 0.06132940673828125, 0.06159356689453125, 0.0613642578125, 0.06163644790649414, 0.06142755126953125, 0.06164620971679687, 0.061475391387939456, 0.06109961700439453, 0.06142355346679688, 0.061069408416748044, 0.06094716644287109, 0.061197662353515626, 0.06124816131591797, 0.06109731292724609, 0.06121129608154297, 0.061208576202392576, 0.06485542297363281, 0.06265711975097656, 0.06143939208984375, 0.06112515258789063, 0.06097708892822266, 0.06115686416625977, 0.061233726501464844, 0.061061214447021485, 0.06128201675415039, 0.06125324630737305, 0.06087478256225586, 0.06093827056884766, 0.061078113555908205, 0.061085697174072265, 0.06107340621948242, 0.061419551849365234, 0.061773792266845706, 0.061714305877685546, 0.061689823150634766, 0.06156623840332031, 0.061286945343017575, 0.06143830490112305, 0.06100937652587891, 0.061139198303222654, 0.06111577606201172, 0.06101417541503906, 0.06101295852661133, 0.0612534065246582, 0.06137235260009766, 0.06130233764648438, 0.06127872085571289, 0.06145228958129883, 0.061548030853271485, 0.06157158279418945, 0.06163148880004883, 0.061395263671875, 0.061344158172607424, 0.061329696655273436, 0.0611247673034668, 0.06118182373046875, 0.06091772842407227, 0.06098912048339844, 0.061028705596923825, 0.06106662368774414, 0.06148492813110352, 0.061117023468017576, 0.06129411315917969, 0.061630207061767577, 0.061518688201904294, 0.06158502578735352, 0.06167385482788086, 0.061818878173828126, 0.061603839874267576, 0.061284351348876956, 0.061290496826171874, 0.061297760009765626, 0.06114985656738281, 0.06131305694580078, 0.061212512969970705, 0.0610873908996582, 0.06108438491821289, 0.06104012680053711, 0.06132371139526367, 0.06549359893798828, 0.06340780639648437, 0.06182675170898438, 0.06153043365478516, 0.06103481674194336, 0.0612044792175293, 0.06116966247558594, 0.06127740859985351, 0.06131180953979492, 0.061102046966552734, 0.061292030334472655, 0.0610302734375, 0.06105145645141601, 0.06128582382202148, 0.061446784973144535, 0.0619310417175293, 0.06177142333984375, 0.06179235076904297, 0.06195609664916992, 0.06155747222900391, 0.061504798889160155, 0.06134473419189453, 0.061091167449951175, 0.06111891174316406, 0.06104256057739258, 0.06120803070068359, 0.061134624481201175, 0.060991424560546875, 0.061037471771240234, 0.061134273529052735, 0.06114572906494141, 0.06125155258178711, 0.06130217742919922, 0.06152969741821289, 0.06139136123657227, 0.06145894241333008, 0.061573089599609374, 0.06163455963134765, 0.061663230895996096, 0.061386302947998045, 0.061129150390625, 0.06150147247314453, 0.06124540710449219, 0.06134550476074219, 0.061205982208251956, 0.06107769775390625, 0.0612083854675293, 0.061449024200439455, 0.06146262359619141, 0.06161801528930664, 0.06183260726928711, 0.06138745498657226, 0.06167548751831055, 0.061464576721191405, 0.06129257583618164, 0.06130252838134766, 0.06130691146850586, 0.061220447540283204, 0.061176414489746096, 0.06134579086303711, 0.061394622802734375, 0.06124585723876953, 0.0612020149230957, 0.06507174682617188, 0.06283673477172852, 0.06168899154663086, 0.06141219329833984, 0.06101916885375976, 0.061273056030273436, 0.06110617446899414, 0.061040481567382815, 0.06103673553466797, 0.06110822296142578, 0.06123721694946289, 0.06088911819458008, 0.061052894592285155, 0.06095449447631836, 0.06125743865966797, 0.061843841552734376, 0.06178118515014648, 0.06166409683227539, 0.06170355224609375, 0.061309024810791014, 0.06105667114257812, 0.06101900863647461, 0.06122905731201172, 0.06129663848876953, 0.0611545295715332, 0.061481536865234374, 0.06128051376342773, 0.061205951690673825, 0.06129052734375, 0.06101046371459961, 0.06132035064697266, 0.06150444793701172, 0.06157913589477539, 0.06192127990722656, 0.06160588836669922, 0.06149529647827148, 0.06140313720703125, 0.06115433502197266, 0.06117270278930664, 0.061451519012451175, 0.061069278717041015, 0.06116854476928711, 0.06120230484008789, 0.06117113494873047, 0.06129248046875, 0.060985504150390626, 0.06140156936645508, 0.06138016128540039, 0.061305248260498046, 0.061755008697509765, 0.06182454299926758, 0.06186809539794922, 0.06161491012573242, 0.06155673599243164, 0.061615455627441404, 0.06146323013305664, 0.06136201477050781, 0.06124291229248047, 0.061305438995361325, 0.06146358489990234, 0.06120732879638672, 0.061507774353027345, 0.06168169784545898, 0.06466556549072265, 0.06253107070922852, 0.061438465118408205, 0.061255233764648434, 0.06116582489013672, 0.06119142532348633, 0.06115423965454102, 0.06128572845458984, 0.06117238235473633, 0.061142433166503904, 0.06126208114624023, 0.06081571197509766, 0.061050880432128904, 0.0611748161315918, 0.061510623931884764, 0.0617775993347168, 0.06179257583618164, 0.061795520782470706, 0.06164896011352539, 0.06141414260864258, 0.06140723037719727, 0.06124550247192383, 0.06114297485351562, 0.06145788955688476, 0.061262367248535156, 0.061240638732910156, 0.061120609283447265, 0.061131393432617184, 0.061357601165771485, 0.06121516799926758, 0.06135948944091797, 0.06160857772827148, 0.061779712677001955, 0.06189494323730469, 0.06155043029785156, 0.06169817733764649, 0.0617341423034668, 0.06122371292114258, 0.061570625305175784, 0.061399486541748045, 0.06124291229248047, 0.061345600128173826, 0.06150822448730469, 0.06172256088256836, 0.06127817535400391, 0.06114009475708008, 0.06144099044799805, 0.06143952178955078, 0.06145395278930664, 0.06174601745605469, 0.061586624145507814, 0.061315902709960936, 0.06138851165771484, 0.06130847930908203, 0.061207263946533204, 0.06127382278442383, 0.06144419097900391, 0.06122304153442383, 0.06159097671508789, 0.06143606567382812, 0.061253566741943356, 0.06135657501220703, 0.06160761642456054, 0.06552387237548828, 0.06310531234741211, 0.06176358413696289, 0.06137011337280274, 0.061208831787109376, 0.06110614395141602, 0.06117529678344726, 0.06137702560424805, 0.061128799438476565, 0.061093505859375, 0.06117814254760742, 0.06114896011352539, 0.061085952758789065, 0.06111433410644531, 0.061222911834716794, 0.06164070510864258, 0.06162432098388672, 0.0617960319519043, 0.06167279815673828, 0.06167820739746094, 0.0615181770324707, 0.06148473739624023, 0.06120393753051758, 0.06121353530883789, 0.06115900802612305, 0.06126019287109375, 0.06116761779785156, 0.061216766357421876, 0.061400577545166014, 0.06131353759765625, 0.061315071105957034, 0.06141033554077149, 0.06149014282226563, 0.06166540908813477, 0.061505409240722654, 0.061505535125732425, 0.061547584533691406, 0.06129350280761719, 0.061357471466064455, 0.061212417602539065, 0.0615629768371582, 0.061743358612060546, 0.061288032531738285, 0.06129462432861328, 0.061270912170410155, 0.061314720153808594, 0.061404960632324215, 0.06149289703369141, 0.061657279968261716, 0.061811424255371096, 0.061765216827392576, 0.06176607894897461, 0.06161814498901367, 0.06157699203491211, 0.06162579345703125, 0.0613507194519043, 0.06119366455078125, 0.061118080139160154, 0.06132815933227539, 0.06107968139648438, 0.061192192077636716, 0.06131011199951172, 0.06128271865844727, 0.06490729522705078, 0.06290224075317383, 0.06169935989379883, 0.061251808166503906, 0.06106291198730469, 0.06126438522338867, 0.061257984161376955, 0.061337631225585935, 0.061112289428710935, 0.06115523147583008, 0.06118550491333008, 0.06122150421142578, 0.061282302856445314, 0.06110396957397461, 0.061593536376953126, 0.061829345703125, 0.06195199966430664, 0.06179635238647461, 0.06191241455078125, 0.06165724945068359, 0.06142617416381836, 0.061252769470214845, 0.06119715118408203, 0.06113644790649414, 0.06141996765136719, 0.061484897613525394, 0.0612138557434082, 0.06158848190307617, 0.061292545318603515, 0.06124748611450195, 0.06173081588745117, 0.061598846435546875, 0.06177471923828125, 0.061683521270751954, 0.0616833610534668, 0.06150191879272461, 0.061472801208496096, 0.061304862976074216, 0.06131670379638672, 0.061357952117919924, 0.06108009719848633, 0.06115532684326172, 0.061259777069091796, 0.06173286437988281, 0.06170169448852539, 0.061558334350585935, 0.06179449462890625, 0.06170800018310547, 0.061812961578369144, 0.06168857574462891, 0.061951648712158205, 0.06179232025146485, 0.061642559051513675, 0.06140137481689453, 0.061327232360839846, 0.06134201431274414, 0.06127558517456055, 0.06108127975463867, 0.06125657653808594, 0.06148668670654297, 0.06120223999023437, 0.061424320220947265, 0.06167337417602539, 0.06490252685546875, 0.0627410545349121, 0.061370433807373045, 0.061381729125976565, 0.061295520782470705, 0.06139884948730469, 0.06143814468383789, 0.06172819137573242, 0.06134841537475586, 0.06134172821044922, 0.06159312057495117, 0.06148051071166992, 0.061610881805419924, 0.06125686264038086, 0.06164771270751953, 0.06206991958618164, 0.06199980926513672, 0.061773983001708985, 0.06157267379760742, 0.06140777587890625, 0.06142556762695312, 0.06117375946044922, 0.06101308822631836, 0.06097958374023438, 0.061122718811035155, 0.06119417572021484, 0.06146297454833984, 0.06147174453735352, 0.06133248138427734, 0.0614799690246582, 0.061510623931884764, 0.06154764938354492, 0.06175836944580078, 0.06185964965820313, 0.06160604858398438, 0.06156492614746094, 0.0615731201171875, 0.06144371032714844, 0.06142102432250977, 0.061295486450195315, 0.06126515197753906, 0.06117574310302734, 0.06120534515380859, 0.0612426872253418, 0.061216545104980466, 0.06138070297241211, 0.06146131134033203, 0.06125158309936524, 0.06174460983276367, 0.061858177185058594, 0.061878463745117185, 0.06186188888549805, 0.06161539077758789, 0.061731521606445315, 0.061505569458007815, 0.06158137512207031, 0.06166281509399414, 0.06156934356689453, 0.061413406372070316, 0.06146656036376953, 0.061319137573242186, 0.06157708740234375, 0.06130291366577149, 0.06526566314697266, 0.06307129669189453, 0.06162454223632813, 0.061301441192626954, 0.061367454528808596, 0.061354881286621095, 0.06135539245605469, 0.06131110382080078, 0.06135424041748047, 0.06147865676879883, 0.06132374572753906, 0.06146047973632812, 0.061704193115234375, 0.06123865509033203, 0.06153263854980469, 0.06226755142211914, 0.0620687370300293, 0.06199071884155274, 0.061742431640625, 0.06163337707519531, 0.06154415893554688, 0.061792545318603516, 0.06119571304321289, 0.06167577743530273, 0.06123689651489258, 0.06145500946044922, 0.061095294952392576, 0.06138937759399414, 0.06120864105224609, 0.0614420166015625, 0.0613458251953125, 0.0615096321105957, 0.061677120208740235, 0.06167596817016602, 0.06187539291381836, 0.061778751373291016, 0.061679359436035155, 0.06149321746826172, 0.061411231994628904, 0.06134374237060547, 0.06133388900756836, 0.06134991836547852, 0.06119222259521485, 0.061450302124023436, 0.06161328125, 0.06117609786987305, 0.06134777450561523, 0.06139731216430664, 0.06151910400390625, 0.0614365119934082, 0.06169411087036133, 0.06185324859619141, 0.06180720138549805, 0.06185881423950195, 0.06153014373779297, 0.0614835205078125, 0.06164665603637695, 0.06137305450439453, 0.061421600341796875, 0.06141334533691406, 0.06154214477539063, 0.06155427169799805, 0.06148735809326172]",tokens/s,16.275258406140157,, @@ -2573,7 +2573,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 130228 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 123738 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.337472,569.311232,0.0,174.063616,172.57984,s,1,7.0809599609375,7.0809599609375,0.0,7.0809599609375,7.0809599609375,7.0809599609375,7.0809599609375,[7.0809599609375],,kWh,4.605726533350207e-06,4.989117345350963e-07,1.0019452460069411e-06,6.106583513892244e-06,,MB,1164.67712,642.711552,0.0,234.881024,215.589888,s,25,0.18156364774703979,0.0072625459098815915,0.0001934262272951333,0.007157440185546875,0.007528467178344727,0.007562796592712402,0.0076320192527771,"[0.007651296138763428, 0.007126143932342529, 0.007109407901763916, 0.007078271865844726, 0.0070833277702331545, 0.007101984024047852, 0.0070431680679321285, 0.007100096225738525, 0.007518655776977539, 0.007526048183441162, 0.007385503768920898, 0.007530079841613769, 0.007375487804412842, 0.007451680183410645, 0.00757097578048706, 0.007209311962127685, 0.0073846721649169925, 0.0074481601715087895, 0.007241727828979493, 0.007157440185546875, 0.0070797119140625, 0.007059904098510742, 0.007146719932556152, 0.007109344005584717, 0.007074528217315674]",tokens/s,35249.34687871375,kWh,2.154723453840459e-07,2.3762854741778743e-08,1.1348072429837874e-07,3.5271592442420335e-07,tokens/kWh,725796546.946133,MB,1198.40768,644.808704,0.0,236.978176,215.592448,s,25,9.855248168945309,0.3942099267578125,0.0258072195663266,0.3893913879394531,0.4026221801757813,0.4114611938476562,0.4888181787109373,"[0.38299411010742185, 0.5126096801757812, 0.38021795654296875, 0.38084127807617185, 0.38131674194335935, 0.3791335144042969, 0.38811654663085937, 0.401467041015625, 0.40339227294921876, 0.41347842407226565, 0.40133578491210936, 0.3942486572265625, 0.3913739013671875, 0.3932178955078125, 0.39696780395507814, 0.3935213012695313, 0.3916291198730469, 0.3952633972167969, 0.3893913879394531, 0.38041622924804686, 0.37945755004882814, 0.3835711669921875, 0.37981634521484375, 0.379656494140625, 0.38181356811523437]",tokens/s,159.81332717353104,kWh,1.1285612854936907e-05,1.2445976165048161e-06,4.527772988701155e-06,1.7057983460142875e-05,tokens/kWh,3693285.325736406,,s,1575,9.8430794839859,0.006249574275546604,0.0032463893249899843,0.006076767921447754,0.006460102462768555,0.006543299198150635,0.007097601194381713,"[0.006322175979614258, 0.00658841609954834, 0.007006207942962647, 0.006119423866271972, 0.006081759929656982, 0.006062304019927979, 0.00606060791015625, 0.006148255825042724, 0.0060761280059814455, 0.006119359970092773, 0.006111231803894043, 0.006586527824401856, 0.0061411519050598145, 0.006069056034088135, 0.006111231803894043, 0.006054111957550049, 0.006077439785003662, 0.006072959899902344, 0.006053088188171386, 0.0060215678215026856, 0.00608512020111084, 0.00611030387878418, 0.006019455909729004, 0.006023903846740722, 0.006032256126403809, 0.006007167816162109, 0.006017600059509277, 0.0060661759376525876, 0.006036767959594727, 0.00603004789352417, 0.0060207037925720215, 0.006043807983398437, 0.006027520179748535, 0.006008800029754639, 0.005991648197174073, 0.006021599769592285, 0.005986464023590088, 0.006003039836883545, 0.005990240097045898, 0.005997600078582764, 0.00598956823348999, 0.00597327995300293, 0.0059920639991760255, 0.005986176013946533, 0.006007904052734375, 0.006014880180358887, 0.006014527797698975, 0.006025248050689698, 0.0059697279930114745, 0.006002912044525147, 0.005995903968811035, 0.006015999794006348, 0.006016064167022705, 0.006040512084960937, 0.005994495868682862, 0.006023231983184814, 0.006006720066070556, 0.006010079860687256, 0.006001440048217774, 0.005992447853088379, 0.00601087999343872, 0.006035520076751709, 0.0060210561752319335, 0.005936031818389893, 0.006013984203338623, 0.006022016048431397, 0.006033408164978027, 0.006006847858428955, 0.006000768184661865, 0.006018879890441894, 0.006021503925323486, 0.006026815891265869, 0.006024735927581787, 0.006010848045349121, 0.006025055885314942, 0.006031392097473144, 0.006015679836273193, 0.13460809326171874, 0.006400352001190186, 0.006250815868377686, 0.006111487865447998, 0.006227359771728515, 0.006592991828918457, 0.006094175815582276, 0.0060934720039367675, 0.0060293121337890625, 0.006074368000030517, 0.006043039798736573, 0.0060680961608886716, 0.006044447898864746, 0.006106912136077881, 0.006094687938690185, 0.006062623977661133, 0.006073567867279053, 0.006206111907958984, 0.006085536003112793, 0.006056896209716797, 0.006002751827239991, 0.0066468482017517086, 0.0060835199356079105, 0.006047743797302246, 0.006070591926574707, 0.006024479866027832, 0.006057983875274659, 0.006029439926147461, 0.006025504112243652, 0.0060308480262756346, 0.0060380158424377444, 0.006164480209350586, 0.006458847999572754, 0.006049439907073974, 0.006046656131744385, 0.006047647953033447, 0.006104479789733887, 0.006039999961853028, 0.006076416015625, 0.006025119781494141, 0.006102399826049805, 0.006040736198425293, 0.00606169605255127, 0.006070400238037109, 0.006063712120056152, 0.0060700798034667965, 0.006072319984436035, 0.006050687789916992, 0.006040480136871338, 0.0064778242111206055, 0.0060044159889221195, 0.006025536060333252, 0.006008992195129395, 0.006017216205596924, 0.005998047828674316, 0.006008224010467529, 0.006004608154296875, 0.006003967761993408, 0.006053055763244629, 0.006037983894348144, 0.005990399837493897, 0.006000639915466309, 0.005988351821899414, 0.0059978880882263184, 0.006010623931884765, 0.006190271854400635, 0.006034463882446289, 0.0060013761520385745, 0.006008831977844238, 0.0060026879310607914, 0.006000448226928711, 0.005994847774505615, 0.006032735824584961, 0.006033088207244873, 0.006031487941741943, 0.006056863784790039, 0.006039199829101563, 0.0060499200820922855, 0.006039391994476318, 0.006051360130310058, 0.006115647792816162, 0.00608460807800293, 0.006047423839569092, 0.006105728149414062, 0.006060031890869141, 0.006072319984436035, 0.006000639915466309, 0.006039552211761475, 0.005994495868682862, 0.0060273919105529785, 0.005991775989532471, 0.0060217599868774415, 0.005942495822906494, 0.006015679836273193, 0.0059714560508728025, 0.006035967826843262, 0.005968224048614502, 0.006024864196777344, 0.005974016189575195, 0.006024191856384278, 0.005983232021331787, 0.006025023937225342, 0.005984799861907959, 0.006027967929840088, 0.005964928150177002, 0.0060191359519958494, 0.005975840091705323, 0.006018335819244385, 0.0059788479804992675, 0.0060356159210205075, 0.006006303787231445, 0.006035488128662109, 0.005922016143798828, 0.0060284161567687984, 0.005995808124542236, 0.006059967994689942, 0.006006591796875, 0.006032351970672607, 0.005997920036315918, 0.006032832145690918, 0.00597654390335083, 0.006039968013763428, 0.006023168087005615, 0.006066304206848144, 0.005996575832366943, 0.006047584056854248, 0.00601087999343872, 0.006059072017669678, 0.006016128063201905, 0.006059840202331543, 0.0059978880882263184, 0.006027679920196533, 0.005976480007171631, 0.006112192153930664, 0.005982463836669922, 0.006077119827270508, 0.0060028800964355465, 0.006074463844299317, 0.006029024124145508, 0.0060457921028137206, 0.0060024957656860355, 0.006057568073272705, 0.0059725441932678225, 0.006039487838745117, 0.006002079963684082, 0.006025536060333252, 0.006041855812072754, 0.0060330238342285155, 0.006031775951385498, 0.006018496036529541, 0.006019775867462158, 0.00602950382232666, 0.006023039817810058, 0.006023231983184814, 0.00602239990234375, 0.006009344100952148, 0.006025216102600098, 0.006011903762817383, 0.006032383918762207, 0.0060247998237609865, 0.006019680023193359, 0.006044703960418701, 0.006041920185089112, 0.006080128192901611, 0.006142144203186035, 0.006082911968231201, 0.006215519905090332, 0.006085375785827637, 0.006138912200927734, 0.0060720000267028805, 0.006120128154754639, 0.0060381760597229, 0.006090400218963623, 0.006025152206420898, 0.006047840118408203, 0.005937344074249268, 0.006043488025665283, 0.006442624092102051, 0.006046207904815673, 0.005977439880371094, 0.0063515520095825196, 0.006012928009033203, 0.006061344146728515, 0.00602185583114624, 0.006025407791137695, 0.006037407875061035, 0.006045599937438965, 0.006060287952423095, 0.006029056072235107, 0.006027455806732178, 0.005993855953216553, 0.006053343772888183, 0.005995584011077881, 0.00602239990234375, 0.005988831996917725, 0.0060356478691101076, 0.006141952037811279, 0.006049791812896729, 0.0060395197868347164, 0.006074399948120117, 0.00600867223739624, 0.006012671947479248, 0.00600435209274292, 0.006038303852081299, 0.005993696212768554, 0.0060191359519958494, 0.006013984203338623, 0.00601087999343872, 0.006078144073486328, 0.006019392013549805, 0.005990079879760742, 0.006015168190002441, 0.006061888217926025, 0.006080512046813965, 0.006014976024627685, 0.006063648223876953, 0.006015456199645996, 0.006060031890869141, 0.006034815788269043, 0.00604966402053833, 0.006008992195129395, 0.006079071998596191, 0.006018335819244385, 0.006060768127441407, 0.00603545618057251, 0.006051839828491211, 0.0060067839622497555, 0.00603545618057251, 0.006014431953430176, 0.006359583854675293, 0.006002431869506836, 0.006048255920410156, 0.005998335838317871, 0.006025216102600098, 0.006014976024627685, 0.006016672134399414, 0.005994175910949707, 0.006001247882843018, 0.005904575824737549, 0.0060293121337890625, 0.006008863925933838, 0.006044832229614258, 0.006040095806121826, 0.006037439823150635, 0.005996863842010498, 0.006060192108154297, 0.005989823818206787, 0.006066944122314453, 0.005993919849395752, 0.0060433921813964845, 0.005974656105041504, 0.0060293121337890625, 0.0059999680519104, 0.006023839950561523, 0.005990399837493897, 0.0060293121337890625, 0.0060000958442687985, 0.006054431915283203, 0.005988351821899414, 0.006021024227142334, 0.005972064018249511, 0.0060284481048583985, 0.005981023788452148, 0.006015007972717285, 0.0059550080299377445, 0.006013023853302002, 0.005982399940490723, 0.006025472164154053, 0.005988416194915772, 0.0060349440574645995, 0.005988800048828125, 0.006039552211761475, 0.005995872020721435, 0.006025184154510498, 0.005984127998352051, 0.006009376049041748, 0.005976352214813232, 0.006002592086791992, 0.006006879806518555, 0.006030367851257324, 0.005970143795013428, 0.006011680126190185, 0.0059818878173828124, 0.006203135967254639, 0.006019872188568115, 0.006004479885101319, 0.0060067839622497555, 0.005994495868682862, 0.0059944639205932616, 0.0059818878173828124, 0.0060070080757141115, 0.005986591815948486, 0.006050911903381348, 0.006017183780670166, 0.006035935878753662, 0.0060087041854858395, 0.006002943992614746, 0.0059901118278503414, 0.006006080150604248, 0.0060037441253662106, 0.006030623912811279, 0.005932576179504394, 0.005972576141357422, 0.006006720066070556, 0.0059903359413146975, 0.006014976024627685, 0.005973152160644531, 0.0060096001625061036, 0.00662883186340332, 0.0065953278541564945, 0.006104063987731933, 0.005990911960601806, 0.006007487773895264, 0.00599616003036499, 0.006006624221801758, 0.006029056072235107, 0.006010848045349121, 0.0059686717987060545, 0.0060020160675048825, 0.006012671947479248, 0.006039840221405029, 0.005984127998352051, 0.006046080112457276, 0.005994592189788818, 0.006205440044403076, 0.006103040218353272, 0.006238560199737549, 0.006147712230682373, 0.0060989117622375485, 0.006002336025238037, 0.006039904117584228, 0.006029727935791015, 0.0060423359870910645, 0.006619840145111084, 0.006107391834259034, 0.008075519561767578, 0.008646080017089844, 0.00631331205368042, 0.006026400089263916, 0.0064299840927124026, 0.00609500789642334, 0.006093183994293213, 0.006107103824615478, 0.006045663833618164, 0.006072639942169189, 0.0061847038269042965, 0.006046783924102783, 0.006035935878753662, 0.006050240039825439, 0.006008863925933838, 0.0060356159210205075, 0.006020959854125977, 0.006031360149383545, 0.006008831977844238, 0.006029600143432617, 0.005993343830108642, 0.006011040210723877, 0.0059987521171569825, 0.006154784202575684, 0.006100992202758789, 0.006059967994689942, 0.006008895874023437, 0.006053855895996094, 0.005974368095397949, 0.005967904090881348, 0.005986271858215332, 0.006020448207855225, 0.006032032012939453, 0.006039552211761475, 0.006078464031219482, 0.006050848007202149, 0.0059913921356201175, 0.006037248134613037, 0.006078559875488282, 0.006023071765899658, 0.005988736152648926, 0.006027135848999023, 0.006000639915466309, 0.006131711959838867, 0.0061430401802062985, 0.0061511039733886715, 0.0060999999046325686, 0.006128608226776123, 0.006176767826080322, 0.0062111678123474125, 0.006267136096954346, 0.006277279853820801, 0.0062873601913452145, 0.0062353601455688475, 0.006318367958068847, 0.006363647937774658, 0.006342656135559082, 0.006432703971862793, 0.006369408130645752, 0.00640121603012085, 0.006419199943542481, 0.006496255874633789, 0.006500351905822754, 0.006360576152801513, 0.006324160099029541, 0.006280992031097412, 0.006351808071136475, 0.006434271812438965, 0.007294496059417724, 0.006849376201629639, 0.006461120128631591, 0.006289728164672851, 0.006336480140686035, 0.006237504005432129, 0.006426464080810547, 0.006410816192626953, 0.006308032035827637, 0.006418560028076172, 0.00640556812286377, 0.006435743808746338, 0.006366879940032959, 0.006272128105163574, 0.0072549118995666505, 0.006603040218353271, 0.008226495742797851, 0.0064553279876708985, 0.006469567775726318, 0.007096735954284668, 0.006987391948699951, 0.006436863899230957, 0.006545472145080567, 0.006453216075897217, 0.006474976062774658, 0.006400800228118896, 0.006377471923828125, 0.006461440086364746, 0.00642252779006958, 0.007579616069793701, 0.007379295825958252, 0.006391488075256348, 0.006297152042388916, 0.006455904006958008, 0.006498144149780273, 0.006442719936370849, 0.006467264175415039, 0.0064637441635131835, 0.0065623679161071775, 0.006588191986083985, 0.00653107213973999, 0.006551551818847656, 0.0064163517951965334, 0.006336703777313233, 0.006471519947052002, 0.006496223926544189, 0.006520864009857178, 0.006589568138122558, 0.006447999954223633, 0.006536799907684326, 0.006601471900939942, 0.006477344036102295, 0.006523231983184815, 0.006542367935180664, 0.0064167361259460445, 0.006314720153808594, 0.006270143985748291, 0.006226431846618652, 0.006168575763702393, 0.006268256187438965, 0.0062707839012146, 0.00616048002243042, 0.006199456214904785, 0.006255199909210205, 0.0062568001747131344, 0.0061699519157409665, 0.006173183917999267, 0.006117216110229492, 0.006146399974822998, 0.006111040115356446, 0.006223584175109863, 0.006322336196899414, 0.006295775890350342, 0.006180223941802979, 0.006134304046630859, 0.0061584959030151366, 0.006131135940551758, 0.0061485118865966795, 0.006176032066345215, 0.006343423843383789, 0.006506624221801757, 0.006375264167785644, 0.006520832061767578, 0.00638976001739502, 0.0063975038528442385, 0.006340320110321045, 0.0063656320571899416, 0.006494080066680908, 0.006326272010803223, 0.006305471897125244, 0.006267199993133545, 0.007161664009094238, 0.006427840232849121, 0.006776095867156983, 0.006501503944396973, 0.0063719358444213866, 0.006277120113372803, 0.006276288032531738, 0.006294400215148926, 0.006477759838104248, 0.006414463996887207, 0.006272064208984375, 0.006374176025390625, 0.007100063800811768, 0.007599775791168213, 0.00793673610687256, 0.01093222427368164, 0.009020895957946777, 0.006556191921234131, 0.006602335929870606, 0.006707615852355957, 0.006596960067749023, 0.006588064193725586, 0.006441088199615479, 0.006330463886260986, 0.006405983924865723, 0.00644704008102417, 0.006452415943145752, 0.0064039998054504395, 0.006321023941040039, 0.00630790376663208, 0.006237823963165284, 0.006210048198699952, 0.0062503361701965335, 0.006232063770294189, 0.006152095794677734, 0.006262176036834717, 0.0061385598182678225, 0.00611737585067749, 0.006264832019805908, 0.0064488000869750975, 0.006392159938812256, 0.006303743839263916, 0.006300064086914062, 0.006239840030670166, 0.006170623779296875, 0.006254208087921142, 0.0062102718353271484, 0.006489759922027588, 0.006326272010803223, 0.00642790412902832, 0.006408959865570068, 0.006462656021118164, 0.006482751846313477, 0.006286911964416504, 0.006293504238128662, 0.00637500810623169, 0.006529695987701416, 0.006488255977630615, 0.006388927936553955, 0.006350944042205811, 0.006406367778778076, 0.006240992069244384, 0.006216671943664551, 0.0061931519508361815, 0.006274687767028809, 0.006259071826934815, 0.006401023864746094, 0.006532447814941406, 0.006480576038360596, 0.0065913920402526855, 0.006496384143829346, 0.006456736087799072, 0.0063554878234863285, 0.006415775775909424, 0.006471776008605957, 0.006382080078125, 0.006378975868225098, 0.006418015956878662, 0.006501311779022217, 0.00659449577331543, 0.006463232040405273, 0.0063851518630981445, 0.006421311855316162, 0.006507967948913574, 0.006504320144653321, 0.0065504322052001955, 0.006468992233276367, 0.006517151832580567, 0.006496384143829346, 0.006555808067321777, 0.0064572482109069825, 0.006335455894470215, 0.006280288219451904, 0.00623529577255249, 0.006418528079986573, 0.0064824318885803225, 0.006710527896881103, 0.0065157442092895505, 0.006525728225708008, 0.00649721622467041, 0.006434879779815674, 0.006305727958679199, 0.006327648162841797, 0.0065194878578186035, 0.006331552028656006, 0.00624505615234375, 0.0061147198677062985, 0.006123648166656494, 0.006083072185516358, 0.00605017614364624, 0.006072288036346436, 0.006197311878204346, 0.006218688011169433, 0.0062657279968261715, 0.006328192234039307, 0.006159808158874512, 0.006150784015655518, 0.006104544162750244, 0.00616534423828125, 0.006151936054229736, 0.006253983974456787, 0.006404575824737549, 0.0065577921867370605, 0.00641923189163208, 0.0064143681526184085, 0.00633190393447876, 0.006176064014434815, 0.00610371208190918, 0.006107327938079834, 0.006082496166229248, 0.006076767921447754, 0.0060495038032531735, 0.006057983875274659, 0.006039552211761475, 0.006154240131378174, 0.006176608085632324, 0.0064208641052246095, 0.0064691839218139644, 0.00645904016494751, 0.006558015823364258, 0.006529280185699463, 0.006420063972473145, 0.006259391784667969, 0.0061129918098449705, 0.00609881591796875, 0.006109312057495118, 0.006121664047241211, 0.0060700798034667965, 0.006078464031219482, 0.006076416015625, 0.006154047966003418, 0.006082272052764893, 0.006113088130950928, 0.0060975680351257324, 0.006436960220336914, 0.006082687854766846, 0.006231135845184326, 0.006275040149688721, 0.00662172794342041, 0.00616809606552124, 0.0061200962066650395, 0.006137792110443115, 0.006067359924316406, 0.006123904228210449, 0.006271520137786865, 0.006175839900970459, 0.0062137598991394045, 0.006326784133911132, 0.006500127792358398, 0.006298111915588379, 0.0062211198806762695, 0.0061057920455932614, 0.006073760032653809, 0.006043327808380127, 0.006093728065490723, 0.006182911872863769, 0.006434815883636475, 0.00660646390914917, 0.006616864204406738, 0.006517343997955323, 0.006453120231628418, 0.006424704074859619, 0.0064767999649047855, 0.006257184028625488, 0.006203872203826904, 0.006132351875305176, 0.0061354880332946775, 0.006139711856842041, 0.006107647895812988, 0.006177792072296143, 0.006200352191925049, 0.006168928146362305, 0.00610265588760376, 0.006260735988616943, 0.006349055767059326, 0.006247392177581787, 0.006154208183288574, 0.006170656204223633, 0.006183135986328125, 0.0061198720932006834, 0.006094816207885742, 0.0061294717788696286, 0.006054240226745606, 0.005984255790710449, 0.006059296131134033, 0.006068960189819336, 0.0062828478813171384, 0.006393311977386474, 0.006202303886413575, 0.006137951850891114, 0.006093791961669922, 0.006112192153930664, 0.006067840099334717, 0.006068672180175781, 0.006039487838745117, 0.006049791812896729, 0.006109183788299561, 0.006158336162567139, 0.006112448215484619, 0.006148928165435791, 0.006060031890869141, 0.00615334415435791, 0.006036352157592773, 0.006076416015625, 0.0060067839622497555, 0.006168575763702393, 0.006336351871490478, 0.006443424224853516, 0.006674176216125489, 0.006612448215484619, 0.006526815891265869, 0.006574336051940918, 0.006379615783691406, 0.006813632011413574, 0.006381984233856201, 0.006333759784698486, 0.006334720134735107, 0.006195648193359375, 0.006176576137542724, 0.0061298561096191405, 0.006167935848236084, 0.0061682558059692385, 0.006112448215484619, 0.006381311893463135, 0.006100800037384033, 0.006135072231292725, 0.006058752059936523, 0.006279327869415283, 0.006194431781768799, 0.006255008220672607, 0.006265408039093018, 0.006239744186401367, 0.006199935913085938, 0.0062278399467468265, 0.006672383785247803, 0.006293087959289551, 0.006207935810089112, 0.006103199958801269, 0.006076223850250244, 0.006059904098510743, 0.006072447776794433, 0.006088223934173584, 0.006074848175048828, 0.006135072231292725, 0.0062696638107299805, 0.006098048210144043, 0.006072703838348389, 0.006300159931182861, 0.006272223949432373, 0.006267871856689453, 0.006112095832824707, 0.006123583793640136, 0.006109792232513428, 0.006084256172180176, 0.006135744094848633, 0.006310624122619629, 0.006227968215942382, 0.00610211181640625, 0.0061244478225708, 0.0060677118301391605, 0.0062490878105163575, 0.006113152027130127, 0.006108352184295654, 0.006087488174438477, 0.006205344200134277, 0.0061641278266906735, 0.006077951908111572, 0.006089024066925049, 0.006186912059783936, 0.006509119987487793, 0.006520959854125976, 0.006706367969512939, 0.006514848232269287, 0.0065133762359619145, 0.0065001602172851565, 0.006525023937225342, 0.006398015975952148, 0.006459296226501465, 0.006460639953613281, 0.006357888221740723, 0.0061296639442443845, 0.007016448020935059, 0.006104415893554688, 0.006049600124359131, 0.0061528959274292, 0.006176928043365479, 0.006127744197845459, 0.006080383777618408, 0.006154240131378174, 0.006057119846343994, 0.006086880207061767, 0.006482687950134278, 0.006608575820922852, 0.006342336177825928, 0.006291423797607422, 0.006192512035369873, 0.0061380801200866695, 0.00609878396987915, 0.006097824096679688, 0.006465536117553711, 0.006184447765350342, 0.006642176151275635, 0.006643712043762207, 0.00702784013748169, 0.006234655857086182, 0.006237728118896484, 0.006212416172027588, 0.006115039825439453, 0.0061073598861694335, 0.006238272190093994, 0.006359039783477783, 0.006252863883972168, 0.0062399358749389644, 0.006213632106781006, 0.006135807991027832, 0.006031360149383545, 0.006083775997161865, 0.006013855934143066, 0.006070432186126709, 0.006104832172393799, 0.0061272640228271485, 0.006277599811553955, 0.006473760128021241, 0.006649280071258545, 0.006566304206848145, 0.0065474557876586915, 0.006662144184112549, 0.0064880638122558594, 0.006492159843444824, 0.006387360095977783, 0.006408544063568116, 0.006401663780212403, 0.00625497579574585, 0.006241312026977539, 0.006215936183929444, 0.006148223876953125, 0.006130271911621093, 0.00615334415435791, 0.006221824169158936, 0.0061829757690429685, 0.006468607902526856, 0.006379327774047852, 0.006371103763580323, 0.006285344123840332, 0.006443359851837158, 0.00645308780670166, 0.006191103935241699, 0.006174176216125488, 0.0062280001640319825, 0.006210048198699952, 0.006131455898284912, 0.0060941438674926756, 0.006087520122528076, 0.006091040134429931, 0.006388895988464356, 0.006177216053009033, 0.0061485118865966795, 0.006147488117218018, 0.0061077442169189455, 0.006045951843261719, 0.006102208137512207, 0.006050047874450684, 0.006144320011138916, 0.006065279960632324, 0.006391776084899902, 0.006463520050048828, 0.006179168224334717, 0.0061038718223571774, 0.006057695865631103, 0.007448575973510742, 0.006615039825439453, 0.006524928092956543, 0.006469791889190674, 0.006346047878265381, 0.006359583854675293, 0.006360415935516357, 0.006226208209991455, 0.006223231792449951, 0.006218751907348633, 0.006231647968292236, 0.006201759815216064, 0.006200448036193847, 0.0062984957695007325, 0.006501408100128174, 0.006487008094787598, 0.006365280151367187, 0.00637440013885498, 0.006375967979431152, 0.0062317438125610355, 0.006161215782165527, 0.006304992198944092, 0.006300672054290772, 0.006076064109802246, 0.006080351829528809, 0.006205088138580323, 0.006193664073944092, 0.006127520084381104, 0.0060416641235351565, 0.006061728000640869, 0.006063680171966553, 0.0060629119873046875, 0.006087935924530029, 0.006111775875091553, 0.006072735786437989, 0.006075424194335937, 0.0060731201171875, 0.006123104095458984, 0.006285727977752686, 0.006279200077056885, 0.006221759796142578, 0.00616860818862915, 0.00613321590423584, 0.00610368013381958, 0.0061439042091369625, 0.00611030387878418, 0.006330848217010498, 0.006373824119567871, 0.006097343921661377, 0.006153791904449463, 0.006136032104492187, 0.006080927848815918, 0.00617033576965332, 0.006146336078643799, 0.006146048069000244, 0.006212800025939941, 0.00638047981262207, 0.00659065580368042, 0.006678207874298096, 0.0064973440170288084, 0.006542272090911865, 0.006432767868041992, 0.006524831771850586, 0.006401631832122803, 0.006229663848876953, 0.006212448120117188, 0.0061972479820251464, 0.006210944175720215, 0.006104991912841797, 0.006191711902618408, 0.006058303833007812, 0.006109119892120362, 0.0060778560638427735, 0.006087135791778564, 0.0060661759376525876, 0.006053120136260986, 0.006087264060974121, 0.0061413440704345705, 0.0062841281890869145, 0.006690271854400635, 0.006207968235015869, 0.0060867519378662105, 0.006112224102020264, 0.006665120124816895, 0.006275231838226318, 0.006131552219390869, 0.006283008098602295, 0.0064486398696899415, 0.006271743774414062, 0.006205664157867431, 0.006102431774139405, 0.006053664207458496, 0.006090784072875976, 0.006127647876739502, 0.006102911949157715, 0.006084512233734131, 0.006092576026916504, 0.006070240020751953, 0.006095104217529297, 0.006076863765716553, 0.006162752151489257, 0.006151999950408936, 0.006135200023651123, 0.00617139196395874, 0.006156320095062256, 0.006209760189056396, 0.006161824226379394, 0.006101376056671143, 0.006082560062408447, 0.006031167984008789, 0.006154431819915772, 0.006596511840820313, 0.006645631790161133, 0.00661897611618042, 0.006531455993652344, 0.006479872226715088, 0.006502399921417237, 0.006409952163696289, 0.006206751823425293, 0.006187615871429443, 0.006135647773742676, 0.006125376224517822, 0.006087488174438477, 0.0060787200927734375, 0.006067903995513916, 0.006123839855194092, 0.006231584072113037, 0.006135968208312989, 0.0061972479820251464, 0.006081984043121338, 0.006275455951690673, 0.006317376136779785, 0.006249216079711914, 0.006205567836761475, 0.006313151836395263, 0.006208320140838623, 0.006252543926239014, 0.006211008071899414, 0.006193727970123291, 0.006162687778472901, 0.006251584053039551, 0.006346687793731689, 0.006273983955383301, 0.006160607814788818, 0.006113887786865235, 0.0061262078285217285, 0.006095488071441651, 0.006094207763671875, 0.006105247974395752, 0.006105311870574951, 0.006050848007202149, 0.006054848194122314, 0.006176832199096679, 0.006213215827941895, 0.006156447887420654, 0.006074111938476562, 0.006124000072479248, 0.0060928001403808595, 0.006365375995635986, 0.006374944210052491, 0.006408864021301269, 0.006452320098876953, 0.006420608043670654, 0.006445343971252441, 0.006368735790252686, 0.006482592105865478, 0.006516287803649903, 0.006552031993865967, 0.006465184211730957, 0.006398176193237305, 0.006324319839477539, 0.006303391933441162, 0.006209887981414795, 0.006223872184753418, 0.006067903995513916, 0.006144224166870117, 0.006108767986297607, 0.006095456123352051, 0.006031328201293945, 0.006125311851501464, 0.00611568021774292, 0.00609503984451294, 0.0060680961608886716, 0.006067999839782715, 0.006047455787658691, 0.0060472960472106934, 0.0062490878105163575, 0.0063816637992858884, 0.006437215805053711, 0.006772384166717529, 0.006270815849304199, 0.006250847816467285, 0.006183743953704834, 0.006149472236633301, 0.006065408229827881, 0.006071872234344482, 0.006055871963500977, 0.006099584102630615, 0.006093088150024414, 0.006077919960021973, 0.006112927913665771, 0.00625651216506958, 0.006165472030639649, 0.006129695892333985, 0.006229472160339355, 0.006430463790893555, 0.006228352069854736, 0.006097311973571777, 0.00613753604888916, 0.006028895854949951, 0.006044095993041993, 0.006213151931762696, 0.006350719928741455, 0.006577023983001709, 0.006512639999389648, 0.006475776195526123, 0.006382847785949707, 0.0064048638343811035, 0.0063777599334716795, 0.0062665920257568355, 0.006198272228240966, 0.006156544208526612, 0.0061010241508483885, 0.006074719905853272, 0.0060993280410766605, 0.00603545618057251, 0.00606822395324707, 0.006012864112854004, 0.006061888217926025, 0.006170783996582031, 0.00615231990814209, 0.006137311935424805, 0.006031871795654297, 0.005994495868682862, 0.006024352073669434, 0.005987167835235596, 0.006041600227355957, 0.005903744220733643, 0.0061348161697387694, 0.006002816200256347, 0.006050848007202149, 0.006035232067108155, 0.006019680023193359, 0.0060022082328796385, 0.006032095909118652, 0.006002943992614746, 0.00606547212600708, 0.006032127857208252, 0.006024960041046142, 0.005975615978240967, 0.0060850558280944825, 0.005996543884277344, 0.006037248134613037, 0.0060265278816223145, 0.006044640064239502, 0.0060992960929870605, 0.006086592197418213, 0.0060804481506347655, 0.0060486397743225095, 0.00604694414138794, 0.006032224178314209, 0.006052095890045166, 0.006120031833648682, 0.006045695781707764, 0.006078464031219482, 0.006091104030609131, 0.006117343902587891, 0.0060778560638427735, 0.006018496036529541, 0.006077280044555664, 0.006033696174621582, 0.006072256088256836, 0.006001728057861328, 0.006049568176269531, 0.005992640018463135, 0.006036384105682373, 0.005982048034667968, 0.006033440113067627, 0.005969791889190674, 0.006023136138916016, 0.005961855888366699, 0.006044703960418701, 0.005959904193878174, 0.006022240161895752, 0.00596451187133789, 0.006032639980316162, 0.006049471855163574, 0.006015103816986084, 0.005971903800964355, 0.006170464038848877, 0.005976160049438476, 0.006021120071411133, 0.0059658241271972655, 0.0060059518814086915, 0.006050496101379394, 0.006008959770202636, 0.005986303806304932, 0.006027200222015381, 0.005959551811218261, 0.006033696174621582, 0.005905888080596924, 0.006017568111419677, 0.005994495868682862, 0.006045536041259765, 0.006000800132751465, 0.006096288204193116, 0.006001183986663819, 0.0060338878631591795, 0.005995840072631836, 0.0060234560966491695, 0.005973440170288086, 0.006050303936004638, 0.005967936038970947, 0.005995935916900634, 0.00598195219039917, 0.006017951965332031, 0.005972064018249511, 0.006114655971527099, 0.0060236802101135255, 0.006030655860900879, 0.005995200157165527, 0.006014463901519776, 0.0059704318046569825, 0.006041600227355957, 0.005998015880584717, 0.006042175769805909, 0.005992288112640381, 0.0061133761405944825, 0.005988480091094971, 0.006043583869934082, 0.0059985918998718265, 0.006031199932098389, 0.006002399921417236, 0.006049248218536377, 0.005995488166809082, 0.006051199913024902, 0.005988480091094971, 0.0060375680923461916, 0.005986688137054444, 0.0060249919891357424, 0.005988639831542969, 0.006033311843872071, 0.005996032238006592, 0.006048351764678955, 0.005982207775115967, 0.006012928009033203, 0.0060061440467834475, 0.006029952049255371, 0.006018784046173096, 0.0060289278030395506, 0.005990816116333008, 0.0060011520385742185, 0.005996640205383301, 0.0061231679916381836, 0.006024223804473877, 0.0060152320861816405, 0.005994527816772461, 0.006003647804260254, 0.006039360046386719, 0.006018815994262695, 0.006011072158813476, 0.0060026879310607914, 0.006031007766723633, 0.005943295955657959, 0.005970240116119385, 0.006029088020324707, 0.0060239357948303224, 0.006035391807556152, 0.006115520000457763, 0.0071840319633483885, 0.006700928211212158, 0.0065998082160949705, 0.0060999679565429685, 0.006330368041992188, 0.006032639980316162, 0.006044415950775146, 0.0060293121337890625, 0.006172671794891358, 0.00601087999343872, 0.00601907205581665, 0.006028575897216797, 0.006023903846740722, 0.0060067839622497555, 0.006038943767547608, 0.005997151851654053, 0.006131711959838867, 0.006080512046813965, 0.006074560165405273, 0.00600816011428833, 0.006243135929107666, 0.006094207763671875, 0.006076704025268555, 0.006145855903625488, 0.006097055912017822, 0.006002719879150391, 0.005992288112640381, 0.0059987521171569825, 0.006045567989349365, 0.006017151832580566, 0.006012800216674804, 0.006021183967590332, 0.006014592170715332, 0.006024672031402588, 0.006140895843505859, 0.006111231803894043, 0.006024608135223389, 0.0060442562103271485, 0.0059985918998718265, 0.0060293121337890625, 0.006002304077148438, 0.006037888050079346, 0.006023104190826416, 0.006027455806732178, 0.005992320060729981, 0.006012928009033203, 0.0059881601333618166, 0.006004928112030029, 0.006031072139739991, 0.006007071971893311, 0.006027647972106934, 0.006020768165588379, 0.006010655879974365, 0.006033599853515625, 0.006000671863555908, 0.006027232170104981, 0.0060028800964355465, 0.005905471801757813, 0.006014976024627685, 0.006006752014160156, 0.0060152320861816405, 0.006010848045349121, 0.005979231834411621, 0.006002655982971191, 0.0060068159103393555, 0.006006432056427002, 0.005971968173980713, 0.005990399837493897, 0.0059699201583862304, 0.005994336128234863, 0.0059967041015625, 0.005990399837493897, 0.005987328052520752, 0.00598905611038208, 0.006008384227752686, 0.00603007984161377, 0.005997920036315918, 0.006051743984222412, 0.006001408100128174, 0.0060160961151123045, 0.006025983810424805, 0.006017183780670166, 0.0062722558975219726, 0.006034175872802735, 0.006045951843261719, 0.0060433921813964845, 0.006062079906463623, 0.006017024040222168, 0.006012928009033203, 0.006036736011505127, 0.0060136961936950685, 0.006039552211761475, 0.0060026879310607914, 0.0060538239479064945, 0.0060226240158081056, 0.0060730881690979005, 0.005990464210510254, 0.00603113603591919, 0.005992447853088379, 0.006066239833831787, 0.005987967967987061, 0.00602675199508667, 0.00598745584487915, 0.006049471855163574, 0.00599622392654419, 0.006043968200683594, 0.005975103855133056, 0.006052800178527832, 0.006008255958557129, 0.006063007831573487, 0.006012576103210449, 0.006053760051727295, 0.006025152206420898, 0.006047935962677002, 0.00598137617111206, 0.006074495792388916, 0.006021984100341797, 0.0060412797927856445, 0.0059712638854980465, 0.006149055957794189, 0.005900191783905029, 0.006160223960876465, 0.006052095890045166, 0.0062399678230285645, 0.00597760009765625, 0.0060050878524780275, 0.005982656002044678, 0.006023519992828369, 0.005987679958343506, 0.006024576187133789, 0.005983168125152588, 0.006008959770202636, 0.0060026879310607914, 0.006006400108337403, 0.006016543865203857, 0.0060095682144165035, 0.0060022401809692386, 0.00601087999343872, 0.006011360168457032, 0.006052127838134766, 0.006020800113677978, 0.0060536317825317385, 0.005986559867858887, 0.006050111770629883, 0.006053120136260986, 0.006058688163757324, 0.0060208640098571775, 0.006021120071411133, 0.006045919895172119, 0.006027040004730225, 0.006078080177307129, 0.006035295963287354, 0.00604963207244873, 0.006009535789489746, 0.0059985918998718265, 0.006000639915466309, 0.006006015777587891, 0.00598419189453125, 0.005994688034057618, 0.005989183902740478, 0.006010687828063964, 0.005974016189575195, 0.0060026879310607914, 0.006020736217498779, 0.0059920639991760255, 0.006000448226928711, 0.006005631923675537, 0.006025023937225342, 0.006014431953430176, 0.0060280637741088865, 0.006014976024627685, 0.006000639915466309, 0.006008575916290284, 0.006023295879364014, 0.006002592086791992, 0.005980703830718994, 0.006016640186309814, 0.005986368179321289, 0.006043295860290527, 0.006008895874023437, 0.006035744190216064, 0.006047743797302246, 0.006021247863769531, 0.005968448162078858, 0.005998655796051026, 0.006021120071411133, 0.00604310417175293, 0.006029856204986572, 0.006037312030792236, 0.006013440132141113, 0.006028704166412354, 0.006029600143432617, 0.006012928009033203, 0.005988351821899414, 0.006281216144561768, 0.006037504196166992, 0.006060256004333496, 0.00616425609588623, 0.006041600227355957, 0.006042655944824219, 0.006013919830322265, 0.00601087999343872, 0.005994495868682862, 0.006010240077972412, 0.006013152122497559, 0.0060360321998596195, 0.006053728103637695, 0.00606547212600708, 0.006066880226135254, 0.006039872169494629, 0.006051743984222412, 0.006024064064025879, 0.006064159870147705, 0.005995039939880371, 0.0060698561668396, 0.005993216037750244, 0.006078464031219482, 0.006000448226928711, 0.00604588794708252, 0.0059983677864074705, 0.006064288139343262, 0.005977536201477051, 0.006081151962280274, 0.006021120071411133, 0.006055007934570313, 0.006003615856170655, 0.006047743797302246, 0.006013023853302002, 0.00602668809890747, 0.005974495887756347, 0.00602623987197876, 0.005962751865386963, 0.006041696071624756, 0.006028768062591553, 0.00608464002609253, 0.006036223888397217, 0.00601039981842041, 0.006041728019714356, 0.0060067839622497555, 0.006062079906463623, 0.006061279773712158, 0.006136608123779297, 0.006170623779296875, 0.006188672065734863, 0.006123807907104492, 0.006647903919219971]",tokens/s,160.0108992884219,, @@ -2621,7 +2621,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 47909 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 41777 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2731,7 +2731,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 28365 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 22756 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.668544,3450.79808,0.0,3055.550464,2937.680896,s,1,7.21254931640625,7.21254931640625,0.0,7.21254931640625,7.21254931640625,7.21254931640625,7.21254931640625,[7.21254931640625],,kWh,7.65544858750123e-06,8.368047426373106e-07,2.206668432001846e-06,1.0698921762140386e-05,,MB,1104.44544,3522.101248,0.0,3114.27072,2817.473024,s,10,0.44898410034179687,0.04489841003417969,0.0006442349365678308,0.04467651176452637,0.045460602951049804,0.045966013526916503,0.046370341987609864,"[0.0464714241027832, 0.04416009521484375, 0.04534828948974609, 0.04467161560058594, 0.044681407928466796, 0.044641857147216794, 0.04422576141357422, 0.04450051116943359, 0.045274112701416014, 0.04500902557373047]",tokens/s,5701.760926614453,kWh,1.4554342901533722e-06,1.6050690768969044e-07,9.612100005870587e-07,2.577151198430121e-06,tokens/kWh,99334490.01981068,MB,1137.303552,3522.101248,0.0,3114.27072,2877.80864,s,10,11.548456054687499,1.15484560546875,0.004692471931810019,1.1568258666992188,1.159875244140625,1.1607574462890626,1.1614632080078124,"[1.15967919921875, 1.1498546142578125, 1.1500992431640624, 1.1482830810546876, 1.158281494140625, 1.1577117919921875, 1.1616396484375, 1.157550537109375, 1.1561011962890626, 1.1492552490234376]",tokens/s,54.55274687946568,kWh,3.3499842479430185e-05,3.6945752013604073e-06,2.2181919625012554e-05,5.937633730580316e-05,tokens/kWh,1061028.7339809134,,s,630,11.545618431091308,0.018326378462049698,0.00035041837722673565,0.018253119468688964,0.01856883583068848,0.018941068649291994,0.02004849069595337,"[0.019441120147705077, 0.018897504806518556, 0.018577791213989257, 0.018319360733032225, 0.01818009567260742, 0.01811395263671875, 0.0181376953125, 0.01805516815185547, 0.018792287826538086, 0.01797881507873535, 0.018034912109375, 0.018000383377075196, 0.01800707244873047, 0.018150367736816407, 0.018104032516479494, 0.018001855850219725, 0.018032703399658203, 0.017952640533447264, 0.0186841926574707, 0.019297632217407226, 0.018570016860961915, 0.018333023071289062, 0.018301567077636718, 0.018230464935302733, 0.018131231307983397, 0.018145856857299806, 0.01803059196472168, 0.018101503372192383, 0.018078208923339844, 0.018112768173217775, 0.018370559692382812, 0.018124671936035158, 0.018210079193115233, 0.01824166488647461, 0.018274112701416014, 0.018315168380737306, 0.01822003173828125, 0.018127967834472656, 0.018076576232910157, 0.02145484733581543, 0.02059833526611328, 0.01829692840576172, 0.018153823852539063, 0.018128896713256838, 0.018214879989624025, 0.018157535552978516, 0.018324800491333008, 0.018264223098754882, 0.018227071762084962, 0.01828883171081543, 0.018386720657348633, 0.01861299133300781, 0.018579200744628908, 0.018620512008666993, 0.01860009574890137, 0.018563072204589845, 0.018582847595214842, 0.01859654426574707, 0.018521247863769533, 0.018420576095581054, 0.01840127944946289, 0.018359935760498047, 0.018319744110107422, 0.02004256057739258, 0.019277215957641602, 0.0187807674407959, 0.018503679275512695, 0.01824358367919922, 0.018319360733032225, 0.01814240074157715, 0.0180948486328125, 0.018171968460083007, 0.018112512588500978, 0.01799692726135254, 0.017949567794799805, 0.01803379249572754, 0.01827315139770508, 0.01845043182373047, 0.018017696380615233, 0.018100223541259765, 0.01859849548339844, 0.0181942081451416, 0.0182458553314209, 0.018163711547851562, 0.01802614402770996, 0.017993471145629884, 0.018073728561401367, 0.018119136810302736, 0.018112512588500978, 0.018167808532714845, 0.018083839416503905, 0.01804287910461426, 0.01805891227722168, 0.01837910461425781, 0.018259967803955078, 0.01809769630432129, 0.018040895462036133, 0.018178464889526368, 0.01814303970336914, 0.018370752334594728, 0.018167327880859375, 0.018139392852783202, 0.018094303131103516, 0.018212160110473632, 0.018203136444091796, 0.018085376739501953, 0.018096832275390624, 0.018192127227783204, 0.01819878387451172, 0.018238880157470702, 0.018201183319091797, 0.01816966438293457, 0.018155712127685547, 0.018137088775634767, 0.01826742362976074, 0.018295520782470702, 0.018316287994384766, 0.0182609920501709, 0.018274303436279296, 0.018302047729492187, 0.018383743286132813, 0.01833782386779785, 0.01823744010925293, 0.01825382423400879, 0.018253568649291993, 0.018254079818725587, 0.020050912857055663, 0.01904665565490723, 0.01877577590942383, 0.01840947151184082, 0.01841289520263672, 0.018205343246459962, 0.018056224822998047, 0.01803539276123047, 0.018009824752807616, 0.0179836483001709, 0.017973535537719725, 0.01800124740600586, 0.018036735534667968, 0.01798838424682617, 0.018008064270019532, 0.01805232048034668, 0.018002431869506837, 0.01808332824707031, 0.018016223907470704, 0.018058048248291016, 0.018325504302978517, 0.018069503784179687, 0.01802649688720703, 0.018067455291748045, 0.018092031478881835, 0.018182144165039063, 0.018155519485473632, 0.0182108154296875, 0.0195665283203125, 0.018267263412475587, 0.01817900848388672, 0.01825564765930176, 0.018129119873046873, 0.01823744010925293, 0.01798534393310547, 0.01826416015625, 0.018155168533325196, 0.018157855987548828, 0.018122880935668946, 0.01809187126159668, 0.018155647277832032, 0.018132095336914063, 0.01811756706237793, 0.018147552490234375, 0.01820444869995117, 0.018202207565307618, 0.018227615356445313, 0.01823904037475586, 0.0181907844543457, 0.018153472900390624, 0.018343551635742188, 0.018272287368774415, 0.01823094367980957, 0.018311872482299804, 0.018300928115844727, 0.018350080490112306, 0.01846067237854004, 0.018495487213134765, 0.01833683204650879, 0.018367424011230468, 0.018282495498657226, 0.018328704833984376, 0.01823798370361328, 0.019194976806640625, 0.018704416275024414, 0.01843494415283203, 0.018249727249145507, 0.01810207939147949, 0.01808118438720703, 0.01797961616516113, 0.017997983932495118, 0.018024864196777343, 0.01824470329284668, 0.018082719802856445, 0.01804697608947754, 0.018231231689453124, 0.018101343154907225, 0.018097120285034178, 0.01817190361022949, 0.01803664016723633, 0.01806959915161133, 0.018118656158447266, 0.018094079971313477, 0.01803264045715332, 0.018084991455078126, 0.018082687377929688, 0.01823744010925293, 0.01827596855163574, 0.018207103729248046, 0.01818623924255371, 0.018226207733154295, 0.018109407424926758, 0.018171104431152343, 0.018326303482055665, 0.018145280838012694, 0.01807548713684082, 0.018100383758544922, 0.018056928634643556, 0.018112127304077148, 0.01824835205078125, 0.018120704650878908, 0.01811155128479004, 0.018160575866699218, 0.018122047424316407, 0.018115264892578125, 0.01810963249206543, 0.01811689567565918, 0.018221311569213867, 0.01827577590942383, 0.018258623123168945, 0.018294464111328124, 0.01824611282348633, 0.018186176300048828, 0.018133056640625, 0.01846451187133789, 0.018353567123413086, 0.018408287048339845, 0.01844793510437012, 0.018360767364501953, 0.01836182403564453, 0.018527776718139648, 0.018380992889404296, 0.018408256530761717, 0.018350080490112306, 0.018410816192626953, 0.018313087463378907, 0.019470464706420897, 0.01901523208618164, 0.018704832077026366, 0.02062335968017578, 0.018351903915405275, 0.018208255767822267, 0.01820537567138672, 0.018087711334228516, 0.01817011260986328, 0.01832476806640625, 0.018268735885620117, 0.018169824600219726, 0.01845471954345703, 0.018206720352172853, 0.018241535186767577, 0.018317312240600587, 0.01820582389831543, 0.018191232681274414, 0.01820057678222656, 0.01825779151916504, 0.018161792755126954, 0.018228736877441407, 0.018332160949707032, 0.018159616470336915, 0.018210432052612305, 0.01832979202270508, 0.018228607177734377, 0.018266944885253905, 0.018124351501464842, 0.018182207107543945, 0.018248064041137695, 0.01827840042114258, 0.018147327423095702, 0.01826576042175293, 0.018175872802734375, 0.01826243209838867, 0.018264127731323243, 0.018159616470336915, 0.018232416152954102, 0.018289567947387696, 0.018331615447998047, 0.018345279693603514, 0.018403711318969725, 0.018319520950317383, 0.018521791458129884, 0.018456064224243163, 0.018375680923461913, 0.018341888427734376, 0.01837401580810547, 0.018310848236083983, 0.018323999404907226, 0.018416128158569335, 0.018479007720947266, 0.018542591094970702, 0.018486751556396484, 0.01855855941772461, 0.018458719253540038, 0.01855574417114258, 0.018458303451538087, 0.018567487716674803, 0.01834716796875, 0.01833660888671875, 0.018450048446655272, 0.019408895492553712, 0.01905254364013672, 0.018589696884155273, 0.01841766357421875, 0.018388063430786132, 0.01826883125305176, 0.018311424255371092, 0.018264064788818358, 0.018297855377197265, 0.018291711807250977, 0.018176000595092775, 0.018231296539306642, 0.018192384719848635, 0.018265792846679688, 0.018349632263183594, 0.018975488662719725, 0.01824563217163086, 0.0182959041595459, 0.018356672286987306, 0.01840176010131836, 0.018222272872924803, 0.01831808090209961, 0.018214975357055664, 0.018284543991088868, 0.01830076789855957, 0.018393184661865233, 0.018222368240356446, 0.01823209571838379, 0.01825584030151367, 0.01816988754272461, 0.018259967803955078, 0.018140512466430662, 0.018146976470947266, 0.01819340705871582, 0.018075424194335936, 0.018150848388671877, 0.01821776008605957, 0.018198528289794923, 0.018229248046875, 0.01823539161682129, 0.01824358367919922, 0.01840643119812012, 0.018381919860839844, 0.018252927780151366, 0.018303712844848632, 0.01835830307006836, 0.01846067237854004, 0.018499584197998048, 0.018276159286499023, 0.01839676856994629, 0.018418272018432616, 0.01855897521972656, 0.018593599319458008, 0.018501663208007814, 0.018515647888183592, 0.018444255828857423, 0.018643455505371095, 0.018642112731933592, 0.018447168350219728, 0.018384735107421876, 0.018410816192626953, 0.018413408279418945, 0.0184901123046875, 0.02029657554626465, 0.019533632278442382, 0.018962623596191407, 0.018689504623413088, 0.018543039321899414, 0.018348127365112304, 0.018325504302978517, 0.018363967895507812, 0.018264511108398437, 0.01820159912109375, 0.0182794246673584, 0.018206016540527344, 0.018141887664794923, 0.018247007369995117, 0.01821558380126953, 0.01817724800109863, 0.018201375961303713, 0.018096128463745118, 0.018161184310913087, 0.01811020851135254, 0.018088672637939455, 0.018096063613891603, 0.018208831787109376, 0.01841971206665039, 0.01942639923095703, 0.018357152938842772, 0.018197919845581053, 0.0181847038269043, 0.018232608795166017, 0.018635583877563477, 0.01835759925842285, 0.018571680068969726, 0.018249504089355467, 0.018267871856689454, 0.018494207382202147, 0.018386528015136717, 0.018779808044433594, 0.018420480728149415, 0.01831260871887207, 0.018351871490478514, 0.01845737648010254, 0.018376319885253907, 0.018330047607421875, 0.018373695373535157, 0.01832441520690918, 0.01861631965637207, 0.018990816116333006, 0.018379039764404297, 0.018372608184814454, 0.018347583770751952, 0.018295232772827148, 0.01832111930847168, 0.018352415084838865, 0.01828236770629883, 0.01837273597717285, 0.01843404769897461, 0.01836636734008789, 0.018614368438720705, 0.01856870460510254, 0.01849395179748535, 0.01849475288391113, 0.018399967193603515, 0.018394975662231444, 0.02001456069946289, 0.019399200439453125, 0.018933536529541016, 0.018665439605712892, 0.018368543624877928, 0.01822537612915039, 0.018158815383911134, 0.018148128509521484, 0.01816192054748535, 0.018150400161743165, 0.018252159118652345, 0.018279903411865233, 0.018332351684570314, 0.018387168884277345, 0.01839891242980957, 0.01830940818786621, 0.018393119812011718, 0.018561023712158203, 0.01826201629638672, 0.01844223976135254, 0.01820876884460449, 0.018141183853149414, 0.018108287811279298, 0.018130943298339842, 0.019060863494873046, 0.018253311157226563, 0.018081663131713867, 0.01808857536315918, 0.018181888580322266, 0.018157184600830076, 0.018258432388305663, 0.018249120712280274, 0.018106592178344726, 0.019118080139160155, 0.01835372734069824, 0.018269119262695314, 0.018288639068603514, 0.01816294479370117, 0.018143999099731446, 0.0181341438293457, 0.01825267219543457, 0.018364416122436524, 0.01829033660888672, 0.018319711685180665, 0.01847279930114746, 0.01838038444519043, 0.018395263671875, 0.018411968231201174, 0.018206720352172853, 0.018206464767456056, 0.018265535354614258, 0.01829555130004883, 0.018421823501586915, 0.01836796760559082, 0.018405920028686525, 0.01834297561645508, 0.01830393600463867, 0.018485248565673826, 0.018291807174682616, 0.01829318428039551, 0.018301408767700197, 0.018431999206542968, 0.01842790412902832, 0.019533344268798828, 0.01897318458557129, 0.01865727996826172, 0.018380800247192384, 0.018247360229492186, 0.01813478469848633, 0.01809056091308594, 0.018039072036743164, 0.018177440643310547, 0.01807097625732422, 0.018834304809570313, 0.020147552490234377, 0.01827702331542969, 0.018348031997680665, 0.0184682559967041, 0.01815932846069336, 0.018144128799438476, 0.018235328674316407, 0.01825388717651367, 0.01824563217163086, 0.018151424407958985, 0.018040256500244142, 0.01806710433959961, 0.018103200912475585, 0.01822105598449707, 0.01827596855163574, 0.01814156723022461, 0.0181343994140625, 0.01813350486755371, 0.018144800186157228, 0.018174560546875, 0.018233343124389647, 0.018116607666015624, 0.018184192657470705, 0.018323392868041993, 0.018362432479858398, 0.01835212707519531, 0.018298879623413086, 0.018182144165039063, 0.018431999206542968, 0.01894723129272461, 0.01914147186279297, 0.01836851119995117, 0.01830431938171387, 0.018262815475463868, 0.018249536514282228, 0.01823321533203125, 0.01825200080871582, 0.018153472900390624, 0.0182108154296875, 0.01823744010925293, 0.018288639068603514, 0.018298784255981446, 0.018393184661865233, 0.018378400802612306, 0.01832940864562988, 0.018415136337280272, 0.018397344589233398, 0.018363231658935546, 0.01826201629638672, 0.018290687561035156, 0.018284543991088868, 0.018293792724609376, 0.02017750358581543, 0.01919308853149414, 0.018897632598876953, 0.018549983978271484, 0.018272735595703124, 0.018253536224365235, 0.01811721611022949, 0.018184032440185547, 0.01820022392272949, 0.018203136444091796, 0.018100223541259765, 0.01820364761352539, 0.018240575790405274, 0.018229183197021485, 0.018308927536010742, 0.018093568801879883, 0.018043519973754883, 0.018061376571655272, 0.01800720024108887, 0.018025344848632812, 0.018081504821777342, 0.018102527618408203, 0.01818009567260742, 0.01814851188659668, 0.01812156867980957, 0.018233343124389647, 0.018110464096069336, 0.018069503784179687, 0.018046464920043945, 0.018092544555664062, 0.018114559173583983, 0.018149375915527344, 0.018077695846557617, 0.018104320526123048, 0.01807155227661133, 0.018167808532714845, 0.01809328079223633, 0.01814159965515137, 0.0180731201171875, 0.018094560623168946, 0.018055551528930663, 0.01819161605834961, 0.018098943710327147, 0.01811625671386719, 0.018104671478271484, 0.018173215866088867, 0.018162399291992187, 0.018185792922973634, 0.018135488510131834, 0.018181568145751954, 0.018158143997192382, 0.018241535186767577, 0.018251775741577148, 0.018284543991088868, 0.018279455184936524, 0.01827939224243164, 0.018284479141235353, 0.018391008377075194, 0.018400415420532227, 0.01830121612548828, 0.0182524471282959, 0.01845583915710449, 0.018370399475097655]",tokens/s,54.56615457717421,, @@ -2777,7 +2777,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 133139 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 126743 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2820,7 +2820,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 89331 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 82916 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.181824,1326.383104,0.0,931.135488,917.648384,s,1,7.267576171875,7.267576171875,0.0,7.267576171875,7.267576171875,7.267576171875,7.267576171875,[7.267576171875],,kWh,9.731543208325394e-06,1.0588403921696833e-06,4.187781127998336e-06,1.4978164728493414e-05,,MB,1164.394496,1458.50368,0.0,1050.673152,1018.330112,s,10,0.2378742084503174,0.023787420845031736,0.00025684186075807625,0.023689487457275393,0.02418208408355713,0.024227458477020264,0.02426375799179077,"[0.024172000885009766, 0.023683807373046876, 0.02393600082397461, 0.023694143295288086, 0.023820640563964844, 0.023541439056396486, 0.023403263092041014, 0.0242728328704834, 0.023684831619262697, 0.02366524887084961]",tokens/s,10761.99061965427,kWh,6.842642028688759e-07,7.546128079848815e-08,4.5220111637936463e-07,1.2119266000467288e-06,tokens/kWh,211233914.65302378,MB,1198.395392,1475.280896,0.0,1067.450368,1032.767488,s,10,13.457886840820311,1.3457886840820312,0.017177029152594957,1.3504623413085937,1.359035498046875,1.360448779296875,1.361579404296875,"[1.35027099609375, 1.34962744140625, 1.358721435546875, 1.3506536865234375, 1.3226339111328125, 1.3043353271484375, 1.3483697509765624, 1.361862060546875, 1.3536444091796875, 1.357767822265625]",tokens/s,46.812698564910725,kWh,3.787052177546552e-05,4.176658680802899e-06,1.7827452396220437e-05,5.987463285248884e-05,tokens/kWh,1052198.5187819193,,s,630,13.451185438156136,0.021351087997073217,0.00045205151538563394,0.02138086414337158,0.021686100006103517,0.02196893768310547,0.022835756683349608,"[0.02106595230102539, 0.021346879959106446, 0.02127872085571289, 0.02129100799560547, 0.021303295135498047, 0.021448991775512696, 0.021298912048339842, 0.021391008377075197, 0.02154489517211914, 0.021416351318359374, 0.02126185607910156, 0.021357023239135742, 0.02118572807312012, 0.021412128448486327, 0.021381664276123046, 0.021553279876708985, 0.021544704437255858, 0.021479551315307616, 0.021571104049682616, 0.021453279495239258, 0.021335487365722657, 0.02129158401489258, 0.021364288330078127, 0.0213623046875, 0.021316415786743165, 0.021583871841430666, 0.021978784561157226, 0.024228288650512696, 0.021284576416015624, 0.0214420166015625, 0.021383007049560546, 0.021314111709594727, 0.021451072692871095, 0.021391199111938475, 0.021065536499023436, 0.021246015548706056, 0.02138947105407715, 0.021878751754760742, 0.022534303665161133, 0.02166988754272461, 0.02131385612487793, 0.021712575912475586, 0.021139455795288087, 0.021212736129760743, 0.021174623489379884, 0.021401472091674804, 0.021395679473876952, 0.021402751922607422, 0.021304191589355467, 0.021182464599609374, 0.02115488052368164, 0.021255104064941407, 0.02123980712890625, 0.02138051223754883, 0.021162080764770507, 0.021642847061157225, 0.021600288391113283, 0.021261184692382813, 0.020927999496459963, 0.021076480865478517, 0.021190656661987304, 0.021192607879638673, 0.021180511474609375, 0.021114208221435546, 0.02127052879333496, 0.021258079528808593, 0.021300031661987306, 0.021374975204467773, 0.02131113624572754, 0.021439903259277342, 0.02142201614379883, 0.02123673629760742, 0.0211661434173584, 0.021147136688232423, 0.021078208923339843, 0.021301504135131835, 0.021383167266845703, 0.02197248077392578, 0.021680992126464845, 0.021490495681762697, 0.021133344650268556, 0.02122960090637207, 0.02188934326171875, 0.021295040130615235, 0.022047264099121094, 0.021900320053100587, 0.021379903793334962, 0.021665824890136718, 0.02129462432861328, 0.02153926467895508, 0.021493120193481444, 0.021322719573974608, 0.021364255905151366, 0.021317920684814452, 0.021214496612548827, 0.021318368911743164, 0.021243904113769533, 0.021315584182739256, 0.02121660804748535, 0.021426847457885742, 0.021370880126953123, 0.02154297637939453, 0.021323423385620117, 0.02129897689819336, 0.021254623413085937, 0.021262208938598634, 0.021155168533325195, 0.02148137664794922, 0.021607648849487304, 0.02148137664794922, 0.021534143447875978, 0.021324127197265626, 0.022230976104736327, 0.021281919479370116, 0.021187519073486327, 0.021656736373901368, 0.02139411163330078, 0.021382495880126952, 0.021541919708251953, 0.02118771171569824, 0.021481472015380858, 0.02149443244934082, 0.021429759979248047, 0.02148953628540039, 0.021413759231567384, 0.02155388832092285, 0.021386079788208008, 0.021405023574829103, 0.02153251266479492, 0.021594816207885743, 0.022244735717773436, 0.02132044792175293, 0.021739168167114256, 0.021184864044189452, 0.021433759689331054, 0.02137763214111328, 0.02141606330871582, 0.021448575973510742, 0.021360992431640625, 0.021468832015991212, 0.021401599884033205, 0.02156870460510254, 0.02152876853942871, 0.02126630401611328, 0.021346368789672852, 0.02144291114807129, 0.021506399154663087, 0.02145894432067871, 0.021460384368896485, 0.021430879592895507, 0.022981760025024413, 0.022840192794799805, 0.021941471099853515, 0.021535295486450196, 0.021229440689086915, 0.02148918342590332, 0.021540800094604493, 0.02157043266296387, 0.021462400436401366, 0.021494047164916992, 0.021485919952392577, 0.02143846321105957, 0.02142617607116699, 0.02138710403442383, 0.0214880313873291, 0.02145635223388672, 0.02131177520751953, 0.021591903686523438, 0.021612287521362305, 0.021540256500244142, 0.021728256225585937, 0.022007104873657226, 0.021329631805419923, 0.021362752914428712, 0.021719968795776368, 0.021622047424316407, 0.0223874568939209, 0.02134422492980957, 0.021354496002197267, 0.021574687957763673, 0.021354496002197267, 0.02143491172790527, 0.021227392196655273, 0.02135856056213379, 0.022089567184448242, 0.02211408042907715, 0.021369823455810545, 0.021227519989013673, 0.021321216583251954, 0.020748287200927733, 0.021513599395751953, 0.021365503311157226, 0.02151849555969238, 0.021288671493530274, 0.021310848236083986, 0.02124982452392578, 0.02133625602722168, 0.021600927352905273, 0.021356544494628905, 0.02130534362792969, 0.02168627166748047, 0.021559295654296876, 0.02152239990234375, 0.021201183319091797, 0.021141248703002928, 0.021384672164916994, 0.02137280082702637, 0.02154697608947754, 0.02151807975769043, 0.021324735641479492, 0.0215285758972168, 0.02145715141296387, 0.02119059181213379, 0.021663040161132813, 0.02120863914489746, 0.021301984786987305, 0.021282560348510744, 0.0214552001953125, 0.021605855941772462, 0.021410655975341798, 0.021880640029907226, 0.02121436882019043, 0.022067583084106446, 0.021426624298095703, 0.021388511657714843, 0.021468255996704103, 0.021379968643188477, 0.021386016845703126, 0.02149718475341797, 0.02154159927368164, 0.021467552185058594, 0.021270111083984376, 0.021350400924682617, 0.021441919326782227, 0.021404287338256837, 0.021623968124389648, 0.021328832626342772, 0.021454751968383787, 0.021506048202514647, 0.021476736068725587, 0.02133465576171875, 0.021522335052490234, 0.021220735549926758, 0.021457696914672853, 0.02118547248840332, 0.021478431701660156, 0.02141302490234375, 0.021302080154418944, 0.021368671417236328, 0.022006111145019533, 0.021515903472900392, 0.021624319076538084, 0.020828351974487305, 0.021493343353271483, 0.021075679779052735, 0.021252031326293944, 0.021082944869995117, 0.021491680145263672, 0.021733152389526368, 0.021462944030761717, 0.02137273597717285, 0.0213275203704834, 0.021441152572631836, 0.021456895828247072, 0.02124310493469238, 0.021551616668701173, 0.021335424423217772, 0.021697471618652344, 0.023775199890136718, 0.02130454444885254, 0.021468992233276366, 0.02093507194519043, 0.020655712127685546, 0.020620256423950194, 0.021012479782104493, 0.021261600494384764, 0.02134931182861328, 0.020764448165893554, 0.02077471923828125, 0.020713727951049806, 0.020520383834838868, 0.020586751937866212, 0.020471807479858398, 0.020424959182739257, 0.020696224212646483, 0.02059766387939453, 0.020545055389404297, 0.02031407928466797, 0.020310464859008788, 0.02068889617919922, 0.020719615936279297, 0.020942848205566408, 0.020472991943359376, 0.020658016204833984, 0.020974592208862306, 0.020745471954345705, 0.020716287612915038, 0.020602880477905275, 0.02065017509460449, 0.020565824508666994, 0.02090188789367676, 0.020809024810791017, 0.02078611183166504, 0.02086444854736328, 0.020752288818359374, 0.02066473579406738, 0.020699136734008788, 0.02066761589050293, 0.020910879135131837, 0.020783103942871094, 0.021818368911743165, 0.020677631378173827, 0.02065999984741211, 0.021082624435424805, 0.021257951736450197, 0.02172492790222168, 0.020821247100830078, 0.020880384445190428, 0.020645631790161132, 0.020590848922729492, 0.020590591430664062, 0.020568031311035156, 0.02059267234802246, 0.020527423858642577, 0.020491968154907225, 0.020426496505737305, 0.020510944366455078, 0.02045737648010254, 0.020605056762695313, 0.020516864776611327, 0.020414464950561522, 0.020477535247802735, 0.020420703887939453, 0.02044960021972656, 0.02029974365234375, 0.020534719467163086, 0.02074435234069824, 0.020668352127075195, 0.02059110450744629, 0.02031820869445801, 0.02043903923034668, 0.020508447647094728, 0.020527135848999022, 0.020744384765625, 0.020473791122436524, 0.020556928634643555, 0.0208035831451416, 0.02079840087890625, 0.020965599060058595, 0.021144832611083984, 0.02111747169494629, 0.021149696350097655, 0.020932928085327148, 0.020670143127441407, 0.02060310363769531, 0.02060652732849121, 0.020713695526123045, 0.020610624313354493, 0.02048988723754883, 0.02049056053161621, 0.020848960876464845, 0.020555936813354492, 0.020686784744262696, 0.021489728927612306, 0.020854207992553712, 0.02084876823425293, 0.020559968948364257, 0.020574560165405275, 0.02048409652709961, 0.020622528076171875, 0.02087436866760254, 0.02090166473388672, 0.02086697578430176, 0.020725759506225586, 0.02073798370361328, 0.020895807266235352, 0.02106502342224121, 0.020947647094726563, 0.02070944023132324, 0.02120649528503418, 0.021178335189819337, 0.020889408111572267, 0.021060447692871093, 0.02107792091369629, 0.020899456024169923, 0.021325824737548828, 0.020973760604858397, 0.02100764846801758, 0.02105027198791504, 0.020940799713134766, 0.021175615310668944, 0.02108220863342285, 0.021234272003173828, 0.02112870407104492, 0.021393632888793944, 0.021290943145751952, 0.021424480438232422, 0.021429311752319335, 0.021461952209472657, 0.02155897521972656, 0.02169068717956543, 0.022055103302001954, 0.02168608093261719, 0.021540864944458008, 0.02150099182128906, 0.021605152130126953, 0.021299455642700197, 0.021950111389160157, 0.021512447357177736, 0.021747711181640626, 0.021994943618774413, 0.0215695686340332, 0.021791263580322264, 0.021604352951049805, 0.021482688903808594, 0.021674816131591796, 0.02161164855957031, 0.021610847473144533, 0.021563135147094726, 0.021501728057861328, 0.02143699264526367, 0.0212393913269043, 0.02121353530883789, 0.021434879302978514, 0.021405696868896484, 0.021284063339233397, 0.0213590087890625, 0.021309823989868165, 0.02123980712890625, 0.021471456527709962, 0.02123139190673828, 0.02129871940612793, 0.021580255508422852, 0.02162086486816406, 0.021403871536254882, 0.021153343200683593, 0.021407007217407226, 0.021504831314086915, 0.021606399536132814, 0.021475263595581055, 0.021507551193237304, 0.021364992141723632, 0.0214835205078125, 0.02177801513671875, 0.021333951950073243, 0.02133964729309082, 0.021389280319213867, 0.021443071365356444, 0.021328384399414063, 0.021381120681762695, 0.021364255905151366, 0.02137750434875488, 0.021313119888305664, 0.021379615783691405, 0.021594207763671876, 0.022573856353759764, 0.02155392074584961, 0.021964607238769532, 0.02141788864135742, 0.021598751068115235, 0.024227840423583984, 0.02147532844543457, 0.02142963218688965, 0.021602943420410158, 0.0213668155670166, 0.021561311721801757, 0.02130963134765625, 0.02147884750366211, 0.021606624603271483, 0.02142223930358887, 0.021512351989746093, 0.021364736557006835, 0.021245792388916017, 0.021326143264770506, 0.021402336120605468, 0.02168726348876953, 0.021461151123046876, 0.021763391494750976, 0.02152707290649414, 0.021442720413208008, 0.021391199111938475, 0.02149504089355469, 0.02154572868347168, 0.021428224563598632, 0.021444671630859374, 0.02137833595275879, 0.021508447647094725, 0.02149600028991699, 0.02219343948364258, 0.022824895858764647, 0.02147011184692383, 0.021545024871826173, 0.021876672744750976, 0.021566976547241212, 0.021443071365356444, 0.021389280319213867, 0.021638240814208985, 0.021348831176757812, 0.02166819190979004, 0.021849920272827148, 0.02139686393737793, 0.0225098876953125, 0.021695104598999024, 0.02182150459289551, 0.02127257537841797, 0.021372575759887696, 0.02145961570739746, 0.021460607528686525, 0.021435680389404296, 0.021480031967163086, 0.02154105567932129, 0.021358591079711914, 0.02142790412902832, 0.02150982475280762, 0.021633663177490235, 0.021597408294677736, 0.02147212791442871, 0.02136662483215332, 0.021671743392944337, 0.021250303268432618, 0.02146918487548828, 0.021140512466430665, 0.02132476806640625, 0.02144256019592285, 0.021383167266845703, 0.021319583892822267, 0.021460639953613282, 0.021373376846313477, 0.02156342315673828, 0.021513824462890626, 0.021205568313598634, 0.021806848526000976, 0.02136684799194336, 0.02162073516845703, 0.021503583908081055, 0.021327999114990233, 0.021514623641967774, 0.0216407356262207, 0.021850496292114257, 0.02136579132080078, 0.021385759353637696, 0.021430559158325195, 0.021307552337646484, 0.021348352432250976, 0.02207257652282715, 0.02133865547180176, 0.0216856632232666, 0.022333631515502928, 0.02155174446105957, 0.021573631286621094, 0.021394527435302735, 0.0214715518951416, 0.02161520004272461, 0.021547008514404296, 0.02161267280578613, 0.021473215103149413, 0.02154694366455078, 0.021431743621826174, 0.02127097511291504, 0.021207168579101564, 0.02140355110168457, 0.021423711776733398, 0.021343936920166017, 0.02140665626525879, 0.021526432037353514, 0.021302848815917968, 0.021459199905395507, 0.021001216888427734, 0.021385215759277345, 0.02149171257019043, 0.021695520401000975, 0.02148387145996094, 0.022984895706176758, 0.021383392333984376, 0.021241472244262694, 0.02143270492553711, 0.021274368286132814, 0.021237279891967772, 0.021242111206054688, 0.02128950309753418, 0.02125446319580078, 0.021380607604980468, 0.021410144805908204, 0.021505088806152345, 0.02137343978881836, 0.021258176803588866, 0.021352960586547853, 0.02156105613708496, 0.02148944091796875, 0.021532447814941406, 0.021850847244262697, 0.021372671127319335, 0.021332223892211913, 0.022409503936767577, 0.021591487884521483, 0.02123196792602539, 0.021325183868408204, 0.02129158401489258, 0.02146713638305664, 0.02127257537841797, 0.02138252830505371, 0.02126464080810547, 0.021284543991088867, 0.021994176864624022, 0.02148316764831543, 0.021433919906616212, 0.02138995170593262, 0.02146905517578125, 0.02152272033691406, 0.021456703186035157, 0.02143244743347168, 0.02259974479675293, 0.022409183502197266, 0.022222496032714843, 0.021505760192871093, 0.02146784019470215, 0.021335744857788087, 0.021192991256713867, 0.021305376052856446, 0.021759040832519533, 0.021418912887573242, 0.021444608688354492, 0.0214936637878418, 0.021321119308471678, 0.02130400085449219, 0.021073919296264648, 0.021362464904785158, 0.021215679168701172, 0.02462211227416992, 0.021505023956298826]",tokens/s,46.836020728174546,, @@ -2866,7 +2866,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 118363 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 112104 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2901,7 +2901,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 74403 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 67563 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -2944,7 +2944,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 92354 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 85933 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,781.959168,1133.44512,0.0,738.197504,715.772928,s,1,7.1385732421875,7.1385732421875,0.0,7.1385732421875,7.1385732421875,7.1385732421875,7.1385732421875,[7.1385732421875],,kWh,3.0771318083149403e-06,3.3236041878850374e-07,9.727785559923707e-07,4.382270783095815e-06,,MB,1231.417344,1175.38816,0.0,767.557632,723.637248,s,11,0.1881715850830078,0.017106507734818897,0.00021070095363948937,0.01708678436279297,0.017403936386108397,0.01743947219848633,0.017467900848388673,"[0.01709846305847168, 0.017403936386108397, 0.01708678436279297, 0.01703126335144043, 0.017475008010864258, 0.017203168869018556, 0.01681648063659668, 0.016903776168823242, 0.01689788818359375, 0.017326175689697267, 0.016928640365600586]",tokens/s,14965.064989795255,kWh,5.02287555421702e-07,5.539292336322952e-08,3.3192462966265027e-07,8.896051084475817e-07,tokens/kWh,287768131.6901794,MB,1264.017408,1215.234048,0.0,807.40352,735.775744,s,11,10.490889038085937,0.95371718528054,0.003998828024129619,0.9527057495117187,0.9571828002929688,0.9605534057617188,0.9632498901367188,"[0.954315185546875, 0.952645751953125, 0.9509779052734375, 0.9478515625, 0.9639240112304688, 0.9527057495117187, 0.954770751953125, 0.9539635009765625, 0.9502321166992187, 0.9523197021484375, 0.9571828002929688]",tokens/s,66.0573186394542,kWh,2.773411667791219e-05,3.058641454160931e-06,1.3192076088519415e-05,4.398483422059255e-05,tokens/kWh,1432311.8664956805,,s,693,10.485052336692819,0.015129945651793377,0.00030938632956023314,0.015063072204589844,0.015295583724975586,0.015474534225463866,0.016191924057006837,"[0.015158143997192383, 0.015160767555236817, 0.01509228801727295, 0.015135040283203125, 0.014992447853088378, 0.014920543670654297, 0.015034144401550292, 0.015200160026550292, 0.015022496223449706, 0.015022111892700194, 0.015036288261413575, 0.01500870418548584, 0.014977888107299804, 0.015134719848632813, 0.015341504096984863, 0.01516988754272461, 0.01514367961883545, 0.015064224243164063, 0.015179583549499512, 0.016235584259033202, 0.01530735969543457, 0.015391072273254395, 0.015140064239501953, 0.015057696342468261, 0.01496678352355957, 0.014981120109558106, 0.015073280334472656, 0.015036479949951172, 0.015378368377685548, 0.015126527786254883, 0.015291872024536133, 0.01531334400177002, 0.015284543991088867, 0.015159071922302246, 0.015224800109863282, 0.015071264266967773, 0.015159232139587402, 0.015118399620056152, 0.01498646354675293, 0.015084256172180175, 0.015111455917358399, 0.015245183944702149, 0.015029151916503907, 0.014937631607055664, 0.015008128166198731, 0.014991135597229003, 0.015051263809204102, 0.015073087692260742, 0.015049823760986328, 0.01505743980407715, 0.015050559997558595, 0.015046879768371582, 0.015024191856384277, 0.0150797119140625, 0.015725631713867188, 0.015634655952453613, 0.015102047920227051, 0.015006015777587891, 0.015083328247070312, 0.01526364803314209, 0.015073439598083497, 0.014975520133972168, 0.015009440422058106, 0.014962528228759766, 0.015093791961669921, 0.015105664253234863, 0.015136287689208985, 0.014959424018859864, 0.014966303825378419, 0.015112832069396972, 0.01507532787322998, 0.014938112258911132, 0.015006879806518554, 0.015076160430908203, 0.014997823715209961, 0.01490937614440918, 0.015044735908508302, 0.015056703567504883, 0.015103839874267578, 0.015089664459228515, 0.014984736442565918, 0.0151146240234375, 0.01510211181640625, 0.015132063865661622, 0.01497993564605713, 0.015122112274169921, 0.015433728218078613, 0.015218688011169433, 0.015245087623596191, 0.01515503978729248, 0.015607551574707031, 0.015252096176147461, 0.015094079971313477, 0.015288224220275879, 0.014999327659606933, 0.01501968002319336, 0.01507545566558838, 0.015075839996337891, 0.015001312255859375, 0.01502723217010498, 0.015166432380676269, 0.015186240196228027, 0.015152383804321289, 0.015102016448974609, 0.015140895843505859, 0.015071583747863769, 0.015187935829162598, 0.015126720428466797, 0.01509552001953125, 0.015155136108398437, 0.015072511672973634, 0.015124896049499511, 0.015076895713806153, 0.01502883243560791, 0.01503446388244629, 0.01502239990234375, 0.015048447608947755, 0.015021568298339843, 0.015082240104675293, 0.015122112274169921, 0.015811967849731445, 0.015090847969055176, 0.01508899211883545, 0.015129023551940918, 0.01518950366973877, 0.015226752281188964, 0.014960320472717285, 0.015089759826660156, 0.015531935691833497, 0.015046751976013184, 0.015132672309875488, 0.014978912353515625, 0.015029888153076171, 0.01502467155456543, 0.015062432289123535, 0.015083168029785156, 0.015211008071899413, 0.015057503700256348, 0.015066975593566895, 0.015089344024658204, 0.015026495933532715, 0.015007200241088868, 0.01496448040008545, 0.015014687538146972, 0.015034687995910645, 0.015158975601196289, 0.0150217924118042, 0.01512224006652832, 0.015038368225097656, 0.014950336456298828, 0.014967488288879395, 0.0150447998046875, 0.015021951675415039, 0.015038335800170898, 0.014992959976196289, 0.015290719985961914, 0.01522697639465332, 0.015116607666015625, 0.01514463996887207, 0.014979328155517578, 0.01504640007019043, 0.01498691177368164, 0.014930144309997558, 0.0151778564453125, 0.015038463592529297, 0.015075008392333984, 0.015065535545349121, 0.015020159721374512, 0.015164992332458497, 0.015216863632202149, 0.014995264053344727, 0.015009440422058106, 0.0149900484085083, 0.015177408218383788, 0.015132767677307129, 0.014956864356994629, 0.014988672256469727, 0.015058303833007812, 0.015107328414916993, 0.014926527976989746, 0.015174655914306641, 0.01507532787322998, 0.015087936401367187, 0.014959903717041015, 0.015038880348205566, 0.015396160125732422, 0.015750176429748537, 0.015191328048706055, 0.015200608253479004, 0.01505401611328125, 0.015035200119018554, 0.014995327949523926, 0.015188096046447754, 0.015133760452270507, 0.015010656356811524, 0.014949760437011718, 0.014971424102783203, 0.014989503860473633, 0.015011839866638184, 0.014919679641723632, 0.014944576263427734, 0.015118111610412597, 0.014960543632507324, 0.014980480194091797, 0.015100543975830077, 0.014975263595581054, 0.015017696380615234, 0.014954496383666992, 0.01500879955291748, 0.015092703819274902, 0.015019968032836914, 0.014981184005737305, 0.015007743835449219, 0.01500175952911377, 0.015008607864379882, 0.014920703887939453, 0.014931072235107421, 0.015004544258117676, 0.015037983894348144, 0.015071136474609375, 0.015029952049255372, 0.015057791709899902, 0.01517520046234131, 0.015235136032104492, 0.01502239990234375, 0.015151071548461913, 0.015108160018920899, 0.01506719970703125, 0.014941760063171387, 0.015056960105895996, 0.015033727645874023, 0.0150632963180542, 0.014949119567871094, 0.014994879722595215, 0.015018464088439942, 0.015048800468444824, 0.014954208374023437, 0.014987551689147949, 0.01499135971069336, 0.015134079933166504, 0.01496777629852295, 0.015124159812927246, 0.015187935829162598, 0.015341567993164062, 0.01505292797088623, 0.01504038429260254, 0.015024127960205079, 0.015031488418579101, 0.014987168312072753, 0.014924351692199706, 0.015072832107543946, 0.015156000137329102, 0.015111552238464356, 0.015336192131042481, 0.018846527099609375, 0.01590771198272705, 0.015322912216186523, 0.01526416015625, 0.015532992362976073, 0.015196319580078124, 0.015106783866882325, 0.015070655822753905, 0.014969568252563476, 0.015183775901794434, 0.015121439933776855, 0.015270751953125, 0.01498528003692627, 0.015074624061584472, 0.01505292797088623, 0.015036160469055176, 0.015031295776367188, 0.014999263763427735, 0.01508672046661377, 0.015059935569763183, 0.015126208305358886, 0.014973440170288087, 0.015047616004943847, 0.015268735885620117, 0.015235072135925292, 0.015028223991394044, 0.015026176452636719, 0.014979071617126465, 0.015036064147949219, 0.01498470401763916, 0.015106911659240723, 0.015032447814941407, 0.015034367561340332, 0.015125568389892579, 0.015033151626586914, 0.015166687965393066, 0.015235872268676757, 0.015298879623413086, 0.015065055847167969, 0.015210528373718262, 0.0152194242477417, 0.015260767936706544, 0.015280223846435547, 0.015265215873718261, 0.0151309757232666, 0.015201279640197754, 0.01510268783569336, 0.016183584213256837, 0.017989248275756837, 0.015349311828613281, 0.016188127517700195, 0.015286368370056152, 0.015290495872497558, 0.015273983955383302, 0.015079296112060547, 0.015240351676940917, 0.015108256340026856, 0.01512441635131836, 0.015109888076782227, 0.015053824424743652, 0.015056639671325684, 0.015355903625488282, 0.015106047630310059, 0.015050751686096191, 0.015042559623718262, 0.015039648056030273, 0.0149617280960083, 0.015052576065063477, 0.015336864471435547, 0.015056480407714843, 0.014959199905395508, 0.014969183921813964, 0.015028096199035645, 0.014983360290527343, 0.015001919746398926, 0.01495740795135498, 0.015024991989135742, 0.01502012825012207, 0.014964544296264648, 0.01609942436218262, 0.017424671173095704, 0.015140576362609863, 0.015079744338989258, 0.015117759704589845, 0.01500595188140869, 0.015021727561950684, 0.014897279739379883, 0.014952896118164062, 0.015015711784362793, 0.014997504234313964, 0.01502444839477539, 0.015017151832580566, 0.015024160385131836, 0.01498793601989746, 0.015074655532836915, 0.015004128456115722, 0.014982624053955078, 0.01498483180999756, 0.015077343940734863, 0.014970047950744629, 0.015113823890686036, 0.015030431747436523, 0.015230303764343261, 0.015073951721191406, 0.015059167861938477, 0.015106111526489259, 0.015062751770019532, 0.014985535621643066, 0.01507868766784668, 0.015122879981994629, 0.01504252815246582, 0.015091584205627442, 0.014992959976196289, 0.015177760124206543, 0.015064736366271973, 0.01505743980407715, 0.015019935607910156, 0.014987551689147949, 0.015114463806152344, 0.01512831974029541, 0.015167424201965332, 0.015070976257324219, 0.01564022445678711, 0.0149717435836792, 0.014871968269348144, 0.015302751541137695, 0.014946368217468262, 0.015386719703674317, 0.015258079528808593, 0.015245311737060547, 0.01581593608856201, 0.01515392017364502, 0.0156428804397583, 0.015529696464538573, 0.015111904144287109, 0.015018272399902343, 0.014999584197998047, 0.01589241600036621, 0.017346176147460937, 0.015237567901611328, 0.015093759536743164, 0.015070719718933106, 0.015086079597473144, 0.015087615966796876, 0.014990495681762696, 0.014895456314086914, 0.01496348762512207, 0.015092896461486817, 0.0150513277053833, 0.014929920196533204, 0.014997759819030761, 0.015157183647155762, 0.015621567726135254, 0.01497270393371582, 0.015337375640869141, 0.01507744026184082, 0.015164159774780273, 0.015091584205627442, 0.014905344009399414, 0.014995455741882324, 0.015071231842041016, 0.014993408203125, 0.014974559783935547, 0.01495248031616211, 0.015015616416931152, 0.015062911987304688, 0.014969951629638672, 0.015029984474182129, 0.015023776054382324, 0.015036767959594726, 0.015193568229675294, 0.015089823722839355, 0.015018367767333984, 0.01510755157470703, 0.014996352195739747, 0.014943903923034667, 0.01504252815246582, 0.015036447525024414, 0.015050592422485352, 0.014999711990356445, 0.015107392311096191, 0.015049440383911133, 0.01500598430633545, 0.014998527526855468, 0.01503711986541748, 0.015071231842041016, 0.015017951965332031, 0.014860159873962402, 0.014988544464111329, 0.015012351989746094, 0.014995840072631836, 0.015050784111022949, 0.014884927749633788, 0.01545206356048584, 0.015120160102844239, 0.01510217571258545, 0.014987039566040038, 0.015136159896850587, 0.015187935829162598, 0.01518671989440918, 0.015072511672973634, 0.014933055877685546, 0.014982912063598633, 0.01503446388244629, 0.01504041576385498, 0.014971263885498047, 0.015017087936401368, 0.018297119140625, 0.016107744216918945, 0.015261568069458008, 0.015062496185302735, 0.015348383903503418, 0.015134559631347657, 0.014968735694885254, 0.014964991569519043, 0.01491744041442871, 0.014974176406860351, 0.015064352035522461, 0.014953280448913574, 0.015034527778625488, 0.015057279586791992, 0.015142751693725585, 0.015040800094604493, 0.014948575973510743, 0.015054847717285156, 0.015031968116760254, 0.015012191772460938, 0.015034367561340332, 0.015007743835449219, 0.014980287551879882, 0.014967616081237794, 0.015019488334655762, 0.014973471641540528, 0.014945792198181153, 0.014966303825378419, 0.014969504356384278, 0.016465248107910155, 0.01526028823852539, 0.015140192031860351, 0.015043807983398437, 0.015116064071655274, 0.015012191772460938, 0.015058272361755371, 0.015126848220825195, 0.014986751556396484, 0.014960576057434082, 0.014989184379577638, 0.015018783569335937, 0.01502400016784668, 0.014950400352478027, 0.01490614414215088, 0.01502780818939209, 0.014961055755615234, 0.014944255828857422, 0.014925824165344239, 0.01512054443359375, 0.015136544227600097, 0.015052864074707031, 0.015073280334472656, 0.01524499225616455, 0.015365983963012695, 0.015182080268859864, 0.01537660789489746, 0.015980607986450197, 0.015167424201965332, 0.015011775970458984, 0.015079584121704102, 0.015127679824829102, 0.015067808151245117, 0.015021247863769531, 0.01500051212310791, 0.015017151832580566, 0.01496121597290039, 0.014946559906005859, 0.014946240425109864, 0.014993151664733887, 0.014938431739807129, 0.014942079544067384, 0.014958751678466798, 0.01495043182373047, 0.01584067153930664, 0.015036416053771973, 0.015024288177490235, 0.015097984313964844, 0.014974720001220703, 0.01496729564666748, 0.014927871704101562, 0.015056351661682128, 0.014959136009216308, 0.015060223579406739, 0.014977567672729493, 0.015091936111450195, 0.015051967620849609, 0.015002431869506836, 0.01490124797821045, 0.0151244478225708, 0.01507753562927246, 0.015044192314147949, 0.014940447807312011, 0.014970687866210938, 0.014979488372802734, 0.01499283218383789, 0.015190431594848633, 0.014960672378540039, 0.01506492805480957, 0.015106111526489259, 0.015054207801818847, 0.015010175704956054, 0.015075008392333984, 0.015091551780700683, 0.015540960311889649, 0.015063039779663086, 0.015032511711120605, 0.014864640235900879, 0.015216287612915038, 0.015099295616149902, 0.015143872261047363, 0.014981120109558106, 0.014970879554748535, 0.01502950382232666, 0.015453184127807617, 0.015087488174438477, 0.014909631729125976, 0.015046336174011231, 0.015370240211486816, 0.015325311660766602, 0.01508243179321289, 0.015219679832458496, 0.015545503616333008, 0.015127615928649902, 0.015063072204589844, 0.015267552375793457, 0.01499123191833496, 0.015025664329528808, 0.01515715217590332, 0.015071231842041016, 0.014971615791320801, 0.015034175872802735, 0.015526080131530761, 0.01506067180633545, 0.014993791580200195, 0.015001631736755372, 0.015046719551086427, 0.0150218563079834, 0.014996576309204101, 0.014922719955444336, 0.01501625633239746, 0.015104703903198242, 0.015238143920898438, 0.01552995204925537, 0.015030207633972168, 0.01503651237487793, 0.015067135810852051, 0.014987263679504394, 0.015202303886413575, 0.015114591598510743, 0.015004480361938476, 0.014936896324157714, 0.0148439359664917, 0.014987199783325196, 0.015220543861389161, 0.015188575744628906, 0.015169183731079101, 0.015122528076171876, 0.015046784400939942, 0.015072832107543946, 0.014919903755187989, 0.014984319686889649, 0.015230015754699707, 0.01514249610900879, 0.015036640167236328, 0.014948543548583984, 0.015249216079711914, 0.015224831581115723, 0.015085536003112793, 0.015410367965698241, 0.014960639953613282, 0.01502780818939209, 0.014942624092102052, 0.014995552062988282, 0.015161312103271485, 0.014961600303649902, 0.015021120071411133, 0.015071167945861816, 0.015091520309448242, 0.015522239685058593, 0.015174752235412597, 0.015205023765563964, 0.01501363182067871, 0.01512399959564209, 0.015503135681152344, 0.015286239624023437, 0.01520259189605713, 0.015810367584228515, 0.015500127792358398, 0.015138848304748535, 0.015119872093200683, 0.014989279747009277, 0.01505452823638916, 0.015160096168518067, 0.015396927833557129, 0.015189632415771485, 0.015327615737915039, 0.01502940845489502, 0.015215456008911133, 0.01518182373046875, 0.015362048149108886, 0.015303775787353516, 0.015137791633605957, 0.015156479835510253, 0.015063455581665039, 0.014944031715393066, 0.014954943656921387, 0.015364095687866211, 0.015216511726379394, 0.015173855781555176, 0.015066559791564942, 0.01515283203125, 0.015141695976257323, 0.01529651165008545, 0.015349760055541992, 0.015170720100402832, 0.015184736251831055, 0.015280256271362305, 0.015310720443725586, 0.015138112068176269, 0.015362751960754395, 0.01528831958770752, 0.015168800354003906, 0.01513327980041504, 0.015122400283813477, 0.015044095993041993, 0.015026847839355468, 0.015164735794067383, 0.015180480003356934, 0.015066816329956054, 0.01514521598815918, 0.015288384437561035, 0.01545747184753418]",tokens/s,66.09409068706525,, @@ -2988,7 +2988,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 44925 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 38906 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3031,7 +3031,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 52887 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 46655 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.374912,806.289408,0.0,411.041792,391.374848,s,1,7.21324560546875,7.21324560546875,0.0,7.21324560546875,7.21324560546875,7.21324560546875,7.21324560546875,[7.21324560546875],,kWh,5.089960633351135e-06,5.542499003759846e-07,9.938896839908895e-07,6.63810021771801e-06,,MB,1164.005376,881.78688,0.0,473.956352,454.832128,s,15,0.18734287929534915,0.01248952528635661,0.000214234636375093,0.012487648010253906,0.012725933074951172,0.012832464122772218,0.012959427242279054,"[0.012574463844299317, 0.012199999809265137, 0.012470656394958497, 0.012207615852355956, 0.012267423629760741, 0.012245023727416992, 0.012525504112243652, 0.012668160438537598, 0.01249782371520996, 0.012487648010253906, 0.012633376121520996, 0.012485119819641113, 0.012991168022155762, 0.012324447631835938, 0.012764448165893555]",tokens/s,20497.176164065335,kWh,3.6605550480445295e-07,4.0369153673917844e-08,2.2922400883563672e-07,6.356486673140075e-07,tokens/kWh,402738199.8325455,MB,1197.735936,909.049856,0.0,501.219328,454.834688,s,15,10.493724060058597,0.6995816040039063,0.01224672635514582,0.7041112670898437,0.7111708374023438,0.7148606750488281,0.7194729162597656,"[0.6981555786132813, 0.6862251586914062, 0.6848902587890625, 0.6789227294921875, 0.6823243408203125, 0.687145263671875, 0.7206259765625, 0.7078164672851562, 0.7072764892578125, 0.7093423461914062, 0.7089010620117188, 0.7041112670898437, 0.6998896484375, 0.7123898315429688, 0.7057076416015625]",tokens/s,90.05382594315361,kWh,2.0434243422139875e-05,2.2535672322146246e-06,8.911857378097216e-06,3.1599668032451724e-05,tokens/kWh,1993691.830411043,,s,945,10.485675329208373,0.01109595272932103,0.00030792222763291303,0.011096672058105468,0.011409036827087403,0.011485177421569823,0.011998213195800777,"[0.01104911994934082, 0.011024031639099121, 0.011106495857238769, 0.011065312385559082, 0.01126137638092041, 0.011096672058105468, 0.010999808311462403, 0.010935615539550782, 0.010844544410705566, 0.010780351638793945, 0.010936927795410157, 0.010760383605957031, 0.010741888046264648, 0.010798815727233887, 0.01100595188140869, 0.010903552055358886, 0.010816960334777833, 0.010865216255187988, 0.010797408103942871, 0.010756928443908692, 0.010740575790405273, 0.01074995231628418, 0.011136544227600098, 0.011620832443237304, 0.011495424270629882, 0.01131935977935791, 0.011285568237304687, 0.01123136043548584, 0.011076064109802245, 0.011066944122314452, 0.010973119735717773, 0.01094320011138916, 0.010902912139892577, 0.010864800453186035, 0.010868351936340332, 0.010746784210205078, 0.011253824234008789, 0.011358112335205077, 0.011898271560668945, 0.011962623596191406, 0.011379039764404297, 0.011049087524414063, 0.010927712440490723, 0.01104313564300537, 0.011003904342651367, 0.01093126392364502, 0.011457695960998535, 0.010942432403564453, 0.010966848373413086, 0.011017215728759766, 0.010966015815734862, 0.011309023857116698, 0.011564736366271972, 0.01138268756866455, 0.011343615531921387, 0.011266559600830077, 0.011426015853881836, 0.011126144409179688, 0.011018943786621094, 0.010956831932067871, 0.010886048316955567, 0.010931039810180664, 0.010751168251037597, 0.01044863986968994, 0.010719488143920899, 0.010649056434631347, 0.01068841552734375, 0.010758463859558105, 0.010770751953125, 0.010919936180114746, 0.010789024353027343, 0.01073523235321045, 0.010826016426086426, 0.010846143722534179, 0.010852352142333984, 0.01083801555633545, 0.010760191917419434, 0.0107741117477417, 0.01074022388458252, 0.01088092803955078, 0.011402912139892578, 0.011421759605407715, 0.011421919822692871, 0.011208767890930177, 0.0110447998046875, 0.01108572769165039, 0.011053183555603027, 0.011057184219360352, 0.01083407974243164, 0.010747743606567382, 0.010690560340881347, 0.010719231605529785, 0.01075814437866211, 0.01062502384185791, 0.010637151718139648, 0.010708831787109374, 0.010704256057739258, 0.010912704467773437, 0.010979328155517578, 0.01099078369140625, 0.010918047904968262, 0.010865632057189942, 0.010946368217468262, 0.010929216384887695, 0.010896479606628418, 0.010808256149291992, 0.01098198413848877, 0.010878944396972656, 0.010889439582824708, 0.011057151794433593, 0.011282431602478027, 0.011354111671447753, 0.01139737606048584, 0.011128576278686523, 0.011085215568542481, 0.011079968452453614, 0.010791007995605468, 0.010821663856506347, 0.010729280471801758, 0.010681056022644042, 0.010876031875610351, 0.01062332820892334, 0.010674367904663086, 0.010680319786071778, 0.010654879570007323, 0.010726143836975097, 0.0106397123336792, 0.010995231628417968, 0.010972960472106934, 0.011233632087707519, 0.011010272026062012, 0.011095968246459961, 0.010992863655090331, 0.011216927528381347, 0.011000672340393066, 0.010983424186706543, 0.010877951622009278, 0.010894335746765137, 0.011149312019348144, 0.011224191665649414, 0.011041407585144042, 0.010964703559875489, 0.010746399879455566, 0.010637663841247558, 0.01067155170440674, 0.010698975563049317, 0.010717023849487304, 0.010756256103515625, 0.0108373441696167, 0.010782431602478028, 0.010775487899780273, 0.010851840019226074, 0.010862688064575195, 0.010758560180664062, 0.01086019229888916, 0.010879327774047851, 0.010836095809936523, 0.010879039764404297, 0.010874752044677735, 0.010817472457885741, 0.01081884765625, 0.010820544242858887, 0.010900351524353027, 0.01079798412322998, 0.01074790382385254, 0.01074176025390625, 0.010674176216125488, 0.010721280097961425, 0.010739392280578613, 0.010749759674072266, 0.010780672073364257, 0.01095030403137207, 0.011154272079467774, 0.011109631538391113, 0.011065792083740235, 0.0111560640335083, 0.011046976089477538, 0.010964768409729004, 0.010825152397155762, 0.010770879745483398, 0.010755071640014649, 0.010667008399963379, 0.010804479598999023, 0.010699520111083985, 0.010690048217773437, 0.010725312232971192, 0.010682720184326172, 0.010695199966430664, 0.01063043212890625, 0.010497920036315918, 0.010758272171020508, 0.01084812831878662, 0.01076643180847168, 0.01075334358215332, 0.01064793586730957, 0.010676575660705566, 0.01073516845703125, 0.010676671981811523, 0.010854432106018067, 0.010921952247619628, 0.010878975868225099, 0.010799263954162598, 0.010745311737060547, 0.010667936325073242, 0.010664416313171386, 0.01071718406677246, 0.010697728157043456, 0.01064633560180664, 0.010694016456604004, 0.010651488304138184, 0.010693568229675293, 0.010694208145141602, 0.01067363166809082, 0.010705951690673829, 0.010690655708312988, 0.010759903907775878, 0.010771807670593261, 0.010814271926879882, 0.010762335777282715, 0.010835871696472169, 0.010841664314270019, 0.011089344024658204, 0.011036767959594726, 0.0108307523727417, 0.010821632385253906, 0.010809632301330566, 0.011261055946350098, 0.01085910415649414, 0.010768383979797362, 0.010727295875549317, 0.01073964786529541, 0.01066966438293457, 0.010711711883544922, 0.010669631958007813, 0.010686847686767578, 0.010788864135742187, 0.010780672073364257, 0.010767904281616211, 0.010731904029846192, 0.010722720146179199, 0.010708767890930176, 0.010791999816894531, 0.010847519874572754, 0.010791296005249024, 0.01079097557067871, 0.010859904289245605, 0.010831968307495117, 0.010840736389160156, 0.010746111869812012, 0.010733311653137206, 0.01075814437866211, 0.010729663848876953, 0.010457759857177734, 0.010731391906738282, 0.010743136405944825, 0.010785375595092773, 0.010763456344604492, 0.010677056312561035, 0.010666048049926758, 0.010682111740112304, 0.010790271759033203, 0.01087168025970459, 0.010755423545837403, 0.010717087745666504, 0.010778719902038575, 0.011233951568603516, 0.010901535987854004, 0.010813407897949219, 0.010947872161865234, 0.010881759643554688, 0.010932448387145997, 0.010948479652404785, 0.010923359870910644, 0.010750240325927734, 0.010725024223327637, 0.011152000427246093, 0.011138272285461426, 0.010686592102050781, 0.01065231990814209, 0.01067024040222168, 0.010700639724731445, 0.011630592346191406, 0.010881024360656738, 0.010842111587524414, 0.010786751747131347, 0.010938431739807129, 0.010811391830444337, 0.010931296348571777, 0.010865887641906739, 0.010866463661193848, 0.01082096004486084, 0.010855072021484375, 0.010894880294799805, 0.010890656471252442, 0.010869728088378907, 0.01070899200439453, 0.010757599830627441, 0.010746208190917969, 0.010696160316467284, 0.010686207771301269, 0.010886207580566406, 0.010743935585021973, 0.010710111618041992, 0.010701567649841308, 0.010696640014648437, 0.01066105556488037, 0.010761024475097657, 0.010789055824279786, 0.011100192070007325, 0.010786591529846191, 0.010820639610290528, 0.010715519905090332, 0.01102460765838623, 0.010737407684326171, 0.010736255645751954, 0.010813952445983887, 0.011146623611450196, 0.010843232154846191, 0.010769824028015136, 0.010809727668762207, 0.010800224304199219, 0.010845151901245118, 0.010896672248840332, 0.010940735816955566, 0.011014495849609375, 0.010987520217895508, 0.01094976043701172, 0.01087993621826172, 0.010794943809509277, 0.010741503715515136, 0.010705151557922364, 0.010786815643310547, 0.01075820827484131, 0.010828736305236817, 0.010698080062866211, 0.010687520027160644, 0.010813632011413575, 0.010749600410461425, 0.01073027229309082, 0.010702848434448242, 0.010732959747314454, 0.010756799697875977, 0.010694239616394043, 0.01068889617919922, 0.011093952178955079, 0.010784640312194824, 0.010803327560424805, 0.010737088203430176, 0.010715904235839844, 0.010644831657409669, 0.010717663764953613, 0.010730719566345215, 0.01083471965789795, 0.010810720443725585, 0.010859487533569336, 0.01081107234954834, 0.010799103736877442, 0.010862144470214843, 0.010758591651916503, 0.010692831993103027, 0.010692543983459472, 0.010872672080993653, 0.011042240142822266, 0.011163871765136718, 0.011182784080505371, 0.011209792137145996, 0.011177727699279785, 0.011141695976257325, 0.0110862398147583, 0.01116966438293457, 0.011372096061706543, 0.011176416397094727, 0.01115062427520752, 0.01117689609527588, 0.011197312355041504, 0.011204671859741212, 0.011212896347045899, 0.011098719596862794, 0.011572319984436035, 0.011252927780151366, 0.011436511993408203, 0.011271167755126953, 0.011287551879882812, 0.01120076847076416, 0.011609919548034668, 0.01349180793762207, 0.01287987232208252, 0.01152239990234375, 0.01136451244354248, 0.011394911766052247, 0.011286527633666991, 0.011564959526062011, 0.011374688148498536, 0.011431936264038087, 0.011382975578308106, 0.011333279609680176, 0.011485152244567871, 0.011306976318359375, 0.011460096359252929, 0.011399200439453125, 0.011693984031677247, 0.011389951705932617, 0.011428895950317383, 0.011506752014160156, 0.011342559814453125, 0.01187119960784912, 0.011423135757446289, 0.011309599876403809, 0.011347840309143067, 0.011371999740600587, 0.011391488075256348, 0.011335904121398926, 0.01136025619506836, 0.011308320045471191, 0.011335807800292969, 0.011286656379699707, 0.011336095809936523, 0.011331583976745606, 0.011313535690307618, 0.011157183647155762, 0.011183903694152833, 0.011530495643615723, 0.01118553638458252, 0.011270208358764649, 0.011323936462402344, 0.011409279823303222, 0.01146236801147461, 0.011372960090637207, 0.01141875171661377, 0.011410304069519043, 0.011312543869018555, 0.011222880363464355, 0.011207712173461915, 0.011255552291870117, 0.011171520233154297, 0.011385408401489258, 0.011320256233215331, 0.01142249584197998, 0.01143712043762207, 0.011305536270141602, 0.011267840385437012, 0.01099129581451416, 0.011155776023864746, 0.011012096405029297, 0.01113868808746338, 0.011151488304138184, 0.0111843843460083, 0.011106304168701172, 0.011167648315429688, 0.011253376007080079, 0.011305439949035644, 0.011089792251586914, 0.011315327644348144, 0.011237407684326172, 0.011183263778686524, 0.011149312019348144, 0.011125503540039062, 0.011338111877441406, 0.011343296051025391, 0.011342240333557128, 0.011418623924255371, 0.011331775665283202, 0.011313823699951172, 0.011190048217773437, 0.011138463973999023, 0.011123519897460937, 0.011363679885864258, 0.011389599800109864, 0.011231231689453124, 0.011895968437194824, 0.010958815574645996, 0.011047807693481446, 0.011148896217346192, 0.011245984077453614, 0.01125376033782959, 0.011280703544616699, 0.011279552459716798, 0.011327103614807129, 0.011361472129821778, 0.011388031959533692, 0.011409983634948731, 0.011214847564697266, 0.011165151596069336, 0.011143327713012696, 0.011190303802490234, 0.011106559753417969, 0.01111248016357422, 0.011040160179138184, 0.011199135780334473, 0.011200511932373047, 0.011243519783020019, 0.011611328125, 0.011419520378112794, 0.011431039810180664, 0.011313055992126465, 0.011175359725952148, 0.010944992065429688, 0.010928095817565918, 0.010939935684204101, 0.011167679786682129, 0.011417759895324706, 0.011245823860168458, 0.01115561580657959, 0.011192319869995117, 0.010995712280273438, 0.01122441577911377, 0.011407072067260743, 0.011357119560241699, 0.011495743751525879, 0.011495008468627929, 0.011298912048339844, 0.011280384063720703, 0.011234848022460938, 0.011164128303527832, 0.011122943878173829, 0.011105088233947754, 0.011102656364440917, 0.01113548755645752, 0.011032575607299805, 0.011053248405456543, 0.010995519638061524, 0.010946559906005859, 0.010966560363769532, 0.010967519760131836, 0.010989567756652833, 0.010992799758911133, 0.011024928092956543, 0.01202617645263672, 0.011493375778198242, 0.011376064300537109, 0.011268671989440917, 0.011485183715820312, 0.011239423751831054, 0.0123985595703125, 0.011307040214538574, 0.011469056129455566, 0.011386624336242676, 0.011368608474731446, 0.011378496170043946, 0.011263456344604492, 0.01121951961517334, 0.011192319869995117, 0.011122688293457032, 0.01103667163848877, 0.010975232124328613, 0.010950528144836425, 0.010840448379516601, 0.01084832000732422, 0.010852031707763672, 0.010981023788452149, 0.011083647727966309, 0.011108192443847656, 0.011068032264709473, 0.01114521598815918, 0.011182080268859864, 0.011204607963562012, 0.01136025619506836, 0.011312416076660156, 0.011197152137756348, 0.011326815605163574, 0.011356831550598145, 0.01133561611175537, 0.011237343788146973, 0.011327232360839844, 0.01129916763305664, 0.011276288032531738, 0.011059200286865235, 0.010835712432861327, 0.011186431884765625, 0.011399359703063964, 0.011280192375183105, 0.011257439613342286, 0.011248031616210937, 0.01137664031982422, 0.01139673614501953, 0.011561344146728516, 0.011340031623840332, 0.011351807594299317, 0.011415552139282227, 0.011659263610839844, 0.011695679664611816, 0.011420096397399902, 0.01130726432800293, 0.011189408302307129, 0.011092063903808593, 0.01117852783203125, 0.011042783737182617, 0.01098464012145996, 0.011218879699707031, 0.011351167678833008, 0.011310848236083984, 0.011214847564697266, 0.011167743682861327, 0.011070879936218261, 0.01111017608642578, 0.011307840347290038, 0.01125532817840576, 0.011067872047424316, 0.011081248283386231, 0.011223520278930664, 0.011192319869995117, 0.01101414394378662, 0.010973279953002929, 0.011024288177490234, 0.01115135955810547, 0.011157088279724121, 0.011008416175842285, 0.010968928337097167, 0.010987648010253906, 0.011042847633361816, 0.011169695854187011, 0.011120896339416503, 0.011048640251159668, 0.011135328292846679, 0.01110313606262207, 0.011030559539794921, 0.01101296043395996, 0.010987551689147949, 0.010962944030761718, 0.011157504081726074, 0.011356287956237793, 0.011736031532287597, 0.012051360130310058, 0.011530367851257325, 0.01259712028503418, 0.011542528152465821, 0.011416895866394043, 0.011295424461364747, 0.01118723201751709, 0.011197728157043457, 0.011091551780700683, 0.011229696273803711, 0.011439871788024903, 0.01127609634399414, 0.011262463569641114, 0.011289759635925293, 0.011218751907348633, 0.01128275203704834, 0.011302656173706054, 0.011234399795532226, 0.011259200096130371, 0.011311840057373047, 0.01154201602935791, 0.01136415958404541, 0.011208255767822266, 0.011117600440979003, 0.011123616218566895, 0.011203392028808594, 0.011112000465393066, 0.01105174446105957, 0.011013919830322266, 0.010962271690368652, 0.011043359756469726, 0.010995424270629883, 0.010889856338500977, 0.010909152030944825, 0.011052895545959472, 0.010978015899658203, 0.011380479812622071, 0.011441920280456543, 0.011305343627929688, 0.011347200393676758, 0.011344160079956055, 0.011542880058288574, 0.011118656158447265, 0.011388863563537598, 0.011596927642822266, 0.011412575721740722, 0.011286304473876954, 0.011293760299682617, 0.011409503936767578, 0.011481792449951171, 0.011387040138244629, 0.011245023727416993, 0.011227328300476075, 0.011364704132080078, 0.01145036792755127, 0.01133568000793457, 0.01123737621307373, 0.011194368362426758, 0.011214847564697266, 0.011233247756958008, 0.011237407684326172, 0.011367456436157226, 0.011307392120361328, 0.011221887588500977, 0.011260992050170898, 0.011263808250427247, 0.011189087867736817, 0.011233280181884766, 0.011116095542907715, 0.011063424110412598, 0.011079104423522949, 0.010767999649047852, 0.010945440292358399, 0.010964223861694336, 0.010966879844665527, 0.01101318359375, 0.011089759826660156, 0.01132953643798828, 0.011138751983642578, 0.011221088409423829, 0.011124959945678711, 0.01103872013092041, 0.01121008014678955, 0.011263903617858886, 0.011186016082763673, 0.011266176223754882, 0.011132960319519042, 0.011192288398742676, 0.011242431640625, 0.011206239700317382, 0.011083935737609863, 0.011008319854736328, 0.01102729606628418, 0.010988479614257812, 0.0109366397857666, 0.011249343872070312, 0.011360128402709961, 0.01129257583618164, 0.011498784065246582, 0.011520959854125977, 0.011290623664855956, 0.01121452808380127, 0.011243807792663573, 0.011202591896057129, 0.011096351623535157, 0.01106710433959961, 0.01099135971069336, 0.010990847587585449, 0.011031104087829589, 0.011129152297973633, 0.01108176040649414, 0.011298912048339844, 0.011208703994750976, 0.011146528244018555, 0.011084863662719727, 0.011103903770446778, 0.011155776023864746, 0.011342975616455078, 0.011397695541381836, 0.011343968391418458, 0.011261856079101563, 0.011204607963562012, 0.011218655586242675, 0.011323360443115235, 0.011333056449890137, 0.011299615859985351, 0.011040575981140137, 0.010975520133972167, 0.01091811180114746, 0.010937503814697266, 0.011178912162780762, 0.011408160209655762, 0.011390111923217773, 0.011337727546691894, 0.011051072120666503, 0.011112031936645507, 0.011070112228393554, 0.011136704444885254, 0.010982720375061036, 0.010898112297058105, 0.010849504470825196, 0.01094643211364746, 0.011166496276855469, 0.01102451229095459, 0.010778431892395019, 0.010739904403686523, 0.01118019199371338, 0.011407199859619141, 0.01139129638671875, 0.01145030403137207, 0.011411199569702149, 0.011441472053527833, 0.011251423835754394, 0.011129823684692383, 0.011181983947753906, 0.011085439682006835, 0.011002559661865235, 0.011032352447509766, 0.010959872245788574, 0.01076863956451416, 0.010812159538269042, 0.010827072143554687, 0.011067808151245117, 0.011442560195922852, 0.01150592041015625, 0.011335328102111816, 0.011306943893432618, 0.011147071838378906, 0.011100159645080567, 0.010934528350830077, 0.010952704429626465, 0.011068639755249023, 0.010965375900268555, 0.010866751670837402, 0.011052415847778321, 0.011061759948730468, 0.010787296295166015, 0.010821632385253906, 0.01073305606842041, 0.010627584457397461, 0.010682368278503418, 0.01123472023010254, 0.011379487991333008, 0.011349823951721192, 0.011260064125061035, 0.011258943557739258, 0.01130780792236328, 0.011544575691223144, 0.011243776321411133, 0.011101951599121094, 0.011242783546447754, 0.01134665584564209, 0.01136844825744629, 0.011286175727844239, 0.01112508773803711, 0.0108721923828125, 0.010924448013305664, 0.010645824432373047, 0.010887104034423828, 0.010950655937194824, 0.011018207550048828, 0.011208736419677734, 0.011189663887023926, 0.011245632171630859, 0.01142630386352539, 0.011228927612304688, 0.011171104431152344, 0.011117568016052246, 0.011016192436218262, 0.011216287612915038, 0.011459168434143066, 0.011472000122070312, 0.011723135948181152, 0.01140287971496582, 0.011362624168395997, 0.011343487739562988, 0.011295680046081544, 0.011384736061096192, 0.011240703582763672, 0.011266912460327148, 0.011290016174316407, 0.01105731201171875, 0.011125280380249023, 0.011456352233886719, 0.012119359970092773, 0.013842623710632324, 0.011282560348510742, 0.010936448097229004, 0.010832192420959473, 0.010807295799255372, 0.010979104042053222, 0.011178208351135253, 0.011069439888000488, 0.010944512367248535, 0.01096291160583496, 0.01108137607574463, 0.011225024223327636, 0.01112723159790039, 0.01107148838043213, 0.011108448028564453, 0.01094883155822754, 0.010983424186706543, 0.011296544075012207, 0.011419551849365234, 0.011460576057434082, 0.011364383697509766, 0.011425248146057129, 0.011336511611938476, 0.011235039710998535, 0.011241472244262696, 0.011183648109436036, 0.011020959854125977, 0.011261247634887695, 0.011479552268981934, 0.011475008010864257, 0.012947392463684083, 0.012400959968566895, 0.01123078441619873, 0.011155232429504395, 0.011171199798583985, 0.011152480125427246, 0.011137951850891113, 0.011108223915100097, 0.011312416076660156, 0.011361056327819825, 0.011227519989013671, 0.011253439903259277, 0.011395071983337402, 0.011228896141052247, 0.010987808227539062, 0.0109683837890625, 0.010974911689758301, 0.011014431953430175, 0.011070176124572754, 0.011122688293457032, 0.011214912414550781, 0.011094112396240235, 0.011091103553771972, 0.01101484775543213, 0.010966272354125976, 0.011159584045410157, 0.01137235164642334, 0.011465023994445801, 0.011530879974365235, 0.01153657627105713, 0.011378111839294434, 0.011411808013916015, 0.011200480461120606, 0.011111712455749512, 0.011121472358703614, 0.010887200355529786, 0.010950559616088868, 0.011131168365478516, 0.011090911865234376, 0.011148032188415527, 0.01117199993133545, 0.011201696395874023, 0.011233983993530273, 0.011408672332763672, 0.011634464263916016, 0.011401408195495606, 0.01137331199645996, 0.011298208236694337, 0.011332159996032714, 0.011511839866638184, 0.011273568153381347, 0.011416223526000977, 0.011294719696044921, 0.011350048065185548, 0.011157343864440918, 0.011272319793701172, 0.011065119743347169, 0.010928352355957032, 0.010923295974731446, 0.010938431739807129, 0.01119324779510498, 0.011082880020141602, 0.011051872253417968, 0.01114038372039795, 0.011309568405151366, 0.010903136253356933, 0.010908063888549804, 0.01115116786956787]",tokens/s,90.1229506284307,, @@ -3075,7 +3075,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 77394 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 70777 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3118,7 +3118,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 147883 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 141174 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,4675.534848,0.0,4280.287232,4115.121152,s,1,7.37269970703125,7.37269970703125,0.0,7.37269970703125,7.37269970703125,7.37269970703125,7.37269970703125,[7.37269970703125],,kWh,9.522534087530706e-06,1.0429521619540363e-06,3.299169305998051e-06,1.3864655555482794e-05,,MB,1202.020352,4983.816192,0.0,4575.985664,4408.408064,s,10,0.42754406356811525,0.04275440635681152,0.0014334045633080338,0.042253551483154295,0.04336371269226074,0.04515391941070556,0.046586084785461425,"[0.04694412612915039, 0.04253737640380859, 0.042242782592773434, 0.042584064483642575, 0.04222710418701172, 0.04203776168823242, 0.04195721435546875, 0.041783424377441404, 0.042264320373535155, 0.04296588897705078]",tokens/s,5987.686926664923,kWh,1.5306460620200298e-06,1.6880301024038254e-07,1.0141325739580902e-06,2.713581646218502e-06,tokens/kWh,94340260.72395776,MB,1230.35648,4983.816192,0.0,4575.985664,4408.410624,s,10,15.602204467773436,1.5602204467773437,0.006893250126550453,1.5605767211914063,1.56579580078125,1.5694571044921877,1.5723861474609375,"[1.573118408203125, 1.564982177734375, 1.558853759765625, 1.5473900146484374, 1.5649759521484374, 1.5629482421875, 1.551452880859375, 1.5591427001953124, 1.5620107421875, 1.55732958984375]",tokens/s,40.37890935869181,kWh,4.494949023089878e-05,4.957548495471422e-06,2.977266983324302e-05,7.967970855961321e-05,tokens/kWh,790665.542568669,,s,630,15.599725788116476,0.024761469504946756,0.000482530303829509,0.024702032089233397,0.0250308967590332,0.02532572202682495,0.02608155612945557,"[0.025272319793701172, 0.02474393653869629, 0.024856576919555663, 0.024817663192749022, 0.024887615203857422, 0.024907455444335938, 0.02490572738647461, 0.024807424545288087, 0.024922111511230468, 0.024801279067993166, 0.025231359481811523, 0.029007648468017577, 0.02487318420410156, 0.025145408630371093, 0.02533328056335449, 0.02497372817993164, 0.02569625663757324, 0.024817663192749022, 0.02489139175415039, 0.02480735969543457, 0.024780096054077147, 0.024849151611328123, 0.024706304550170897, 0.024804096221923828, 0.025481056213378907, 0.02496227264404297, 0.02486777687072754, 0.024742944717407227, 0.02535523223876953, 0.02493017578125, 0.02490108871459961, 0.025690559387207032, 0.024985824584960938, 0.025132192611694335, 0.024863584518432617, 0.024937568664550783, 0.024531871795654296, 0.024639488220214844, 0.02459561538696289, 0.02467024040222168, 0.024514591217041016, 0.024591136932373046, 0.02474332809448242, 0.02485513687133789, 0.024702688217163087, 0.0246080322265625, 0.024656671524047852, 0.024721632003784178, 0.02487295913696289, 0.025556991577148438, 0.024922111511230468, 0.024682048797607423, 0.025390687942504882, 0.025489599227905273, 0.025444576263427734, 0.024637887954711914, 0.02473756790161133, 0.024766687393188477, 0.024268800735473633, 0.024357152938842774, 0.024272607803344726, 0.024923391342163086, 0.0250263671875, 0.02491769599914551, 0.02536288070678711, 0.024864639282226562, 0.024590560913085938, 0.024579776763916015, 0.024620864868164064, 0.024539424896240235, 0.02451456069946289, 0.02614476776123047, 0.025933792114257812, 0.02469385528564453, 0.024879968643188477, 0.024748128890991213, 0.024713216781616212, 0.02456928062438965, 0.02465439987182617, 0.024530208587646485, 0.025078176498413086, 0.02460268783569336, 0.02439344024658203, 0.024437280654907228, 0.02471219253540039, 0.03300022506713867, 0.024514463424682616, 0.024929983139038086, 0.024777215957641603, 0.024754304885864258, 0.0247193603515625, 0.02462067222595215, 0.024899967193603517, 0.02451241683959961, 0.024567903518676756, 0.024858015060424805, 0.02486537551879883, 0.024532127380371093, 0.02492630386352539, 0.024453887939453123, 0.024649311065673828, 0.024895519256591798, 0.024696575164794923, 0.024507007598876952, 0.024532800674438478, 0.02455571174621582, 0.024593696594238282, 0.024584928512573243, 0.024635391235351564, 0.02460371208190918, 0.024564672470092773, 0.024436128616333007, 0.024719808578491213, 0.02444044876098633, 0.02446611213684082, 0.025082752227783202, 0.024757055282592772, 0.024538591384887697, 0.024580799102783202, 0.024399551391601562, 0.024586559295654297, 0.02441766357421875, 0.02461065673828125, 0.02457859230041504, 0.02485273551940918, 0.0244421443939209, 0.025602975845336915, 0.02497529602050781, 0.02480953598022461, 0.025163103103637695, 0.024743776321411132, 0.02509292793273926, 0.024543231964111328, 0.024841407775878906, 0.024580928802490236, 0.024549375534057616, 0.024526847839355468, 0.024708736419677736, 0.024904064178466797, 0.024614912033081054, 0.024544832229614257, 0.024533439636230468, 0.02451456069946289, 0.024677568435668946, 0.02478323173522949, 0.024795583724975586, 0.02460633659362793, 0.02478323173522949, 0.024467456817626954, 0.024507616043090822, 0.024566560745239257, 0.024886335372924805, 0.024641536712646486, 0.02457491111755371, 0.024448127746582032, 0.024580768585205078, 0.02518448066711426, 0.024700511932373048, 0.02453094482421875, 0.02488265609741211, 0.024746944427490234, 0.0247193603515625, 0.024554975509643556, 0.024615455627441406, 0.024614240646362303, 0.02446790313720703, 0.024783071517944337, 0.024573951721191405, 0.0248603515625, 0.024653823852539062, 0.024557535171508788, 0.02500556755065918, 0.024653696060180665, 0.025676223754882814, 0.026434080123901367, 0.024757471084594727, 0.024647968292236328, 0.024586271286010743, 0.024666175842285157, 0.024594751358032227, 0.024585344314575194, 0.024669151306152343, 0.024431903839111327, 0.024516511917114257, 0.02465670394897461, 0.02469638442993164, 0.024792991638183593, 0.02461955261230469, 0.024585504531860352, 0.024832000732421877, 0.02459769630432129, 0.024451904296875, 0.024227840423583984, 0.024405727386474608, 0.024289247512817382, 0.024198783874511718, 0.024142559051513673, 0.024088544845581053, 0.02425200080871582, 0.024268287658691406, 0.024404895782470702, 0.024434688568115235, 0.024333471298217772, 0.024345439910888673, 0.024423551559448243, 0.02434752082824707, 0.02480892753601074, 0.026811935424804687, 0.025015327453613283, 0.024469472885131835, 0.024649503707885743, 0.02492367935180664, 0.024615615844726563, 0.024620351791381837, 0.02466476821899414, 0.02467430305480957, 0.024493215560913086, 0.024657760620117188, 0.024515584945678712, 0.02426470375061035, 0.024551424026489257, 0.02428646469116211, 0.024304351806640624, 0.024170080184936524, 0.02418451118469238, 0.024103679656982423, 0.02416758346557617, 0.024298336029052733, 0.0242093448638916, 0.024143936157226563, 0.02408608055114746, 0.024123775482177735, 0.024483680725097656, 0.024641279220581055, 0.02471164894104004, 0.02470035171508789, 0.02472198486328125, 0.024827903747558593, 0.02473574447631836, 0.02485865592956543, 0.02478220748901367, 0.02465020751953125, 0.024610944747924805, 0.024681535720825196, 0.024895999908447267, 0.02466655921936035, 0.024723392486572265, 0.0249815673828125, 0.024846336364746095, 0.02497439956665039, 0.02482387161254883, 0.024947071075439455, 0.02610588836669922, 0.025106399536132813, 0.02497443199157715, 0.024756479263305663, 0.02475484848022461, 0.024829952239990235, 0.024745119094848632, 0.024761184692382813, 0.024720928192138673, 0.024820192337036133, 0.024700319290161133, 0.024731327056884765, 0.02461788749694824, 0.02500819206237793, 0.024852415084838868, 0.024624160766601563, 0.024710111618041992, 0.024849567413330078, 0.024724319458007814, 0.024635391235351564, 0.024796735763549804, 0.02469728088378906, 0.024691743850708006, 0.025012704849243166, 0.02474153518676758, 0.024616832733154296, 0.0247359676361084, 0.024616832733154296, 0.02456787109375, 0.02633401679992676, 0.02523868751525879, 0.024807552337646484, 0.02491663932800293, 0.024694847106933593, 0.024645631790161132, 0.024642879486083985, 0.02481385612487793, 0.024657695770263673, 0.024715904235839845, 0.02449171257019043, 0.024631616592407226, 0.025821184158325194, 0.02486662483215332, 0.024670400619506837, 0.024935455322265626, 0.02493539237976074, 0.024750080108642578, 0.02473574447631836, 0.025233407974243165, 0.024542783737182616, 0.024662464141845704, 0.02469593620300293, 0.02470387268066406, 0.02476406478881836, 0.024768352508544922, 0.024764928817749023, 0.024729120254516603, 0.02485910415649414, 0.025046432495117187, 0.02491763114929199, 0.024765024185180663, 0.02472313690185547, 0.024742591857910157, 0.025319456100463867, 0.024702943801879883, 0.02461676788330078, 0.02463968086242676, 0.024616960525512696, 0.025050239562988283, 0.024968063354492188, 0.024662208557128907, 0.024801088333129884, 0.024993791580200195, 0.02466716766357422, 0.024990688323974608, 0.024672256469726563, 0.025190208435058595, 0.0247891845703125, 0.02486403274536133, 0.02470710372924805, 0.02472755241394043, 0.024746688842773437, 0.02475142478942871, 0.024791391372680664, 0.02470044708251953, 0.02466076850891113, 0.024814783096313478, 0.024734176635742188, 0.02509164810180664, 0.02500227165222168, 0.024835968017578126, 0.024631839752197265, 0.02492560005187988, 0.024689376831054686, 0.024764415740966796, 0.024584192276000977, 0.0247459831237793, 0.02465171241760254, 0.025001440048217773, 0.02465827178955078, 0.02463327980041504, 0.024686208724975588, 0.024512256622314453, 0.024421024322509765, 0.02459676742553711, 0.024604671478271483, 0.024608768463134766, 0.025795936584472656, 0.025664159774780274, 0.02465177536010742, 0.024661855697631838, 0.02466217613220215, 0.024917024612426758, 0.024894432067871095, 0.024559520721435548, 0.02472764778137207, 0.025212928771972655, 0.025330848693847656, 0.024607391357421876, 0.024455263137817384, 0.0245883846282959, 0.024647680282592774, 0.024620864868164064, 0.024698400497436525, 0.025385087966918945, 0.024795679092407225, 0.02514508819580078, 0.024684480667114258, 0.024807519912719726, 0.024715520858764647, 0.024623680114746093, 0.024798688888549806, 0.024625696182250977, 0.024610815048217775, 0.02453708839416504, 0.024573951721191405, 0.024721408843994142, 0.024525920867919923, 0.024488544464111327, 0.02493881607055664, 0.024805376052856445, 0.024649728775024415, 0.02454528045654297, 0.024569664001464844, 0.02446713638305664, 0.024689151763916017, 0.02468454360961914, 0.02464508819580078, 0.024719903945922852, 0.024638784408569335, 0.024507072448730467, 0.024792255401611327, 0.024557600021362303, 0.024648479461669922, 0.024803327560424804, 0.02511257553100586, 0.025217023849487305, 0.024840192794799806, 0.024591808319091798, 0.024615455627441406, 0.024731679916381834, 0.024760351181030274, 0.024707040786743163, 0.02472755241394043, 0.02481052780151367, 0.024572032928466797, 0.024531808853149414, 0.02470297622680664, 0.024647071838378908, 0.0246112003326416, 0.024611040115356444, 0.024827264785766603, 0.025027200698852538, 0.024589471817016602, 0.024389984130859375, 0.024281600952148437, 0.024061279296875, 0.024044191360473633, 0.024197120666503907, 0.024216800689697265, 0.02432694435119629, 0.0245166072845459, 0.024381439208984376, 0.0246824951171875, 0.02453651237487793, 0.02468307113647461, 0.024319999694824217, 0.02450432014465332, 0.024344575881958007, 0.025718591690063478, 0.02471993637084961, 0.024743904113769533, 0.024411264419555663, 0.026021984100341795, 0.024978271484375, 0.024502208709716797, 0.024256128311157227, 0.024314239501953126, 0.024503551483154296, 0.02456038475036621, 0.024473600387573242, 0.024403295516967773, 0.024300128936767577, 0.025299007415771485, 0.024163328170776367, 0.02412838363647461, 0.024195199966430665, 0.0242475528717041, 0.025168512344360353, 0.02425974464416504, 0.024277984619140627, 0.02426192092895508, 0.02426748847961426, 0.025450496673583983, 0.024823808670043947, 0.02454092788696289, 0.024540544509887696, 0.024410144805908203, 0.02461510467529297, 0.024488607406616212, 0.024993791580200195, 0.024683679580688477, 0.02484659194946289, 0.024809280395507814, 0.024992639541625977, 0.024711008071899413, 0.02493801689147949, 0.02501481628417969, 0.02482585525512695, 0.024838144302368165, 0.02469856071472168, 0.02495929527282715, 0.02496620750427246, 0.024903615951538085, 0.02498252868652344, 0.02503036880493164, 0.024916255950927734, 0.02500934410095215, 0.024713216781616212, 0.024761152267456055, 0.024806976318359375, 0.025087711334228515, 0.025053920745849608, 0.024661439895629883, 0.024684223175048828, 0.025404287338256837, 0.024809343338012695, 0.024625280380249023, 0.02474393653869629, 0.024688640594482423, 0.024786815643310547, 0.024897504806518554, 0.02546499252319336, 0.02475449562072754, 0.024713216781616212, 0.024727264404296876, 0.02485481643676758, 0.024899168014526366, 0.024710752487182616, 0.024687423706054687, 0.024557567596435546, 0.024681503295898438, 0.024599519729614258, 0.024713247299194337, 0.024698175430297852, 0.024619680404663086, 0.024653247833251953, 0.024637567520141603, 0.02470137596130371, 0.024969215393066405, 0.024596479415893553, 0.024845888137817383, 0.025100128173828125, 0.025717344284057617, 0.025024511337280272, 0.02481705665588379, 0.024760576248168947, 0.02465622329711914, 0.024853792190551758, 0.024879007339477538, 0.024832832336425782, 0.024901023864746095, 0.024694911956787108, 0.02466454315185547, 0.02482784080505371, 0.02481772804260254, 0.024863807678222657, 0.024662912368774412, 0.024751583099365235, 0.025021024703979492, 0.02489257621765137, 0.024705568313598634, 0.024571584701538085, 0.024617599487304687, 0.024625152587890626, 0.024746208190917968, 0.0246474552154541, 0.024661632537841798, 0.024977792739868165, 0.02488444709777832, 0.024776832580566406, 0.024875680923461915, 0.0247459831237793, 0.02455865669250488, 0.02447455978393555, 0.02456166458129883, 0.0248668155670166, 0.024881120681762695, 0.02462713623046875, 0.0247193603515625, 0.02467030334472656, 0.02446950340270996, 0.024961023330688475, 0.025067520141601563, 0.02523494338989258, 0.025391807556152345, 0.02497443199157715, 0.02506012725830078, 0.024961151123046876, 0.024759584426879883, 0.024742591857910157, 0.024658079147338866, 0.024706560134887694, 0.024713600158691406, 0.024727359771728515, 0.024755903244018555, 0.024672544479370118, 0.024641759872436525, 0.024688575744628908, 0.024610559463500978, 0.0245980167388916, 0.024494911193847658, 0.02443878364562988, 0.024774816513061522, 0.025035648345947265, 0.024773120880126953, 0.02466864013671875, 0.024577951431274413, 0.02454252815246582, 0.024893375396728517, 0.024795904159545898, 0.024680543899536132, 0.024534400939941407, 0.024490304946899414, 0.024656192779541015, 0.024726560592651367, 0.02487196731567383, 0.02478220748901367, 0.024955455780029296, 0.024590560913085938, 0.024755775451660158, 0.0251680965423584, 0.02479046440124512, 0.024686304092407227, 0.024695648193359374, 0.024508256912231446, 0.024541215896606447, 0.024561792373657226, 0.024700927734375, 0.024832000732421877, 0.024702720642089844, 0.024535295486450195, 0.024532991409301756, 0.02471731185913086, 0.024936447143554686, 0.024793088912963866, 0.024571903228759767, 0.024648895263671877, 0.02465990447998047, 0.02468908882141113, 0.02468262481689453, 0.02474403190612793, 0.024569503784179686, 0.02456755256652832, 0.02465875244140625, 0.024586368560791015, 0.024676223754882813, 0.02461871910095215]",tokens/s,40.38532526513516,, @@ -3162,7 +3162,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 95540 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 89024 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3285,7 +3285,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 136048 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 129658 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1034.825728,10975.379456,0.0,10580.13184,10162.029568,s,1,7.04631689453125,7.04631689453125,0.0,7.04631689453125,7.04631689453125,7.04631689453125,7.04631689453125,[7.04631689453125],,kWh,6.928084420822718e-06,7.568928127316525e-07,3.626947345999887e-06,1.1311924579554257e-05,,MB,1421.123584,11097.014272,0.0,10689.183744,9358.065152,s,10,1.2058576965332033,0.12058576965332032,0.0057852438894190296,0.12277670288085937,0.12617377014160158,0.12655339584350586,0.12685709640502932,"[0.10859286499023438, 0.12693302154541017, 0.12593856048583985, 0.12311558532714843, 0.11274288177490234, 0.12307100677490235, 0.11689859008789062, 0.11999337768554688, 0.12608940887451173, 0.12248239898681641]",tokens/s,2122.970237168039,kWh,3.523590479668807e-06,3.885736947396416e-07,2.3340480519758895e-06,6.246212226384339e-06,tokens/kWh,40984838.6064505,MB,1454.32576,11097.014272,0.0,10689.183744,9397.6704,s,10,31.172315673828127,3.1172315673828126,0.005840258230599865,3.116716918945312,3.1216128662109375,3.1267613891601562,3.130880207519531,"[3.117585693359375, 3.111726806640625, 3.111638671875, 3.12046875, 3.110628173828125, 3.119263427734375, 3.11584814453125, 3.11538671875, 3.117859375, 3.131909912109375]",tokens/s,20.21024060554282,kWh,9.067647783991333e-05,1.000147588033289e-05,6.018214084982452e-05,0.00016086009457007072,tokens/kWh,391644.6783671209,,s,630,31.16853237915038,0.049473860919286336,0.00038854894768234087,0.0494033432006836,0.049796352767944335,0.049995362854003904,0.05130727104187012,"[0.050460128784179686, 0.04980380630493164, 0.04919206237792969, 0.0491814079284668, 0.04902108764648438, 0.04915814590454102, 0.049065216064453125, 0.04898278427124023, 0.049199134826660156, 0.04906800079345703, 0.04930323028564453, 0.049395870208740235, 0.049067230224609376, 0.04914886474609375, 0.049059486389160155, 0.049500511169433596, 0.049356353759765624, 0.04949651336669922, 0.04918236923217773, 0.050678112030029296, 0.04948188781738281, 0.049555007934570315, 0.049341854095458985, 0.04965465545654297, 0.04933145523071289, 0.049207103729248046, 0.04929983901977539, 0.04924678421020508, 0.049141216278076175, 0.049293502807617184, 0.04922608184814453, 0.049220703125, 0.04938243103027344, 0.04938915252685547, 0.04924563217163086, 0.049423198699951175, 0.04964761734008789, 0.0498661117553711, 0.04938166427612305, 0.04931964874267578, 0.05000172805786133, 0.0502089614868164, 0.049652385711669925, 0.049543136596679686, 0.04940595245361328, 0.04939977645874023, 0.04940188980102539, 0.049280094146728515, 0.04941712188720703, 0.049495136260986325, 0.049683361053466796, 0.049598464965820314, 0.049772544860839846, 0.04962076950073242, 0.050151649475097655, 0.04963670349121094, 0.04978915023803711, 0.04954294586181641, 0.049879711151123045, 0.04974147033691406, 0.04962543869018555, 0.04969244766235351, 0.04970108795166016, 0.051871742248535156, 0.050220863342285156, 0.04935286331176758, 0.049258529663085936, 0.04914575958251953, 0.04926473617553711, 0.049200191497802734, 0.0492545280456543, 0.04923376083374023, 0.04916118240356445, 0.04910492706298828, 0.04961276626586914, 0.04967833709716797, 0.049240062713623044, 0.04917555236816406, 0.04916121673583984, 0.04900233459472656, 0.049167552947998044, 0.0493864631652832, 0.04923798370361328, 0.04946102523803711, 0.049565216064453126, 0.049259231567382815, 0.049162174224853514, 0.049188926696777345, 0.049209342956542966, 0.04911513519287109, 0.04942438507080078, 0.049235969543457034, 0.0493007698059082, 0.04923043060302734, 0.049200321197509764, 0.049197952270507814, 0.04942444610595703, 0.049121280670166016, 0.04918067169189453, 0.04917657470703125, 0.049291263580322264, 0.04926668930053711, 0.049240062713623044, 0.049237598419189454, 0.04945315170288086, 0.049699134826660156, 0.04950991821289062, 0.04923209762573242, 0.049481983184814456, 0.04921753692626953, 0.0493199348449707, 0.04914176177978516, 0.049212928771972655, 0.04925900650024414, 0.049245567321777345, 0.049344959259033205, 0.04955353546142578, 0.04959196853637695, 0.0494159049987793, 0.04930963134765625, 0.04932275390625, 0.04945305633544922, 0.04950764846801758, 0.04951520156860351, 0.04958003234863281, 0.050753280639648436, 0.05113177490234375, 0.050012256622314455, 0.04933071899414063, 0.04909657669067383, 0.04908863830566406, 0.04917619323730469, 0.04918438339233398, 0.049150718688964846, 0.04933222579956055, 0.04908662414550781, 0.04926857757568359, 0.049014785766601565, 0.049152000427246094, 0.04923116683959961, 0.04904006576538086, 0.04916428756713867, 0.04906982421875, 0.04910617446899414, 0.04924262237548828, 0.049365505218505856, 0.04941955184936524, 0.04929404830932617, 0.049329185485839845, 0.04926784133911133, 0.049173599243164064, 0.04914662551879883, 0.049108097076416016, 0.049307937622070315, 0.04944569778442383, 0.04947903823852539, 0.049183135986328126, 0.049086463928222655, 0.04905984115600586, 0.04941209411621094, 0.04915139389038086, 0.04949660873413086, 0.04916844940185547, 0.04915609741210938, 0.04939980697631836, 0.04950630569458008, 0.04943462371826172, 0.049459201812744144, 0.04952678298950195, 0.04961280059814453, 0.049434593200683594, 0.04942851257324219, 0.04936880111694336, 0.0494431037902832, 0.04960617446899414, 0.04942076873779297, 0.049465343475341796, 0.04953395080566406, 0.049509376525878904, 0.049729534149169925, 0.04971724700927734, 0.04940390396118164, 0.04931923294067383, 0.04940047836303711, 0.049554622650146485, 0.04946211242675781, 0.049516159057617186, 0.04965801620483398, 0.05038463973999024, 0.051326526641845706, 0.04979609680175781, 0.04924588775634765, 0.04942038345336914, 0.049066047668457034, 0.04917270278930664, 0.049176513671875, 0.04911513519287109, 0.049168254852294924, 0.049145984649658206, 0.049176128387451175, 0.049242462158203125, 0.04915980911254883, 0.04930403137207031, 0.04931103897094727, 0.049342334747314455, 0.04958448028564453, 0.04954102325439453, 0.049281089782714844, 0.04965836715698242, 0.04938547134399414, 0.04953497695922852, 0.04945129776000977, 0.049479358673095705, 0.04980534362792969, 0.04986646270751953, 0.04959433746337891, 0.04951513671875, 0.04950803375244141, 0.04939974212646484, 0.049240127563476566, 0.04927078247070313, 0.04930508804321289, 0.04938598251342773, 0.0496517105102539, 0.04944486236572266, 0.04923392105102539, 0.04985020828247071, 0.049551422119140626, 0.04969071960449219, 0.04947353744506836, 0.0497806396484375, 0.049500255584716796, 0.04998758316040039, 0.04956774520874024, 0.04955091094970703, 0.050573760986328126, 0.049649024963378904, 0.04947622299194336, 0.049584095001220706, 0.04924623870849609, 0.04974095916748047, 0.04933718490600586, 0.04946236801147461, 0.049623966217041016, 0.04965785598754883, 0.04947763061523437, 0.04976435089111328, 0.0494815673828125, 0.049653854370117184, 0.04990278244018555, 0.0498205451965332, 0.04941209411621094, 0.051748382568359376, 0.05027068710327148, 0.04927449417114258, 0.04914806365966797, 0.04905801773071289, 0.049110912322998045, 0.04903033447265625, 0.04923078536987305, 0.049254016876220705, 0.04920956802368164, 0.049275039672851566, 0.049145503997802736, 0.04908886337280274, 0.04905779266357422, 0.0491069450378418, 0.04900454330444336, 0.049031169891357425, 0.04912332916259766, 0.04919839859008789, 0.04915679931640625, 0.04934041595458984, 0.04946239852905274, 0.04921379089355469, 0.049164798736572264, 0.04940803146362305, 0.04926816177368164, 0.04905017471313477, 0.04913484954833985, 0.049105567932128905, 0.049172542572021485, 0.04917660903930664, 0.04921123123168945, 0.049100959777832034, 0.04922278213500977, 0.04905868911743164, 0.049285118103027346, 0.04927056121826172, 0.04916633605957031, 0.04921571350097656, 0.04936272048950195, 0.04933222579956055, 0.04961507034301758, 0.04945510482788086, 0.04987603378295898, 0.049511070251464846, 0.04947177505493164, 0.049498111724853515, 0.04925417709350586, 0.04935702514648437, 0.04951859283447266, 0.049436511993408205, 0.04968182373046875, 0.04961561584472656, 0.04945078277587891, 0.049574111938476564, 0.04947916793823242, 0.04960009765625, 0.04960255813598633, 0.0493917121887207, 0.04970927810668945, 0.04959020614624023, 0.04969036865234375, 0.049615776062011716, 0.051260128021240234, 0.05005219268798828, 0.04910787200927735, 0.0501739501953125, 0.049094432830810546, 0.049127647399902344, 0.04922345733642578, 0.04940108871459961, 0.04925040054321289, 0.04924095916748047, 0.049258495330810545, 0.04937862396240234, 0.049498817443847654, 0.049188865661621096, 0.04915785598754883, 0.049111328125, 0.04937286376953125, 0.04923337554931641, 0.04936486434936523, 0.04951897430419922, 0.04941382217407227, 0.049361824035644535, 0.049324031829833984, 0.04931379318237305, 0.049192958831787106, 0.0492786865234375, 0.049324321746826175, 0.0492564468383789, 0.04925235366821289, 0.04926668930053711, 0.04905574417114258, 0.04930559921264648, 0.049127422332763675, 0.04929724884033203, 0.049281185150146484, 0.049452064514160156, 0.04922617721557617, 0.04939193725585937, 0.04937750244140625, 0.04965776062011719, 0.04977993774414063, 0.049689247131347654, 0.04962736129760742, 0.04956159973144531, 0.04961075210571289, 0.04969043350219727, 0.04957408142089844, 0.04961248016357422, 0.049731201171875, 0.0496769905090332, 0.049565696716308595, 0.04950537490844727, 0.0497017936706543, 0.04966195297241211, 0.049551551818847656, 0.049514305114746096, 0.049559040069580076, 0.04970137786865234, 0.0510618896484375, 0.04994547271728516, 0.049838081359863284, 0.04979916763305664, 0.04973263931274414, 0.05154457473754883, 0.05004185485839844, 0.0499455680847168, 0.049858558654785154, 0.049202239990234375, 0.049363327026367185, 0.04911980819702148, 0.04913356781005859, 0.0492564468383789, 0.049375232696533204, 0.0490937614440918, 0.04926348876953125, 0.049212543487548825, 0.04910579299926758, 0.0492564468383789, 0.04936198425292969, 0.04915091323852539, 0.04916595077514648, 0.049017215728759764, 0.04930915069580078, 0.04924265670776367, 0.04973283386230469, 0.049339168548583986, 0.04939571380615235, 0.04947558212280274, 0.04934656143188477, 0.049235969543457034, 0.04951638412475586, 0.04934672164916992, 0.04937900924682617, 0.049969470977783204, 0.049798656463623046, 0.04927542495727539, 0.04926665496826172, 0.049225727081298826, 0.049642879486083986, 0.04918540954589844, 0.04941164779663086, 0.04932163238525391, 0.049576736450195315, 0.04944806289672852, 0.049417247772216795, 0.04938313674926758, 0.049500160217285157, 0.04935059356689453, 0.049411937713623046, 0.04929769515991211, 0.04938067245483398, 0.049351009368896484, 0.049379745483398435, 0.049441856384277345, 0.049344993591308596, 0.05028847885131836, 0.04973590469360351, 0.04945888137817383, 0.04949059295654297, 0.04938143920898438, 0.04968236923217773, 0.04941164779663086, 0.04960496139526367, 0.04947574234008789, 0.04941209411621094, 0.04936816024780273, 0.05148672103881836, 0.04995072174072265, 0.049196990966796875, 0.04914182281494141, 0.04938726425170899, 0.04929951858520508, 0.04919929504394531, 0.049210559844970705, 0.049164833068847655, 0.04939158248901367, 0.04911545562744141, 0.04928102493286133, 0.04923542404174805, 0.04922832107543945, 0.04924540710449219, 0.049294239044189454, 0.04909564971923828, 0.049275135040283205, 0.04920182418823242, 0.04946739196777344, 0.04960665512084961, 0.049643199920654295, 0.049543487548828126, 0.04941625595092773, 0.049417377471923825, 0.049385791778564454, 0.04913401412963867, 0.04923600006103516, 0.049332000732421874, 0.0492606086730957, 0.04920131301879883, 0.049274398803710935, 0.04923235321044922, 0.0492308464050293, 0.04920409774780273, 0.04925766372680664, 0.04923897552490234, 0.049317760467529295, 0.04947350311279297, 0.04951830291748047, 0.04947398376464844, 0.049704959869384766, 0.05000806427001953, 0.049606304168701175, 0.04924777603149414, 0.049507137298583984, 0.049501792907714844, 0.04932767868041992, 0.04958089447021485, 0.049616222381591794, 0.04981987380981445, 0.04948854446411133, 0.049471263885498044, 0.049516544342041016, 0.04952217483520508, 0.04940812683105469, 0.04937356948852539, 0.04956076812744141, 0.049498943328857424, 0.04963273620605469, 0.04987516784667969, 0.04967388916015625, 0.04976873779296875, 0.05205401611328125, 0.05026406478881836, 0.04940185546875, 0.049436351776123044, 0.049508575439453126, 0.049401790618896484, 0.04928435134887695, 0.049170398712158205, 0.04925110244750976, 0.05035404968261719, 0.04985065460205078, 0.049186817169189455, 0.04924620819091797, 0.04968790435791016, 0.04928691101074219, 0.049436832427978514, 0.049257217407226564, 0.04926259231567383, 0.049188865661621096, 0.04949606323242187, 0.04938137435913086, 0.049565696716308595, 0.04939932632446289, 0.04929334259033203, 0.04952844619750976, 0.04924208068847656, 0.04934467315673828, 0.049373886108398435, 0.049153217315673826, 0.04925084686279297, 0.04940972900390625, 0.0492652473449707, 0.049344512939453126, 0.049620990753173826, 0.04926873779296875, 0.049282974243164065, 0.04915139389038086, 0.049490623474121094, 0.04931084823608398, 0.04978982543945312, 0.04963087844848633, 0.049610431671142576, 0.04945772933959961, 0.049549407958984375, 0.04954025650024414, 0.04956963348388672, 0.049296382904052735, 0.04934604644775391, 0.04923766326904297, 0.04933241653442383, 0.04927145767211914, 0.049686527252197264, 0.04948160171508789, 0.04940732955932617, 0.04949414443969727, 0.0495513916015625, 0.049422462463378905, 0.04953107070922851, 0.04937350463867188, 0.04952259063720703, 0.0497501106262207, 0.04951244735717773, 0.04943167877197266, 0.052706592559814455, 0.05042403030395508, 0.049576446533203124, 0.04954111862182617, 0.049301502227783206, 0.04938924789428711, 0.04929977416992187, 0.0493704948425293, 0.050090625762939454, 0.04949769592285156, 0.04938742446899414, 0.049621505737304686, 0.04942233657836914, 0.04932825469970703, 0.04940278244018555, 0.04941683197021484, 0.049359199523925784, 0.049469280242919925, 0.04953104019165039, 0.049777759552001956, 0.04985948944091797, 0.04983603286743164, 0.04970905685424805, 0.04964556884765625, 0.04956108856201172, 0.049562110900878906, 0.04950960159301758, 0.04962284851074219, 0.049695713043212894, 0.049635326385498044, 0.04947148895263672, 0.04940185546875, 0.04976844787597656, 0.049917953491210934, 0.04956905746459961, 0.04984707260131836, 0.050421630859375, 0.04969881439208984, 0.04953251266479492, 0.049506782531738285, 0.04972505569458008, 0.04975040054321289, 0.049632640838623045, 0.04988582229614258, 0.04954876708984375, 0.04960720062255859, 0.04940595245361328, 0.049442817687988284, 0.04952473449707031, 0.04968835067749024, 0.049737247467041015, 0.04959507369995117, 0.049522335052490235, 0.04991827011108398, 0.04943628692626953, 0.04945961761474609, 0.04975523376464844, 0.04991244888305664, 0.051052833557128904, 0.049673633575439455, 0.04988988876342773, 0.049887233734130856, 0.04981350326538086]",tokens/s,20.212693762296833,, @@ -3329,7 +3329,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 115487 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 108970 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.287936,14689.435648,0.0,14294.188032,14284.158464,s,1,7.4896943359375,7.4896943359375,0.0,7.4896943359375,7.4896943359375,7.4896943359375,7.4896943359375,[7.4896943359375],,kWh,1.4267168679187611e-05,1.560477292521457e-06,7.197227980010323e-06,2.3024873951719392e-05,,MB,1159.585792,14995.61984,0.0,14587.789312,14512.892416,s,10,2.1156253356933594,0.21156253356933594,0.0047781127013005464,0.21339472198486328,0.21463478698730468,0.2157583038330078,0.2166571173095703,"[0.198820068359375, 0.21002482604980469, 0.20845228576660157, 0.21333978271484375, 0.21344966125488282, 0.21438511657714843, 0.2128421173095703, 0.21688182067871092, 0.2136397705078125, 0.21378988647460936]",tokens/s,1210.0441211444486,kWh,6.3592148394028245e-06,7.012992785076774e-07,4.21556496665201e-06,1.1276079084562512e-05,tokens/kWh,22702926.973124564,MB,1182.142464,15100.47744,0.0,14692.646912,14646.153216,s,10,43.51311914062501,4.351311914062501,0.006574783603022592,4.351512939453125,4.359264599609375,4.360290502929688,4.361111225585938,"[4.33798828125, 4.3467451171875, 4.3467138671875, 4.35394970703125, 4.35903662109375, 4.35664697265625, 4.36131640625, 4.350025390625, 4.3476962890625, 4.35300048828125]",tokens/s,14.47839209053196,kWh,0.00012701867365351295,1.4010532308286438e-05,8.433970032194869e-05,0.00022536890628374807,tokens/kWh,279541.6680980853,,s,630,43.509153587341366,0.0690621485513354,0.0005463833746282887,0.06900400161743164,0.06953564147949219,0.06970079917907715,0.07210129539489747,"[0.07258521270751953, 0.06911984252929687, 0.0683496322631836, 0.06819667053222657, 0.0684606704711914, 0.0682718734741211, 0.06827760314941406, 0.0684728012084961, 0.06824038696289063, 0.06830226898193359, 0.06852806091308594, 0.06832998657226562, 0.06824969482421875, 0.06847283172607421, 0.06864662170410156, 0.06875552368164063, 0.06874960327148437, 0.06874476623535156, 0.06857766723632812, 0.06848863983154296, 0.06853004455566407, 0.06851612854003906, 0.06860022735595703, 0.06840697479248047, 0.06888070678710938, 0.06847923278808593, 0.06859478759765625, 0.06850012969970704, 0.06882118225097657, 0.06887612915039062, 0.06887369537353516, 0.06878873443603516, 0.06865853118896484, 0.0689179229736328, 0.06872064208984376, 0.06883676910400391, 0.06883388519287109, 0.06876096343994141, 0.06901209259033203, 0.06890393829345703, 0.06879676818847656, 0.06892726135253906, 0.06908956909179688, 0.06896256256103515, 0.06917769622802734, 0.06915641784667968, 0.06898531341552734, 0.06902777862548828, 0.06900534057617187, 0.06899001312255859, 0.0690060806274414, 0.06899321746826172, 0.06906620788574219, 0.06912464141845703, 0.0690802230834961, 0.06911430358886719, 0.06930409240722656, 0.06935616302490234, 0.069212158203125, 0.06920588684082031, 0.06909964752197266, 0.06915299224853516, 0.0694188461303711, 0.07203292846679688, 0.06919577789306641, 0.06860800170898437, 0.06844585418701171, 0.06844214630126953, 0.06841999816894531, 0.06843897247314454, 0.06843244934082031, 0.06820735931396485, 0.06861775970458985, 0.06832550048828125, 0.06852352142333984, 0.06844822692871094, 0.06868860626220703, 0.06915782165527344, 0.06906355285644532, 0.06898073577880859, 0.06910140991210938, 0.06886147308349609, 0.06884153747558594, 0.0686370849609375, 0.06852828979492187, 0.06879424285888672, 0.06846227264404296, 0.06878572845458984, 0.06905846405029296, 0.06874018859863282, 0.06907459259033204, 0.06926486206054687, 0.06904096221923828, 0.06891065979003906, 0.06924256134033203, 0.06876982116699219, 0.0689662094116211, 0.06892361450195313, 0.06891500854492187, 0.06879235076904297, 0.06863481903076171, 0.06878585815429687, 0.06879875183105469, 0.06882982635498047, 0.06886547088623046, 0.06926102447509766, 0.06913715362548828, 0.06922988891601563, 0.06908329772949219, 0.06908089447021484, 0.06925804901123046, 0.06916710662841796, 0.06939238739013671, 0.06890086364746094, 0.06916915130615234, 0.06901302337646484, 0.06894854736328125, 0.06946192169189454, 0.0690708465576172, 0.06947020721435547, 0.06950911712646485, 0.0691937255859375, 0.06998226928710938, 0.06947833251953126, 0.0695902099609375, 0.06928272247314453, 0.07193395233154297, 0.06917743682861328, 0.06852294158935547, 0.06883599853515625, 0.06840780639648437, 0.06867254638671876, 0.06846121978759766, 0.06855897521972656, 0.068523681640625, 0.06881260681152344, 0.06859961700439453, 0.06848313903808594, 0.06858812713623047, 0.06872998046875, 0.06949903869628907, 0.06934796905517578, 0.06880659484863282, 0.06860822296142578, 0.06840729522705079, 0.06855270385742188, 0.0685277099609375, 0.068470947265625, 0.06841545867919922, 0.06864530944824218, 0.06864185333251953, 0.06855500793457031, 0.06872930908203125, 0.06871363067626954, 0.068901123046875, 0.06947235107421874, 0.06902559661865235, 0.06902352142333984, 0.06885270690917969, 0.06883737945556641, 0.06871670532226562, 0.06896419525146484, 0.06873257446289062, 0.06912556457519531, 0.07008528137207032, 0.06873078155517579, 0.06881734466552734, 0.06898854064941407, 0.06909503936767578, 0.06944847869873047, 0.06961724853515625, 0.06898876953125, 0.06894992065429688, 0.06912873840332032, 0.06905606079101563, 0.069421630859375, 0.069119873046875, 0.06924057769775391, 0.06933324432373048, 0.06898489379882812, 0.0689697265625, 0.06883510589599609, 0.0690206069946289, 0.06953705596923829, 0.06995833587646484, 0.06930850982666016, 0.06919158172607422, 0.06910157012939454, 0.06945315551757812, 0.07210189056396485, 0.06947225952148438, 0.06875958251953125, 0.06831919860839844, 0.06831839752197266, 0.06841222381591797, 0.06851939392089844, 0.06883372497558594, 0.06869551849365234, 0.068880126953125, 0.06829740905761719, 0.06849350738525391, 0.0689392318725586, 0.06862681579589844, 0.06912220764160157, 0.06913433837890624, 0.06914252471923828, 0.06882099151611328, 0.06854783630371093, 0.06867775726318359, 0.06864768218994141, 0.06876338958740234, 0.06894127655029297, 0.06849932861328124, 0.06849520111083984, 0.06862726593017578, 0.06875337219238281, 0.06927110290527344, 0.06924931335449219, 0.06924697875976563, 0.06953794860839843, 0.06936914825439452, 0.06947235107421874, 0.06915312194824219, 0.06874940490722656, 0.06896044921875, 0.06890444946289062, 0.0693623046875, 0.07014940643310547, 0.06887702178955078, 0.06887833404541016, 0.06893977355957032, 0.06923375701904297, 0.06997062683105469, 0.06942313385009766, 0.06927788543701172, 0.06965657806396484, 0.06939260864257812, 0.06901023864746093, 0.06932784271240235, 0.06913027191162109, 0.06928598022460937, 0.06939225769042968, 0.06895961761474609, 0.06912882995605468, 0.06899712371826172, 0.06923673248291015, 0.06984060668945312, 0.0696568603515625, 0.06935552215576171, 0.06938777923583984, 0.06957011413574218, 0.06936396789550782, 0.07218790435791016, 0.06934937286376953, 0.06856841278076171, 0.06848310089111329, 0.0684303970336914, 0.06856636810302734, 0.06867449951171875, 0.06885968017578124, 0.06856716918945313, 0.06872013092041016, 0.06890534210205078, 0.06892134094238281, 0.0688046417236328, 0.06892518615722656, 0.06926716613769532, 0.06948095703125, 0.06938361358642578, 0.06893753814697266, 0.06864498901367187, 0.06864959716796876, 0.06873017883300782, 0.06867574310302735, 0.06936547088623046, 0.06878899383544922, 0.0687022705078125, 0.06856729888916016, 0.06907469177246094, 0.06948047637939453, 0.06935343933105469, 0.06959017944335938, 0.06951760101318359, 0.06901529693603516, 0.06941104125976562, 0.0688213119506836, 0.068847900390625, 0.06878617858886718, 0.069010498046875, 0.06945465850830078, 0.06910979461669922, 0.06894831848144531, 0.06884281921386719, 0.06949078369140625, 0.06950252532958984, 0.06938873291015625, 0.0694029769897461, 0.0692384033203125, 0.06931251525878906, 0.06961190032958985, 0.06958080291748046, 0.06939238739013671, 0.06950508880615235, 0.06946364593505859, 0.06921619415283203, 0.06900777435302734, 0.06914646148681641, 0.06961138916015625, 0.06953548431396485, 0.07015888214111328, 0.06960332489013672, 0.06912614440917969, 0.06954105377197266, 0.06985779571533203, 0.06950739288330078, 0.07209983825683594, 0.06916896057128906, 0.06849513244628906, 0.06857068634033203, 0.0685184326171875, 0.06871059417724609, 0.06898665618896484, 0.06856694030761719, 0.06848966217041015, 0.06862643432617188, 0.06918962860107422, 0.06866102600097657, 0.068609375, 0.0689438705444336, 0.06970681762695312, 0.06940643310546875, 0.06928336334228516, 0.06889859008789062, 0.06880131530761718, 0.06851583862304687, 0.0686913604736328, 0.0691226577758789, 0.06872268676757813, 0.06880255889892578, 0.06857500457763673, 0.06872406768798828, 0.06894297790527344, 0.06898252868652344, 0.06934102630615234, 0.06986972808837891, 0.06929612731933593, 0.06933708953857422, 0.06877318572998047, 0.06916780853271484, 0.06877184295654297, 0.06877597045898437, 0.06901942443847656, 0.06880480194091797, 0.06905840301513672, 0.06901190185546875, 0.06912790679931641, 0.06932892608642578, 0.06959718322753906, 0.06979373168945313, 0.06948067474365234, 0.06926233673095702, 0.06967174530029296, 0.06935942077636718, 0.06891334533691407, 0.06893977355957032, 0.06942720031738281, 0.06931199645996093, 0.06901811218261719, 0.06908236694335937, 0.06926988983154297, 0.06952566528320313, 0.06967874908447266, 0.06972473907470703, 0.0694205093383789, 0.06924931335449219, 0.06936716461181641, 0.06951213073730468, 0.07014988708496094, 0.07210371398925781, 0.06939670562744141, 0.06869606781005859, 0.06883328247070312, 0.06846380615234375, 0.06862726593017578, 0.06890716552734374, 0.0687511978149414, 0.06866051483154297, 0.06872959899902344, 0.0688333740234375, 0.06909939575195312, 0.06861561584472656, 0.06872940826416016, 0.06956646728515625, 0.06904994964599609, 0.06926284790039063, 0.06876060485839844, 0.06875945281982422, 0.06877696228027344, 0.06909836578369141, 0.06889686584472657, 0.06882713317871093, 0.06906674957275391, 0.06891725158691406, 0.06896422576904297, 0.06926051330566406, 0.0696278076171875, 0.0691435546875, 0.06939852905273437, 0.06967203521728516, 0.06924150085449218, 0.0690660171508789, 0.06904729461669921, 0.06904624176025391, 0.06887382507324219, 0.06931670379638671, 0.06922016143798829, 0.06904637145996094, 0.06950691223144531, 0.06945645141601563, 0.0693759994506836, 0.06954598236083985, 0.06931046295166016, 0.06948834991455079, 0.0696404800415039, 0.06928998565673829, 0.06938832092285156, 0.06949600219726562, 0.06947702026367188, 0.06942428588867187, 0.06933193969726563, 0.06949478149414062, 0.0694307861328125, 0.06919379425048829, 0.06925299072265625, 0.06945439910888672, 0.06962995147705078, 0.0695767059326172, 0.06966668701171876, 0.0696562271118164, 0.06934575653076172, 0.0691443862915039, 0.07213875579833984, 0.06927769470214844, 0.06873827362060547, 0.06869478607177734, 0.06858675384521484, 0.068552734375, 0.0691136932373047, 0.06899203491210938, 0.06880863952636719, 0.06870748901367188, 0.06850745391845703, 0.06854550170898438, 0.06864806365966797, 0.0687809295654297, 0.06926937866210937, 0.0690458526611328, 0.06880441284179688, 0.0688486099243164, 0.0687891845703125, 0.06859449768066406, 0.06857933044433594, 0.0688222427368164, 0.06890364837646484, 0.06865312194824219, 0.06865715026855469, 0.06855455780029297, 0.06863276672363282, 0.0689656982421875, 0.06932921600341797, 0.0691756820678711, 0.06864691162109375, 0.06900505828857421, 0.06882329559326172, 0.06893567657470703, 0.06884259033203124, 0.06910620880126953, 0.06886243438720703, 0.06883932495117187, 0.06887606048583984, 0.06911385345458984, 0.06906082916259766, 0.06924854278564453, 0.06930867004394531, 0.06958512115478516, 0.06919577789306641, 0.06897869110107421, 0.06917113494873046, 0.06912598419189453, 0.06891487884521484, 0.06890147399902344, 0.06912185668945313, 0.06906594848632812, 0.06913046264648437, 0.06896742248535156, 0.06918131256103516, 0.0694879379272461, 0.0697798080444336, 0.0697838363647461, 0.06913148498535156, 0.0690749740600586, 0.0689988784790039, 0.06952384185791016, 0.07013209533691406, 0.07213459014892579, 0.0691500473022461, 0.06855958557128906, 0.06860800170898437, 0.06835318756103516, 0.06856585693359375, 0.06845439910888672, 0.06848102569580078, 0.06839705657958985, 0.06851299285888672, 0.06839785766601562, 0.06854783630371093, 0.06849120330810547, 0.06880662536621093, 0.06949542236328125, 0.06931407928466797, 0.06893949127197266, 0.0685902099609375, 0.06840278625488282, 0.06834867095947265, 0.06881011199951172, 0.06868409729003906, 0.06859715270996093, 0.06859993743896485, 0.06864771270751953, 0.06851789093017578, 0.06871449279785156, 0.06875299072265625, 0.06898902130126953, 0.06978797149658203, 0.06911318206787109, 0.06900726318359375, 0.06903679656982421, 0.06877932739257812, 0.06884528350830078, 0.06915376281738281, 0.06966995239257813, 0.06906771087646485, 0.06887811279296875, 0.06893180847167969, 0.06900294494628906, 0.06915309143066406, 0.06917446136474609, 0.06947840118408204, 0.06940534210205078, 0.06937385559082031, 0.06910182189941406, 0.06899097442626953, 0.06900940704345702, 0.06920716857910156, 0.06940354919433593, 0.06946412658691406, 0.06892329406738282, 0.06885158538818359, 0.06882726287841796, 0.06902547454833985, 0.06933126068115235, 0.06947433471679687, 0.06938950347900391, 0.06922217559814453, 0.06967005157470703, 0.0694557113647461, 0.06923849487304687, 0.07238857269287109, 0.06957615661621094, 0.06867407989501953, 0.06846876525878906, 0.06876156616210938, 0.06865446472167969, 0.06875523376464844, 0.06870425415039062, 0.06840013122558594, 0.06836128234863281, 0.06920829010009766, 0.06871711730957031, 0.06846463775634766, 0.06882508850097656, 0.0694939193725586, 0.06966480255126953, 0.06904710388183594, 0.0686612777709961, 0.0684849624633789, 0.06853644561767579, 0.06894127655029297, 0.06853817749023437, 0.06861692810058594, 0.0684933090209961, 0.06860514831542969, 0.06875398254394531, 0.06891248321533203, 0.06882601928710938, 0.06928173065185547, 0.06976675415039063, 0.06939894104003906, 0.06906473541259765, 0.06881056213378907, 0.06919574737548828, 0.06888169860839843, 0.06892845153808594, 0.06891519927978515, 0.06879567718505859, 0.06899942779541016, 0.06894544219970702, 0.06923769378662109, 0.069165283203125, 0.06920521545410156, 0.0697636489868164, 0.06970780944824219, 0.06938985443115234, 0.06917369842529297, 0.06975897979736329, 0.06891292572021485, 0.06925539398193359, 0.06910157012939454, 0.06902579498291016, 0.06904627227783203, 0.06911590576171875, 0.06919497680664062, 0.06912489318847656, 0.06933299255371093, 0.06981017303466797, 0.06969344329833985, 0.06923878479003906, 0.06908889770507813, 0.06931289672851562, 0.06946931457519531]",tokens/s,14.479711694122551,, @@ -3374,7 +3374,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 86294 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 79941 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3417,7 +3417,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 100973 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 105921 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.03232,14129.496064,0.0,13734.248448,13728.777216,s,1,7.25836767578125,7.25836767578125,0.0,7.25836767578125,7.25836767578125,7.25836767578125,7.25836767578125,[7.25836767578125],,kWh,6.925197970815589e-06,7.436715488168721e-07,3.4991694660008177e-06,1.1168038985633278e-05,,MB,1233.65376,14144.176128,0.0,13736.3456,13487.53408,s,10,1.6703685150146486,0.16703685150146486,0.004848025152046232,0.166406494140625,0.1731939682006836,0.17395778579711915,0.17456883987426758,"[0.1601844482421875, 0.16687123107910157, 0.16592703247070312, 0.16594175720214843, 0.1653955841064453, 0.17062611389160157, 0.17302423095703126, 0.16906687927246095, 0.1747216033935547, 0.15860963439941406]",tokens/s,1532.5959373567034,kWh,4.842775066940069e-06,5.340705179180566e-07,3.2104898179343525e-06,8.587335402792478e-06,tokens/kWh,29811342.86623444,MB,1266.315264,14144.176128,0.0,13736.3456,13661.262848,s,10,38.00502197265625,3.800502197265625,0.0021377293039880367,3.8004615478515626,3.802883837890625,3.8029917236328123,3.8030780322265625,"[3.797611572265625, 3.799089599609375, 3.79715478515625, 3.800767578125, 3.7989755859375, 3.802486083984375, 3.80285986328125, 3.800155517578125, 3.803099609375, 3.80282177734375]",tokens/s,16.5767566310913,kWh,0.00011113897474181071,1.225885846887993e-05,7.404023864926565e-05,0.00019743807185995625,tokens/kWh,319087.3948803866,,s,630,38.00184012985231,0.06032038115849571,0.00030563377178528095,0.06028327941894531,0.060649496841430665,0.06078943614959717,0.06154451679229736,"[0.061326976776123046, 0.06069696044921875, 0.05997571182250976, 0.05996540832519531, 0.05980716705322266, 0.05988336181640625, 0.05981798553466797, 0.059936576843261716, 0.05995203018188477, 0.06001049423217773, 0.06030092620849609, 0.060076446533203126, 0.06005942535400391, 0.06000864028930664, 0.05992784118652344, 0.060072158813476564, 0.06029129409790039, 0.060499969482421874, 0.060373279571533205, 0.06006143951416015, 0.060006656646728516, 0.05996748733520508, 0.059934688568115235, 0.060106273651123046, 0.060013057708740235, 0.060053150177001954, 0.059961151123046875, 0.05996723175048828, 0.060147552490234374, 0.06008329772949219, 0.060262462615966794, 0.060464927673339844, 0.060391422271728515, 0.06104643249511719, 0.060432415008544925, 0.06025564956665039, 0.06032681655883789, 0.06093619155883789, 0.060047359466552735, 0.06011423873901367, 0.060121791839599606, 0.060200065612792966, 0.06007628631591797, 0.06010275268554687, 0.06027727890014648, 0.06037664031982422, 0.060262847900390625, 0.06050406265258789, 0.06056735992431641, 0.06038256072998047, 0.060424190521240234, 0.060897537231445316, 0.060754081726074216, 0.06042464065551758, 0.060372577667236325, 0.0603939208984375, 0.060385246276855466, 0.06107353591918945, 0.060340255737304685, 0.060596065521240236, 0.06022553634643555, 0.060540351867675785, 0.0604218864440918, 0.061515777587890626, 0.060631038665771485, 0.06007807922363281, 0.06035865783691406, 0.0599818229675293, 0.05992451095581055, 0.05990979385375977, 0.05995100784301758, 0.06000051116943359, 0.05997395324707031, 0.059983711242675784, 0.060208480834960935, 0.06007875061035156, 0.06015180969238281, 0.06005347061157226, 0.060295230865478514, 0.06056547164916992, 0.06062694549560547, 0.06047110366821289, 0.06025558471679687, 0.060257118225097654, 0.06012527847290039, 0.05998729705810547, 0.059963424682617186, 0.06005574417114258, 0.06000060653686524, 0.05994249725341797, 0.06014812850952148, 0.06028076934814453, 0.06040582275390625, 0.06037116622924805, 0.06022121429443359, 0.06073747253417969, 0.060631168365478515, 0.060424129486083986, 0.060487873077392576, 0.06032729721069336, 0.06042259216308594, 0.06015375900268555, 0.06018182373046875, 0.06021200180053711, 0.060558719635009764, 0.06020156860351562, 0.06027676773071289, 0.06017228698730469, 0.06028054428100586, 0.06040518569946289, 0.0605030403137207, 0.060393310546875, 0.06059132766723633, 0.06043084716796875, 0.060631072998046875, 0.060323776245117186, 0.06040198516845703, 0.06034966278076172, 0.06037190246582031, 0.06047711944580078, 0.06032400131225586, 0.06036479949951172, 0.060298942565917966, 0.06029708862304688, 0.060413921356201175, 0.06036332702636719, 0.0618741455078125, 0.06078464126586914, 0.06020297622680664, 0.05995219039916992, 0.05990291213989258, 0.059876384735107424, 0.05986812973022461, 0.059939903259277345, 0.05986358261108399, 0.059967456817626955, 0.0599617919921875, 0.06001663970947266, 0.059908096313476565, 0.0599736328125, 0.06014976119995117, 0.06037641525268555, 0.06065430450439453, 0.06062278366088867, 0.060490848541259766, 0.06039440155029297, 0.06016022491455078, 0.060071712493896486, 0.05994316864013672, 0.06003244781494141, 0.0599513931274414, 0.060032863616943356, 0.059991233825683596, 0.060148735046386716, 0.060080127716064455, 0.05994035339355469, 0.05990655899047852, 0.05999411010742187, 0.06056675338745117, 0.060693023681640625, 0.06043423843383789, 0.06067868804931641, 0.060419361114501954, 0.060299903869628906, 0.06020505523681641, 0.06013558578491211, 0.06008816146850586, 0.060227489471435545, 0.06002495956420899, 0.060098529815673825, 0.060493824005126956, 0.06043033599853516, 0.06028083038330078, 0.06022553634643555, 0.06056537628173828, 0.06067007827758789, 0.06062080001831055, 0.060568737030029296, 0.060599136352539065, 0.06054502487182617, 0.060374942779541016, 0.06028294372558594, 0.060323871612548825, 0.06033772659301758, 0.06032223892211914, 0.060232929229736325, 0.060427040100097654, 0.06044569778442383, 0.06020764923095703, 0.06155625534057617, 0.061092353820800784, 0.06032179260253906, 0.06014710235595703, 0.059920543670654296, 0.05999660873413086, 0.05989785766601562, 0.060015743255615234, 0.05998665618896484, 0.060055038452148435, 0.05995289611816406, 0.060048286437988284, 0.05997286224365234, 0.05991913604736328, 0.06003859329223633, 0.0602542724609375, 0.06045708847045898, 0.060729694366455075, 0.060424190521240234, 0.060993438720703126, 0.06012527847290039, 0.06010879898071289, 0.06005350494384765, 0.05999788665771484, 0.0600162239074707, 0.06010902404785156, 0.06046160125732422, 0.06007804870605469, 0.060295169830322265, 0.06006784057617188, 0.06009833526611328, 0.060174560546875, 0.0604417610168457, 0.060513118743896484, 0.060485633850097656, 0.06049782562255859, 0.06043247985839844, 0.060227615356445316, 0.060327552795410154, 0.06021696090698242, 0.060152542114257815, 0.06027814483642578, 0.06017254257202148, 0.060307838439941405, 0.06029305648803711, 0.06034233474731445, 0.060268543243408204, 0.06033926391601562, 0.060427200317382815, 0.0610447998046875, 0.06079654312133789, 0.06068563079833984, 0.060521472930908204, 0.06053273773193359, 0.060329216003417965, 0.060311649322509764, 0.060578464508056644, 0.06047244644165039, 0.06027523040771485, 0.06033964920043945, 0.060555614471435544, 0.06043910217285156, 0.060456031799316405, 0.06150627136230469, 0.06099763107299805, 0.060053665161132815, 0.06001571273803711, 0.059773696899414065, 0.05990195083618164, 0.05993881607055664, 0.06005759811401367, 0.06002687835693359, 0.0600203857421875, 0.05996121597290039, 0.06011747360229492, 0.05999526214599609, 0.06005583953857422, 0.05994124984741211, 0.060276065826416016, 0.060633983612060544, 0.060813312530517576, 0.06053241729736328, 0.06038060760498047, 0.06030185699462891, 0.06018476867675781, 0.060106239318847655, 0.060117504119873044, 0.06018473434448242, 0.06010060882568359, 0.06004073715209961, 0.06021356964111328, 0.06007814407348633, 0.06029321670532226, 0.059998046875, 0.060209312438964845, 0.06049286270141602, 0.06042031860351563, 0.06063955307006836, 0.06046556854248047, 0.060352512359619144, 0.060422080993652344, 0.06022355270385742, 0.060180225372314454, 0.06010889434814453, 0.060264606475830075, 0.06019891357421875, 0.06016409683227539, 0.06035625457763672, 0.06031600189208984, 0.060230911254882814, 0.0602426872253418, 0.06028902435302735, 0.060620288848876956, 0.06070867156982422, 0.06053897476196289, 0.06053481674194336, 0.06048416137695312, 0.06048166275024414, 0.06031961441040039, 0.060391422271728515, 0.06028902435302735, 0.06026444625854492, 0.06050559997558594, 0.06041446304321289, 0.060421695709228514, 0.06045251083374024, 0.06179651260375976, 0.06098108673095703, 0.060186752319335936, 0.0600761604309082, 0.05995916748046875, 0.05996892929077149, 0.05996384048461914, 0.06009190368652344, 0.05988828659057617, 0.06007731246948242, 0.06020377731323242, 0.0600002555847168, 0.060109088897705075, 0.060383041381835936, 0.060304286956787106, 0.06044569778442383, 0.06067609786987305, 0.06065347290039062, 0.06046432113647461, 0.060107681274414064, 0.05999980926513672, 0.06012895965576172, 0.059973983764648436, 0.06010416030883789, 0.06013174438476562, 0.060076576232910156, 0.06004326248168945, 0.060112895965576174, 0.060101856231689454, 0.06013555145263672, 0.06015046310424805, 0.06026031875610351, 0.06028902435302735, 0.06049587249755859, 0.06037299346923828, 0.06079257583618164, 0.060610752105712894, 0.06025632095336914, 0.06020710372924805, 0.06030950546264648, 0.06028214263916016, 0.060283615112304685, 0.06027171325683594, 0.06032681655883789, 0.06025625610351563, 0.06029919815063477, 0.06027833557128906, 0.0602874870300293, 0.060443809509277344, 0.060670814514160155, 0.06046105575561524, 0.06076620864868164, 0.06061676788330078, 0.06084396743774414, 0.060647422790527344, 0.06069465637207031, 0.06043648147583008, 0.06031756973266601, 0.060286975860595705, 0.06047334289550781, 0.060405086517333985, 0.06112435150146484, 0.06074857711791992, 0.0616860466003418, 0.06076870346069336, 0.06042313766479492, 0.06029747009277344, 0.06007270431518555, 0.06005750274658203, 0.05993203353881836, 0.060072288513183594, 0.0600682258605957, 0.060303359985351565, 0.06018060684204102, 0.060237697601318356, 0.06009036636352539, 0.060194206237792966, 0.060190654754638674, 0.060502689361572264, 0.06069638442993164, 0.06068428802490235, 0.06088857650756836, 0.060452960968017576, 0.06030809783935547, 0.0602562255859375, 0.060203006744384766, 0.06045455932617187, 0.06017020797729492, 0.060302974700927735, 0.06015663909912109, 0.06016412734985352, 0.06018396759033203, 0.06010319900512695, 0.06012115097045898, 0.06032179260253906, 0.06041961669921875, 0.060631423950195315, 0.060490848541259766, 0.060619518280029296, 0.0605228157043457, 0.060516094207763674, 0.06028044891357422, 0.06021791839599609, 0.06017622375488281, 0.060264606475830075, 0.060139488220214844, 0.06015798568725586, 0.06014511871337891, 0.06025455856323242, 0.06033833694458008, 0.06055081558227539, 0.06061094284057617, 0.060639232635498044, 0.06047129440307617, 0.060636863708496094, 0.060427776336669924, 0.06058844757080078, 0.06041846466064453, 0.06029312133789062, 0.06035811233520508, 0.060381729125976565, 0.060284927368164064, 0.06031689453125, 0.060211360931396486, 0.0603570556640625, 0.06030329513549805, 0.061901023864746094, 0.06087286376953125, 0.060235774993896485, 0.060104705810546874, 0.05997532653808594, 0.0598900146484375, 0.05985798263549805, 0.05990700912475586, 0.05997568130493164, 0.05996966552734375, 0.06007596969604492, 0.05998140716552734, 0.05983881759643555, 0.05992668914794922, 0.06003884887695313, 0.060225662231445314, 0.06036896133422852, 0.06071292877197266, 0.06051839828491211, 0.06043843078613281, 0.06033939361572266, 0.060148639678955076, 0.06007398223876953, 0.06003507232666016, 0.06001996612548828, 0.060152576446533206, 0.060037025451660155, 0.060137214660644533, 0.060094497680664063, 0.06005702209472656, 0.06020761489868164, 0.06024435043334961, 0.060385280609130856, 0.06054899215698242, 0.06148896026611328, 0.06064972686767578, 0.06059014511108399, 0.06035359954833985, 0.060272575378417965, 0.06018361663818359, 0.060100543975830076, 0.060155742645263674, 0.06021075057983399, 0.060170623779296876, 0.060131553649902345, 0.060262401580810546, 0.060364864349365235, 0.060899040222167966, 0.06054115295410156, 0.060474655151367185, 0.06082857513427734, 0.06068204879760742, 0.06057347106933594, 0.0604420166015625, 0.060558143615722655, 0.060483585357666014, 0.06034451293945312, 0.060295135498046874, 0.06027251052856445, 0.06041759872436524, 0.060281246185302735, 0.06026342391967773, 0.06026953506469727, 0.061730911254882816, 0.06078559875488281, 0.06014575958251953, 0.060164192199707034, 0.06052534484863281, 0.06007398223876953, 0.05998096084594726, 0.06006256103515625, 0.059998207092285157, 0.06002483367919922, 0.05997568130493164, 0.060065406799316406, 0.05996582412719727, 0.0599920654296875, 0.06007523345947265, 0.06019971084594727, 0.06062195205688477, 0.06090243148803711, 0.060652801513671875, 0.06039206314086914, 0.06018435287475586, 0.060527870178222656, 0.060549598693847656, 0.06008009719848633, 0.06015830230712891, 0.0601723518371582, 0.0601396484375, 0.05996335983276367, 0.060086177825927733, 0.06059535980224609, 0.06118700790405274, 0.06032137680053711, 0.06042665481567383, 0.06059212875366211, 0.06062694549560547, 0.06052793502807617, 0.06049862289428711, 0.06041996765136719, 0.06046326446533203, 0.060247295379638674, 0.06031824111938477, 0.06018694305419922, 0.06018873596191406, 0.06020281600952149, 0.06018048095703125, 0.06031507110595703, 0.06025484848022461, 0.060327232360839846, 0.060488319396972655, 0.0606453742980957, 0.06057350540161133, 0.06069676971435547, 0.060649471282958986, 0.06050611114501953, 0.06034431838989258, 0.060388481140136716, 0.060252254486083984, 0.060384033203125, 0.06021104049682617, 0.060536991119384764, 0.06037680053710937, 0.06032592010498047, 0.06034159851074219, 0.06159977722167969, 0.060993793487548825, 0.060229633331298826, 0.06056963348388672, 0.06005539321899414, 0.06010617446899414, 0.06025900650024414, 0.06004518508911133, 0.05993689727783203, 0.06004076766967773, 0.060033344268798826, 0.06003315353393555, 0.05996291351318359, 0.060087936401367184, 0.06008627319335937, 0.06023072052001953, 0.060630977630615236, 0.06065955352783203, 0.06061670303344727, 0.06045491027832031, 0.06027017593383789, 0.06003750228881836, 0.06003299331665039, 0.06012944030761719, 0.06013123321533203, 0.06016963195800781, 0.06015615844726562, 0.060133216857910156, 0.06022809600830078, 0.06009846496582031, 0.06006179046630859, 0.06028287887573242, 0.06047334289550781, 0.06074367904663086, 0.06057984161376953, 0.06040563201904297, 0.06051036834716797, 0.06040105438232422, 0.06025888061523438, 0.06030873489379883, 0.06034009552001953, 0.06023667144775391, 0.06029094314575195, 0.06034649658203125, 0.06029087829589844, 0.060168384552001956, 0.060210975646972656, 0.06057187271118164, 0.06056755065917969, 0.06049782562255859, 0.06068000030517578, 0.06066204833984375, 0.06074367904663086, 0.060626785278320314, 0.06035676956176758, 0.06060031890869141, 0.06055267333984375, 0.060509822845458985, 0.060420543670654296, 0.06047808074951172, 0.06060435104370117, 0.06032374572753906, 0.06038735961914062]",tokens/s,16.578144580559517,, @@ -3461,7 +3461,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 79948 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 73522 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3504,7 +3504,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 72135 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 65357 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.00416,3354.329088,0.0,2959.081472,2957.493248,s,1,7.4474365234375,7.4474365234375,0.0,7.4474365234375,7.4474365234375,7.4474365234375,7.4474365234375,[7.4474365234375],,kWh,9.354730991644071e-06,1.02380170073714e-06,2.9752801579963073e-06,1.3353812850377518e-05,,MB,1148.882944,3423.535104,0.0,3017.801728,2552.885248,s,10,0.609890682220459,0.060989068222045896,0.003264355478831742,0.06088916778564453,0.06263210525512695,0.0660835391998291,0.06884468635559082,"[0.06953497314453125, 0.06046691131591797, 0.061311424255371096, 0.05996384048461914, 0.06186511993408203, 0.06173257446289063, 0.057616863250732425, 0.05804412841796875, 0.05776764678955078, 0.06158720016479492]",tokens/s,4197.473538503134,kWh,2.2473518942627797e-06,2.4783270569686913e-07,1.4945033323538555e-06,3.989687932313504e-06,tokens/kWh,64165419.537350394,MB,1158.61504,3423.535104,0.0,3017.801728,2552.887808,s,10,14.256443725585939,1.4256443725585934,0.0115770949596713,1.4310405883789064,1.4351067993164062,1.4353154846191407,1.4354824328613283,"[1.4318619384765625, 1.394968017578125, 1.4350604248046874, 1.43022119140625, 1.4318599853515626, 1.417967529296875, 1.42188232421875, 1.4320689697265625, 1.435524169921875, 1.4250291748046875]",tokens/s,44.190543737730586,kWh,4.106144405865308e-05,4.528718605831331e-06,2.2645821535445745e-05,6.823598419993017e-05,tokens/kWh,923266.5248208506,,s,630,14.253904968261713,0.022625245981367805,0.0005464244241510381,0.022606528282165528,0.02291669692993164,0.023132241344451902,0.024322863998413095,"[0.023183231353759767, 0.02291663932800293, 0.02278883171081543, 0.022808576583862306, 0.02265497589111328, 0.022640640258789063, 0.02253363227844238, 0.02261427116394043, 0.02275312042236328, 0.022503776550292967, 0.02271808052062988, 0.02297702407836914, 0.02273683166503906, 0.024712480545043946, 0.023758975982666016, 0.022856159210205076, 0.023077280044555663, 0.022898080825805665, 0.022765344619750976, 0.022575647354125976, 0.022521600723266602, 0.022378751754760742, 0.022550527572631835, 0.022593408584594726, 0.02283942413330078, 0.022543487548828126, 0.022626623153686524, 0.022569536209106445, 0.022713375091552735, 0.022590431213378906, 0.02249932861328125, 0.023838720321655273, 0.02262182426452637, 0.022714752197265625, 0.022550527572631835, 0.022581247329711913, 0.02278201675415039, 0.02258732795715332, 0.022398399353027343, 0.02254729652404785, 0.022716127395629882, 0.022478847503662108, 0.022501375198364256, 0.02249318313598633, 0.02269923210144043, 0.022661376953125, 0.02268623924255371, 0.022560768127441407, 0.022579200744628908, 0.02261008071899414, 0.02271241569519043, 0.022654720306396484, 0.022686847686767576, 0.022614879608154295, 0.022592992782592775, 0.02287468719482422, 0.022529247283935547, 0.022560800552368164, 0.022756095886230468, 0.022668800354003905, 0.022426080703735352, 0.022487071990966796, 0.022548479080200197, 0.023119903564453124, 0.022724576950073242, 0.022409215927124023, 0.02220649528503418, 0.02205251121520996, 0.0221146240234375, 0.022192127227783204, 0.022208511352539064, 0.02207744026184082, 0.022214496612548828, 0.022020544052124023, 0.0223189754486084, 0.022269088745117186, 0.022164159774780274, 0.022280191421508787, 0.022148351669311523, 0.022150079727172853, 0.022097728729248048, 0.02221670341491699, 0.022181888580322266, 0.022177791595458983, 0.022071296691894532, 0.022013952255249023, 0.02191302490234375, 0.022032960891723633, 0.022022144317626953, 0.022079488754272462, 0.022066783905029298, 0.022063488006591796, 0.02219603157043457, 0.022224863052368163, 0.022044927597045898, 0.022024192810058595, 0.02200707244873047, 0.022225631713867187, 0.02215711975097656, 0.022026016235351564, 0.02234432029724121, 0.022028064727783202, 0.022296607971191405, 0.021916959762573244, 0.02265567970275879, 0.02254435157775879, 0.022071327209472656, 0.022071296691894532, 0.02188483238220215, 0.022011999130249024, 0.02260915184020996, 0.02194918441772461, 0.02205708885192871, 0.022245248794555663, 0.022113727569580077, 0.022206943511962892, 0.02203446388244629, 0.021958719253540038, 0.021907360076904296, 0.021928031921386718, 0.021923391342163086, 0.02188470458984375, 0.02195248031616211, 0.021940031051635743, 0.021855104446411134, 0.021863935470581054, 0.022661951065063475, 0.022147071838378905, 0.022763519287109374, 0.02294988822937012, 0.022845439910888672, 0.022405120849609376, 0.022355968475341798, 0.022202560424804688, 0.022124191284179688, 0.021919904708862306, 0.022120447158813478, 0.022197887420654296, 0.022339712142944335, 0.022354175567626953, 0.022471935272216796, 0.02253081512451172, 0.02295132827758789, 0.02255523109436035, 0.022994943618774414, 0.022799999237060546, 0.022623903274536134, 0.023065471649169923, 0.022785888671875, 0.022597280502319336, 0.02387798309326172, 0.0240863037109375, 0.02277596855163574, 0.0229039363861084, 0.022702592849731446, 0.022706623077392577, 0.022665216445922853, 0.022751232147216797, 0.023289503097534178, 0.022743392944335937, 0.022934560775756837, 0.02271536064147949, 0.022854719161987305, 0.022920127868652343, 0.02285478401184082, 0.023030656814575196, 0.02276927947998047, 0.022881887435913087, 0.022795040130615233, 0.022810047149658202, 0.022859807968139648, 0.023675424575805664, 0.02272217559814453, 0.022903167724609375, 0.022607200622558592, 0.022772319793701173, 0.022681663513183594, 0.022754848480224608, 0.023497184753417968, 0.022666976928710936, 0.02283535957336426, 0.02256230354309082, 0.022827648162841798, 0.022626304626464845, 0.022640640258789063, 0.022769664764404295, 0.023225664138793945, 0.023077568054199218, 0.023259136199951173, 0.023173280715942383, 0.022544384002685547, 0.02251158332824707, 0.022652959823608397, 0.022775808334350587, 0.02290892791748047, 0.02257449531555176, 0.02256752014160156, 0.02261561584472656, 0.022364288330078124, 0.02256108856201172, 0.022660736083984376, 0.022800575256347655, 0.022456512451171876, 0.022605823516845702, 0.02245631980895996, 0.022345024108886717, 0.022712095260620117, 0.022649759292602538, 0.022435039520263673, 0.022591327667236327, 0.022688703536987306, 0.022552448272705077, 0.022610048294067382, 0.022529119491577147, 0.02229916763305664, 0.02257475280761719, 0.023018207550048828, 0.022540288925170897, 0.022452224731445314, 0.02272220802307129, 0.02288243293762207, 0.0227740478515625, 0.022446016311645507, 0.02257935905456543, 0.022486656188964844, 0.022621728897094726, 0.022645023345947264, 0.022602144241333007, 0.022900415420532227, 0.023608896255493166, 0.02275766372680664, 0.02283692741394043, 0.022950687408447266, 0.022642688751220705, 0.02249068832397461, 0.022573503494262695, 0.022697984695434572, 0.022753183364868163, 0.022863967895507813, 0.02411427116394043, 0.022774688720703123, 0.022840927124023438, 0.02272502326965332, 0.0228351993560791, 0.022693151473999022, 0.022702592849731446, 0.022713823318481444, 0.022968063354492186, 0.022815744400024415, 0.022587392807006838, 0.02269523239135742, 0.022446815490722655, 0.023310752868652345, 0.02254630470275879, 0.02267558479309082, 0.02251366424560547, 0.022603776931762694, 0.022486112594604493, 0.02218281555175781, 0.022349472045898437, 0.02285398483276367, 0.022614015579223632, 0.02247270393371582, 0.022338592529296875, 0.02245910453796387, 0.02238489532470703, 0.03100876808166504, 0.022345727920532226, 0.022437280654907226, 0.023004831314086913, 0.022756288528442383, 0.02308233642578125, 0.022631135940551758, 0.02263033676147461, 0.0225581111907959, 0.022931488037109374, 0.022914751052856445, 0.022610815048217773, 0.022796287536621093, 0.022519615173339842, 0.022605119705200197, 0.022644832611083986, 0.022493984222412108, 0.022519168853759767, 0.022454912185668946, 0.022534143447875975, 0.02243756866455078, 0.022460704803466798, 0.02245020866394043, 0.022250944137573243, 0.022656927108764647, 0.02285593605041504, 0.022728736877441407, 0.02263897514343262, 0.022519359588623045, 0.022444480895996093, 0.02263654327392578, 0.02262118339538574, 0.02248099136352539, 0.02244700813293457, 0.02270191955566406, 0.022631647109985352, 0.022609888076782228, 0.022504383087158204, 0.02238876724243164, 0.022629919052124022, 0.02243836784362793, 0.02244105529785156, 0.022649023056030275, 0.022704864501953127, 0.02275225639343262, 0.02248806381225586, 0.022542335510253905, 0.022619808197021484, 0.022589120864868164, 0.025994720458984374, 0.02296268844604492, 0.022968320846557616, 0.02265907287597656, 0.022701248168945313, 0.022462879180908203, 0.02231884765625, 0.022506143569946287, 0.02284726333618164, 0.022794336318969727, 0.022550655364990235, 0.022609920501708985, 0.022521087646484375, 0.022597984313964845, 0.02260121536254883, 0.022498207092285158, 0.02268342399597168, 0.02270185661315918, 0.022759359359741212, 0.022497791290283203, 0.02265907287597656, 0.02262015914916992, 0.02271177673339844, 0.022557216644287108, 0.022558719635009765, 0.022845632553100587, 0.022673215866088867, 0.02263859176635742, 0.022543807983398438, 0.022186559677124025, 0.021960704803466798, 0.022147071838378905, 0.02186979293823242, 0.02207823944091797, 0.021831680297851562, 0.02206915283203125, 0.02237654495239258, 0.022383615493774413, 0.022701055526733398, 0.022589439392089843, 0.024408063888549804, 0.023171072006225587, 0.022514879226684572, 0.0221909122467041, 0.023010368347167968, 0.02240630340576172, 0.0226296329498291, 0.022368799209594725, 0.02232524871826172, 0.022621952056884765, 0.02206924819946289, 0.021881088256835938, 0.022013952255249023, 0.021975135803222655, 0.022089887619018554, 0.022036224365234374, 0.022023551940917967, 0.0218590087890625, 0.02200476837158203, 0.02197212791442871, 0.022021888732910156, 0.02192915153503418, 0.02197587203979492, 0.022379135131835936, 0.022173919677734376, 0.022226463317871092, 0.021964479446411132, 0.022118431091308594, 0.02220230484008789, 0.022063135147094726, 0.022002464294433595, 0.02198886489868164, 0.021981695175170898, 0.02195155143737793, 0.021860639572143556, 0.022002336502075195, 0.02205881690979004, 0.022085248947143556, 0.02190540885925293, 0.022053056716918946, 0.022276479721069335, 0.022228992462158204, 0.02286367988586426, 0.02215648078918457, 0.02227507209777832, 0.022317119598388672, 0.02263852882385254, 0.02293337631225586, 0.0223287353515625, 0.02227027130126953, 0.022438304901123047, 0.023226560592651366, 0.022771520614624022, 0.02260326385498047, 0.02274287986755371, 0.022722623825073243, 0.02278665542602539, 0.022824960708618162, 0.022734848022460938, 0.02272051239013672, 0.022755327224731444, 0.022731008529663085, 0.0229039363861084, 0.022833791732788086, 0.022837087631225585, 0.02263852882385254, 0.02268921661376953, 0.022838048934936524, 0.022664480209350586, 0.02273967933654785, 0.02291312026977539, 0.022699935913085938, 0.02267523193359375, 0.02268601608276367, 0.022827072143554686, 0.022847232818603517, 0.022917215347290038, 0.022722560882568358, 0.022648735046386717, 0.02264409637451172, 0.022553312301635743, 0.022644224166870116, 0.022808704376220703, 0.022563199996948242, 0.02332467269897461, 0.025610080718994142, 0.023443424224853515, 0.022732927322387696, 0.023035839080810548, 0.022710208892822267, 0.022505247116088867, 0.022814752578735352, 0.023142335891723632, 0.023289279937744142, 0.02302239990234375, 0.02292531204223633, 0.02270412826538086, 0.02264473533630371, 0.02268511962890625, 0.022563295364379884, 0.02272060775756836, 0.02319900894165039, 0.022688512802124024, 0.02257302474975586, 0.022744672775268555, 0.02250726318359375, 0.0226265926361084, 0.02274064064025879, 0.022556800842285157, 0.022534751892089845, 0.023566335678100587, 0.022595104217529298, 0.022617887496948243, 0.022663839340209962, 0.022497055053710937, 0.022749183654785156, 0.022453760147094725, 0.022385408401489258, 0.022571008682250978, 0.022766719818115233, 0.02268582344055176, 0.022639360427856445, 0.022816768646240236, 0.022585119247436523, 0.022714591979980468, 0.022579200744628908, 0.022603776931762694, 0.022605823516845702, 0.02292736053466797, 0.02288025665283203, 0.022551872253417968, 0.02256675148010254, 0.02298944091796875, 0.02272483253479004, 0.02275062370300293, 0.022651519775390625, 0.0227491512298584, 0.022577152252197266, 0.022429695129394533, 0.02254172706604004, 0.02267366409301758, 0.02273535919189453, 0.022665056228637695, 0.0225133113861084, 0.022769216537475587, 0.022749919891357422, 0.02280006408691406, 0.02266748809814453, 0.02268079948425293, 0.02297500801086426, 0.02262022399902344, 0.022608160018920898, 0.022691232681274414, 0.022590047836303712, 0.022584415435791014, 0.02245903968811035, 0.022585599899291993, 0.02260905647277832, 0.022614879608154295, 0.022439647674560546, 0.022442272186279297, 0.022374399185180666, 0.022431392669677735, 0.02278598403930664, 0.02276393508911133, 0.02267686462402344, 0.02257574462890625, 0.02258451271057129, 0.022764352798461913, 0.02270137596130371, 0.02261020851135254, 0.022700096130371095, 0.02249558448791504, 0.02410905647277832, 0.022759424209594727, 0.02275702476501465, 0.02283113670349121, 0.02253446388244629, 0.02285753631591797, 0.022589792251586915, 0.022695648193359376, 0.022603904724121094, 0.022747135162353514, 0.02246451187133789, 0.022598943710327148, 0.02260585594177246, 0.022542976379394532, 0.022816831588745118, 0.022783296585083008, 0.02279078483581543, 0.0227205753326416, 0.022786048889160155, 0.022697984695434572, 0.02265907287597656, 0.02264473533630371, 0.022535999298095702, 0.022697439193725587, 0.02258208084106445, 0.023175071716308594, 0.026408960342407226, 0.02552217674255371, 0.02266726493835449, 0.02263033676147461, 0.02259564781188965, 0.022649856567382814, 0.022508544921875, 0.022542335510253905, 0.022421247482299806, 0.022503679275512695, 0.022815807342529297, 0.02278825569152832, 0.02292815971374512, 0.023346080780029296, 0.02264838409423828, 0.022837568283081054, 0.0225664005279541, 0.02256752014160156, 0.022618112564086915, 0.022576288223266603, 0.02247123146057129, 0.022712064743041993, 0.022574623107910155, 0.02245475196838379, 0.02256876754760742, 0.02247248077392578, 0.022536319732666017, 0.023003360748291016, 0.022938207626342775, 0.02254217529296875, 0.022457952499389647, 0.022590015411376955, 0.02239897537231445, 0.022582815170288085, 0.022581119537353516, 0.022594144821166992, 0.02254377555847168, 0.022599647521972657, 0.0223603515625, 0.022475103378295898, 0.022512704849243163, 0.022604736328125, 0.022504928588867188, 0.02260544013977051, 0.022481727600097656, 0.022407039642333985, 0.02289072036743164, 0.02266316795349121, 0.022679040908813477, 0.022471168518066405, 0.022425504684448243, 0.02259891128540039, 0.02252047920227051, 0.022554208755493164, 0.022524511337280274, 0.02270412826538086, 0.02275868797302246, 0.022628671646118165, 0.02253606414794922, 0.022464128494262697, 0.022829055786132812, 0.02246486473083496, 0.02262201690673828, 0.02249772834777832, 0.022668928146362306, 0.02251046371459961, 0.022877439498901368, 0.022794815063476564, 0.022838272094726563, 0.022537023544311523, 0.02256480026245117, 0.022585599899291993, 0.022648256301879884, 0.022693632125854492, 0.022816991806030272, 0.022650720596313477]",tokens/s,44.19841449783633,, @@ -3549,7 +3549,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 82908 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 76542 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,817.729536,6174.998528,0.0,5779.750912,5773.960192,s,1,7.57930224609375,7.57930224609375,0.0,7.57930224609375,7.57930224609375,7.57930224609375,7.57930224609375,[7.57930224609375],,kWh,1.031787790832368e-05,1.1308835693605839e-06,4.427225764006004e-06,1.587598724169027e-05,,MB,1109.594112,6491.66848,0.0,6085.935104,6038.345728,s,10,2.3674476165771483,0.23674476165771483,0.012145732333113422,0.24003164672851562,0.24437044525146484,0.2444778022766113,0.2445636878967285,"[0.20128944396972656, 0.23908291625976563, 0.2431251220703125, 0.24098037719726562, 0.24434658813476562, 0.2358023681640625, 0.23840220642089843, 0.23757420349121094, 0.2422592315673828, 0.2445851593017578]",tokens/s,1081.3333237342094,kWh,6.383036748641098e-06,7.038957026354253e-07,4.221482845782736e-06,1.130841529705926e-05,tokens/kWh,22638008.357065953,MB,1114.431488,6512.64,0.0,6106.906624,6086.544896,s,10,18.189046997070317,1.8189046997070313,0.002654217950322588,1.818224365234375,1.8220148559570313,1.8222690246582032,1.8224723596191408,"[1.8154215087890626, 1.818479248046875, 1.817969482421875, 1.815954345703125, 1.822523193359375, 1.8219407958984375, 1.8219583740234375, 1.8213411865234375, 1.81760107421875, 1.8158577880859375]",tokens/s,34.63622916041029,kWh,5.362022793344189e-05,5.914190468179385e-06,3.549668781761704e-05,9.503110621923831e-05,tokens/kWh,662940.8254457017,,s,630,18.185909311294548,0.028866522716340565,0.0002932959952372305,0.028806336402893068,0.029048799324035642,0.029152521705627443,0.030631099720001224,"[0.030439424514770507, 0.029577215194702147, 0.029061119079589845, 0.028778495788574218, 0.028741247177124025, 0.028761888504028322, 0.028666080474853514, 0.02862076759338379, 0.028575328826904296, 0.028547903060913087, 0.0285980167388916, 0.028608768463134766, 0.02877440071105957, 0.02878054428100586, 0.028684255599975585, 0.028745376586914062, 0.02866009521484375, 0.028655616760253907, 0.028535839080810546, 0.028687328338623048, 0.028677791595458985, 0.028649791717529297, 0.02860611152648926, 0.028621055603027343, 0.028801151275634766, 0.02874336051940918, 0.028725568771362304, 0.028696128845214844, 0.028682687759399413, 0.02873958396911621, 0.028685407638549806, 0.028771167755126954, 0.02884819221496582, 0.02881657600402832, 0.028826175689697267, 0.02907321548461914, 0.029012224197387696, 0.029159616470336915, 0.02888915252685547, 0.028945760726928713, 0.02879756736755371, 0.028757440567016603, 0.028709407806396484, 0.02875094413757324, 0.028721887588500975, 0.028811456680297852, 0.028807167053222657, 0.028862464904785157, 0.02883737564086914, 0.028744192123413087, 0.02875596809387207, 0.028755359649658203, 0.028785247802734375, 0.028887359619140626, 0.028808544158935547, 0.028736991882324217, 0.02885264015197754, 0.028909536361694337, 0.028930559158325195, 0.028878400802612305, 0.028897727966308594, 0.02893414306640625, 0.028735231399536133, 0.030709760665893555, 0.029738239288330078, 0.029081663131713866, 0.02891436767578125, 0.02869862365722656, 0.02870681571960449, 0.02865344047546387, 0.028776575088500976, 0.028708864212036132, 0.02870681571960449, 0.028622848510742187, 0.028516351699829103, 0.028704767227172853, 0.028659711837768553, 0.028740863800048828, 0.02864566421508789, 0.028596704483032226, 0.02865337562561035, 0.028733631134033204, 0.02871500778198242, 0.02873107147216797, 0.028655935287475585, 0.028940288543701172, 0.028618080139160156, 0.028738016128540038, 0.028700544357299806, 0.028721471786499024, 0.028708192825317384, 0.028711584091186522, 0.028905536651611326, 0.02885215950012207, 0.028872480392456056, 0.02888630485534668, 0.028923072814941407, 0.028931615829467773, 0.028964160919189453, 0.0289719352722168, 0.029073408126831055, 0.028901056289672853, 0.028836160659790038, 0.029100032806396486, 0.02894643211364746, 0.028848127365112306, 0.028817407608032225, 0.028824800491333007, 0.02875276756286621, 0.028757312774658202, 0.028815967559814453, 0.02894643211364746, 0.028888479232788086, 0.029149599075317383, 0.0289334716796875, 0.02890982437133789, 0.028840192794799803, 0.028915327072143556, 0.028838367462158204, 0.02889753532409668, 0.028831584930419922, 0.028825664520263673, 0.028724319458007814, 0.028959743499755858, 0.02883500862121582, 0.028834432601928712, 0.03068511962890625, 0.029756959915161134, 0.029282848358154298, 0.028835840225219726, 0.02872025680541992, 0.028674079895019532, 0.02870467185974121, 0.02870377540588379, 0.028702463150024414, 0.028508031845092773, 0.028570112228393556, 0.028591903686523437, 0.02866489601135254, 0.028709823608398438, 0.028765663146972657, 0.028669567108154298, 0.02871776008605957, 0.02872038459777832, 0.028729343414306642, 0.028711904525756837, 0.02872319984436035, 0.028667104721069335, 0.02876032066345215, 0.02870662307739258, 0.028752031326293944, 0.02892857551574707, 0.0287457275390625, 0.028843551635742187, 0.02880988883972168, 0.02877129554748535, 0.028709344863891602, 0.0287379207611084, 0.028872735977172853, 0.028924928665161134, 0.02897977638244629, 0.029137504577636718, 0.029220672607421876, 0.02905824089050293, 0.028986175537109374, 0.028895360946655273, 0.028732959747314452, 0.028706592559814455, 0.028667455673217775, 0.028799808502197266, 0.02874387168884277, 0.028805343627929688, 0.02884940719604492, 0.028914207458496093, 0.029234399795532228, 0.028875551223754882, 0.02874387168884277, 0.028743488311767578, 0.028805152893066406, 0.028767391204833983, 0.02881990432739258, 0.028968448638916015, 0.02888172721862793, 0.028921920776367186, 0.028880447387695313, 0.028848575592041015, 0.028753183364868165, 0.028771360397338866, 0.028781728744506838, 0.030577407836914063, 0.029626367568969726, 0.02905023956298828, 0.028690240859985353, 0.028605247497558595, 0.028618335723876953, 0.02858345603942871, 0.02854092788696289, 0.028541120529174804, 0.028641408920288085, 0.02858451271057129, 0.028628992080688476, 0.028626623153686522, 0.028542911529541016, 0.028655391693115234, 0.028834400177001954, 0.029147136688232423, 0.028675743103027344, 0.028739936828613283, 0.02869171142578125, 0.02874448013305664, 0.028728736877441406, 0.02874220848083496, 0.02871839904785156, 0.028764928817749023, 0.028713983535766603, 0.028719968795776367, 0.028622943878173827, 0.02855740737915039, 0.028655519485473634, 0.02880512046813965, 0.029430784225463868, 0.028821535110473633, 0.028967008590698243, 0.029035392761230468, 0.029057024002075195, 0.02898054313659668, 0.029007808685302734, 0.028895999908447264, 0.028915712356567383, 0.028823551177978517, 0.028794591903686523, 0.02874163246154785, 0.028737823486328126, 0.028807039260864257, 0.028784767150878906, 0.028716928482055665, 0.028749952316284178, 0.028794879913330077, 0.028692480087280273, 0.028769472122192382, 0.028774528503417968, 0.028954399108886718, 0.028743743896484375, 0.029012960433959963, 0.028806880950927736, 0.028739744186401368, 0.028835840225219726, 0.028784576416015624, 0.028813119888305663, 0.028815616607666017, 0.02879283142089844, 0.028725248336791992, 0.030593952178955077, 0.029666879653930663, 0.029003488540649415, 0.028742271423339842, 0.028512479782104493, 0.02851171112060547, 0.028549184799194337, 0.02873788833618164, 0.028784255981445312, 0.02875164794921875, 0.02887548828125, 0.0287903995513916, 0.028780799865722656, 0.02874982452392578, 0.02874982452392578, 0.028705888748168946, 0.0287425594329834, 0.028738847732543947, 0.028797664642333985, 0.028907520294189453, 0.028778495788574218, 0.02873139190673828, 0.02877961540222168, 0.02874435234069824, 0.028806720733642578, 0.02895270347595215, 0.028800575256347657, 0.02876927947998047, 0.028827360153198242, 0.028879295349121092, 0.028856351852416993, 0.028955968856811523, 0.028903776168823243, 0.029568960189819336, 0.02915760040283203, 0.02908723258972168, 0.029053440093994142, 0.029060096740722657, 0.029025407791137697, 0.02904051208496094, 0.02901580810546875, 0.02898150444030762, 0.028868160247802734, 0.0290263671875, 0.02889971160888672, 0.028953855514526367, 0.028871423721313478, 0.028932096481323243, 0.02893519973754883, 0.028953088760375976, 0.028901439666748047, 0.028901792526245116, 0.028929407119750977, 0.028918399810791015, 0.028999584197998047, 0.029114463806152343, 0.028911808013916015, 0.028935583114624023, 0.028879072189331053, 0.028952224731445313, 0.02897769546508789, 0.02884422492980957, 0.02901100730895996, 0.03085094451904297, 0.029772287368774415, 0.02911884880065918, 0.028860416412353516, 0.028786687850952147, 0.028706464767456055, 0.028651456832885742, 0.028719520568847655, 0.02874163246154785, 0.028817407608032225, 0.0287554874420166, 0.02874361610412598, 0.028776384353637694, 0.02867897605895996, 0.02874959945678711, 0.02868230438232422, 0.028721216201782227, 0.028815231323242187, 0.02895462417602539, 0.02900480079650879, 0.028824703216552734, 0.028760223388671874, 0.028751392364501954, 0.028686527252197266, 0.028844127655029295, 0.028983455657958984, 0.0288656005859375, 0.02892870330810547, 0.02881926345825195, 0.028952768325805664, 0.028825408935546876, 0.028833663940429688, 0.02903481674194336, 0.029097471237182617, 0.029097663879394532, 0.02904863929748535, 0.02896691131591797, 0.028903839111328124, 0.02891961669921875, 0.02899843215942383, 0.028893184661865235, 0.028821504592895508, 0.02890713691711426, 0.02888742446899414, 0.0287457275390625, 0.028825599670410155, 0.02891302490234375, 0.028805952072143554, 0.02890150451660156, 0.028868288040161134, 0.02880102348327637, 0.0287903995513916, 0.029024639129638673, 0.02887641525268555, 0.02891542434692383, 0.028961599349975584, 0.028888927459716798, 0.029097984313964844, 0.028868608474731446, 0.028903423309326173, 0.029041791915893556, 0.028922752380371095, 0.028901632308959962, 0.030714591979980468, 0.029648000717163087, 0.029075872421264647, 0.028886655807495117, 0.028729440689086914, 0.028695552825927735, 0.028726911544799803, 0.02872902488708496, 0.02876153564453125, 0.028768543243408204, 0.028693376541137697, 0.028737152099609375, 0.028776351928710937, 0.028639167785644532, 0.028799327850341797, 0.028674079895019532, 0.028730911254882814, 0.02870524787902832, 0.028753440856933595, 0.028715551376342772, 0.028815391540527344, 0.02885843276977539, 0.028814912796020508, 0.02879747200012207, 0.029027999877929686, 0.028864608764648438, 0.02880886459350586, 0.028797279357910155, 0.028792032241821287, 0.02879280090332031, 0.02886079978942871, 0.028859935760498046, 0.028881824493408204, 0.028917760848999025, 0.02926313591003418, 0.029061855316162108, 0.029056768417358398, 0.028960639953613282, 0.029030784606933594, 0.02899715232849121, 0.028813791275024415, 0.028919807434082033, 0.028809215545654295, 0.028796096801757813, 0.02899795150756836, 0.028789247512817383, 0.028983327865600587, 0.028959808349609376, 0.02896784019470215, 0.028817407608032225, 0.028957984924316407, 0.02887343978881836, 0.028899328231811523, 0.028900928497314453, 0.029280832290649414, 0.028904800415039063, 0.028936256408691408, 0.02892608070373535, 0.028896799087524416, 0.02902931213378906, 0.028900447845458983, 0.028959520339965822, 0.029071104049682616, 0.030795743942260742, 0.02978019142150879, 0.029081375122070312, 0.028708864212036132, 0.02876438331604004, 0.028681024551391602, 0.028731679916381835, 0.028720928192138673, 0.028724128723144532, 0.028729055404663088, 0.028780799865722656, 0.028673152923583984, 0.028824480056762695, 0.02868751907348633, 0.02873619270324707, 0.02869660758972168, 0.028712671279907228, 0.02871548843383789, 0.028738943099975586, 0.028777023315429688, 0.028770303726196288, 0.028896480560302733, 0.028803871154785155, 0.02879897689819336, 0.028763551712036133, 0.0291549129486084, 0.028758495330810548, 0.02883404731750488, 0.02885865592956543, 0.02874982452392578, 0.028858367919921874, 0.028931615829467773, 0.029001792907714843, 0.029075872421264647, 0.029042688369750977, 0.02900105667114258, 0.02899193572998047, 0.029013919830322265, 0.029339967727661134, 0.02894220733642578, 0.02875200080871582, 0.028896896362304688, 0.0288439998626709, 0.028815263748168944, 0.02886092758178711, 0.028923904418945313, 0.028762111663818358, 0.028915552139282225, 0.02895894432067871, 0.028728256225585936, 0.028913951873779296, 0.028887775421142577, 0.028960639953613282, 0.028829471588134765, 0.028954656600952148, 0.028872415542602538, 0.028938720703125, 0.02891788864135742, 0.028932319641113282, 0.02882966423034668, 0.029035743713378907, 0.028971616744995116, 0.02885807991027832, 0.0308175048828125, 0.029647712707519532, 0.029142976760864258, 0.028788000106811523, 0.028797504425048828, 0.029034656524658205, 0.02894438362121582, 0.028645376205444335, 0.02871500778198242, 0.028604415893554686, 0.02869001579284668, 0.028695968627929686, 0.028744991302490235, 0.028663520812988282, 0.028657663345336915, 0.02855891227722168, 0.029085535049438477, 0.029118751525878905, 0.02866217613220215, 0.02873436737060547, 0.028682432174682616, 0.02910700798034668, 0.029087039947509767, 0.028707168579101563, 0.028788223266601562, 0.028746591567993165, 0.02869990348815918, 0.028713056564331055, 0.028719423294067382, 0.0287542724609375, 0.028735488891601563, 0.028747039794921873, 0.028809471130371092, 0.02895414352416992, 0.028978111267089844, 0.0289234561920166, 0.02882431983947754, 0.02889491271972656, 0.028823392868041992, 0.028789024353027343, 0.028915679931640625, 0.028835744857788087, 0.028684160232543946, 0.02876198387145996, 0.028762367248535155, 0.028730880737304686, 0.028708736419677736, 0.028756607055664064, 0.028758079528808593, 0.028676031112670898, 0.02879283142089844, 0.02877440071105957, 0.02877449607849121, 0.02871900749206543, 0.02883516883850098, 0.02883407974243164, 0.028976543426513672, 0.02878767967224121, 0.028833791732788085, 0.028694271087646484, 0.028782880783081055, 0.028914688110351562, 0.02878563117980957, 0.030646272659301758, 0.029595008850097658, 0.02903654479980469, 0.028760704040527343, 0.028604415893554686, 0.028506111145019532, 0.028635168075561525, 0.028770559310913085, 0.028714719772338866, 0.02867967987060547, 0.028690944671630858, 0.028563232421875, 0.028610847473144532, 0.028565023422241213, 0.028729759216308593, 0.02867571258544922, 0.028573631286621094, 0.02864975929260254, 0.028586143493652343, 0.02857164764404297, 0.02872425651550293, 0.028675039291381835, 0.028907520294189453, 0.029618175506591796, 0.028811103820800783, 0.02866102409362793, 0.028671871185302733, 0.02863747215270996, 0.028848575592041015, 0.028739871978759764, 0.028704767227172853, 0.02880499267578125, 0.02879302406311035, 0.028821439743041993, 0.028876800537109375, 0.0289751033782959, 0.028887231826782225, 0.02889299201965332, 0.02892812728881836, 0.02879270362854004, 0.028729343414306642, 0.02876380729675293, 0.02867030334472656, 0.028686656951904296, 0.028714208602905272, 0.02873788833618164, 0.028745023727416993, 0.028723552703857423, 0.028799455642700197, 0.028688383102416993, 0.02877408027648926, 0.028733024597167967, 0.028727584838867188, 0.028706335067749025, 0.028859296798706056, 0.028938175201416016, 0.028977216720581053, 0.029085695266723634, 0.028866336822509765, 0.028893407821655274, 0.02896294403076172, 0.02900099182128906, 0.028803647994995116]",tokens/s,34.642205083951,, @@ -3593,7 +3593,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 143168 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 4.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 136510 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 4.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3636,7 +3636,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 162.12 MiB is free. Process 30996 has 14.58 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 25.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 162.12 MiB is free. Process 25474 has 14.58 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 25.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,785.956864,11656.953856,0.0,11261.70624,11255.391232,s,1,7.18033935546875,7.18033935546875,0.0,7.18033935546875,7.18033935546875,7.18033935546875,7.18033935546875,[7.18033935546875],,kWh,5.039279437501893e-06,5.485672503577893e-07,1.397223339999476e-06,6.985070027859159e-06,,MB,1123.26656,11667.439616,0.0,11261.70624,10971.009024,s,10,3.609160095214844,0.3609160095214844,0.0057715397464658475,0.36168374633789063,0.36523611755371094,0.3675208755493164,0.3693486819458008,"[0.34562661743164064, 0.36130355834960937, 0.35997222900390624, 0.3647283935546875, 0.360566650390625, 0.3621558837890625, 0.3620639343261719, 0.36980563354492185, 0.3622278137207031, 0.36070938110351564]",tokens/s,709.3063018717683,kWh,1.0374761183620728e-05,1.1441585045193638e-06,6.926892514689321e-06,1.844581220282941e-05,tokens/kWh,13878488.90496305,MB,1129.078784,11669.536768,0.0,11263.803392,11168.310272,s,10,32.345559570312496,3.2345559570312497,0.0030313954029406553,3.23419287109375,3.238731909179687,3.239144885253906,3.2394752661132813,"[3.228908203125, 3.23709130859375, 3.234354248046875, 3.239557861328125, 3.23864013671875, 3.231833740234375, 3.232732177734375, 3.233671142578125, 3.234031494140625, 3.2347392578125]",tokens/s,19.477171159475894,kWh,9.4550488759296e-05,1.0429143709534005e-05,6.261869089951168e-05,0.00016759832336834168,tokens/kWh,375898.7484710144,,s,630,32.3424638595581,0.051337244221520806,0.0002730846994798606,0.05133265495300293,0.051640332794189454,0.051755985450744627,0.05218107563018799,"[0.05185184097290039, 0.05116694259643555, 0.051020065307617185, 0.05102796936035156, 0.05079040145874023, 0.05086412811279297, 0.05093948745727539, 0.05091164779663086, 0.05082726287841797, 0.051001182556152345, 0.0509455680847168, 0.05109526443481445, 0.05096131134033203, 0.05127126312255859, 0.05109990310668945, 0.05096457672119141, 0.05110784149169922, 0.05120006561279297, 0.05122048187255859, 0.0513875846862793, 0.05126841735839844, 0.051265281677246095, 0.05110111999511719, 0.05129817581176758, 0.05111084747314453, 0.05101350402832031, 0.05118150329589844, 0.05111347198486328, 0.05107360076904297, 0.05109142303466797, 0.05104451370239258, 0.05120204925537109, 0.051120128631591794, 0.051205665588378906, 0.05122851181030273, 0.05125593566894531, 0.0513966064453125, 0.05135974502563476, 0.0513719367980957, 0.051394657135009764, 0.05133660888671875, 0.05134710311889648, 0.05139756774902344, 0.051353118896484376, 0.05133315277099609, 0.05142950439453125, 0.0512957763671875, 0.0513666877746582, 0.051224193572998046, 0.051324478149414064, 0.05144249725341797, 0.05132809448242188, 0.051420063018798826, 0.05140889739990234, 0.05154611206054688, 0.05159686279296875, 0.0514400634765625, 0.05144985580444336, 0.05153779220581055, 0.051560321807861326, 0.051517696380615235, 0.05165055847167969, 0.051557727813720707, 0.052185344696044925, 0.05153792190551758, 0.05099494552612305, 0.050864383697509764, 0.050991104125976565, 0.051146751403808595, 0.05095945739746094, 0.05092240142822266, 0.05091123199462891, 0.05104838562011719, 0.05114652633666992, 0.05100291061401367, 0.05100211334228515, 0.05116652679443359, 0.05118841552734375, 0.051087135314941405, 0.05124937438964844, 0.05129216003417969, 0.051910110473632816, 0.05162768173217774, 0.051323776245117185, 0.05125529479980469, 0.05158707046508789, 0.051156608581542966, 0.05110140609741211, 0.051098270416259764, 0.05119753646850586, 0.05113078308105469, 0.051118049621582035, 0.0510423355102539, 0.05114265441894531, 0.0510382080078125, 0.051138561248779295, 0.051317855834960936, 0.05135017776489258, 0.05146768188476562, 0.051442527770996095, 0.05140470504760742, 0.05155644989013672, 0.051523582458496094, 0.05148262405395508, 0.05155430221557617, 0.05166080093383789, 0.0514150390625, 0.05227718353271484, 0.05254502487182617, 0.05140947341918945, 0.05142015838623047, 0.05121331024169922, 0.05138604736328125, 0.05148908615112305, 0.05131468963623047, 0.05129817581176758, 0.05148236846923828, 0.0523573112487793, 0.05162601470947266, 0.05164588928222656, 0.051622528076171875, 0.05166604614257812, 0.05177158355712891, 0.05144451141357422, 0.05154816055297851, 0.05153123092651367, 0.052065982818603515, 0.05120902252197266, 0.05104422378540039, 0.050863422393798825, 0.050934593200683595, 0.05127372741699219, 0.05117497634887695, 0.050969024658203126, 0.05103731155395508, 0.05110188674926758, 0.05112697601318359, 0.05102796936035156, 0.05100940704345703, 0.05121855926513672, 0.05104230499267578, 0.05109145736694336, 0.051128318786621094, 0.051236862182617186, 0.05130643081665039, 0.05133318328857422, 0.051122177124023435, 0.05131235122680664, 0.05115523147583008, 0.05117715072631836, 0.0510379524230957, 0.05120467376708984, 0.05102092742919922, 0.05099977493286133, 0.05135721588134766, 0.05136883163452149, 0.05117337417602539, 0.051418689727783205, 0.05147068786621094, 0.051370079040527344, 0.051492641448974606, 0.05142265701293945, 0.05144246292114258, 0.05139046478271484, 0.05171814346313477, 0.05150624084472656, 0.0512685432434082, 0.05134912109375, 0.051423614501953124, 0.05139177703857422, 0.05130928039550781, 0.051490238189697266, 0.051272254943847656, 0.051500926971435546, 0.051514846801757816, 0.051388256072998045, 0.05150803375244141, 0.051410945892333984, 0.05156620788574219, 0.05153814315795899, 0.05151705551147461, 0.051646270751953126, 0.051501792907714845, 0.051689472198486325, 0.05170995330810547, 0.05175091171264649, 0.05171532821655273, 0.05170048141479492, 0.051502239227294924, 0.05227001571655274, 0.05150310516357422, 0.05107062530517578, 0.05112630462646484, 0.05085804748535156, 0.05107913589477539, 0.051093791961669924, 0.05113417434692383, 0.05096847915649414, 0.05105683135986328, 0.05099103927612305, 0.05121427154541015, 0.05112041473388672, 0.0511242561340332, 0.05108272171020508, 0.05123072052001953, 0.05118825531005859, 0.05149033737182617, 0.05149897766113281, 0.051491329193115234, 0.05133107376098633, 0.05146214294433594, 0.051253246307373046, 0.05124915313720703, 0.0511110725402832, 0.0511288948059082, 0.05133036804199219, 0.0512907829284668, 0.0512105598449707, 0.05145539093017578, 0.05127180862426758, 0.05123660659790039, 0.05125571060180664, 0.05132249450683594, 0.05163692855834961, 0.05156614303588867, 0.05147488021850586, 0.05175910568237305, 0.051603424072265626, 0.05159529495239258, 0.05159526443481445, 0.05144166564941406, 0.05156560134887695, 0.051442176818847656, 0.051501537322998045, 0.051544063568115236, 0.05144521713256836, 0.0514312973022461, 0.05148944091796875, 0.05144780731201172, 0.0514436149597168, 0.051477664947509764, 0.05152646255493164, 0.05164044952392578, 0.05175296020507812, 0.05170175933837891, 0.05183820724487305, 0.05177215957641602, 0.05170336151123047, 0.05169107055664063, 0.051609790802001954, 0.05243532943725586, 0.05156486511230469, 0.05212601470947266, 0.05128799819946289, 0.05166207885742188, 0.05214700698852539, 0.05093116760253906, 0.05081455993652344, 0.05114275360107422, 0.05114147186279297, 0.050964481353759764, 0.05104844665527344, 0.05102105712890625, 0.05125724792480469, 0.0509202880859375, 0.05108224105834961, 0.05093478393554687, 0.05121795272827148, 0.05101206588745117, 0.0513493766784668, 0.05119929504394531, 0.05132505416870117, 0.051122081756591796, 0.051378753662109374, 0.05119558334350586, 0.051055137634277346, 0.050984798431396486, 0.05127388763427734, 0.05140009689331055, 0.05122259140014648, 0.05123126220703125, 0.05199462509155273, 0.051738304138183595, 0.05138463973999023, 0.05116668701171875, 0.05135619354248047, 0.05146739196777344, 0.05139104080200195, 0.05161743927001953, 0.05149967956542969, 0.05142297744750977, 0.05154227066040039, 0.05150848007202148, 0.05143824005126953, 0.05150729751586914, 0.05150515365600586, 0.0514334716796875, 0.05175686264038086, 0.051617088317871096, 0.05141955184936523, 0.051665374755859375, 0.051650177001953124, 0.05181683349609375, 0.05150051116943359, 0.051754913330078124, 0.05142512130737305, 0.051761920928955076, 0.05155219268798828, 0.05149705505371094, 0.05169356918334961, 0.051582977294921874, 0.05166262435913086, 0.05154019165039062, 0.051544063568115236, 0.05148246383666992, 0.052170623779296876, 0.051132606506347655, 0.05115887832641602, 0.05097283172607422, 0.05088665771484375, 0.05084569549560547, 0.05100896072387695, 0.050893375396728516, 0.050958335876464846, 0.05089641571044922, 0.05089308929443359, 0.050986686706542966, 0.0508851203918457, 0.05096857452392578, 0.05093939208984375, 0.05104281616210937, 0.051371391296386716, 0.05133785629272461, 0.05122662353515625, 0.051205631256103515, 0.051044769287109375, 0.051213470458984375, 0.05127468872070313, 0.0513309440612793, 0.05114204788208008, 0.05102870559692383, 0.05101363372802734, 0.05098863983154297, 0.05107497787475586, 0.050962944030761716, 0.05104844665527344, 0.050993152618408207, 0.05110374450683594, 0.05125529479980469, 0.052029441833496094, 0.051484672546386716, 0.051666465759277344, 0.05137593460083008, 0.05141161727905273, 0.05146419143676758, 0.051410945892333984, 0.05141299057006836, 0.0515968017578125, 0.05161625671386719, 0.05164031982421875, 0.05161369705200195, 0.05141299057006836, 0.05154934310913086, 0.051472225189208985, 0.05130752182006836, 0.051275070190429685, 0.05130889511108398, 0.051398719787597656, 0.051542015075683595, 0.05147244644165039, 0.05164054489135742, 0.05151129531860352, 0.051557727813720707, 0.05151811218261719, 0.051555774688720704, 0.05163065719604492, 0.051596382141113284, 0.05175388717651367, 0.05201375961303711, 0.05120841598510742, 0.05115084838867188, 0.05107036972045898, 0.05101833724975586, 0.05094153594970703, 0.0510153923034668, 0.05099923324584961, 0.050991584777832034, 0.05088214492797852, 0.050941951751708986, 0.05109215927124024, 0.05103615951538086, 0.05110492706298828, 0.05115702438354492, 0.051163745880126954, 0.05103433609008789, 0.05115609741210937, 0.05127043151855469, 0.05153801727294922, 0.05113779067993164, 0.05124556732177735, 0.051189952850341794, 0.051118144989013674, 0.05130444717407227, 0.05112422561645508, 0.05120783996582031, 0.051087711334228514, 0.05102150344848633, 0.05118912124633789, 0.05098908615112305, 0.05129497528076172, 0.051140609741210936, 0.05128611373901367, 0.051273792266845704, 0.051367935180664064, 0.05140991973876953, 0.05138278579711914, 0.05142169570922851, 0.05158089447021484, 0.05134748840332031, 0.0514150390625, 0.051510337829589845, 0.05153273773193359, 0.051576831817626956, 0.05148246383666992, 0.051294368743896486, 0.051591167449951174, 0.0513875846862793, 0.05140118408203125, 0.05134339141845703, 0.05153164672851562, 0.05123894500732422, 0.05145568084716797, 0.05140550231933594, 0.05156252670288086, 0.051525409698486326, 0.05202057647705078, 0.05177350234985351, 0.051661121368408204, 0.05160806274414063, 0.05163827133178711, 0.05157068634033203, 0.052096382141113284, 0.05118835067749023, 0.050974720001220705, 0.050861183166503905, 0.050937793731689454, 0.05083404922485352, 0.05093404769897461, 0.05090435028076172, 0.05090719985961914, 0.051003326416015626, 0.05099801635742188, 0.05110988616943359, 0.05105459213256836, 0.05113651275634765, 0.0522874870300293, 0.05129344177246094, 0.051132736206054685, 0.05110419082641601, 0.051307743072509765, 0.051372833251953125, 0.05130854415893555, 0.05130035018920898, 0.051165184020996096, 0.05116108703613281, 0.051158718109130856, 0.05113478469848633, 0.05117526245117188, 0.05122883224487305, 0.05125734329223633, 0.05117647933959961, 0.05114569473266602, 0.05110988616943359, 0.05133107376098633, 0.05126758575439453, 0.05139564895629883, 0.05188703918457031, 0.05153318405151367, 0.0515467529296875, 0.05148591995239258, 0.051313438415527345, 0.05135769653320312, 0.05148057556152344, 0.051525630950927735, 0.05146623992919922, 0.05160953521728515, 0.05167520141601562, 0.051419136047363284, 0.05130035018920898, 0.05132287979125977, 0.05125734329223633, 0.051332191467285154, 0.05128467178344726, 0.05158115386962891, 0.0514150390625, 0.05144707107543945, 0.05169347381591797, 0.051591999053955076, 0.05157273483276367, 0.051507198333740234, 0.051607551574707033, 0.05143462371826172, 0.05160844802856445, 0.05136716842651367, 0.051907230377197265, 0.051843040466308596, 0.05120425415039063, 0.050949630737304685, 0.0509567985534668, 0.05085184097290039, 0.05095609664916992, 0.05094144058227539, 0.05097856140136719, 0.05113907241821289, 0.051044734954833984, 0.05099321746826172, 0.050817024230957034, 0.05112815856933594, 0.05116329574584961, 0.051253246307373046, 0.05100255966186523, 0.05127804946899414, 0.05135625457763672, 0.051568641662597656, 0.05122252655029297, 0.0514202880859375, 0.051200191497802736, 0.05117599868774414, 0.05118947219848633, 0.051257408142089844, 0.0511102409362793, 0.05108736038208008, 0.051195903778076174, 0.05124643325805664, 0.051178112030029296, 0.051300384521484374, 0.05137561416625976, 0.05129391860961914, 0.051346206665039064, 0.05143484878540039, 0.05147100830078125, 0.05144512176513672, 0.05160819244384766, 0.051451904296875, 0.05155833435058594, 0.051369281768798826, 0.051737342834472656, 0.05139865493774414, 0.05151129531860352, 0.05155977630615234, 0.051302398681640625, 0.05135222244262695, 0.051329025268554686, 0.05146419143676758, 0.05148672103881836, 0.05142323303222656, 0.05130179214477539, 0.05141753768920899, 0.05161795043945312, 0.051533824920654295, 0.05148211288452149, 0.051622398376464845, 0.05159526443481445, 0.05156380844116211, 0.051778270721435544, 0.051580032348632815, 0.051409793853759767, 0.05198233413696289, 0.0513331184387207, 0.05110572814941406, 0.0509071044921875, 0.050954177856445314, 0.05108918380737305, 0.05096076965332031, 0.051042240142822264, 0.05087609481811523, 0.051385921478271486, 0.05099808120727539, 0.05110889434814453, 0.05101052856445312, 0.051109214782714844, 0.05110553741455078, 0.05117414474487305, 0.05110185623168945, 0.05140070343017578, 0.05142643356323242, 0.05148681640625, 0.05125814437866211, 0.05127135848999023, 0.05126176071166992, 0.05130035018920898, 0.05120819091796875, 0.05146988677978516, 0.05118563079833984, 0.05119753646850586, 0.05121484756469727, 0.05141337585449219, 0.0510832633972168, 0.05120819091796875, 0.051236862182617186, 0.05122457504272461, 0.051318687438964845, 0.051361888885498044, 0.05144908905029297, 0.05157759857177734, 0.05143328094482422, 0.0513947525024414, 0.05152767944335938, 0.05149491119384766, 0.051748863220214845, 0.05152972793579102, 0.052103168487548826, 0.051590240478515625, 0.05137420654296875, 0.05142403030395508, 0.05144780731201172, 0.051394561767578124, 0.05147238540649414, 0.05140889739990234, 0.05141708755493164, 0.05135475158691406, 0.05145484924316406, 0.05156454467773437, 0.05144915390014648, 0.05160825729370117, 0.05161939239501953, 0.05156047821044922, 0.051429790496826173, 0.05136588668823242, 0.05148057556152344]",tokens/s,19.479035448123945,, @@ -3680,7 +3680,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 348.12 MiB is free. Process 129816 has 14.40 GiB memory in use. Of the allocated memory 14.28 GiB is allocated by PyTorch, and 3.01 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 348.12 MiB is free. Process 123351 has 14.40 GiB memory in use. Of the allocated memory 14.28 GiB is allocated by PyTorch, and 3.01 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.96768,718.209024,0.0,322.961408,314.743808,s,1,7.3543603515625,7.3543603515625,0.0,7.3543603515625,7.3543603515625,7.3543603515625,7.3543603515625,[7.3543603515625],,kWh,4.528132787478019e-06,4.921199367924644e-07,1.0097230300071258e-06,6.02997575427761e-06,,MB,1109.393408,810.483712,0.0,404.750336,391.119872,s,28,0.23511241626739496,0.00839687200954982,5.0066419832193083e-05,0.008390496253967285,0.008446630477905273,0.008482257795333861,0.008542315378189086,"[0.008561599731445312, 0.008347359657287598, 0.008490176200866699, 0.008378623962402344, 0.00836246395111084, 0.008417216300964356, 0.008345791816711426, 0.008420000076293946, 0.008392512321472168, 0.008333632469177246, 0.00831372833251953, 0.008388480186462403, 0.008340831756591796, 0.00840828800201416, 0.008381471633911133, 0.008366623878479003, 0.008437088012695312, 0.008401503562927246, 0.008437664031982421, 0.008411968231201172, 0.008467552185058593, 0.008403200149536133, 0.008402400016784668, 0.008382783889770509, 0.008385055541992188, 0.008348480224609375, 0.008399552345275878, 0.008386367797851563]",tokens/s,30487.543421984927,kWh,2.677828497402986e-07,2.9515348800167903e-08,1.7613773129606e-07,4.734359298365265e-07,tokens/kWh,540727865.9403706,MB,1119.39584,823.066624,0.0,417.333248,391.122432,s,28,10.094631469726561,0.3605225524902344,0.02247068146145327,0.3560248718261719,0.3585250762939453,0.35991512908935547,0.4454740164184571,"[0.4770150146484375, 0.35675845336914064, 0.35692742919921877, 0.35595477294921873, 0.3581532897949219, 0.35647695922851563, 0.35771258544921875, 0.359392578125, 0.3544256591796875, 0.35476324462890624, 0.3531097717285156, 0.35399282836914064, 0.354876708984375, 0.3577764892578125, 0.3552135009765625, 0.35681475830078124, 0.3555978088378906, 0.3551192626953125, 0.3559044494628906, 0.35580642700195314, 0.3554816589355469, 0.356094970703125, 0.35668875122070315, 0.3554750671386719, 0.3562126159667969, 0.3550398864746094, 0.36019650268554687, 0.3576500244140625]",tokens/s,174.74634961069876,kWh,1.0572432539457617e-05,1.1659751416953781e-06,4.684432000777751e-06,1.6422839681930743e-05,tokens/kWh,3836120.9888272765,,s,1764,10.082759708881394,0.005715850175102823,0.0028396695228811914,0.0056310720443725586,0.005697811079025269,0.0057735600233078,0.006137980718612669,"[0.005451935768127442, 0.005694911956787109, 0.005716127872467041, 0.005833183765411377, 0.005695487976074219, 0.005738399982452393, 0.0056648640632629395, 0.005642240047454834, 0.005666783809661865, 0.005847072124481201, 0.005715583801269531, 0.005724544048309326, 0.005670911788940429, 0.005752831935882568, 0.00588972806930542, 0.0057432641983032225, 0.005648032188415527, 0.12485836791992187, 0.005994336128234863, 0.005746848106384277, 0.005677055835723877, 0.005640096187591552, 0.005617760181427002, 0.005640416145324707, 0.005646111965179443, 0.00564134407043457, 0.005684095859527588, 0.005607423782348633, 0.0056258559226989744, 0.005586080074310303, 0.005639008045196533, 0.005646336078643799, 0.005631999969482422, 0.0056975998878479005, 0.005652416229248047, 0.005857279777526855, 0.005732160091400146, 0.0056724481582641605, 0.005646080017089844, 0.0056063361167907715, 0.005668863773345947, 0.005616991996765137, 0.005664735794067383, 0.005669472217559815, 0.005745759963989258, 0.005702239990234375, 0.005610208034515381, 0.005725120067596436, 0.005620192050933838, 0.00561900806427002, 0.005652607917785645, 0.005610655784606933, 0.00562553596496582, 0.005668320178985596, 0.005626399993896484, 0.005594336032867432, 0.0056096000671386715, 0.005651103973388672, 0.005603040218353271, 0.005607711791992188, 0.005594751834869385, 0.005601151943206787, 0.005672959804534912, 0.0053487358093261715, 0.00566048002243042, 0.005829823970794678, 0.00569920015335083, 0.005656479835510254, 0.00567084789276123, 0.00575107192993164, 0.005640192031860352, 0.005719808101654053, 0.005639776229858399, 0.005657023906707763, 0.005617887973785401, 0.005703680038452149, 0.005678336143493653, 0.005645055770874024, 0.005648672103881836, 0.005684959888458252, 0.0056295361518859865, 0.005683360099792481, 0.005615647792816162, 0.005832255840301514, 0.0056162881851196285, 0.0056258559226989744, 0.0056293120384216305, 0.005704319953918457, 0.0056258559226989744, 0.005584479808807373, 0.005620128154754639, 0.005591040134429932, 0.005576704025268555, 0.005662399768829346, 0.005558591842651367, 0.0056442880630493165, 0.005581151962280273, 0.006147456169128418, 0.0056118078231811525, 0.005596896171569824, 0.005625440120697022, 0.005673664093017578, 0.005819519996643066, 0.005682047843933106, 0.005592671871185303, 0.005717663764953613, 0.005618239879608154, 0.005618207931518555, 0.005633696079254151, 0.005638112068176269, 0.005595168113708496, 0.00559884786605835, 0.005642240047454834, 0.005654911994934082, 0.005627200126647949, 0.005609951972961426, 0.005610879898071289, 0.005659488201141358, 0.005621664047241211, 0.005635551929473877, 0.005616256237030029, 0.00561900806427002, 0.005775743961334228, 0.00571014404296875, 0.0056852478981018065, 0.005670911788940429, 0.005462143898010254, 0.005672351837158203, 0.0056938881874084475, 0.005642943859100342, 0.005635807991027832, 0.005670911788940429, 0.005609471797943116, 0.005649983882904053, 0.005628352165222168, 0.005670239925384521, 0.005642015933990479, 0.005604512214660644, 0.005652192115783691, 0.005644224166870117, 0.005637663841247559, 0.005656991958618164, 0.0056070399284362795, 0.005659135818481445, 0.005647359848022461, 0.005661695957183838, 0.005672639846801758, 0.005637728214263916, 0.005648575782775879, 0.00561411190032959, 0.0057712640762329105, 0.0056442880630493165, 0.005677055835723877, 0.005649856090545654, 0.005614143848419189, 0.005646207809448242, 0.005642303943634033, 0.005602719783782959, 0.005648672103881836, 0.005681280136108398, 0.0056302080154418946, 0.005625247955322266, 0.0056431999206542965, 0.005610559940338135, 0.005598048210144043, 0.005646016120910645, 0.005592864036560058, 0.005897503852844238, 0.005659647941589355, 0.00566921615600586, 0.005752480030059815, 0.005650207996368408, 0.006117599964141846, 0.00565772819519043, 0.005741439819335938, 0.005654079914093018, 0.005665215969085693, 0.0056483840942382815, 0.005677055835723877, 0.005657599925994873, 0.005628960132598877, 0.005646304130554199, 0.005646336078643799, 0.005611072063446045, 0.005717472076416015, 0.005589983940124512, 0.005621503829956054, 0.005628159999847412, 0.005701632022857666, 0.005383647918701172, 0.005673503875732422, 0.005656576156616211, 0.005604351997375488, 0.005631328105926514, 0.005637440204620361, 0.005618239879608154, 0.006198400020599365, 0.005634751796722412, 0.005666656017303467, 0.0056475200653076174, 0.005649375915527344, 0.005646336078643799, 0.0056852478981018065, 0.0056483840942382815, 0.00566476821899414, 0.0056234879493713376, 0.00561359977722168, 0.005646431922912597, 0.0057112002372741695, 0.005601696014404297, 0.005632448196411133, 0.00561356782913208, 0.005638144016265869, 0.005622943878173828, 0.005605663776397705, 0.005655104160308838, 0.005598495960235596, 0.005700319766998291, 0.005609471797943116, 0.005598464012145996, 0.005632768154144287, 0.005642111778259277, 0.0056096000671386715, 0.0056135039329528805, 0.005603392124176026, 0.005705728054046631, 0.005608863830566406, 0.0056735677719116214, 0.005622911930084229, 0.0056265277862548825, 0.005627327919006347, 0.005628064155578614, 0.005742720127105713, 0.005609983921051025, 0.005632319927215576, 0.00563375997543335, 0.005650400161743164, 0.00563318395614624, 0.005595168113708496, 0.005622015953063965, 0.005632639884948731, 0.005698592185974121, 0.00565340805053711, 0.00562992000579834, 0.005658656120300293, 0.005629280090332031, 0.005730976104736328, 0.005600992202758789, 0.005593376159667969, 0.005635104179382324, 0.005600224018096924, 0.00563750410079956, 0.0053547840118408204, 0.005625919818878174, 0.005614336013793945, 0.005592127799987793, 0.005851583957672119, 0.0055996479988098145, 0.005638144016265869, 0.005631999969482422, 0.005609663963317871, 0.005643871784210205, 0.005616991996765137, 0.005630847930908203, 0.005634047985076904, 0.0056566400527954105, 0.005642399787902832, 0.005639071941375732, 0.005694047927856445, 0.005653120040893554, 0.005654176235198974, 0.005654528141021729, 0.0064386558532714844, 0.0060900158882141115, 0.005679232120513916, 0.0057721281051635745, 0.005627456188201904, 0.005675456047058105, 0.005650207996368408, 0.0056566400527954105, 0.005642399787902832, 0.0056236801147460935, 0.005717887878417969, 0.005633696079254151, 0.005647071838378906, 0.005633344173431396, 0.005812128067016601, 0.005694111824035644, 0.005626976013183594, 0.005659552097320557, 0.005713888168334961, 0.005675039768218994, 0.005768223762512207, 0.0057704000473022465, 0.00564192008972168, 0.005763199806213379, 0.005664063930511475, 0.005603744029998779, 0.005640480041503906, 0.00559660816192627, 0.005674848079681397, 0.005652224063873291, 0.005618336200714111, 0.005638463973999024, 0.0056258559226989744, 0.005609471797943116, 0.005650432109832764, 0.005611519813537597, 0.0056863040924072265, 0.005667808055877685, 0.005612576007843017, 0.00563647985458374, 0.00562992000579834, 0.0056408319473266605, 0.0060208959579467775, 0.005398528099060058, 0.005647903919219971, 0.005673439979553223, 0.005654528141021729, 0.0056436161994934085, 0.005665440082550049, 0.005638144016265869, 0.005645408153533936, 0.005747615814208984, 0.005658624172210694, 0.005678976058959961, 0.005658207893371582, 0.005701759815216065, 0.005628159999847412, 0.0056505918502807615, 0.00561356782913208, 0.005611839771270752, 0.005738175868988037, 0.00559830379486084, 0.00565340805053711, 0.005636159896850586, 0.005669919967651367, 0.005665760040283203, 0.005622911930084229, 0.0056650562286376955, 0.005895711898803711, 0.00567193603515625, 0.005675007820129394, 0.005646560192108154, 0.005658400058746338, 0.005636096000671387, 0.005645823955535889, 0.005622208118438721, 0.005627552032470703, 0.005656991958618164, 0.005623807907104492, 0.005623807907104492, 0.0056258559226989744, 0.005620736122131348, 0.0056824002265930176, 0.005569568157196045, 0.005622879981994629, 0.005609119892120361, 0.005601280212402344, 0.005591040134429932, 0.005674975872039795, 0.00563375997543335, 0.00601529598236084, 0.005791744232177734, 0.005816319942474365, 0.005626976013183594, 0.005622655868530274, 0.0056945281028747555, 0.005658944129943847, 0.005583712100982666, 0.00562275218963623, 0.005596000194549561, 0.005590943813323974, 0.0056481599807739254, 0.005566976070404053, 0.005661664009094239, 0.005583072185516357, 0.005581727981567383, 0.005351871967315674, 0.0055927357673645015, 0.005861663818359375, 0.005599232196807862, 0.005593088150024414, 0.0055808000564575196, 0.005574656009674072, 0.005601280212402344, 0.0055668802261352535, 0.005585984230041504, 0.005609536170959472, 0.005597536087036133, 0.005701759815216065, 0.005819456100463867, 0.006220736026763916, 0.006209536075592041, 0.006112383842468262, 0.005630847930908203, 0.005658976078033447, 0.005942431926727295, 0.006500703811645508, 0.005635263919830322, 0.00566806411743164, 0.00565993595123291, 0.005824704170227051, 0.0056293439865112305, 0.005583744049072265, 0.005662432193756103, 0.005600736141204834, 0.005636991977691651, 0.00562716817855835, 0.005610144138336182, 0.00562716817855835, 0.005579487800598144, 0.005614943981170654, 0.005626656055450439, 0.005584767818450928, 0.00566476821899414, 0.005595136165618897, 0.005623807907104492, 0.005607423782348633, 0.005611519813537597, 0.0056274237632751465, 0.005595615863800049, 0.005617504119873047, 0.005621920108795166, 0.00561356782913208, 0.005627903938293457, 0.005601471900939942, 0.005625664234161377, 0.005611519813537597, 0.005642240047454834, 0.005631999969482422, 0.005606527805328369, 0.005675456047058105, 0.005648575782775879, 0.005651936054229736, 0.0056368961334228515, 0.00561356782913208, 0.005621632099151611, 0.005621664047241211, 0.005624032020568848, 0.005595136165618897, 0.0054579200744628905, 0.0056179518699646, 0.005590752124786377, 0.005627903938293457, 0.005619775772094726, 0.005631936073303223, 0.005582848072052002, 0.005594783782958984, 0.0055937919616699215, 0.0055968317985534664, 0.005603328227996827, 0.005595136165618897, 0.005574016094207763, 0.005615615844726562, 0.005590784072875977, 0.005890944004058838, 0.006743231773376465, 0.0072341117858886715, 0.007020607948303222, 0.005933248043060303, 0.005666816234588623, 0.005607264041900635, 0.005623968124389649, 0.005601280212402344, 0.005635488033294678, 0.005896800041198731, 0.005623839855194092, 0.00566048002243042, 0.005616896152496338, 0.005601439952850342, 0.00566326379776001, 0.005600575923919678, 0.0056143999099731445, 0.005635935783386231, 0.005613791942596436, 0.005615359783172608, 0.005619743824005127, 0.0056154241561889645, 0.005615327835083008, 0.005630688190460205, 0.005600895881652832, 0.005638688087463379, 0.005617504119873047, 0.005627903938293457, 0.005611519813537597, 0.005607168197631836, 0.005603583812713623, 0.00566048002243042, 0.005601471900939942, 0.005636096000671387, 0.005638144016265869, 0.005668863773345947, 0.005622111797332764, 0.005592639923095703, 0.005636191844940185, 0.005596288204193115, 0.005617695808410645, 0.0057923197746276855, 0.005624095916748047, 0.005634047985076904, 0.005629024028778076, 0.005600160121917725, 0.005645311832427978, 0.0053821439743042, 0.005654528141021729, 0.005611199855804444, 0.005638463973999024, 0.0056863360404968265, 0.005675968170166016, 0.00562716817855835, 0.005627967834472656, 0.005765024185180664, 0.0056241598129272465, 0.005613984107971192, 0.0056217598915100095, 0.005694464206695556, 0.00564089584350586, 0.005593728065490722, 0.005627711772918701, 0.00559500789642334, 0.00557260799407959, 0.005636096000671387, 0.005580031871795654, 0.005630080223083496, 0.005605120182037354, 0.005630847930908203, 0.005646336078643799, 0.005609471797943116, 0.005633215904235839, 0.005614208221435547, 0.0056302080154418946, 0.00560697603225708, 0.0056910080909729, 0.005626207828521729, 0.005607840061187744, 0.0056356477737426755, 0.005591487884521485, 0.005600768089294434, 0.0056161279678344726, 0.005627520084381103, 0.005617536067962646, 0.005609983921051025, 0.005580671787261963, 0.005673056125640869, 0.005580192089080811, 0.005642784118652344, 0.005609151840209961, 0.005581215858459472, 0.005631999969482422, 0.005601280212402344, 0.0056200637817382815, 0.005588384151458741, 0.005654784202575683, 0.005650432109832764, 0.005602943897247315, 0.005619808197021485, 0.005597472190856934, 0.005605184078216553, 0.005611711978912353, 0.005601280212402344, 0.005654528141021729, 0.005615039825439453, 0.005571135997772217, 0.005619711875915527, 0.005582047939300537, 0.005603231906890869, 0.005328896045684814, 0.005587007999420166, 0.0057134079933166505, 0.005808576107025147, 0.005640543937683105, 0.005611167907714844, 0.005603328227996827, 0.005582240104675293, 0.005572224140167236, 0.005599967956542969, 0.005561600208282471, 0.005635072231292724, 0.005791168212890625, 0.005562943935394287, 0.005598239898681641, 0.005573599815368652, 0.005601280212402344, 0.005603295803070068, 0.005590496063232422, 0.00571449613571167, 0.005748479843139648, 0.005908448219299317, 0.005828479766845703, 0.005654943943023682, 0.005672959804534912, 0.005621344089508057, 0.005613984107971192, 0.005588607788085937, 0.005615200042724609, 0.005628704071044922, 0.005603328227996827, 0.005594783782958984, 0.005584415912628174, 0.005616543769836425, 0.0056109437942504885, 0.005659103870391846, 0.005634047985076904, 0.005586944103240967, 0.005629951953887939, 0.005659711837768555, 0.005616576194763184, 0.005606431961059571, 0.005612512111663818, 0.005652480125427246, 0.005611519813537597, 0.005621632099151611, 0.005603456020355225, 0.005615615844726562, 0.0056217598915100095, 0.005594560146331787, 0.0056059517860412595, 0.005599135875701904, 0.005616928100585937, 0.005638688087463379, 0.0055668802261352535, 0.005640160083770752, 0.005582752227783203, 0.0056217598915100095, 0.005620960235595703, 0.005571360111236572, 0.0055808000564575196, 0.005596320152282715, 0.005585855960845947, 0.005375936031341553, 0.005572800159454346, 0.005634751796722412, 0.005596255779266358, 0.005602208137512207, 0.00556441593170166, 0.005563936233520508, 0.005577184200286865, 0.0055623679161071774, 0.005613247871398926, 0.005595680236816406, 0.005551904201507568, 0.005631872177124023, 0.005574783802032471, 0.005588992118835449, 0.0055582718849182125, 0.005554175853729248, 0.005576704025268555, 0.005625152111053467, 0.005554463863372803, 0.005603744029998779, 0.005564703941345215, 0.005600992202758789, 0.005583136081695556, 0.005590464115142822, 0.0055790400505065915, 0.005560287952423096, 0.005601471900939942, 0.005729983806610108, 0.005621856212615967, 0.005621823787689209, 0.005582848072052002, 0.005611519813537597, 0.005597184181213379, 0.005582431793212891, 0.005625823974609375, 0.0056936640739440915, 0.005648608207702637, 0.005609471797943116, 0.005612544059753418, 0.005598207950592041, 0.005588992118835449, 0.005619711875915527, 0.005594399929046631, 0.0055938239097595216, 0.005576704025268555, 0.0055474557876586915, 0.005591616153717041, 0.0055848960876464845, 0.005658048152923584, 0.005597536087036133, 0.005578976154327392, 0.005595136165618897, 0.005615520000457764, 0.005625951766967774, 0.0055848960876464845, 0.005586016178131104, 0.005907360076904297, 0.00556441593170166, 0.005599391937255859, 0.005594560146331787, 0.005580543994903564, 0.005618656158447265, 0.005347455978393555, 0.005714111804962159, 0.005601280212402344, 0.005615615844726562, 0.0055922560691833495, 0.005575488090515137, 0.005606656074523926, 0.005620480060577393, 0.005619904041290283, 0.005605184078216553, 0.0055920639038085935, 0.005618688106536865, 0.005592991828918457, 0.005619391918182373, 0.005583263874053955, 0.005570559978485107, 0.005586368083953857, 0.005589087963104248, 0.005625984191894531, 0.0056629438400268555, 0.005606527805328369, 0.005643551826477051, 0.005572319984436035, 0.005596511840820313, 0.00563267183303833, 0.005607423782348633, 0.005597087860107422, 0.005609568119049072, 0.005629439830780029, 0.0055895037651062015, 0.005590688228607177, 0.00564463996887207, 0.0055808000564575196, 0.005623839855194092, 0.005581952095031738, 0.005581920146942139, 0.00562713623046875, 0.005626368045806885, 0.005607552051544189, 0.005607295989990235, 0.0055760002136230466, 0.005628608226776123, 0.005578303813934326, 0.005639840126037598, 0.005589600086212158, 0.005580671787261963, 0.005603519916534424, 0.005578048229217529, 0.005658432006835938, 0.005609471797943116, 0.005600607872009277, 0.0056459841728210445, 0.005607423782348633, 0.005623807907104492, 0.005599232196807862, 0.005689343929290771, 0.005613247871398926, 0.0055790719985961915, 0.005605375766754151, 0.0057935361862182615, 0.005843200206756592, 0.005621407985687256, 0.005589344024658203, 0.005359615802764893, 0.005588992118835449, 0.005627903938293457, 0.005600351810455323, 0.005607935905456543, 0.00558735990524292, 0.0055764479637145994, 0.00561568021774292, 0.005962080001831055, 0.005650271892547607, 0.00561356782913208, 0.005608799934387207, 0.005634719848632812, 0.005581056118011475, 0.005631455898284912, 0.0055957441329956056, 0.005596864223480225, 0.005619487762451172, 0.005582143783569336, 0.005630879878997803, 0.005605375766754151, 0.005615231990814209, 0.005630239963531494, 0.005602431774139404, 0.005630943775177002, 0.005629951953887939, 0.005631040096282959, 0.005640960216522216, 0.005596704006195068, 0.00559990406036377, 0.005619584083557129, 0.005633408069610595, 0.0055857281684875484, 0.005574016094207763, 0.005589568138122559, 0.005603328227996827, 0.005576704025268555, 0.005641983985900879, 0.005595232009887695, 0.005625376224517822, 0.005601920127868652, 0.005615744113922119, 0.005615488052368164, 0.005597184181213379, 0.005728256225585937, 0.005671040058135986, 0.005633408069610595, 0.005808127880096436, 0.0055976958274841304, 0.005701632022857666, 0.005605216026306152, 0.005673120021820069, 0.005617152214050293, 0.005607935905456543, 0.0056258559226989744, 0.005607295989990235, 0.005611648082733154, 0.005658624172210694, 0.0056852478981018065, 0.005679103851318359, 0.005660672187805176, 0.005639552116394043, 0.005730432033538818, 0.0053366079330444335, 0.005619616031646729, 0.005648287773132324, 0.005702527999877929, 0.00559827184677124, 0.005630559921264648, 0.005577087879180908, 0.0056044158935546875, 0.005632768154144287, 0.005777279853820801, 0.005920063972473144, 0.005594079971313477, 0.005649759769439698, 0.005605663776397705, 0.005640575885772705, 0.0056135358810424805, 0.005608575820922852, 0.005626016139984131, 0.006284031867980957, 0.006352543830871582, 0.006281023979187012, 0.005992512226104737, 0.005953440189361572, 0.005671487808227539, 0.005652480125427246, 0.005664480209350586, 0.005693727970123291, 0.005685408115386963, 0.005635583877563476, 0.005593311786651611, 0.005654655933380127, 0.005595136165618897, 0.005643551826477051, 0.005616352081298828, 0.005612800121307373, 0.005616384029388428, 0.0055848960876464845, 0.00564192008972168, 0.005636415958404541, 0.0055848960876464845, 0.005705728054046631, 0.005818143844604492, 0.005635488033294678, 0.005607391834259033, 0.005608287811279297, 0.005614880084991455, 0.0055979199409484865, 0.005593088150024414, 0.005605375766754151, 0.005615615844726562, 0.00562332820892334, 0.005609856128692627, 0.005625984191894531, 0.005609439849853516, 0.005595136165618897, 0.005617504119873047, 0.005593247890472412, 0.005637440204620361, 0.0056031041145324706, 0.005601439952850342, 0.005611519813537597, 0.005579520225524903, 0.005607423782348633, 0.0055354881286621095, 0.0056302080154418946, 0.005605375766754151, 0.005625279903411865, 0.005620287895202637, 0.005609471797943116, 0.005631999969482422, 0.005592607975006104, 0.0056427202224731444, 0.0056258559226989744, 0.005642240047454834, 0.005650239944458008, 0.005594719886779785, 0.005634655952453613, 0.005621471881866455, 0.005595424175262451, 0.005646207809448242, 0.0056152639389038085, 0.005620192050933838, 0.005603328227996827, 0.005611519813537597, 0.005638144016265869, 0.005593088150024414, 0.005633664131164551, 0.0056221442222595215, 0.005589248180389405, 0.005627647876739502, 0.005605728149414063, 0.005602975845336914, 0.005596704006195068, 0.005595104217529297, 0.0056427521705627445, 0.005609471797943116, 0.005619711875915527, 0.005640192031860352, 0.0056154241561889645, 0.005628032207489014, 0.00560748815536499, 0.005602880001068115, 0.005626304149627686, 0.006440959930419922, 0.005617568016052246, 0.005811647891998291, 0.0056241598129272465, 0.005601600170135498, 0.005647647857666016, 0.005601215839385987, 0.00560649585723877, 0.005590720176696777, 0.005574656009674072, 0.005638144016265869, 0.005619711875915527, 0.005631999969482422, 0.005607423782348633, 0.005605375766754151, 0.005633855819702148, 0.005606688022613525, 0.005602208137512207, 0.005615488052368164, 0.005611648082733154, 0.00563750410079956, 0.005589632034301758, 0.005623807907104492, 0.005357696056365967, 0.005615392208099365, 0.005610144138336182, 0.005662720203399658, 0.005636127948760986, 0.00562172794342041, 0.0056258878707885745, 0.005606880187988281, 0.005597311973571778, 0.005630335807800293, 0.005605375766754151, 0.005617631912231445, 0.005586976051330566, 0.005574656009674072, 0.005621503829956054, 0.005589248180389405, 0.005646624088287353, 0.00561078405380249, 0.005677504062652588, 0.005631872177124023, 0.005640160083770752, 0.005658783912658691, 0.005633952140808106, 0.005636191844940185, 0.005627903938293457, 0.005605375766754151, 0.005607423782348633, 0.005647679805755615, 0.005659167766571045, 0.005640192031860352, 0.0056303682327270505, 0.005684991836547851, 0.005611008167266846, 0.0056254081726074216, 0.005742847919464111, 0.00564521598815918, 0.00566044807434082, 0.005623167991638184, 0.00564195203781128, 0.005622687816619873, 0.005656576156616211, 0.005615615844726562, 0.005617919921875, 0.005653439998626709, 0.005621920108795166, 0.005859839916229248, 0.006127776145935059, 0.005861343860626221, 0.005633056163787842, 0.005645408153533936, 0.005681056022644043, 0.005660672187805176, 0.005675072193145752, 0.005652160167694092, 0.005671040058135986, 0.005619679927825928, 0.0057077760696411135, 0.005691391944885254, 0.00563420820236206, 0.005642240047454834, 0.005809760093688965, 0.0060661759376525876, 0.005627744197845459, 0.005402912139892578, 0.005650432109832764, 0.0056310720443725586, 0.005659552097320557, 0.005699584007263184, 0.005703392028808593, 0.005613855838775635, 0.005617631912231445, 0.005639616012573242, 0.005603456020355225, 0.005628384113311767, 0.005604351997375488, 0.005596159934997558, 0.005651552200317383, 0.005622464179992676, 0.005635903835296631, 0.00564028787612915, 0.0057736320495605465, 0.005648575782775879, 0.005601088047027588, 0.005692575931549072, 0.005643104076385498, 0.005672959804534912, 0.005646336078643799, 0.0056640000343322755, 0.0059028158187866215, 0.0056239042282104496, 0.005629119873046875, 0.005641215801239013, 0.005639935970306396, 0.0056343040466308595, 0.005616640090942383, 0.0056492481231689455, 0.0056440639495849605, 0.005651904106140137, 0.00561407995223999, 0.005625311851501465, 0.005635039806365967, 0.005624896049499512, 0.005618624210357666, 0.005627264022827148, 0.005617472171783447, 0.005647168159484863, 0.00563750410079956, 0.005636735916137696, 0.005650432109832764, 0.005627456188201904, 0.005652927875518799, 0.005603328227996827, 0.005670015811920166, 0.005622655868530274, 0.005640192031860352, 0.005624864101409912, 0.005604320049285889, 0.0056258559226989744, 0.005612544059753418, 0.005628032207489014, 0.005606272220611572, 0.005631999969482422, 0.005609471797943116, 0.005629568099975586, 0.00564467191696167, 0.0056442880630493165, 0.005392384052276611, 0.005628223896026611, 0.00562278413772583, 0.005644224166870117, 0.005609344005584717, 0.00564518404006958, 0.005613152027130127, 0.005597599983215332, 0.0056258559226989744, 0.0056137280464172365, 0.005595200061798096, 0.005631775856018067, 0.005588992118835449, 0.005666944026947021, 0.00559500789642334, 0.005629727840423584, 0.005615359783172608, 0.005591519832611084, 0.005651679992675782, 0.0056061758995056155, 0.005640192031860352, 0.005605375766754151, 0.005584991931915283, 0.005654431819915771, 0.005826560020446778, 0.005627520084381103, 0.0056180481910705566, 0.005630015850067139, 0.005613024234771728, 0.00559113597869873, 0.0056236801147460935, 0.005609536170959472, 0.005638591766357422, 0.005629568099975586, 0.005754784107208252, 0.0056835517883300785, 0.005652607917785645, 0.005662720203399658, 0.005637695789337158, 0.005605823993682861, 0.005654528141021729, 0.005618847846984863, 0.005651296138763428, 0.0056483840942382815, 0.005646016120910645, 0.005632415771484375, 0.005599135875701904, 0.005643743991851807, 0.005634592056274414, 0.005645919799804688, 0.005633600234985351, 0.005624095916748047, 0.005687520027160644, 0.005640543937683105, 0.005658432006835938, 0.00564243221282959, 0.005607423782348633, 0.005652480125427246, 0.005600927829742431, 0.005658976078033447, 0.005617663860321045, 0.005599232196807862, 0.005646336078643799, 0.005373248100280762, 0.005667808055877685, 0.005599391937255859, 0.00562937593460083, 0.005628320217132568, 0.0056211199760437015, 0.005627711772918701, 0.005606207847595215, 0.005609471797943116, 0.005586559772491455, 0.005560704231262207, 0.005619711875915527, 0.005566463947296142, 0.005599232196807862, 0.005583936214447021, 0.0055799040794372555, 0.005639071941375732, 0.005593311786651611, 0.005688000202178955, 0.005631999969482422, 0.005623807907104492, 0.005658239841461182, 0.005621600151062012, 0.005698336124420166, 0.005666336059570312, 0.005676928043365478, 0.005670591831207276, 0.005666528224945068, 0.005673920154571533, 0.005637663841247559, 0.005816800117492676, 0.00568236780166626, 0.005868447780609131, 0.00567849588394165, 0.00563865613937378, 0.005652480125427246, 0.005619872093200684, 0.00564796781539917, 0.005635903835296631, 0.0056406397819519045, 0.0057051520347595214, 0.005878335952758789, 0.0057019200325012205, 0.005631103992462159, 0.005673439979553223, 0.005615744113922119, 0.005610847949981689, 0.005648799896240234, 0.005646592140197754, 0.005660672187805176, 0.005642240047454834, 0.005629439830780029, 0.00565670394897461, 0.005616000175476074, 0.005628159999847412, 0.005633791923522949, 0.005629759788513183, 0.005635488033294678, 0.005613408088684082, 0.005641151905059814, 0.005627903938293457, 0.005650432109832764, 0.005628096103668213, 0.005391615867614746, 0.005603936195373535, 0.0056068158149719235, 0.005608191967010498, 0.005625631809234619, 0.005613247871398926, 0.0056241598129272465, 0.005600736141204834, 0.00563478422164917, 0.005750239849090576, 0.005603871822357178, 0.005610879898071289, 0.005611616134643555, 0.005654111862182617, 0.005636223793029785, 0.00563647985458374, 0.005613887786865234, 0.0056096000671386715, 0.005654016017913818, 0.005614240169525147, 0.005646175861358642, 0.0056258559226989744, 0.005621344089508057, 0.005663392066955567, 0.005615231990814209, 0.005680992126464844, 0.005644576072692871, 0.005668032169342041, 0.005655295848846435, 0.005627967834472656, 0.00566476821899414, 0.005615615844726562, 0.005635200023651123, 0.005624767780303955, 0.005627264022827148, 0.005717984199523926, 0.0056341438293457035, 0.005664544105529785, 0.0056464638710021975, 0.005617695808410645, 0.005681727886199951, 0.005624959945678711, 0.005687424182891846, 0.005649151802062988, 0.005652671813964844, 0.005670752048492432, 0.005657919883728027, 0.005671008110046387, 0.005624383926391602, 0.005765120029449463, 0.005711391925811767, 0.005634528160095215, 0.005660287857055664, 0.005626239776611328, 0.005668863773345947, 0.005634047985076904, 0.005666336059570312, 0.005622591972351074, 0.005610879898071289, 0.005744448184967041, 0.005611487865447998, 0.0056570878028869625, 0.005646336078643799, 0.005398591995239258, 0.005645023822784424, 0.005597280025482178, 0.005656544208526611, 0.005597216129302978, 0.005633887767791748, 0.005626016139984131, 0.005608640193939209, 0.0056360640525817875, 0.00561033582687378, 0.005647552013397217, 0.00565715217590332, 0.0058178558349609374, 0.005609856128692627, 0.0056277761459350584, 0.005628416061401367, 0.005615615844726562, 0.005629951953887939, 0.005611519813537597, 0.005615615844726562, 0.005634047985076904, 0.005609471797943116, 0.00562713623046875, 0.005624576091766358, 0.005608511924743652, 0.005651391983032226, 0.00563097620010376, 0.005670976161956787, 0.005669727802276612, 0.005640255928039551, 0.005724192142486572, 0.005666207790374756, 0.005723936080932617, 0.005642240047454834, 0.005661375999450683, 0.005699711799621582, 0.005635488033294678, 0.005679711818695068, 0.005642240047454834, 0.005646240234375, 0.0056054720878601074, 0.005605120182037354, 0.005646592140197754, 0.0056146240234375, 0.005630943775177002, 0.005633503913879395, 0.005596896171569824, 0.005672863960266113, 0.0056267518997192385, 0.005643455982208252, 0.005632863998413086, 0.005673247814178467, 0.005649663925170898, 0.005616096019744873, 0.0056622719764709475, 0.005607359886169434, 0.005636608123779297, 0.005610976219177246, 0.005618207931518555, 0.005633535861968994, 0.0056202239990234375, 0.005636096000671387, 0.005631999969482422, 0.005351679801940918, 0.0056003198623657225, 0.005632991790771484, 0.005727519989013672, 0.0056388797760009764, 0.005646336078643799, 0.005638144016265869, 0.0056258559226989744, 0.005672095775604248, 0.00567145586013794, 0.005695648193359375, 0.005652544021606446, 0.0056341438293457035, 0.0056433920860290525, 0.005633056163787842, 0.005653535842895508, 0.005619904041290283, 0.005628255844116211, 0.005628128051757813, 0.0056098241806030276, 0.005678815841674805, 0.005650432109832764, 0.005643487930297852, 0.005616415977478027, 0.005648064136505127, 0.005635935783386231, 0.005601759910583496, 0.0056418561935424804, 0.005936927795410156, 0.005642848014831543, 0.0056250238418579105, 0.005624127864837646, 0.005634560108184815, 0.005629951953887939, 0.005646336078643799, 0.005619711875915527, 0.00561359977722168, 0.005664735794067383, 0.005609471797943116, 0.0056497278213500975, 0.005613791942596436, 0.0056232957839965824, 0.005667520046234131, 0.005625984191894531, 0.005632160186767578, 0.0056638078689575195, 0.005634943962097168, 0.005625823974609375, 0.005637631893157959, 0.0056735677719116214, 0.005656576156616211, 0.005687295913696289, 0.005697535991668701, 0.00564415979385376, 0.005662112236022949, 0.005637951850891113, 0.005651360034942627, 0.005748608112335205, 0.005675136089324951, 0.0056516480445861815, 0.005632448196411133, 0.005626239776611328, 0.005699584007263184, 0.005359712123870849, 0.005623167991638184, 0.0056284480094909665, 0.0056112961769104, 0.005625088214874268, 0.005630655765533447, 0.005624095916748047, 0.005631999969482422, 0.005639872074127197, 0.0056112961769104, 0.005655072212219238, 0.005627071857452393, 0.005654911994934082, 0.005632448196411133, 0.005637951850891113, 0.005666240215301514, 0.005645055770874024, 0.0056442880630493165, 0.005633696079254151, 0.00559116792678833, 0.005650527954101562, 0.0056275839805603025, 0.005645088195800781, 0.005648032188415527, 0.005647456169128418, 0.005626783847808838, 0.005641791820526123, 0.005642687797546387, 0.005654528141021729, 0.005666719913482666, 0.005681248188018799, 0.005616703987121582, 0.005692351818084717, 0.005592864036560058, 0.0059836478233337405, 0.006132415771484375, 0.005727935791015625, 0.005654047966003418, 0.005653120040893554, 0.005643775939941406, 0.005660575866699219, 0.005641088008880615, 0.005658624172210694, 0.005666816234588623, 0.005648447990417481, 0.0056070079803466795, 0.0056733121871948245, 0.005619711875915527, 0.00564134407043457, 0.005649280071258545, 0.005631648063659668, 0.005693439960479736, 0.0056200637817382815, 0.005695712089538574, 0.005643743991851807, 0.00566918420791626, 0.00564195203781128, 0.005624032020568848, 0.005900288105010986, 0.005596640110015869, 0.005650176048278809, 0.005622432231903076, 0.0055912318229675295, 0.005355616092681884, 0.005615615844726562, 0.005631999969482422, 0.005607135772705078, 0.005611680030822754, 0.005619584083557129, 0.005613632202148437, 0.005640384197235108, 0.005605375766754151, 0.005611519813537597, 0.005601280212402344, 0.005688576221466064, 0.005614336013793945, 0.005607423782348633, 0.0056442880630493165, 0.005631999969482422, 0.0056239042282104496, 0.005652383804321289, 0.0056295042037963865, 0.0056713600158691405, 0.005662975788116455, 0.005635776042938232, 0.00560748815536499, 0.005619711875915527, 0.005683199882507324, 0.005601280212402344, 0.005617663860321045, 0.005618720054626465, 0.0055797438621521, 0.005756896018981933, 0.005601344108581543, 0.005631968021392823, 0.005629951953887939, 0.00561897611618042, 0.005646399974822998, 0.005585440158843994, 0.005641632080078125, 0.0056388797760009764, 0.005816256046295166, 0.0056341118812561035, 0.005597184181213379, 0.005629856109619141, 0.005622975826263428, 0.005854112148284912, 0.005631135940551758, 0.00562604808807373, 0.005663392066955567, 0.005687295913696289, 0.0056217598915100095, 0.0056217598915100095, 0.005758848190307617, 0.005726336002349853, 0.00560038423538208, 0.005608320236206055, 0.005636288166046143, 0.00562332820892334, 0.005646624088287353, 0.005607423782348633, 0.005659904003143311, 0.005585663795471192, 0.005621024131774902, 0.005611487865447998, 0.005690112113952637, 0.005453855991363526, 0.005606304168701172, 0.0056375679969787595, 0.005620160102844238, 0.005621183872222901, 0.005635776042938232, 0.005622367858886718, 0.005662528038024903, 0.005937280178070069, 0.00563043212890625, 0.0056130561828613285, 0.005636000156402588, 0.005609151840209961, 0.005593472003936768, 0.005627456188201904, 0.005616608142852783, 0.005656383991241455, 0.005609632015228272, 0.0055931200981140135, 0.005646336078643799, 0.005617663860321045, 0.005658624172210694, 0.0056557440757751464, 0.005640768051147461, 0.005670591831207276, 0.00563046407699585, 0.005632031917572022, 0.005650527954101562, 0.005650368213653565, 0.005703680038452149, 0.005650303840637207, 0.0056730880737304685, 0.005679103851318359, 0.005654528141021729, 0.005650368213653565, 0.0056310720443725586, 0.005663712024688721, 0.005848159790039062, 0.005644544124603272, 0.005648096084594726, 0.005657536029815674, 0.00579744005203247, 0.005595327854156494, 0.005713568210601806, 0.005626463890075683, 0.005631999969482422, 0.005636096000671387, 0.005617472171783447, 0.005642079830169678, 0.00560368013381958, 0.005652736186981202, 0.005630879878997803, 0.005639008045196533, 0.005669087886810303, 0.00566044807434082, 0.005652480125427246, 0.005661856174468994, 0.005663584232330323, 0.005618879795074463, 0.005614175796508789, 0.005652031898498535, 0.00562662410736084, 0.005636000156402588, 0.005375999927520752, 0.0056315197944641114, 0.0056713919639587405, 0.005623807907104492, 0.0056770238876342775, 0.005619743824005127, 0.0056341438293457035, 0.005662623882293701, 0.005659808158874511, 0.005697887897491455, 0.005615871906280517, 0.005660927772521972, 0.005638144016265869, 0.0056217598915100095, 0.00565177583694458, 0.005618368148803711, 0.005644320011138916, 0.0056295042037963865, 0.005616032123565674, 0.005615359783172608, 0.005607583999633789, 0.0056341438293457035, 0.005613344192504883, 0.0056304001808166505, 0.005625472068786621, 0.005598432064056397, 0.005622719764709473, 0.005629216194152832, 0.005669600009918213, 0.005627520084381103, 0.005623968124389649, 0.005648672103881836, 0.005606592178344727, 0.005630720138549805, 0.005629951953887939, 0.005588736057281494, 0.005650047779083252, 0.0055747518539428715, 0.00564031982421875, 0.005608191967010498, 0.005797120094299316, 0.005656991958618164, 0.005595136165618897, 0.005638144016265869, 0.005672192096710205, 0.005630144119262695, 0.005636191844940185, 0.005612095832824707, 0.005643455982208252, 0.005626656055450439, 0.005631872177124023, 0.005635615825653076, 0.005594816207885742, 0.005630815982818603, 0.005617728233337402, 0.005615551948547363, 0.005640192031860352, 0.005582848072052002, 0.005607423782348633, 0.005590623855590821, 0.005623839855194092, 0.0055972480773925784, 0.005695807933807373, 0.005393184185028076, 0.005619711875915527, 0.005691391944885254, 0.005646336078643799, 0.005634047985076904, 0.005646336078643799, 0.0055948801040649416, 0.005628032207489014, 0.005593215942382813, 0.005654528141021729, 0.005608672142028808, 0.005633056163787842, 0.005641759872436524, 0.005624032020568848, 0.005621503829956054, 0.00578326416015625, 0.006793248176574707, 0.005968480110168457, 0.006121535778045654, 0.006065279960632324, 0.006225823879241943, 0.005679935932159424, 0.005709824085235596, 0.005649792194366455, 0.005668672084808349, 0.005650335788726806, 0.005661824226379395, 0.005689023971557617, 0.00562992000579834, 0.005666111946105957, 0.005671743869781494, 0.005660223960876465, 0.00564899206161499, 0.005675903797149658, 0.005711935997009278, 0.00565340805053711, 0.005711775779724121, 0.005658783912658691, 0.00566864013671875, 0.00566921615600586, 0.005686079978942871, 0.0056863360404968265, 0.00565180778503418, 0.005661280155181885, 0.005638144016265869, 0.0056275839805603025, 0.005677375793457031, 0.005662720203399658, 0.005703616142272949, 0.005717376232147217, 0.005651135921478271, 0.005695487976074219, 0.005649759769439698, 0.005667232036590576, 0.00566707181930542, 0.005662720203399658, 0.0056217598915100095, 0.005629951953887939, 0.005713920116424561, 0.006149824142456055, 0.005664351940155029, 0.005663455963134766, 0.005689343929290771, 0.005354015827178955, 0.005670911788940429, 0.005636096000671387, 0.005616703987121582, 0.005655007839202881, 0.005667295932769776, 0.0056741762161254886, 0.005665599822998047, 0.0056399679183959965, 0.005656511783599854, 0.00562614393234253, 0.005640192031860352, 0.005623519897460938, 0.005597472190856934, 0.00564793586730957, 0.005618112087249756, 0.005654528141021729, 0.00565772819519043, 0.005661568164825439, 0.005623807907104492, 0.005607423782348633, 0.005684607982635498, 0.005642879962921143, 0.005670911788940429, 0.0059169921875, 0.005620960235595703, 0.0061567678451538085, 0.0056260800361633305, 0.005664544105529785, 0.005631999969482422, 0.005646336078643799, 0.005646336078643799, 0.005640192031860352, 0.005671040058135986, 0.005652160167694092, 0.005652671813964844, 0.005668416023254395, 0.005740992069244385, 0.005679103851318359, 0.005678431987762451, 0.0056900157928466795, 0.005641600131988525, 0.005773151874542236, 0.005727007865905762, 0.005675007820129394, 0.005646336078643799, 0.005645567893981933, 0.0056921601295471195, 0.0056442880630493165, 0.00566431999206543, 0.005652575969696045, 0.00563750410079956, 0.005667808055877685, 0.005656576156616211, 0.005700640201568604, 0.005724319934844971, 0.005716032028198243, 0.005663487911224365, 0.00564134407043457, 0.005685887813568115, 0.005666975975036621, 0.0056976318359375, 0.005817728042602539]",tokens/s,174.95210150115787,, @@ -3726,7 +3726,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 35522 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 6.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 29769 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 6.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,939.741184,12518.883328,0.0,12123.635712,12121.851904,s,1,7.0910576171875,7.0910576171875,0.0,7.0910576171875,7.0910576171875,7.0910576171875,7.0910576171875,[7.0910576171875],,kWh,5.995084929149167e-06,6.536890547941898e-07,3.3272248840046648e-06,9.975998867948022e-06,,MB,1327.08352,12544.049152,0.0,12138.315776,10311.21664,s,10,3.5256805114746097,0.352568051147461,0.009494322369546622,0.35446546936035156,0.3594026153564453,0.36019141998291015,0.36082246368408205,"[0.3253225402832031, 0.35563198852539063, 0.35113504028320314, 0.3545672607421875, 0.3541093444824219, 0.35436367797851565, 0.3530037536621094, 0.360980224609375, 0.3592273254394531, 0.35733935546875]",tokens/s,726.1009588555386,kWh,9.95890694555568e-06,1.0977229504491317e-06,6.659088660599758e-06,1.771571855660457e-05,tokens/kWh,14450444.060851319,MB,1356.386304,12550.340608,0.0,12144.607232,10311.2192,s,10,27.021487060546875,2.702148706054687,0.0018136617898095334,2.7023939208984373,2.7035677001953125,2.704416174316406,2.705094953613281,"[2.70197265625, 2.697788818359375, 2.702718505859375, 2.703379150390625, 2.7020693359375, 2.7013095703125, 2.7052646484375, 2.701328369140625, 2.70283154296875, 2.702824462890625]",tokens/s,23.314779034490705,kWh,7.923303308861126e-05,8.74009368760228e-06,5.241868082380049e-05,0.000140391807600014,tokens/kWh,448744.13312984305,,s,630,27.01925434875488,0.04288770531548394,0.00024527835886048937,0.042839584350585935,0.04304192733764648,0.04312481708526611,0.04451238708496094,"[0.04444156646728516, 0.04303683090209961, 0.042803009033203124, 0.0428120002746582, 0.042782943725585935, 0.042649375915527345, 0.04270486450195313, 0.04277657699584961, 0.04268854522705078, 0.042724735260009764, 0.04273401641845703, 0.04300614547729492, 0.042923038482666015, 0.043199455261230466, 0.043055103302001956, 0.04285235214233398, 0.04270406341552734, 0.04286547088623047, 0.04267827224731445, 0.04275609588623047, 0.04283737564086914, 0.043025150299072265, 0.04293519973754883, 0.043248607635498044, 0.042877086639404295, 0.04295459365844727, 0.042799102783203126, 0.04283801651000976, 0.04288716888427734, 0.04286019134521484, 0.04292233657836914, 0.04276188659667969, 0.042799423217773434, 0.04267216110229492, 0.04277657699584961, 0.04289945602416992, 0.0430571517944336, 0.04291900634765625, 0.04287376022338867, 0.04286848068237305, 0.04288332748413086, 0.042778560638427734, 0.04279062271118164, 0.04290569686889648, 0.043151649475097656, 0.043040737152099606, 0.042969120025634765, 0.042941600799560546, 0.042879806518554685, 0.04281139373779297, 0.04283801651000976, 0.04283801651000976, 0.04276547241210937, 0.042807296752929686, 0.04292620849609375, 0.0428100814819336, 0.042906654357910155, 0.04269120025634766, 0.04271299362182617, 0.04276416015625, 0.042816062927246094, 0.042839103698730466, 0.04287910461425781, 0.044458560943603516, 0.04315571212768555, 0.04294451141357422, 0.042840065002441405, 0.0427147216796875, 0.042723232269287106, 0.04269311904907227, 0.04267737579345703, 0.04273651123046875, 0.0428807373046875, 0.04280319976806641, 0.0428172492980957, 0.04302396774291992, 0.042775104522705075, 0.04268646240234375, 0.04265532684326172, 0.04262790298461914, 0.042674175262451174, 0.04271923065185547, 0.04273971176147461, 0.0426343994140625, 0.042799583435058595, 0.04283536148071289, 0.04284902572631836, 0.04273916625976563, 0.04277078247070312, 0.04264729690551758, 0.042801822662353516, 0.04277596664428711, 0.042773086547851565, 0.04273971176147461, 0.042716320037841794, 0.042713951110839844, 0.04262412643432617, 0.04272012710571289, 0.04281536102294922, 0.042848384857177735, 0.04282745742797851, 0.04278300857543945, 0.042657825469970705, 0.04272127914428711, 0.042788864135742184, 0.04270428848266602, 0.042758750915527347, 0.0429854736328125, 0.04294607925415039, 0.0429859504699707, 0.04288211059570313, 0.042883071899414066, 0.042732479095458985, 0.0428438720703125, 0.04280963134765625, 0.0428741455078125, 0.04283670425415039, 0.04280115127563477, 0.04287807846069336, 0.04272422409057617, 0.04279667282104492, 0.04280358505249023, 0.04284393692016602, 0.04274739074707031, 0.04295139312744141, 0.0428581428527832, 0.04452614212036133, 0.043184127807617184, 0.04284764862060547, 0.04288572692871094, 0.042815486907958986, 0.04268761444091797, 0.04274879837036133, 0.04273561477661133, 0.04272860717773438, 0.04280972671508789, 0.04289728164672851, 0.04288735961914063, 0.04276675033569336, 0.042950016021728515, 0.042749824523925783, 0.042758750915527347, 0.042724544525146485, 0.042816062927246094, 0.04284662246704102, 0.04281753540039063, 0.04297318267822266, 0.04300822448730469, 0.04305887985229492, 0.04292822265625, 0.04297843170166016, 0.04286064147949219, 0.0428633918762207, 0.04301824188232422, 0.042893310546875, 0.042842113494873046, 0.042782463073730466, 0.04284646224975586, 0.04274358367919922, 0.04273955154418945, 0.04284454345703125, 0.04282777786254883, 0.042832927703857424, 0.042971134185791016, 0.04305750274658203, 0.04282953643798828, 0.04278953552246094, 0.04274639892578125, 0.04281894302368164, 0.04295654296875, 0.043026496887207034, 0.04306179046630859, 0.04304006576538086, 0.04287097549438477, 0.04292659378051758, 0.042847999572753905, 0.04279510498046875, 0.0428873291015625, 0.04297727966308594, 0.042907520294189455, 0.04285248184204102, 0.04279257583618164, 0.0427196159362793, 0.042760032653808594, 0.042770401000976566, 0.04306697463989258, 0.043055713653564455, 0.04279814529418945, 0.04293523025512695, 0.04465673446655274, 0.04332950210571289, 0.04286671829223633, 0.04273971176147461, 0.04281081771850586, 0.042971710205078124, 0.04273551940917969, 0.04286268615722656, 0.04305920028686523, 0.042772159576416016, 0.042729312896728516, 0.04274428939819336, 0.042708992004394535, 0.04292144012451172, 0.042791454315185544, 0.042788864135742184, 0.04286259078979492, 0.04283801651000976, 0.04289945602416992, 0.042777854919433596, 0.04288179016113281, 0.043096065521240234, 0.04302547073364258, 0.042973407745361326, 0.04301078414916992, 0.04306470489501953, 0.04291798400878906, 0.04286019134521484, 0.04277478408813477, 0.042756736755371096, 0.042821632385253904, 0.04272515106201172, 0.04285257720947266, 0.042962944030761716, 0.042897407531738284, 0.04273107147216797, 0.042888992309570315, 0.042803230285644533, 0.04273011016845703, 0.04285440063476562, 0.042864543914794925, 0.042840160369873044, 0.04290316772460938, 0.04288550567626953, 0.043097633361816406, 0.042955135345458986, 0.042866783142089845, 0.0429035530090332, 0.04293222427368164, 0.04296908950805664, 0.043074817657470704, 0.04302710342407227, 0.042790302276611326, 0.04279536056518555, 0.04272572708129883, 0.04271462249755859, 0.04294451141357422, 0.04282624053955078, 0.04287676620483399, 0.042848289489746096, 0.04305263900756836, 0.04298761749267578, 0.04274732971191406, 0.044504478454589845, 0.043321758270263674, 0.042931713104248044, 0.04273196792602539, 0.04266009521484375, 0.04270284652709961, 0.04280428695678711, 0.04277139282226562, 0.042823680877685545, 0.042790912628173826, 0.04274380874633789, 0.04270489501953125, 0.042764190673828126, 0.04271452713012695, 0.04273011016845703, 0.04280489730834961, 0.042795425415039064, 0.042759521484375, 0.04268508911132812, 0.0426618881225586, 0.04265484619140625, 0.042801471710205076, 0.04325843048095703, 0.043374591827392575, 0.04311040115356445, 0.043054622650146486, 0.04292860794067383, 0.04284415817260742, 0.04280464172363281, 0.04297379302978516, 0.042840065002441405, 0.042872833251953124, 0.042790431976318356, 0.042834335327148435, 0.04277459335327148, 0.04281958389282227, 0.042794719696044925, 0.04278492736816406, 0.042741153717041014, 0.04291043090820312, 0.04297727966308594, 0.04278025436401367, 0.042764705657958986, 0.043034175872802734, 0.04291219329833985, 0.04300320053100586, 0.042998462677001956, 0.043036670684814454, 0.04295884704589844, 0.04306739044189453, 0.042939712524414066, 0.04281817626953125, 0.0427613754272461, 0.042824607849121094, 0.0428295669555664, 0.043014400482177736, 0.042780670166015625, 0.04286259078979492, 0.04277862548828125, 0.04272915267944336, 0.042925567626953126, 0.04286751937866211, 0.04281283187866211, 0.04451561737060547, 0.0431346549987793, 0.04294854354858398, 0.042864864349365234, 0.0428851203918457, 0.042896831512451175, 0.042818111419677736, 0.04284415817260742, 0.04273766326904297, 0.04276406478881836, 0.04275737762451172, 0.04272803115844727, 0.04288345718383789, 0.04293427276611328, 0.04278441619873047, 0.04280355072021484, 0.04278476715087891, 0.04280649566650391, 0.04267702484130859, 0.042782718658447266, 0.04291340637207031, 0.04305881500244141, 0.04311427307128906, 0.043058143615722654, 0.042891265869140625, 0.042870849609375, 0.04280928039550781, 0.042780670166015625, 0.04260825729370117, 0.042874496459960935, 0.04288710403442383, 0.04283065414428711, 0.04275609588623047, 0.04286185455322265, 0.042758880615234376, 0.04274585723876953, 0.042700801849365234, 0.04281958389282227, 0.04278179168701172, 0.0427529296875, 0.04279286575317383, 0.04288931274414062, 0.04279500961303711, 0.0429117431640625, 0.0429117431640625, 0.04288716888427734, 0.043162784576416015, 0.043117408752441404, 0.04292607879638672, 0.042921760559082034, 0.042805152893066405, 0.04279328155517578, 0.042815486907958986, 0.04276428985595703, 0.042840065002441405, 0.04279635238647461, 0.04284070587158203, 0.042876991271972656, 0.042864639282226565, 0.04275923156738281, 0.042775390625, 0.04281967926025391, 0.042812767028808596, 0.044538944244384766, 0.04325062561035156, 0.04303247833251953, 0.04281100845336914, 0.04291836929321289, 0.042889217376708984, 0.042934207916259765, 0.04292147064208984, 0.042818111419677736, 0.0429854736328125, 0.04278681564331055, 0.04283526229858398, 0.04289401626586914, 0.042848255157470705, 0.04267612838745117, 0.042840415954589844, 0.04278656005859375, 0.0427760009765625, 0.04288774490356445, 0.0429051513671875, 0.04281593704223633, 0.04326399993896484, 0.04316364669799805, 0.043107902526855466, 0.04293875122070313, 0.042934337615966794, 0.042921375274658204, 0.04276079940795898, 0.04287897491455078, 0.04289535903930664, 0.04289535903930664, 0.04274995040893555, 0.04280934524536133, 0.042786785125732425, 0.04284204864501953, 0.042817726135253906, 0.042979328155517575, 0.043005790710449215, 0.042886238098144534, 0.04282262420654297, 0.04278476715087891, 0.042853694915771484, 0.04306172943115234, 0.042938591003417965, 0.04311014556884766, 0.04309148788452148, 0.04302511978149414, 0.042997760772705076, 0.043007137298583985, 0.042903968811035156, 0.043100574493408206, 0.0428928337097168, 0.04295731353759766, 0.0429752311706543, 0.04293017578125, 0.04294819259643555, 0.04300252914428711, 0.04290848159790039, 0.04269891357421875, 0.04276508712768555, 0.042805057525634765, 0.04282726287841797, 0.04284281539916992, 0.044598560333251956, 0.04320537567138672, 0.04294652938842773, 0.042893310546875, 0.042774528503417966, 0.04274585723876953, 0.04275814437866211, 0.04275948715209961, 0.04278956985473633, 0.04282572937011719, 0.042842113494873046, 0.042735294342041014, 0.04279328155517578, 0.042858497619628906, 0.04277766418457031, 0.04283603286743164, 0.04278499221801758, 0.04268304061889648, 0.042771903991699216, 0.042775104522705075, 0.04282767868041992, 0.04283564758300781, 0.04295065689086914, 0.04320707321166992, 0.04313087844848633, 0.04301824188232422, 0.04293427276611328, 0.042872833251953124, 0.0428353271484375, 0.04278540802001953, 0.04274358367919922, 0.042901729583740236, 0.04282572937011719, 0.042853824615478514, 0.0428650894165039, 0.042821758270263674, 0.04271615982055664, 0.042801822662353516, 0.04277196884155274, 0.04275491333007812, 0.04278476715087891, 0.04281865692138672, 0.04281564712524414, 0.04299033737182617, 0.04286975860595703, 0.04286800003051758, 0.04291926574707031, 0.042920318603515625, 0.04300799942016602, 0.04294041442871094, 0.04288889694213867, 0.0428895378112793, 0.04277862548828125, 0.04277657699584961, 0.04274585723876953, 0.04268396759033203, 0.042866657257080075, 0.04283622360229492, 0.04289257431030274, 0.04286313629150391, 0.04279951858520508, 0.04279478454589844, 0.042733440399169924, 0.04454291152954101, 0.04317184066772461, 0.042891265869140625, 0.042984798431396486, 0.0427977294921875, 0.042796993255615236, 0.04275791931152344, 0.04274332809448242, 0.04279372787475586, 0.04283715057373047, 0.04287910461425781, 0.042963680267333985, 0.042798751831054686, 0.042772830963134764, 0.042774528503417966, 0.042730720520019534, 0.042736030578613284, 0.0426926383972168, 0.042655681610107424, 0.04276828765869141, 0.04301260757446289, 0.043063297271728515, 0.04310943984985351, 0.0429917106628418, 0.04308598327636719, 0.04282969665527344, 0.04291052627563476, 0.042969024658203125, 0.042993438720703124, 0.04285184097290039, 0.04280928039550781, 0.04294128036499024, 0.042816543579101564, 0.04276732635498047, 0.04271664047241211, 0.04279267120361328, 0.042762046813964845, 0.042894336700439455, 0.04282921600341797, 0.042715744018554686, 0.04268780899047851, 0.04278496170043945, 0.04280319976806641, 0.04286022567749023, 0.043175872802734376, 0.04305763244628906, 0.04308969497680664, 0.04295068740844726, 0.042947006225585935, 0.0428873291015625, 0.04284793472290039, 0.04280966567993164, 0.0428985595703125, 0.04303142547607422, 0.04288211059570313, 0.042855358123779295, 0.042902622222900394, 0.043117183685302735, 0.04273404693603516, 0.04279590225219727, 0.042838657379150394, 0.04283599853515625, 0.04287055969238281, 0.044707870483398436, 0.043235294342041014, 0.0429035530090332, 0.042829822540283204, 0.042788864135742184, 0.04274380874633789, 0.04267769622802734, 0.042693183898925784, 0.04277811050415039, 0.042732032775878906, 0.042774528503417966, 0.042772384643554685, 0.04272457504272461, 0.042756542205810544, 0.04267257690429688, 0.04265100860595703, 0.04278745651245117, 0.04279500961303711, 0.04265574264526367, 0.042788864135742184, 0.042858497619628906, 0.042898624420166016, 0.04300003051757813, 0.043012702941894534, 0.042872833251953124, 0.042942047119140625, 0.04347331237792969, 0.04296908950805664, 0.04287311935424805, 0.04278243255615234, 0.042831871032714845, 0.04279500961303711, 0.04280883026123047, 0.04277503967285156, 0.042883071899414066, 0.04278681564331055, 0.0429054069519043, 0.04280899047851563, 0.04272329711914063, 0.042695232391357425, 0.042764225006103516, 0.042925567626953126, 0.04290412902832031, 0.04292607879638672, 0.04303257751464844, 0.04301004791259765, 0.042893310546875, 0.04291788864135742, 0.043020286560058595, 0.04299980926513672, 0.04298294448852539, 0.04303071975708008, 0.04327657699584961, 0.04297068786621094, 0.04291219329833985, 0.04280313491821289, 0.04277814483642578, 0.04281331253051758, 0.04287276840209961, 0.04296777725219727, 0.04302796936035156, 0.042813953399658204, 0.04278659057617187]",tokens/s,23.31670563029553,, @@ -3770,7 +3770,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 47525 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 41439 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3879,7 +3879,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 58632 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 52220 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -3922,7 +3922,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 27981 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 22376 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.764544,6193.872896,0.0,5798.62528,5774.685184,s,1,7.30814697265625,7.30814697265625,0.0,7.30814697265625,7.30814697265625,7.30814697265625,7.30814697265625,[7.30814697265625],,kWh,4.32576990416654e-06,4.700026609990046e-07,2.377224124001931e-06,7.172996689167475e-06,,MB,1047.277568,6206.455808,0.0,5800.722432,5525.736448,s,10,2.3927240905761713,0.23927240905761718,0.009610978361010573,0.2413498229980469,0.24558556365966797,0.24645138931274413,0.24714404983520508,"[0.2117969207763672, 0.2372667236328125, 0.2394752655029297, 0.24016172790527343, 0.2419894714355469, 0.24731721496582032, 0.24520982360839844, 0.24539315795898436, 0.24071017456054689, 0.2434036102294922]",tokens/s,1069.9102374915062,kWh,6.58644672194437e-06,7.263188448296269e-07,4.35650965804443e-06,1.1669275224818428e-05,tokens/kWh,21937952.020836264,MB,1051.738112,6208.55296,0.0,5802.819584,5623.660032,s,10,17.640743408203125,1.7640743408203126,0.002191758134030772,1.76315966796875,1.7671169555664061,1.767554815673828,1.7679051037597657,"[1.76262060546875, 1.7623641357421875, 1.7670196533203124, 1.761020751953125, 1.7628199462890626, 1.7648861083984375, 1.76799267578125, 1.7661329345703125, 1.76238720703125, 1.7634993896484374]",tokens/s,35.71278065906472,kWh,5.2048872468055586e-05,5.739458893656275e-06,3.445896583875587e-05,9.224729720046773e-05,tokens/kWh,682946.8386818011,,s,630,17.638176202774062,0.02799710508376833,0.00031267521339151144,0.02794905662536621,0.02817639675140381,0.028387158775329588,0.029773395805358892,"[0.029443679809570314, 0.028571552276611328, 0.027992576599121095, 0.027808864593505858, 0.02789686393737793, 0.027770912170410156, 0.027739200592041015, 0.027689472198486328, 0.027746591567993164, 0.027676671981811524, 0.02773401641845703, 0.02784867286682129, 0.027787296295166016, 0.02779961585998535, 0.02803296089172363, 0.0278154239654541, 0.02775059127807617, 0.02770102310180664, 0.02777142333984375, 0.027777023315429687, 0.027808927536010743, 0.028105567932128907, 0.02825356864929199, 0.028070240020751952, 0.027837823867797852, 0.02789878463745117, 0.02776678466796875, 0.027826175689697266, 0.027846656799316406, 0.02788675117492676, 0.02789401626586914, 0.027859264373779297, 0.027896095275878906, 0.027935007095336913, 0.02852854347229004, 0.02844633674621582, 0.028176511764526367, 0.028033151626586914, 0.027996095657348632, 0.02821126365661621, 0.028190656661987303, 0.028203008651733398, 0.027992063522338868, 0.028090368270874022, 0.027992095947265625, 0.027918560028076172, 0.02821865653991699, 0.0281246395111084, 0.027947488784790038, 0.02786886405944824, 0.027888479232788085, 0.027936767578125, 0.02787942314147949, 0.027903711318969727, 0.027988256454467772, 0.02800614356994629, 0.027918687820434572, 0.0279215030670166, 0.027949312210083007, 0.028006399154663086, 0.02790166473388672, 0.027915103912353516, 0.027994400024414064, 0.02965711975097656, 0.028714656829833984, 0.028100479125976564, 0.027877824783325195, 0.02780182456970215, 0.02776655960083008, 0.02771046447753906, 0.02781110382080078, 0.027743711471557617, 0.027674720764160155, 0.027699359893798826, 0.02783251190185547, 0.027789119720458985, 0.027770559310913087, 0.027732383728027343, 0.02778009605407715, 0.027673664093017577, 0.027684000015258788, 0.027780832290649413, 0.02774963188171387, 0.027659488677978517, 0.027762304306030272, 0.027835264205932617, 0.028010240554809572, 0.027754751205444336, 0.02776412773132324, 0.027880224227905273, 0.027821887969970704, 0.02782931137084961, 0.027871295928955077, 0.027858911514282228, 0.027855775833129884, 0.027867136001586915, 0.02799523162841797, 0.028023712158203123, 0.028037120819091797, 0.028163871765136718, 0.02815407943725586, 0.02814361572265625, 0.028098880767822267, 0.028174144744873047, 0.02891526412963867, 0.028252096176147462, 0.028002687454223633, 0.027975679397583008, 0.027850751876831056, 0.02790768051147461, 0.0280231990814209, 0.027973440170288084, 0.02785708808898926, 0.0279552001953125, 0.028215295791625978, 0.028062816619873046, 0.02811996841430664, 0.028019968032836913, 0.028074464797973632, 0.027971872329711912, 0.028000448226928713, 0.02789561653137207, 0.027987968444824218, 0.028006399154663086, 0.028024927139282226, 0.02810665512084961, 0.03003664016723633, 0.029128608703613282, 0.028288192749023437, 0.028132223129272462, 0.028026687622070313, 0.02781724739074707, 0.0277410888671875, 0.02798124885559082, 0.027775552749633788, 0.027807743072509765, 0.027850656509399413, 0.027868640899658202, 0.027816287994384764, 0.027851200103759764, 0.027843679428100586, 0.02793343925476074, 0.02778748893737793, 0.027842336654663086, 0.027757984161376953, 0.02784111976623535, 0.027804927825927736, 0.02794099235534668, 0.027890304565429687, 0.02783395195007324, 0.02781430435180664, 0.027852832794189455, 0.027858911514282228, 0.027879520416259764, 0.02803049659729004, 0.029004159927368163, 0.028035072326660155, 0.027971263885498046, 0.027991840362548828, 0.028027423858642576, 0.02816409683227539, 0.028089792251586913, 0.02815648078918457, 0.028012544631958007, 0.028051456451416015, 0.028039167404174805, 0.028008159637451173, 0.027990304946899414, 0.02792857551574707, 0.027978784561157228, 0.02790483283996582, 0.027936927795410155, 0.028025983810424804, 0.027967552185058593, 0.027942880630493164, 0.027911008834838866, 0.027987968444824218, 0.028458400726318358, 0.028442975997924804, 0.028080127716064454, 0.028004608154296874, 0.0279837760925293, 0.028059776306152345, 0.028100448608398436, 0.02808844757080078, 0.027977567672729492, 0.028049631118774412, 0.028123071670532226, 0.028211200714111328, 0.029952224731445314, 0.028782495498657225, 0.02820355224609375, 0.02788483238220215, 0.027914016723632813, 0.0277142391204834, 0.027641855239868163, 0.027703296661376952, 0.027711488723754882, 0.02772377586364746, 0.027820287704467775, 0.027768287658691406, 0.027730207443237304, 0.027835968017578126, 0.027783008575439454, 0.027781280517578125, 0.027790943145751954, 0.027753343582153322, 0.027891328811645508, 0.02777052879333496, 0.028027584075927734, 0.027891712188720705, 0.02778508758544922, 0.027773056030273437, 0.02775654411315918, 0.02789583969116211, 0.02777225685119629, 0.027763328552246093, 0.027799264907836914, 0.027834144592285157, 0.02783078384399414, 0.02777529525756836, 0.02786604881286621, 0.027984607696533204, 0.028221376419067384, 0.028030815124511718, 0.02806809616088867, 0.02802284812927246, 0.027966623306274415, 0.02812291145324707, 0.028035167694091798, 0.02793769645690918, 0.027918399810791014, 0.027848224639892578, 0.027873952865600585, 0.027918079376220702, 0.02802284812927246, 0.02807539176940918, 0.027909791946411133, 0.02795564842224121, 0.027988447189331054, 0.028108192443847657, 0.027880096435546876, 0.027940959930419923, 0.027905696868896483, 0.027959487915039063, 0.027989023208618163, 0.028122079849243163, 0.02800828742980957, 0.027977888107299804, 0.027999359130859373, 0.028023679733276366, 0.02803264045715332, 0.029866783142089844, 0.02874563217163086, 0.02847145652770996, 0.028131519317626953, 0.02785251235961914, 0.02781337547302246, 0.02802943992614746, 0.027842336654663086, 0.02774822425842285, 0.027828575134277344, 0.02788688087463379, 0.027909055709838867, 0.02772764778137207, 0.02775859260559082, 0.02788262367248535, 0.02782912063598633, 0.027840511322021484, 0.028063743591308594, 0.027953216552734375, 0.028078144073486327, 0.027840639114379884, 0.02779680061340332, 0.027750688552856444, 0.027821344375610353, 0.02782912063598633, 0.027817983627319336, 0.027786815643310547, 0.02776054382324219, 0.027836671829223635, 0.027836704254150392, 0.02774220848083496, 0.02778688049316406, 0.028008832931518554, 0.028057600021362306, 0.02813337516784668, 0.028106752395629882, 0.028106719970703124, 0.027991167068481447, 0.028171167373657227, 0.02812495994567871, 0.02795132827758789, 0.02789580726623535, 0.027906047821044923, 0.027891712188720705, 0.02822524833679199, 0.02794268798828125, 0.02802070426940918, 0.02795779228210449, 0.027884960174560547, 0.027902143478393555, 0.0278799991607666, 0.027905887603759765, 0.02796134376525879, 0.02793471908569336, 0.02796134376525879, 0.027850879669189452, 0.027968896865844726, 0.028123647689819335, 0.02794495964050293, 0.027979904174804688, 0.02797350311279297, 0.027917728424072266, 0.028029472351074218, 0.029911008834838868, 0.02967747116088867, 0.028435968399047853, 0.028311391830444337, 0.027951871871948242, 0.02782841682434082, 0.02768227195739746, 0.02777948760986328, 0.027704832077026367, 0.0277774715423584, 0.02774233627319336, 0.027694976806640625, 0.027792383193969726, 0.02776166343688965, 0.02774220848083496, 0.02773606491088867, 0.027812864303588865, 0.027823104858398437, 0.027686975479125978, 0.0278035831451416, 0.02829120063781738, 0.02816806411743164, 0.02779136085510254, 0.02792188835144043, 0.027773471832275392, 0.027768831253051757, 0.027844608306884764, 0.02790809631347656, 0.0278853759765625, 0.027888896942138672, 0.027929536819458006, 0.027830272674560546, 0.027899232864379883, 0.028008319854736327, 0.0279968318939209, 0.02810086441040039, 0.02799203109741211, 0.02801888084411621, 0.028101919174194336, 0.028114463806152342, 0.027998559951782225, 0.028013120651245116, 0.02815999984741211, 0.028128992080688475, 0.028018848419189453, 0.028219520568847658, 0.02811087989807129, 0.028026847839355468, 0.028016767501831054, 0.02800774383544922, 0.02793734359741211, 0.027983871459960938, 0.027926528930664062, 0.028039167404174805, 0.028026847839355468, 0.027977983474731447, 0.027981216430664063, 0.028245471954345704, 0.0279715518951416, 0.027988927841186523, 0.027987167358398436, 0.027995136260986327, 0.02799007987976074, 0.029646879196166993, 0.02871891212463379, 0.02826678466796875, 0.028129152297973633, 0.027922271728515625, 0.02784787178039551, 0.027771871566772462, 0.027811840057373048, 0.027785152435302735, 0.027886655807495116, 0.02798899269104004, 0.027795583724975585, 0.027916160583496094, 0.028022783279418945, 0.02794643211364746, 0.027846656799316406, 0.0279205436706543, 0.027876064300537108, 0.027932544708251954, 0.027846527099609376, 0.02783843231201172, 0.027876895904541017, 0.02795155143737793, 0.027932191848754884, 0.027951583862304688, 0.027985183715820313, 0.027863807678222656, 0.02784272003173828, 0.027879104614257813, 0.027855039596557617, 0.027789375305175782, 0.027862207412719726, 0.02793747138977051, 0.027992063522338868, 0.0281529598236084, 0.028181375503540038, 0.028096511840820314, 0.028042816162109376, 0.028105152130126952, 0.028077791213989258, 0.02948739242553711, 0.028118528366088868, 0.028002815246582033, 0.028010751724243162, 0.027967039108276366, 0.028028703689575194, 0.028020479202270507, 0.028015264511108397, 0.028071327209472655, 0.028127359390258788, 0.028112895965576173, 0.028071392059326173, 0.028103967666625977, 0.02812928009033203, 0.028184288024902342, 0.028209152221679686, 0.028160032272338868, 0.028196832656860352, 0.02798703956604004, 0.02809116744995117, 0.0281396484375, 0.028188928604125977, 0.028210880279541016, 0.02993731117248535, 0.028737024307250978, 0.02841846466064453, 0.0281112003326416, 0.027872383117675783, 0.027909088134765624, 0.027746368408203125, 0.02787708854675293, 0.02776038360595703, 0.027898208618164062, 0.027870912551879883, 0.027785728454589844, 0.02788140869140625, 0.027875328063964845, 0.027867136001586915, 0.027737632751464843, 0.027818464279174806, 0.02779862403869629, 0.027819936752319335, 0.02787833595275879, 0.02790928077697754, 0.028007328033447267, 0.02794905662536621, 0.027891712188720705, 0.027881471633911133, 0.027897823333740236, 0.02779743957519531, 0.027879680633544922, 0.02908361625671387, 0.02802060890197754, 0.02798201560974121, 0.02798182487487793, 0.027983680725097656, 0.028057600021362306, 0.02823097610473633, 0.02813164710998535, 0.028037504196166994, 0.02817638397216797, 0.02811635208129883, 0.02801318359375, 0.028001855850219727, 0.028045759201049805, 0.02797772789001465, 0.027950815200805664, 0.02808665657043457, 0.028022687911987306, 0.028006336212158204, 0.027955263137817384, 0.028005504608154298, 0.02795814323425293, 0.028006399154663086, 0.02797590446472168, 0.02808399963378906, 0.0279564151763916, 0.027960128784179687, 0.02794905662536621, 0.028080127716064454, 0.028026880264282225, 0.027960447311401366, 0.02800320053100586, 0.027987968444824218, 0.027983007431030275, 0.028234592437744142, 0.029812576293945313, 0.02880953598022461, 0.02834889602661133, 0.0279117431640625, 0.028043712615966797, 0.027821760177612304, 0.027699520111083984, 0.02779136085510254, 0.02775654411315918, 0.027942783355712892, 0.027795743942260743, 0.027720575332641603, 0.02779849624633789, 0.027813791275024414, 0.027770975112915038, 0.02777907180786133, 0.02775654411315918, 0.027703296661376952, 0.027796863555908203, 0.027800384521484374, 0.027821887969970704, 0.027797504425048827, 0.02782339286804199, 0.027810495376586915, 0.027821439743041992, 0.027744672775268556, 0.02777891159057617, 0.02778972816467285, 0.027897151947021484, 0.02782908821105957, 0.02794713592529297, 0.027846015930175782, 0.027864479064941407, 0.027986656188964842, 0.02833430480957031, 0.02815795135498047, 0.028065792083740236, 0.028176223754882813, 0.028078208923339842, 0.02802403259277344, 0.028213920593261718, 0.028141727447509767, 0.02792959976196289, 0.027988832473754884, 0.028078559875488282, 0.028057376861572267, 0.028037023544311524, 0.02795699119567871, 0.027955455780029295, 0.027998207092285156, 0.02790934371948242, 0.027950944900512694, 0.027899999618530274, 0.02792108726501465, 0.027936735153198243, 0.027914560317993165, 0.02795039939880371, 0.027946624755859375, 0.027973663330078124, 0.028000383377075194, 0.027994911193847657, 0.02800873565673828, 0.028097248077392577, 0.029820928573608397, 0.028690240859985353, 0.028274879455566407, 0.02795929527282715, 0.02796771240234375, 0.027823904037475585, 0.027788736343383788, 0.027786111831665038, 0.027862720489501953, 0.027941024780273438, 0.02780143928527832, 0.027711488723754882, 0.02775449562072754, 0.02775449562072754, 0.027686464309692384, 0.02778067207336426, 0.027896703720092772, 0.027856992721557616, 0.02784675216674805, 0.02789356803894043, 0.02775040054321289, 0.027803712844848633, 0.0278035831451416, 0.027850751876831056, 0.027805152893066405, 0.027872800827026367, 0.02778828811645508, 0.02794291114807129, 0.027966720581054687, 0.027931392669677733, 0.027945119857788085, 0.027912031173706053, 0.027933952331542968, 0.027919103622436523, 0.02814156723022461, 0.02806915283203125, 0.028011232376098632, 0.02809196853637695, 0.028127359390258788, 0.02801020812988281, 0.0279736328125, 0.028240480422973634, 0.028536159515380858, 0.02796406364440918, 0.028086271286010742, 0.02806537628173828, 0.028131872177124023, 0.028052671432495117, 0.028035776138305664, 0.027901952743530273, 0.027937919616699218, 0.02795814323425293, 0.02795484733581543, 0.027935071945190428, 0.027962656021118165, 0.028005119323730468, 0.027985887527465822, 0.02798201560974121, 0.027914079666137695, 0.02796544075012207, 0.02799190330505371, 0.02798195266723633, 0.028090368270874022]",tokens/s,35.717978591285224,, @@ -3966,7 +3966,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 784.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 210.12 MiB is free. Process 132776 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 5.21 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 784.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 210.12 MiB is free. Process 126347 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 5.21 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4013,7 +4013,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 88936 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 82553 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,818.118656,2127.495168,0.0,1732.247552,1728.316416,s,1,7.90771826171875,7.90771826171875,0.0,7.90771826171875,7.90771826171875,7.90771826171875,7.90771826171875,[7.90771826171875],,kWh,9.499211254145242e-06,1.040675102756e-06,3.082502466000836e-06,1.3622388822902077e-05,,MB,1110.9376,2328.82176,0.0,1923.088384,1891.2,s,10,0.48508626937866217,0.048508626937866214,0.0029105012856569454,0.047908863067626956,0.04953007087707519,0.05320087604522704,0.05613752017974854,"[0.056871681213378905, 0.048147838592529295, 0.04766988754272461, 0.0456190071105957, 0.04682697677612305, 0.04732592010498047, 0.048253406524658204, 0.047424320220947266, 0.04871433639526367, 0.04823289489746094]",tokens/s,5277.411795800891,kWh,1.8499921940927998e-06,2.040192039538802e-07,1.2289935992278652e-06,3.2830049972745454e-06,tokens/kWh,77977340.94603075,MB,1120.669696,2328.82176,0.0,1923.088384,1895.80032,s,10,12.342516357421875,1.2342516357421873,0.010719832370455143,1.2334990234375,1.2469368408203125,1.2498820556640624,1.2522382275390624,"[1.24167626953125, 1.2302158203125, 1.239170654296875, 1.2462823486328125, 1.2367822265625, 1.2528272705078125, 1.228632080078125, 1.227983154296875, 1.2144898681640626, 1.2244566650390625]",tokens/s,51.04307596247703,kWh,3.597472274965803e-05,3.967591333625105e-06,1.8983994793373237e-05,5.892630887665637e-05,tokens/kWh,1069131.9582204039,,s,630,12.339980672836315,0.019587270909263974,0.00045158069869127306,0.019530303955078127,0.01990556468963623,0.020072346115112303,0.02086324857711792,"[0.019812416076660157, 0.019705663681030272, 0.019674335479736328, 0.01954867172241211, 0.019330944061279297, 0.020469568252563478, 0.02086579132080078, 0.019900543212890625, 0.01983852767944336, 0.01953638458251953, 0.019448863983154298, 0.0195001277923584, 0.019581663131713868, 0.019593408584594726, 0.01961859130859375, 0.019644416809082032, 0.01983888053894043, 0.019578336715698242, 0.01976179122924805, 0.019737920761108398, 0.019557056427001954, 0.020711423873901368, 0.021198816299438476, 0.019617824554443358, 0.019466239929199217, 0.019545856475830077, 0.019523839950561523, 0.01981439971923828, 0.019611648559570313, 0.019566848754882814, 0.019605119705200194, 0.019908992767333985, 0.019617183685302735, 0.01945840072631836, 0.019310592651367187, 0.019538047790527344, 0.019540128707885743, 0.019650495529174805, 0.01965648078918457, 0.01973446464538574, 0.019793983459472655, 0.01971548843383789, 0.019530176162719726, 0.019464351654052733, 0.01949894332885742, 0.01960870361328125, 0.01951584053039551, 0.01951590347290039, 0.019480064392089845, 0.019564447402954103, 0.019626592636108397, 0.019834880828857423, 0.01981177520751953, 0.019692256927490236, 0.01969136047363281, 0.019765247344970704, 0.019911712646484375, 0.019792863845825195, 0.01966640090942383, 0.019493408203125, 0.019562496185302734, 0.01957606315612793, 0.01969843292236328, 0.019678848266601563, 0.019524063110351562, 0.019511295318603517, 0.019324928283691405, 0.01941663932800293, 0.019515104293823242, 0.01956732749938965, 0.019517440795898438, 0.019328479766845704, 0.019384735107421874, 0.019412960052490234, 0.019599103927612306, 0.01948099136352539, 0.019326175689697266, 0.01927779197692871, 0.01957356834411621, 0.01943280029296875, 0.019587743759155275, 0.02006220817565918, 0.019517440795898438, 0.01957587242126465, 0.019624895095825195, 0.019646368026733398, 0.01965411186218262, 0.019597087860107422, 0.0193670711517334, 0.019575519561767576, 0.019530431747436523, 0.019435808181762694, 0.01941209602355957, 0.01928598403930664, 0.019555103302001952, 0.01952479934692383, 0.019637216567993165, 0.019605472564697267, 0.019529792785644533, 0.019593151092529296, 0.01942118453979492, 0.01951091194152832, 0.019318304061889648, 0.0194747200012207, 0.019573343276977538, 0.01951299285888672, 0.019451488494873048, 0.019372127532958985, 0.019557088851928712, 0.019744672775268556, 0.019781631469726564, 0.019568384170532225, 0.01992483139038086, 0.019501472473144533, 0.01936720085144043, 0.019409631729125975, 0.019597503662109376, 0.01942310333251953, 0.019425472259521483, 0.019443456649780273, 0.01956003189086914, 0.019443391799926758, 0.019442399978637694, 0.02000486373901367, 0.019402463912963866, 0.0195382080078125, 0.019469087600708007, 0.019619840621948242, 0.01944576072692871, 0.01977289581298828, 0.019482431411743165, 0.020191776275634767, 0.02085702323913574, 0.019685375213623048, 0.01961369514465332, 0.01950627136230469, 0.019452192306518554, 0.019260063171386718, 0.019414655685424803, 0.01946454429626465, 0.019826688766479493, 0.01928390312194824, 0.01918777656555176, 0.019335168838500977, 0.019343360900878907, 0.01927529525756836, 0.019466720581054687, 0.019537919998168944, 0.019728384017944335, 0.019806367874145508, 0.019565792083740235, 0.01952422332763672, 0.019351232528686525, 0.019390783309936523, 0.01956211280822754, 0.019507007598876955, 0.01941766357421875, 0.019363840103149413, 0.01927577590942383, 0.0194969596862793, 0.019306495666503908, 0.01927987289428711, 0.019414688110351564, 0.0196713924407959, 0.019738624572753907, 0.019647680282592773, 0.019612031936645506, 0.01945849609375, 0.01945599937438965, 0.02042790412902832, 0.019753856658935545, 0.01964195251464844, 0.019612064361572267, 0.019688608169555664, 0.01966374397277832, 0.020831615447998046, 0.02157606315612793, 0.020117727279663086, 0.019996480941772463, 0.019928319931030274, 0.020118463516235353, 0.019701984405517577, 0.01965648078918457, 0.019777536392211914, 0.01984547233581543, 0.019679967880249023, 0.019657663345336914, 0.01961759948730469, 0.019537471771240236, 0.019827552795410156, 0.01962940788269043, 0.019669696807861327, 0.019953056335449217, 0.019628543853759766, 0.019597375869750976, 0.019766271591186522, 0.019853311538696287, 0.01990518379211426, 0.019775903701782227, 0.019691328048706054, 0.019640064239501952, 0.01958540725708008, 0.01982796859741211, 0.019710847854614258, 0.019709823608398437, 0.019568479537963868, 0.019514751434326173, 0.019859903335571288, 0.019670751571655272, 0.019626720428466797, 0.019706975936889647, 0.019616479873657226, 0.019805503845214845, 0.01995475196838379, 0.019903488159179687, 0.019913440704345704, 0.019910655975341796, 0.020494335174560546, 0.019853311538696287, 0.01973587226867676, 0.019932064056396484, 0.019552095413208008, 0.01975699234008789, 0.019773439407348634, 0.019877824783325195, 0.0201646728515625, 0.02000486373901367, 0.020002815246582033, 0.020330495834350586, 0.019965951919555663, 0.019933183670043944, 0.019937280654907227, 0.01986355209350586, 0.019958944320678712, 0.01985174369812012, 0.019871551513671874, 0.019755456924438478, 0.019779136657714844, 0.019685407638549805, 0.019509183883666993, 0.01955718421936035, 0.019519264221191407, 0.019793920516967774, 0.019775199890136718, 0.019566335678100587, 0.01959494400024414, 0.01967804718017578, 0.01963212776184082, 0.01963212776184082, 0.019525632858276368, 0.019501056671142578, 0.01982259178161621, 0.019886079788208007, 0.019711999893188475, 0.01964851188659668, 0.019625120162963867, 0.019503007888793944, 0.01937504005432129, 0.01930201530456543, 0.019267135620117188, 0.01944403266906738, 0.019579231262207033, 0.019525184631347656, 0.01957539176940918, 0.01967513656616211, 0.019684831619262697, 0.019632671356201174, 0.019599552154541015, 0.019656192779541014, 0.01986137580871582, 0.019732032775878906, 0.01990233612060547, 0.019679231643676756, 0.019647487640380858, 0.019512928009033204, 0.01971241569519043, 0.01960550308227539, 0.020002815246582033, 0.019564544677734375, 0.019486719131469727, 0.019441471099853516, 0.019546112060546874, 0.01964771270751953, 0.019746816635131836, 0.019745567321777343, 0.01970195198059082, 0.01979167938232422, 0.019599552154541015, 0.019695615768432616, 0.019762752532958984, 0.019816896438598634, 0.02004787254333496, 0.01954310417175293, 0.01939708709716797, 0.019503583908081056, 0.019451776504516603, 0.019779136657714844, 0.0198699836730957, 0.019851200103759764, 0.019763551712036132, 0.019659839630126952, 0.019915103912353516, 0.019663455963134766, 0.019548160552978516, 0.019475616455078126, 0.019438175201416014, 0.019484928131103516, 0.01945756721496582, 0.01946054458618164, 0.01955574417114258, 0.01959766387939453, 0.01952195167541504, 0.019593088150024415, 0.019492416381835936, 0.019562944412231446, 0.020164575576782227, 0.01981439971923828, 0.01967513656616211, 0.01957475280761719, 0.01961974334716797, 0.020203647613525392, 0.027420448303222655, 0.021888511657714844, 0.019779680252075195, 0.020435232162475586, 0.019833183288574217, 0.019912704467773438, 0.019854623794555663, 0.020156768798828124, 0.020091264724731446, 0.019713247299194336, 0.01957484817504883, 0.01959017562866211, 0.019619232177734376, 0.019826208114624023, 0.01963033676147461, 0.019452415466308593, 0.019442911148071288, 0.019478368759155273, 0.019630624771118165, 0.019357791900634767, 0.019720512390136717, 0.020288639068603516, 0.019825504302978515, 0.019886112213134764, 0.019459999084472657, 0.01944585609436035, 0.019501056671142578, 0.01960515213012695, 0.02037705612182617, 0.01965679931640625, 0.01939740753173828, 0.01939263916015625, 0.019355199813842774, 0.01946451187133789, 0.019494911193847657, 0.01927724838256836, 0.019249727249145508, 0.01951670455932617, 0.019647199630737303, 0.019689472198486328, 0.01943552017211914, 0.01941913604736328, 0.020133184432983398, 0.021340959548950194, 0.019939231872558593, 0.01986284828186035, 0.019925376892089845, 0.019730655670166016, 0.01960540771484375, 0.019660991668701173, 0.019572736740112305, 0.019382272720336914, 0.019288063049316406, 0.019517440795898438, 0.020083711624145507, 0.020059135437011717, 0.019608671188354493, 0.01992176055908203, 0.019710016250610352, 0.019525568008422853, 0.019580703735351562, 0.019800191879272462, 0.019455904006958007, 0.019377952575683595, 0.0193089599609375, 0.019570688247680663, 0.019535680770874024, 0.019398847579956056, 0.01927577590942383, 0.019697664260864257, 0.01932195281982422, 0.019790752410888672, 0.0196997127532959, 0.019552255630493166, 0.019525568008422853, 0.01940275192260742, 0.019412479400634765, 0.019446176528930666, 0.01942252731323242, 0.019389280319213865, 0.019316736221313476, 0.01922867202758789, 0.01941094398498535, 0.019343360900878907, 0.01939241600036621, 0.019341407775878908, 0.01947644805908203, 0.019638303756713868, 0.019689151763916016, 0.019580352783203126, 0.01968124771118164, 0.019585952758789063, 0.019597312927246095, 0.01947161674499512, 0.019518016815185547, 0.019476512908935546, 0.019436800003051757, 0.01948054313659668, 0.01955936050415039, 0.0193832950592041, 0.019497983932495116, 0.01960140800476074, 0.01985740852355957, 0.019805952072143553, 0.019699296951293944, 0.019505023956298828, 0.01946659278869629, 0.019496927261352538, 0.0194069766998291, 0.01936345672607422, 0.019314783096313477, 0.019286655426025392, 0.01952902412414551, 0.01948847961425781, 0.01943440055847168, 0.019360927581787108, 0.019380352020263673, 0.019385120391845704, 0.019429567337036133, 0.019350431442260743, 0.01984921646118164, 0.019939327239990236, 0.019949567794799804, 0.019838016510009767, 0.01976211166381836, 0.019789663314819336, 0.019663007736206054, 0.019542015075683594, 0.019352832794189454, 0.01931248092651367, 0.019272607803344728, 0.01934115219116211, 0.02026019287109375, 0.019282751083374024, 0.01927577590942383, 0.01935478401184082, 0.01928201675415039, 0.019244096755981446, 0.01928876876831055, 0.019339935302734375, 0.019280223846435546, 0.019454015731811523, 0.01953171157836914, 0.0195379524230957, 0.019353567123413087, 0.019331071853637697, 0.01942732810974121, 0.019529727935791014, 0.019331071853637697, 0.02199519920349121, 0.019581247329711914, 0.019335168838500977, 0.019664896011352538, 0.019359743118286133, 0.01929360008239746, 0.019278047561645507, 0.01938470458984375, 0.019363840103149413, 0.019455808639526367, 0.019504640579223635, 0.01944646453857422, 0.019466239929199217, 0.019357696533203125, 0.019171327590942384, 0.019269567489624023, 0.019229791641235353, 0.019346399307250975, 0.01921558380126953, 0.01924892807006836, 0.019259584426879882, 0.019102527618408204, 0.019264896392822264, 0.019452512741088866, 0.01922380828857422, 0.019234943389892577, 0.019174272537231446, 0.020500255584716798, 0.019840576171875, 0.01998054313659668, 0.01927596855163574, 0.019269632339477538, 0.01919811248779297, 0.01927939224243164, 0.019228479385375977, 0.019286144256591798, 0.019247167587280272, 0.01930031967163086, 0.019257375717163086, 0.019080543518066408, 0.019159711837768555, 0.019212223052978514, 0.01912166404724121, 0.019226528167724608, 0.019077791213989257, 0.019147775650024415, 0.019212383270263672, 0.019465120315551757, 0.01920204734802246, 0.019164255142211914, 0.01913484764099121, 0.01951798439025879, 0.019194976806640625, 0.019213375091552735, 0.019060575485229492, 0.019191808700561523, 0.01923891258239746, 0.01924870491027832, 0.01927212715148926, 0.019197887420654296, 0.01929427146911621, 0.019451904296875, 0.019400703430175782, 0.019234560012817384, 0.01920742416381836, 0.019194879531860352, 0.019224576950073242, 0.019258752822875976, 0.019260032653808594, 0.019247007369995118, 0.01925948715209961, 0.01920796775817871, 0.019334688186645507, 0.01926144027709961, 0.019251903533935546, 0.019167327880859376, 0.019211551666259766, 0.019399391174316407, 0.019759008407592774, 0.01940787124633789, 0.019243808746337892, 0.01916486358642578, 0.019185344696044923, 0.019573600769042968, 0.019156991958618166, 0.01922662353515625, 0.01920204734802246, 0.019277023315429687, 0.019385120391845704, 0.02008064079284668, 0.01925065612792969, 0.019330656051635742, 0.019352224349975584, 0.019329536437988282, 0.019299552917480468, 0.01925155258178711, 0.019240192413330078, 0.019551744461059572, 0.019479360580444336, 0.01919990348815918, 0.019531871795654295, 0.019371936798095703, 0.019929183959960937, 0.019296255111694336, 0.019318784713745117, 0.019320831298828126, 0.01934262466430664, 0.01942710494995117, 0.019311552047729493, 0.01943731117248535, 0.019506591796875, 0.019460960388183592, 0.019404800415039062, 0.019191808700561523, 0.0191441593170166, 0.019188255310058595, 0.0196011848449707, 0.01970812797546387, 0.019826688766479493, 0.019688608169555664, 0.019453887939453126, 0.019518367767333983, 0.019314687728881837, 0.019416767120361327, 0.01938163185119629, 0.01936684799194336, 0.020107616424560548, 0.01946998405456543, 0.01966080093383789, 0.0192491512298584, 0.019220224380493166, 0.019259647369384767, 0.019277824401855468, 0.019663999557495117, 0.01927891159057617, 0.019241823196411132, 0.019403968811035156, 0.0192325439453125, 0.019158559799194334, 0.019164703369140626, 0.019177471160888672, 0.01919276809692383, 0.019180864334106446, 0.019253952026367187, 0.019406688690185546, 0.019194015502929686, 0.019238304138183594, 0.019276384353637696, 0.01958502388000488, 0.01934329605102539, 0.019250560760498046, 0.019255136489868162, 0.019677728652954102, 0.019296575546264648, 0.019269632339477538, 0.020627231597900392, 0.020057695388793945, 0.019626623153686525, 0.01962188720703125, 0.01959446334838867]",tokens/s,51.05356456406804,, @@ -4059,7 +4059,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 118004 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 111723 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4102,7 +4102,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 73991 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 67136 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4145,7 +4145,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 91963 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 85587 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,784.490496,1835.99104,0.0,1440.743424,1427.743744,s,1,7.05362646484375,7.05362646484375,0.0,7.05362646484375,7.05362646484375,7.05362646484375,7.05362646484375,[7.05362646484375],,kWh,3.1605274458532523e-06,3.4151649806889433e-07,1.0275008220025916e-06,4.529544765924738e-06,,MB,1122.238464,1905.197056,0.0,1499.46368,1436.386304,s,12,0.5033521575927734,0.041946013132731115,0.002437993024933074,0.041618270874023436,0.042279608917236325,0.04542972679138183,0.048501982345581056,"[0.04927004623413086, 0.0406033935546875, 0.04117107009887695, 0.041630622863769534, 0.04228764724731445, 0.04175177764892578, 0.0382490234375, 0.041605918884277344, 0.04119152069091797, 0.042180385589599606, 0.04120348739624023, 0.0422072639465332]",tokens/s,6103.083007911408,kWh,1.6151644267725357e-06,1.7801160114274064e-07,1.0779382472486129e-06,2.871114275163889e-06,tokens/kWh,89163988.425848,MB,1134.297088,1947.140096,0.0,1541.40672,1461.974016,s,12,10.205782958984374,0.8504819132486979,0.004450718362695324,0.8499222717285156,0.8550165710449218,0.8571534606933594,0.8591819616699219,"[0.8453379516601562, 0.8471861572265625, 0.84700537109375, 0.8516913452148438, 0.855078857421875, 0.8440980834960937, 0.8474777221679688, 0.8596890869140625, 0.8511321411132813, 0.8539178466796875, 0.8544559936523437, 0.84871240234375]",tokens/s,74.07564936842759,kWh,2.4443933527395122e-05,2.6958722395343424e-06,1.371781433008491e-05,4.0857620097014364e-05,tokens/kWh,1541940.031025049,,s,756,10.199776342391967,0.013491767648666624,0.0003095512837554598,0.013427152156829835,0.013713024139404298,0.013854999780654909,0.014646281766891499,"[0.01340822410583496, 0.013647263526916503, 0.013458016395568848, 0.01344332790374756, 0.013385472297668457, 0.013342720031738281, 0.013332480430603028, 0.014006624221801758, 0.013272831916809082, 0.013309856414794922, 0.013288928031921386, 0.013208095550537109, 0.013518560409545898, 0.013271231651306153, 0.013410400390625, 0.013361215591430664, 0.013408479690551757, 0.01322979164123535, 0.013428735733032226, 0.013270591735839844, 0.013275584220886231, 0.013336031913757325, 0.01352348804473877, 0.013321375846862793, 0.013333344459533691, 0.013344127655029297, 0.013388416290283204, 0.01339577579498291, 0.013289376258850098, 0.013285663604736328, 0.01333078384399414, 0.01328444766998291, 0.01333420753479004, 0.013455391883850098, 0.013326592445373535, 0.01332694435119629, 0.013494272232055664, 0.013565664291381835, 0.013385120391845704, 0.013366144180297851, 0.013340703964233399, 0.013662176132202148, 0.01349619197845459, 0.013391231536865235, 0.013453920364379883, 0.013270719528198242, 0.014025343894958495, 0.014036255836486817, 0.013308608055114746, 0.013387455940246581, 0.01335324764251709, 0.01330515193939209, 0.013455039978027343, 0.013410816192626953, 0.013345439910888672, 0.013300895690917969, 0.013264479637145997, 0.01327945613861084, 0.013420639991760254, 0.013355263710021973, 0.013418944358825683, 0.013468671798706054, 0.01372809600830078, 0.01319696044921875, 0.013486720085144044, 0.013742143630981446, 0.013416128158569336, 0.013461471557617188, 0.013457375526428223, 0.013316415786743164, 0.013350879669189453, 0.01339395236968994, 0.016293600082397462, 0.014012319564819336, 0.013524864196777344, 0.013332127571105957, 0.013531040191650391, 0.013469951629638672, 0.013308735847473144, 0.01332038402557373, 0.013338080406188965, 0.013319519996643067, 0.013375840187072753, 0.013427328109741211, 0.013338527679443359, 0.013627391815185547, 0.0134269437789917, 0.013291359901428223, 0.013361056327819825, 0.013373056411743163, 0.013236288070678712, 0.0133220796585083, 0.013337408065795898, 0.013377183914184571, 0.013356191635131836, 0.013342816352844239, 0.013502304077148437, 0.013654175758361817, 0.013277952194213866, 0.013316448211669923, 0.013428383827209473, 0.013324480056762695, 0.013279040336608887, 0.013289471626281739, 0.013314047813415527, 0.013381855964660645, 0.013432607650756836, 0.013284768104553223, 0.013314656257629395, 0.013371135711669922, 0.013319647789001465, 0.013308256149291993, 0.013320575714111329, 0.013322208404541016, 0.013276960372924805, 0.013396415710449219, 0.013391103744506836, 0.013347455978393555, 0.013483807563781739, 0.013443296432495117, 0.013432479858398438, 0.013350496292114258, 0.013302528381347656, 0.013484383583068848, 0.01351030445098877, 0.013400159835815429, 0.01305628776550293, 0.013363200187683106, 0.013541407585144043, 0.01337936019897461, 0.013598912239074707, 0.013531423568725586, 0.013371104240417481, 0.013404159545898438, 0.013520895957946777, 0.013326335906982421, 0.013740032196044923, 0.013465023994445801, 0.013394495964050294, 0.013385472297668457, 0.01391641616821289, 0.013385727882385253, 0.013502464294433594, 0.013448543548583984, 0.013494688034057617, 0.013322400093078614, 0.01336508846282959, 0.01334502410888672, 0.013443072319030762, 0.013334527969360351, 0.013436927795410156, 0.013346816062927246, 0.013340543746948243, 0.013359392166137695, 0.013380479812622071, 0.013283712387084961, 0.01342131233215332, 0.01337123203277588, 0.013348863601684571, 0.013537280082702637, 0.013381695747375489, 0.01333891201019287, 0.013424287796020508, 0.013277376174926758, 0.013459263801574707, 0.01361315155029297, 0.013475104331970215, 0.013499072074890137, 0.013500288009643554, 0.013486080169677735, 0.013534496307373047, 0.013417247772216798, 0.013428288459777832, 0.013474464416503906, 0.013387776374816895, 0.013714495658874512, 0.013518912315368652, 0.013438719749450684, 0.01352079963684082, 0.013454336166381836, 0.01346560001373291, 0.01348198413848877, 0.01336729621887207, 0.013351072311401367, 0.013444255828857422, 0.013365728378295898, 0.013443296432495117, 0.01343283176422119, 0.013365023612976075, 0.013445152282714843, 0.013670368194580078, 0.01389363193511963, 0.013668000221252441, 0.013844415664672852, 0.013506655693054198, 0.013579808235168456, 0.01340681552886963, 0.013412544250488281, 0.013544544219970703, 0.01354640007019043, 0.013340671539306641, 0.013625344276428223, 0.013496128082275391, 0.013654208183288575, 0.013518143653869628, 0.013585087776184081, 0.01368883228302002, 0.013862175941467285, 0.013517536163330079, 0.013381983757019043, 0.013548928260803223, 0.013458047866821289, 0.013364288330078125, 0.013541983604431153, 0.013402112007141113, 0.013481344223022461, 0.013352704048156739, 0.013351743698120117, 0.013287487983703614, 0.013450240135192871, 0.013369791984558106, 0.01331884765625, 0.013378623962402345, 0.013407039642333984, 0.013369471549987792, 0.013414239883422852, 0.013477055549621583, 0.01341648006439209, 0.013447999954223633, 0.013365344047546386, 0.013301952362060547, 0.013393183708190919, 0.013314496040344239, 0.013352095603942871, 0.013357919692993164, 0.013282655715942383, 0.013346752166748048, 0.013581024169921876, 0.013391039848327636, 0.013487008094787598, 0.01352079963684082, 0.013613056182861329, 0.013782143592834473, 0.013744000434875489, 0.0137739839553833, 0.01427228832244873, 0.01369491195678711, 0.01354150390625, 0.0134717435836792, 0.013450592041015626, 0.013504223823547363, 0.013583295822143555, 0.01328774356842041, 0.013508607864379883, 0.013430111885070801, 0.013505184173583984, 0.013428447723388671, 0.013557279586791992, 0.013537887573242188, 0.013537535667419434, 0.013779935836791992, 0.01362831974029541, 0.01411689567565918, 0.013744256019592286, 0.013943872451782226, 0.01387609577178955, 0.013835455894470215, 0.013697792053222656, 0.013537599563598632, 0.013730496406555175, 0.01357852840423584, 0.013553407669067383, 0.01427295970916748, 0.013732383728027344, 0.013717503547668456, 0.013940799713134765, 0.014055328369140625, 0.013815839767456055, 0.014133248329162598, 0.013613056182861329, 0.013536704063415528, 0.013923935890197754, 0.01353212833404541, 0.0134717435836792, 0.01345529556274414, 0.013439040184020996, 0.013359135627746582, 0.013436896324157715, 0.013484064102172852, 0.013417856216430664, 0.013386336326599121, 0.013424223899841308, 0.013386143684387206, 0.01343283176422119, 0.013434687614440917, 0.013345184326171875, 0.01337116813659668, 0.013613056182861329, 0.013378687858581543, 0.013348896026611328, 0.013351743698120117, 0.013328415870666504, 0.013459456443786622, 0.013330431938171386, 0.013371392250061035, 0.013540736198425293, 0.013556351661682129, 0.013577471733093262, 0.013585503578186036, 0.013409279823303222, 0.01340073585510254, 0.013334624290466309, 0.01334876823425293, 0.013356672286987305, 0.013375871658325196, 0.013189120292663574, 0.01337929630279541, 0.013275424003601074, 0.013559679985046386, 0.013383808135986329, 0.013305855751037597, 0.013293408393859863, 0.013280768394470215, 0.013607487678527833, 0.013598464012145995, 0.013479488372802734, 0.013421183586120605, 0.013342944145202637, 0.013311936378479003, 0.013330431938171386, 0.013222975730895996, 0.01327785587310791, 0.013295904159545898, 0.013545503616333008, 0.013604576110839844, 0.013579520225524902, 0.013306879997253418, 0.013303263664245606, 0.013306400299072266, 0.014053279876708985, 0.01340544033050537, 0.013426976203918457, 0.013216320037841798, 0.013614527702331543, 0.013408672332763672, 0.013406368255615234, 0.013430784225463867, 0.013444543838500977, 0.013392448425292969, 0.013703328132629394, 0.013373279571533203, 0.013720735549926758, 0.01353164768218994, 0.013404671669006347, 0.013342559814453125, 0.013473024368286133, 0.013214495658874512, 0.01333347225189209, 0.013247488021850586, 0.01329315185546875, 0.01334928035736084, 0.013363455772399902, 0.013242303848266601, 0.013319168090820312, 0.013223936080932617, 0.013257599830627442, 0.013401439666748047, 0.013256608009338379, 0.013238975524902344, 0.013502816200256347, 0.0133024320602417, 0.013310144424438477, 0.013302304267883302, 0.013311615943908691, 0.013327008247375488, 0.013455360412597657, 0.013381631851196289, 0.013362591743469238, 0.01315881633758545, 0.013442655563354493, 0.013373855590820313, 0.01336025619506836, 0.013410464286804199, 0.013302080154418945, 0.013273664474487305, 0.013451104164123534, 0.01343446445465088, 0.013415936470031739, 0.013366175651550292, 0.013602815628051757, 0.013381119728088378, 0.013449983596801758, 0.013314047813415527, 0.013498111724853516, 0.013358847618103027, 0.013291040420532226, 0.013425375938415527, 0.013330207824707032, 0.013285056114196777, 0.013379936218261718, 0.013373439788818359, 0.013357248306274414, 0.013774847984313965, 0.013658016204833985, 0.01364521598815918, 0.013893919944763184, 0.013737631797790526, 0.013673215866088868, 0.013637632369995116, 0.013557600021362306, 0.013457695960998535, 0.013634783744812012, 0.013416192054748536, 0.013355936050415039, 0.013399040222167969, 0.01359769630432129, 0.013463071823120117, 0.013420096397399902, 0.013398943901062011, 0.013385727882385253, 0.01359017562866211, 0.013733375549316406, 0.01360588836669922, 0.0133855037689209, 0.013579520225524902, 0.013425472259521485, 0.01345695972442627, 0.013344863891601562, 0.01333091163635254, 0.013332351684570313, 0.013292896270751952, 0.013351584434509277, 0.013404159545898438, 0.013340031623840332, 0.013387968063354492, 0.013299455642700195, 0.013388480186462402, 0.013527039527893067, 0.013305024147033691, 0.013390496253967285, 0.013375264167785644, 0.013187935829162598, 0.013324288368225098, 0.013229920387268066, 0.013330592155456542, 0.01332953643798828, 0.01343564796447754, 0.013451680183410645, 0.013403871536254882, 0.0133471040725708, 0.013383392333984376, 0.013256064414978027, 0.013317055702209472, 0.013346495628356934, 0.013234272003173828, 0.013270751953125, 0.013400128364562988, 0.013412287712097168, 0.013514687538146972, 0.013644351959228515, 0.013603584289550781, 0.013654975891113282, 0.013507967948913574, 0.013492287635803223, 0.013406304359436036, 0.013365728378295898, 0.01333897590637207, 0.013368960380554198, 0.013288800239562987, 0.013314463615417481, 0.014231295585632324, 0.015139583587646485, 0.014092320442199708, 0.014249728202819825, 0.013601792335510255, 0.013624320030212403, 0.013719552040100098, 0.013667424201965333, 0.01382691192626953, 0.014200063705444336, 0.014885536193847656, 0.013934752464294434, 0.01367676830291748, 0.013635199546813965, 0.013492095947265624, 0.013604831695556641, 0.013545791625976562, 0.013523072242736816, 0.013444992065429687, 0.013432479858398438, 0.014450528144836427, 0.016224992752075194, 0.014380831718444825, 0.01373369598388672, 0.013545663833618164, 0.013573439598083496, 0.0135600004196167, 0.013368160247802734, 0.013286111831665039, 0.013406271934509278, 0.013334783554077148, 0.01372873592376709, 0.013534879684448243, 0.013379584312438965, 0.013154335975646973, 0.013436767578125, 0.013322400093078614, 0.013309568405151366, 0.013239680290222168, 0.013417471885681152, 0.013260767936706544, 0.01321894359588623, 0.013327263832092285, 0.013352800369262695, 0.013333919525146485, 0.013380319595336913, 0.013496352195739745, 0.013463520050048828, 0.01349465560913086, 0.013536928176879882, 0.013527039527893067, 0.013529088020324707, 0.013454591751098633, 0.013445152282714843, 0.013431232452392578, 0.013770432472229003, 0.013558431625366211, 0.01348761558532715, 0.013430303573608398, 0.013540255546569823, 0.013412256240844727, 0.013521023750305176, 0.013347135543823243, 0.013308768272399902, 0.01348691177368164, 0.013363295555114747, 0.013340479850769043, 0.013470975875854493, 0.013453920364379883, 0.013558015823364258, 0.01375267219543457, 0.013517663955688477, 0.013513759613037109, 0.01379139232635498, 0.013650976181030273, 0.013588864326477051, 0.013482208251953126, 0.013535231590270995, 0.013499903678894042, 0.013358912467956542, 0.013498656272888184, 0.01354793643951416, 0.013576319694519043, 0.01362502384185791, 0.013687232017517089, 0.013771776199340821, 0.01354422378540039, 0.013483231544494628, 0.013433792114257813, 0.013432640075683594, 0.013468768119812012, 0.01371337604522705, 0.013863615989685059, 0.013946304321289063, 0.013582367897033692, 0.013761311531066895, 0.013766143798828125, 0.017545183181762694, 0.014981120109558106, 0.013744128227233888, 0.01373139190673828, 0.013594847679138183, 0.013659520149230957, 0.013460639953613282, 0.013712672233581543, 0.01352131175994873, 0.013421792030334473, 0.013508671760559083, 0.013365983963012695, 0.01340403175354004, 0.013718879699707032, 0.013368063926696778, 0.013304863929748535, 0.013448127746582031, 0.013428223609924317, 0.013513248443603515, 0.013458944320678711, 0.013468000411987304, 0.01358403205871582, 0.013468192100524903, 0.013364607810974122, 0.013478752136230468, 0.013426464080810547, 0.013340319633483887, 0.013387743949890136, 0.013408448219299316, 0.013535743713378906, 0.01342835235595703, 0.013518912315368652, 0.013524991989135742, 0.013370495796203613, 0.01336793613433838, 0.013439104080200196, 0.013313183784484863, 0.013307200431823731, 0.013407551765441894, 0.013386079788208008, 0.013381631851196289, 0.013327360153198242, 0.013468192100524903, 0.0135665283203125, 0.013418399810791015, 0.013371392250061035, 0.013445440292358399, 0.013379263877868652, 0.013270688056945801, 0.013412416458129883, 0.013650208473205567, 0.013378879547119141, 0.013341376304626465, 0.01328553581237793, 0.013424480438232422, 0.013518815994262695, 0.013494175910949707, 0.013553055763244629, 0.013540063858032227, 0.013414400100708008, 0.013497823715209962, 0.01349891185760498, 0.013346015930175781, 0.013265983581542969, 0.01369388771057129, 0.013721599578857421, 0.013452896118164063, 0.013446944236755371, 0.013460096359252929, 0.013307904243469238, 0.013406016349792481, 0.013371583938598632, 0.013271039962768554, 0.013409631729125977, 0.013331071853637695, 0.013559712409973144, 0.016088479995727538, 0.014421792030334472, 0.013710335731506347, 0.013552895545959473, 0.013431776046752929, 0.013673760414123536, 0.013530591964721679, 0.013454303741455079, 0.013502495765686035, 0.013475808143615722, 0.013440959930419922, 0.01350214385986328, 0.01333523178100586, 0.01333625602722168, 0.013387776374816895, 0.013421919822692871, 0.013480607986450194, 0.013439264297485352, 0.01340608024597168, 0.013406047821044922, 0.013414400100708008, 0.013426688194274903, 0.013448320388793945, 0.013335424423217774, 0.013364992141723632, 0.013394304275512695, 0.01339788818359375, 0.01346889591217041, 0.014002079963684083, 0.013525664329528808, 0.0135863676071167, 0.015020319938659668, 0.013725695610046386, 0.013516799926757812, 0.013505536079406738, 0.013541567802429199, 0.013402303695678712, 0.013486592292785645, 0.013396096229553223, 0.013385727882385253, 0.013565695762634278, 0.013357312202453613, 0.013466976165771484, 0.013393856048583985, 0.013417247772216798, 0.013852607727050782, 0.013526399612426758, 0.013433792114257813, 0.013498047828674316, 0.013420415878295898, 0.013270591735839844, 0.013412799835205078, 0.01350607967376709, 0.013365728378295898, 0.013545056343078614, 0.013685152053833008, 0.013596672058105469, 0.013448863983154298, 0.013770591735839844, 0.01367910385131836, 0.013684736251831055, 0.01406771183013916, 0.013525247573852539, 0.013422335624694824, 0.013424639701843261, 0.013389823913574218, 0.013389408111572266, 0.01344758415222168, 0.013356287956237793, 0.013380640029907227, 0.013453023910522461, 0.013321503639221192, 0.013586496353149414, 0.013456031799316407, 0.013325887680053711, 0.013464287757873535, 0.0134550724029541, 0.01358847999572754, 0.01370307159423828, 0.01363270378112793, 0.013635680198669434, 0.01342950439453125, 0.013340736389160156, 0.013404159545898438, 0.013271072387695312, 0.01328438377380371, 0.013447360038757324, 0.013423616409301758, 0.013270848274230957, 0.013330623626708985, 0.013438495635986328, 0.013371616363525391, 0.013604864120483399, 0.01338761615753174, 0.013346976280212402, 0.013465344429016113, 0.013562111854553223, 0.013656384468078613, 0.013416128158569336, 0.013345888137817383, 0.01353324794769287, 0.013357536315917969, 0.013326560020446777, 0.013439135551452637, 0.013342720031738281, 0.013330528259277344, 0.013381535530090333, 0.013297663688659669, 0.013287487983703614, 0.013334464073181153, 0.013524991989135742, 0.013743103981018067, 0.013544608116149902]",tokens/s,74.11927228815188,, @@ -4189,7 +4189,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 44527 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 38504 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4232,7 +4232,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 52528 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 46341 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.074752,1129.250816,0.0,734.0032,709.336064,s,1,7.43693310546875,7.43693310546875,0.0,7.43693310546875,7.43693310546875,7.43693310546875,7.43693310546875,[7.43693310546875],,kWh,5.201868145816964e-06,5.662632469932491e-07,1.980001584002411e-06,7.748132976812624e-06,,MB,1107.88608,1276.051456,0.0,870.31808,809.960448,s,15,0.25941091346740724,0.01729406089782715,0.0005297255663439621,0.01726335906982422,0.01760234909057617,0.01818601589202881,0.018896005973815917,"[0.019073503494262695, 0.01717180824279785, 0.01726335906982422, 0.017280704498291017, 0.0169836483001709, 0.017047456741333008, 0.01691372871398926, 0.01683955192565918, 0.017281055450439453, 0.01729737663269043, 0.0178056640625, 0.017280031204223632, 0.016884416580200196, 0.016996864318847657, 0.017291744232177733]",tokens/s,14802.769662512534,kWh,6.447960063649882e-07,7.107573282732315e-08,4.25537500105971e-07,1.1414092392982822e-06,tokens/kWh,224284149.0904561,MB,1117.888512,1311.70304,0.0,905.969664,809.963008,s,15,9.905081726074219,0.6603387817382812,0.011113331088490127,0.6602666015625,0.672424609375,0.6754632690429687,0.6802393334960937,"[0.6602666015625, 0.6636019897460937, 0.6598812255859375, 0.663210693359375, 0.6421433715820313, 0.6421513671875, 0.6425101318359375, 0.6600648803710938, 0.6729046630859375, 0.681433349609375, 0.6664047241210938, 0.6534988403320312, 0.6589554443359374, 0.6717045288085938, 0.6663499145507813]",tokens/s,95.40557323342162,kWh,1.8989165730301262e-05,2.0941903259328287e-06,8.994136702293818e-06,3.0077492758527917e-05,tokens/kWh,2094589.482766562,,s,945,9.89892097187043,0.010475048647481926,0.00034942214293159305,0.010455455780029297,0.010799897193908692,0.010900223731994629,0.011734079666137695,"[0.010524831771850585, 0.011044447898864745, 0.010804767608642579, 0.01053769588470459, 0.010745183944702148, 0.010670751571655273, 0.010538944244384766, 0.010553279876708984, 0.010522751808166505, 0.010623040199279786, 0.010565567970275878, 0.01033407974243164, 0.010340703964233398, 0.01020633602142334, 0.010228128433227538, 0.010248448371887207, 0.010231807708740234, 0.010229791641235352, 0.010223584175109863, 0.010192831993103028, 0.010149951934814452, 0.010299136161804199, 0.010492064476013184, 0.010332256317138673, 0.010460960388183593, 0.010344672203063964, 0.010338303565979003, 0.010264575958251953, 0.01027187156677246, 0.010599007606506347, 0.010914079666137695, 0.010866687774658204, 0.010795007705688477, 0.01064857578277588, 0.010570624351501464, 0.01064179229736328, 0.010612480163574219, 0.01053286361694336, 0.010628992080688477, 0.010574175834655761, 0.010753120422363281, 0.010563584327697753, 0.010381728172302247, 0.010374879837036133, 0.010424896240234375, 0.010231871604919434, 0.01022969627380371, 0.010226976394653321, 0.010394335746765137, 0.010491904258728027, 0.010414079666137695, 0.010389727592468262, 0.010287967681884766, 0.010316736221313476, 0.010220959663391113, 0.010297951698303222, 0.01044863986968994, 0.010514687538146973, 0.010532032012939454, 0.010435680389404296, 0.01058521556854248, 0.010692352294921876, 0.010808159828186035, 0.010562175750732422, 0.010714816093444825, 0.010645824432373047, 0.01053600025177002, 0.010519488334655762, 0.010887040138244628, 0.01085632038116455, 0.010617055892944337, 0.010618240356445312, 0.010597023963928223, 0.010751999855041505, 0.010502143859863282, 0.010500096321105956, 0.010448896408081054, 0.010335359573364258, 0.010203712463378907, 0.01036847972869873, 0.010305919647216798, 0.010455455780029297, 0.010528575897216797, 0.010365183830261231, 0.01032192039489746, 0.010554880142211913, 0.010589856147766114, 0.010404640197753907, 0.010297408103942871, 0.010327296257019043, 0.0102259521484375, 0.010539487838745117, 0.010960351943969727, 0.01083683204650879, 0.010840928077697754, 0.010965855598449707, 0.01082367992401123, 0.010553631782531738, 0.010542816162109375, 0.010512384414672851, 0.010647551536560058, 0.0106496000289917, 0.010687999725341797, 0.010473919868469237, 0.010450431823730469, 0.010472000122070313, 0.01069046401977539, 0.0107357120513916, 0.010489439964294434, 0.010465632438659668, 0.010470879554748536, 0.010446592330932617, 0.010420160293579101, 0.010240032196044921, 0.010281855583190919, 0.010358688354492187, 0.010215200424194337, 0.010230079650878906, 0.010329983711242675, 0.010254464149475097, 0.010201087951660156, 0.010233856201171876, 0.01031372833251953, 0.01070473575592041, 0.011079327583312988, 0.010897919654846192, 0.010456864356994628, 0.010522720336914062, 0.01052070426940918, 0.010590208053588868, 0.010569184303283692, 0.010629664421081543, 0.010823264122009277, 0.010660160064697265, 0.010504287719726562, 0.010473695755004883, 0.010469152450561523, 0.010438976287841796, 0.010337984085083008, 0.010372447967529296, 0.010510751724243164, 0.01049129581451416, 0.010369888305664062, 0.010210304260253907, 0.010255071640014648, 0.010192543983459472, 0.010182304382324219, 0.010142687797546387, 0.01010649585723877, 0.010172320365905761, 0.01024227237701416, 0.010199040412902831, 0.010194527626037597, 0.010254752159118653, 0.010178655624389648, 0.01028707218170166, 0.010684096336364746, 0.010882559776306153, 0.010722304344177246, 0.010713055610656738, 0.010653727531433106, 0.010561408042907715, 0.010559840202331543, 0.010671903610229492, 0.010686464309692383, 0.010729472160339355, 0.010584063529968261, 0.010483584403991698, 0.010444928169250488, 0.010520575523376464, 0.010780256271362304, 0.010393183708190918, 0.01028211212158203, 0.010316767692565919, 0.010276960372924806, 0.010239871978759765, 0.010244864463806153, 0.010446463584899902, 0.010401984214782715, 0.010434752464294433, 0.011067744255065917, 0.010900799751281737, 0.010617312431335449, 0.010598272323608398, 0.010414079666137695, 0.010274208068847657, 0.010244704246520997, 0.010274720191955567, 0.01090783977508545, 0.010463359832763672, 0.010580415725708009, 0.010575072288513183, 0.010600607872009277, 0.01046787166595459, 0.010377311706542968, 0.0103155517578125, 0.010293472290039062, 0.0102194242477417, 0.010303584098815918, 0.010467328071594239, 0.010647520065307618, 0.010472831726074218, 0.010435232162475586, 0.010393600463867187, 0.0104017915725708, 0.010264479637145996, 0.010223615646362304, 0.010192864418029786, 0.010254048347473144, 0.010555808067321776, 0.010784511566162109, 0.010635392189025879, 0.010571999549865723, 0.010763392448425294, 0.01072822380065918, 0.010698080062866211, 0.010678943634033202, 0.01067523193359375, 0.010715488433837891, 0.010613375663757325, 0.0107642879486084, 0.010505408287048339, 0.01054751968383789, 0.01053542423248291, 0.01043455982208252, 0.010528767585754394, 0.010347840309143066, 0.010326720237731934, 0.010420224189758302, 0.010548576354980469, 0.010561375617980958, 0.010494688034057616, 0.01040345573425293, 0.010531231880187989, 0.010448479652404785, 0.010158559799194337, 0.010239999771118164, 0.01073971176147461, 0.010825568199157715, 0.010649760246276856, 0.010462271690368652, 0.010442815780639648, 0.01073036766052246, 0.010695679664611817, 0.011336095809936523, 0.010687071800231934, 0.010659839630126953, 0.010616064071655273, 0.010724096298217774, 0.010569567680358887, 0.010330240249633789, 0.010316991806030273, 0.010115008354187011, 0.01014742374420166, 0.010104384422302246, 0.01036137580871582, 0.010434464454650879, 0.010238431930541992, 0.010141695976257324, 0.010199007987976074, 0.010194623947143554, 0.010278528213500976, 0.0105315523147583, 0.010168319702148437, 0.01021350383758545, 0.010499839782714843, 0.010750080108642578, 0.010491904258728027, 0.010308735847473145, 0.01015078353881836, 0.010141823768615723, 0.010326047897338868, 0.010106752395629883, 0.010203104019165039, 0.010205183982849121, 0.010166272163391114, 0.010172384262084961, 0.010190688133239745, 0.010260416030883788, 0.010211584091186523, 0.010395648002624512, 0.010280960083007813, 0.010286272048950195, 0.010115424156188964, 0.010086496353149415, 0.010113311767578124, 0.010067999839782715, 0.010129728317260743, 0.010073311805725097, 0.010119711875915528, 0.010057056427001953, 0.010066592216491699, 0.010179903984069824, 0.01022156810760498, 0.010140352249145508, 0.010149375915527344, 0.010062335968017578, 0.010110943794250489, 0.010053664207458497, 0.010131456375122071, 0.010143744468688964, 0.010071136474609376, 0.01020406436920166, 0.010208992004394532, 0.010132800102233887, 0.010114015579223633, 0.010153056144714356, 0.010087455749511719, 0.010112192153930664, 0.010077887535095215, 0.010138463973999024, 0.010086560249328613, 0.010093791961669922, 0.01004419231414795, 0.010070015907287597, 0.010059071540832519, 0.010023776054382325, 0.0101212158203125, 0.010085791587829589, 0.010199647903442383, 0.01008358383178711, 0.010132224082946777, 0.010192895889282226, 0.010099871635437012, 0.01010364818572998, 0.010116671562194824, 0.010193344116210937, 0.010172160148620605, 0.010232288360595703, 0.01023363208770752, 0.01013759994506836, 0.010061823844909668, 0.01012940788269043, 0.010045632362365723, 0.010098496437072753, 0.01005568027496338, 0.010137727737426758, 0.010087583541870117, 0.01010694408416748, 0.010085023880004883, 0.010033056259155274, 0.01009059238433838, 0.010097824096679687, 0.010170623779296875, 0.010101344108581543, 0.011016192436218262, 0.010645503997802735, 0.011602016448974609, 0.01024300765991211, 0.010152607917785645, 0.010113632202148438, 0.010464991569519043, 0.010153984069824219, 0.010180607795715332, 0.010149888038635254, 0.010158080101013184, 0.010202752113342286, 0.01026460838317871, 0.010336607933044433, 0.010225664138793946, 0.010185888290405273, 0.010113471984863282, 0.010113151550292968, 0.01013584041595459, 0.010145792007446289, 0.010082304000854492, 0.010145792007446289, 0.010085536003112792, 0.010154784202575684, 0.010350655555725098, 0.0101396484375, 0.010178720474243164, 0.010102527618408203, 0.010111071586608887, 0.010062975883483887, 0.010122112274169922, 0.01007414436340332, 0.010182623863220214, 0.010059359550476075, 0.01010483169555664, 0.010145792007446289, 0.01011302375793457, 0.010119168281555176, 0.010118783950805664, 0.010121600151062012, 0.010090496063232422, 0.010106880187988282, 0.010028127670288087, 0.010137760162353515, 0.01006492805480957, 0.010145376205444336, 0.010045696258544921, 0.010121088027954101, 0.010062047958374024, 0.010114080429077148, 0.010086688041687011, 0.010237631797790527, 0.010088640213012695, 0.01011193561553955, 0.010112671852111817, 0.010090496063232422, 0.010108832359313966, 0.010100640296936036, 0.010115263938903808, 0.010174464225769043, 0.010196800231933595, 0.014173919677734376, 0.010813568115234375, 0.010235456466674805, 0.01010153579711914, 0.010254336357116698, 0.010117119789123535, 0.01011302375793457, 0.010077919960021973, 0.010101023674011231, 0.010130463600158691, 0.010134495735168458, 0.010114720344543457, 0.010109215736389161, 0.01010044765472412, 0.010133855819702149, 0.010116191864013671, 0.01012009620666504, 0.010112480163574218, 0.010107423782348633, 0.010076160430908204, 0.010110112190246582, 0.010097503662109375, 0.010089792251586915, 0.0100765438079834, 0.010096960067749024, 0.010143967628479003, 0.010171296119689942, 0.010248255729675294, 0.010168448448181152, 0.010164928436279298, 0.010136608123779298, 0.010064607620239257, 0.010115103721618652, 0.01011734390258789, 0.010076383590698242, 0.01005452823638916, 0.010086400032043457, 0.010102687835693359, 0.01017369556427002, 0.010133824348449706, 0.010314271926879883, 0.010141695976257324, 0.01011302375793457, 0.010123135566711426, 0.01010912036895752, 0.010148927688598633, 0.01013644790649414, 0.010188672065734863, 0.010110783576965331, 0.010139967918395996, 0.010077695846557617, 0.010117664337158203, 0.010153951644897461, 0.010141695976257324, 0.010138879776000977, 0.010130080223083496, 0.010384575843811035, 0.010146656036376954, 0.01031174373626709, 0.010168160438537598, 0.010154144287109375, 0.010094112396240234, 0.010371552467346192, 0.010589664459228515, 0.010629664421081543, 0.010711039543151855, 0.01098134422302246, 0.010649632453918458, 0.010754048347473144, 0.01072332763671875, 0.01067580795288086, 0.010750368118286132, 0.010671327590942383, 0.010705696105957032, 0.010974559783935547, 0.010809856414794922, 0.010791071891784668, 0.010690560340881347, 0.01093120002746582, 0.01084006404876709, 0.010871135711669921, 0.010869279861450195, 0.01081603240966797, 0.01072208023071289, 0.010687264442443847, 0.010685855865478516, 0.010637120246887207, 0.010606816291809082, 0.010611295700073242, 0.010577119827270508, 0.01043507194519043, 0.010393280029296875, 0.010554080009460448, 0.010831392288208008, 0.010811648368835448, 0.010841664314270019, 0.010687135696411132, 0.010641280174255372, 0.010473440170288086, 0.01173094367980957, 0.010421728134155274, 0.010420767784118653, 0.010506239891052246, 0.010620896339416503, 0.010629152297973633, 0.010471424102783204, 0.010521856307983399, 0.010470144271850586, 0.010674176216125488, 0.01075814437866211, 0.010626079559326172, 0.010657024383544923, 0.010755807876586913, 0.010580032348632813, 0.010526656150817871, 0.01061580753326416, 0.010705727577209473, 0.01042198371887207, 0.010441184043884277, 0.01049180793762207, 0.010648768424987793, 0.010657952308654786, 0.01076095962524414, 0.010848256111145019, 0.010907487869262695, 0.01077286434173584, 0.010581791877746582, 0.01063526439666748, 0.01073523235321045, 0.010561920166015626, 0.01054319953918457, 0.010606399536132812, 0.01075823974609375, 0.01073151969909668, 0.010825728416442871, 0.010672127723693848, 0.010721280097961425, 0.010612640380859375, 0.011736543655395507, 0.010707584381103516, 0.010795007705688477, 0.01075609588623047, 0.010532896041870117, 0.010577792167663575, 0.01058137607574463, 0.010705632209777832, 0.01064252758026123, 0.010522815704345703, 0.010433247566223144, 0.010495776176452637, 0.010909536361694335, 0.010805631637573242, 0.010820799827575684, 0.010730175971984864, 0.010681728363037109, 0.010714879989624023, 0.010604960441589355, 0.010606847763061523, 0.01059670352935791, 0.010636832237243652, 0.010678367614746094, 0.010394240379333496, 0.010662079811096192, 0.010668031692504883, 0.010630847930908204, 0.010744031906127929, 0.010736736297607422, 0.010791935920715333, 0.010707200050354004, 0.010648736000061035, 0.010588768005371094, 0.010800959587097168, 0.010748255729675292, 0.010786656379699708, 0.010704319953918457, 0.011899359703063965, 0.01069257640838623, 0.010632896423339843, 0.010624768257141114, 0.010676480293273926, 0.010631232261657715, 0.010627455711364746, 0.01076633644104004, 0.010704895973205567, 0.010751711845397948, 0.010864928245544433, 0.01070899200439453, 0.010693920135498048, 0.01059008026123047, 0.010574687957763671, 0.010579968452453613, 0.011530240058898926, 0.011794783592224122, 0.011101856231689453, 0.011558879852294921, 0.010742815971374511, 0.010646528244018554, 0.010691807746887208, 0.010523424148559571, 0.010675328254699707, 0.0106561279296875, 0.010590656280517578, 0.010953151702880859, 0.010806912422180176, 0.01074176025390625, 0.010696703910827637, 0.010602335929870605, 0.010631327629089355, 0.010792960166931152, 0.010727423667907715, 0.010620800018310547, 0.01065334415435791, 0.010861023902893067, 0.010798303604125977, 0.01062377643585205, 0.010659775733947753, 0.010749792098999024, 0.010733792304992676, 0.010757632255554199, 0.01066006374359131, 0.010600959777832031, 0.011044639587402344, 0.012755200386047364, 0.01128867244720459, 0.010575231552124023, 0.01075868797302246, 0.010573823928833008, 0.01056761646270752, 0.010555392265319824, 0.01083193588256836, 0.010665023803710937, 0.010771391868591308, 0.010734720230102539, 0.010641280174255372, 0.010673151969909669, 0.010705120086669922, 0.010657343864440918, 0.01073583984375, 0.011132927894592285, 0.010823904037475586, 0.010889344215393067, 0.01096992015838623, 0.01076921558380127, 0.010764320373535157, 0.010853568077087402, 0.010759296417236329, 0.010770079612731933, 0.010720352172851562, 0.010718144416809083, 0.010672127723693848, 0.010829824447631836, 0.010696703910827637, 0.010622976303100586, 0.01077222442626953, 0.0107010555267334, 0.010629119873046875, 0.01061513614654541, 0.010712415695190429, 0.010643136024475098, 0.010446623802185058, 0.010197855949401856, 0.010129376411437989, 0.010164256095886231, 0.010182592391967773, 0.010154047966003417, 0.01011302375793457, 0.01022150421142578, 0.010141119956970215, 0.010162816047668458, 0.010215392112731933, 0.010086400032043457, 0.01017039966583252, 0.010590208053588868, 0.010972543716430664, 0.01087551975250244, 0.010643072128295898, 0.01056982421875, 0.01044099235534668, 0.01039731216430664, 0.010405920028686523, 0.01031935977935791, 0.010574687957763671, 0.010601696014404296, 0.010570528030395508, 0.0104017915725708, 0.010288415908813476, 0.010421088218688965, 0.010324000358581543, 0.01019878387451172, 0.0101048641204834, 0.010082304000854492, 0.010172160148620605, 0.010154399871826172, 0.010116959571838379, 0.010460543632507324, 0.010533056259155273, 0.01019545555114746, 0.010264255523681641, 0.010133760452270508, 0.01021951961517334, 0.010129695892333984, 0.010540767669677735, 0.01084620761871338, 0.010762240409851074, 0.010758272171020508, 0.01063481616973877, 0.010654239654541016, 0.010477343559265136, 0.01042841625213623, 0.010406144142150879, 0.010641375541687011, 0.010796832084655763, 0.010391551971435547, 0.010272543907165527, 0.010252511978149415, 0.010239232063293456, 0.01032374382019043, 0.01047651195526123, 0.010120512008666992, 0.010106559753417968, 0.010202112197875977, 0.010207232475280761, 0.010145440101623535, 0.01022339153289795, 0.010172991752624512, 0.010170368194580079, 0.010204959869384765, 0.010128671646118165, 0.010195903778076172, 0.010168319702148437, 0.010131391525268554, 0.010432576179504395, 0.010975232124328613, 0.011100128173828125, 0.010866720199584961, 0.010584063529968261, 0.010510335922241211, 0.010457375526428222, 0.010372832298278808, 0.010381312370300292, 0.010571167945861817, 0.010480223655700683, 0.01040505599975586, 0.010424192428588868, 0.010675135612487794, 0.01036291217803955, 0.01009660816192627, 0.010163264274597168, 0.010077119827270508, 0.010135552406311036, 0.010233792304992675, 0.010139360427856445, 0.010136896133422852, 0.010234848022460937, 0.010245696067810058, 0.010181056022644044, 0.010221152305603028, 0.010123680114746094, 0.010196864128112793, 0.01038144016265869, 0.010864416122436523, 0.010905823707580566, 0.010645503997802735, 0.010544639587402344, 0.010598655700683593, 0.010510592460632325, 0.010475520133972169, 0.010491519927978516, 0.010576255798339844, 0.010743807792663575, 0.010665984153747558, 0.010717503547668457, 0.010760064125061036, 0.010566656112670898, 0.010443584442138672, 0.010356736183166505, 0.010301440238952637, 0.010225664138793946, 0.010192447662353516, 0.010150336265563964, 0.010269696235656739, 0.010109951972961426, 0.010159296035766601, 0.010224448204040528, 0.010354432106018066, 0.010313088417053223, 0.010244864463806153, 0.0102457275390625, 0.010340736389160157, 0.010802720069885254, 0.01071769618988037, 0.011051136016845703, 0.010731103897094727, 0.010645376205444337, 0.010532928466796874, 0.010481984138488769, 0.010473183631896973, 0.010508735656738282, 0.010599455833435058, 0.010515423774719238, 0.010751999855041505, 0.01067625617980957, 0.010505887985229492, 0.010448448181152344, 0.010431232452392579, 0.010364128112792968, 0.010410335540771485, 0.010445247650146485, 0.010391136169433594, 0.010615200042724609, 0.010727392196655274, 0.01039686393737793, 0.01032192039489746, 0.01022163200378418, 0.010274815559387206, 0.010340224266052246, 0.010618176460266113, 0.010920255661010743, 0.010893376350402832, 0.010772352218627929, 0.010641599655151367, 0.010688032150268555, 0.010664799690246581, 0.010559488296508789, 0.010528191566467286, 0.010451519966125488, 0.010606880187988282, 0.010845919609069825, 0.011370368003845215, 0.01092416000366211, 0.013450783729553223, 0.011151840209960937, 0.010439807891845704, 0.010486656188964844, 0.010284704208374023, 0.01028326416015625, 0.010338624000549317, 0.010235487937927246, 0.010211359977722169, 0.010193056106567382, 0.010424063682556153, 0.010368895530700684, 0.012419455528259278, 0.012228608131408691, 0.01057151985168457, 0.01073305606842041, 0.011157440185546876, 0.010847040176391601, 0.01073971176147461, 0.010694656372070312, 0.01061888027191162, 0.010748991966247559, 0.01073971176147461, 0.010601344108581543, 0.010803263664245605, 0.010754048347473144, 0.010724639892578125, 0.010717920303344727, 0.01066921615600586, 0.010721983909606934, 0.010490015983581543, 0.010389504432678222, 0.010297344207763673, 0.01030288028717041, 0.01024403190612793, 0.010357151985168457, 0.010244352340698242, 0.010182656288146973, 0.010291199684143066, 0.010338303565979003, 0.01028006362915039, 0.010279808044433594, 0.010235424041748048, 0.010203616142272949, 0.010436415672302246, 0.010874943733215332, 0.010575743675231933, 0.010669407844543457, 0.010572671890258789, 0.01062502384185791, 0.010559488296508789, 0.010422271728515625, 0.01053273582458496, 0.010489184379577637, 0.010504639625549317, 0.010652000427246094, 0.010757535934448241, 0.010534496307373046, 0.010414560317993165, 0.010295136451721192, 0.010309408187866211, 0.010205280303955079, 0.010306015968322754, 0.010328415870666505, 0.010364928245544434, 0.01040998363494873, 0.01042636775970459, 0.010424320220947265, 0.010362879753112793, 0.010315296173095703, 0.010355199813842773, 0.010469344139099121, 0.01095411205291748, 0.010895359992980956, 0.010807935714721679, 0.010749888420104981, 0.010729408264160157, 0.010608768463134766, 0.011374591827392578, 0.011780096054077148, 0.011974143981933593, 0.011467007637023926, 0.010658047676086425, 0.010682271957397462, 0.010715231895446778, 0.010476608276367188, 0.010235936164855956, 0.010195872306823731, 0.010436448097229004, 0.010171584129333496, 0.01013974380493164, 0.010244992256164551, 0.010225664138793946, 0.010465279579162597, 0.010340352058410645, 0.01023369598388672, 0.01022480010986328, 0.010238975524902343, 0.010270912170410157, 0.010480480194091797, 0.010795904159545898, 0.010854496002197265, 0.010772480010986327, 0.010704895973205567, 0.010529952049255371, 0.010611552238464356, 0.010553343772888184, 0.01067244815826416, 0.010739392280578613]",tokens/s,95.46495044110252,, @@ -4276,7 +4276,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 77010 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 70281 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4319,7 +4319,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 147520 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 140775 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.241728,8760.786944,0.0,8365.539328,8230.228992,s,1,7.50484423828125,7.50484423828125,0.0,7.50484423828125,7.50484423828125,7.50484423828125,7.50484423828125,[7.50484423828125],,kWh,1.1066832612497515e-05,1.2130818759605153e-06,5.226393070004165e-06,1.7506307558462196e-05,,MB,1169.158144,8951.627776,0.0,8545.8944,8499.295232,s,10,2.6764699096679685,0.26764699096679684,0.010224015400804483,0.27015660095214844,0.2749579895019531,0.2763633331298828,0.2774876080322266,"[0.2387668762207031, 0.2687913208007813, 0.2687487487792969, 0.264208251953125, 0.26977490234375, 0.2708685607910156, 0.27053829956054687, 0.27235858154296877, 0.27464569091796875, 0.2777686767578125]",tokens/s,956.4837589814648,kWh,7.34776420145863e-06,8.100666222931695e-07,4.890156689900046e-06,1.3047987513651845e-05,tokens/kWh,19619883.8887723,MB,1174.2208,8953.724928,0.0,8547.991552,8499.297792,s,10,18.966721679687502,1.8966721679687502,0.004236313017153842,1.8971524658203125,1.8996663452148437,1.9014992492675782,1.9029655725097656,"[1.89731689453125, 1.8964061279296875, 1.8975816650390624, 1.895955078125, 1.8856767578125, 1.896988037109375, 1.89871826171875, 1.8954876708984374, 1.9033321533203125, 1.899259033203125]",tokens/s,33.216072373471974,kWh,5.602011279520714e-05,6.179156081395586e-06,3.704018240989933e-05,9.923945128650207e-05,tokens/kWh,634828.1775371814,,s,630,18.963937675476082,0.030101488373771548,0.0003737273369058032,0.030036399841308594,0.03032941131591797,0.030539182472229004,0.03224072696685791,"[0.03222934341430664, 0.03057254409790039, 0.030236448287963867, 0.03005414390563965, 0.029918912887573243, 0.02982304000854492, 0.02989302444458008, 0.029762943267822264, 0.029848512649536134, 0.02996944046020508, 0.02985241508483887, 0.029823200225830078, 0.0300316162109375, 0.030032127380371094, 0.030001279830932617, 0.030010976791381837, 0.030052223205566407, 0.029941375732421876, 0.029847583770751952, 0.029767711639404296, 0.029906656265258787, 0.030077951431274414, 0.030069856643676757, 0.029860767364501953, 0.029891775131225585, 0.029958976745605468, 0.029800447463989257, 0.030143583297729492, 0.029955135345458985, 0.02990652847290039, 0.030183231353759766, 0.030195775985717772, 0.03033251190185547, 0.030488927841186522, 0.030249120712280274, 0.030761247634887696, 0.030257152557373046, 0.029973600387573244, 0.030194591522216797, 0.030045215606689452, 0.030088159561157228, 0.030010400772094728, 0.030073823928833007, 0.03017728042602539, 0.030138111114501952, 0.029980224609375, 0.030036575317382814, 0.030038112640380858, 0.03025017547607422, 0.03026924705505371, 0.030079679489135744, 0.030007680892944335, 0.030168256759643554, 0.030157535552978516, 0.030029855728149413, 0.03024105644226074, 0.030117599487304688, 0.030121280670166017, 0.03013088035583496, 0.030013439178466796, 0.030076223373413084, 0.030819007873535156, 0.030054399490356445, 0.032245376586914065, 0.030806367874145507, 0.030271488189697264, 0.029884416580200194, 0.02975062370300293, 0.029945663452148438, 0.030065311431884765, 0.02992918395996094, 0.029901472091674805, 0.02979596710205078, 0.02995327949523926, 0.029847904205322264, 0.029911167144775392, 0.029845279693603517, 0.0299051513671875, 0.030036224365234374, 0.02981011199951172, 0.029905664443969728, 0.03004966354370117, 0.030089088439941406, 0.03018582344055176, 0.0300032958984375, 0.029774080276489256, 0.02984351921081543, 0.030021120071411132, 0.030105344772338866, 0.02984217643737793, 0.029924863815307616, 0.029918943405151367, 0.029879072189331054, 0.030027135848999024, 0.030081151962280273, 0.03037164878845215, 0.0305097599029541, 0.03033888053894043, 0.03023993682861328, 0.03011862373352051, 0.0300731201171875, 0.030074880599975585, 0.03031449508666992, 0.03052694320678711, 0.030027711868286133, 0.030146656036376954, 0.029943552017211914, 0.029921279907226563, 0.03026940727233887, 0.030245664596557618, 0.029914495468139648, 0.030026464462280272, 0.029986719131469726, 0.030087167739868165, 0.030101152420043947, 0.030090816497802736, 0.02998137664794922, 0.03020400047302246, 0.02997987174987793, 0.03002556800842285, 0.029948320388793945, 0.03027203178405762, 0.030449663162231445, 0.030055776596069336, 0.030122655868530274, 0.030150592803955076, 0.032194526672363284, 0.030664703369140626, 0.030345216751098632, 0.02997622489929199, 0.029835615158081055, 0.029705631256103517, 0.029993024826049805, 0.029749248504638674, 0.03002217674255371, 0.030031328201293946, 0.029822784423828123, 0.02985443115234375, 0.02998886489868164, 0.029845504760742186, 0.030131231307983397, 0.03000419235229492, 0.029748607635498046, 0.029851295471191405, 0.029796672821044923, 0.029825696945190428, 0.029880319595336914, 0.030119935989379884, 0.029913087844848633, 0.029871488571166994, 0.029944448471069335, 0.029805696487426758, 0.0298353271484375, 0.030026559829711915, 0.03024684715270996, 0.03012339210510254, 0.030099264144897463, 0.03016691207885742, 0.030694400787353516, 0.030279680252075194, 0.030284896850585937, 0.030086048126220705, 0.030341119766235353, 0.030081024169921877, 0.03016294479370117, 0.03003392028808594, 0.029996192932128907, 0.029944671630859374, 0.030280832290649415, 0.03020684814453125, 0.03008230400085449, 0.03001590347290039, 0.030984384536743164, 0.029928607940673826, 0.03000831985473633, 0.03010767936706543, 0.03020716857910156, 0.03013916778564453, 0.03002572822570801, 0.03001753616333008, 0.03041231918334961, 0.03026540756225586, 0.030105920791625978, 0.03006854438781738, 0.03018351936340332, 0.030224576950073242, 0.030084800720214844, 0.030255104064941408, 0.030326528549194334, 0.03230550384521484, 0.03094432067871094, 0.030393280029296876, 0.02995609664916992, 0.030096576690673827, 0.029818975448608398, 0.02974550437927246, 0.029871648788452148, 0.03005084800720215, 0.030023231506347656, 0.029780736923217775, 0.029804191589355468, 0.030009695053100586, 0.030072832107543947, 0.029980127334594726, 0.02989302444458008, 0.030006687164306642, 0.02986240005493164, 0.029888128280639647, 0.029819488525390625, 0.02991923141479492, 0.030133344650268554, 0.02994473648071289, 0.02994175910949707, 0.029841407775878907, 0.02983526420593262, 0.02985958480834961, 0.029858047485351563, 0.030095359802246095, 0.029910144805908204, 0.03023551940917969, 0.030087167739868165, 0.030093311309814453, 0.030523391723632814, 0.030308351516723633, 0.030062591552734375, 0.030066688537597655, 0.03007427215576172, 0.029997663497924806, 0.03028531265258789, 0.029975040435791016, 0.03001510429382324, 0.030140159606933593, 0.03006502342224121, 0.030185728073120116, 0.02996019172668457, 0.030320512771606446, 0.030172351837158204, 0.03004300880432129, 0.030001216888427735, 0.029997055053710937, 0.030315967559814454, 0.030028352737426756, 0.02998681640625, 0.030385440826416015, 0.02999123191833496, 0.030050207138061523, 0.030114303588867186, 0.030000896453857423, 0.030015743255615235, 0.03041279983520508, 0.030035871505737305, 0.03011782455444336, 0.032194561004638675, 0.03055961608886719, 0.03025974464416504, 0.0302061767578125, 0.029746400833129884, 0.029642784118652343, 0.029637344360351564, 0.029662879943847656, 0.029663616180419922, 0.029749248504638674, 0.029702335357666015, 0.029714239120483397, 0.02971980857849121, 0.029775808334350586, 0.029761344909667968, 0.029825216293334962, 0.029788991928100587, 0.02980575942993164, 0.029799232482910155, 0.029742687225341798, 0.029790271759033204, 0.029730527877807618, 0.029702783584594727, 0.02976335906982422, 0.029720800399780273, 0.02975334358215332, 0.02978201675415039, 0.029722623825073242, 0.0297325439453125, 0.02982943916320801, 0.030025760650634767, 0.02995622444152832, 0.03012915229797363, 0.030204063415527345, 0.030160640716552733, 0.03006355285644531, 0.030044160842895507, 0.02999091148376465, 0.029911039352416992, 0.029886463165283202, 0.029906944274902345, 0.029959936141967774, 0.02993382453918457, 0.02999728012084961, 0.0298855037689209, 0.02983750343322754, 0.02981942367553711, 0.02979430389404297, 0.029836479187011718, 0.029874975204467774, 0.029922943115234375, 0.029850015640258788, 0.029879840850830078, 0.029921728134155272, 0.029949983596801757, 0.029882368087768556, 0.0299233283996582, 0.029998336791992188, 0.030115711212158204, 0.030094207763671874, 0.0300437126159668, 0.03003343963623047, 0.03013500785827637, 0.03227036666870117, 0.03070774459838867, 0.030063167572021484, 0.0298221435546875, 0.029780799865722657, 0.029741056442260744, 0.02984342384338379, 0.029814815521240233, 0.02975129508972168, 0.02976563262939453, 0.029744768142700197, 0.029804927825927734, 0.029714431762695313, 0.02973695945739746, 0.029788223266601563, 0.029734367370605468, 0.02970204734802246, 0.029680192947387694, 0.02979635238647461, 0.029798080444335937, 0.03004419136047363, 0.030015775680541992, 0.03017523193359375, 0.030109695434570313, 0.030031871795654298, 0.03002524757385254, 0.030126367568969727, 0.03027987289428711, 0.02996441650390625, 0.03011737632751465, 0.030230911254882812, 0.030235967636108398, 0.030366399765014648, 0.030535680770874023, 0.03032035255432129, 0.03023209571838379, 0.030370559692382813, 0.030275583267211914, 0.030246912002563478, 0.030215904235839842, 0.030181663513183594, 0.030312320709228516, 0.030072959899902343, 0.030189376831054687, 0.030119871139526366, 0.030040319442749024, 0.030205856323242186, 0.030229856491088867, 0.03002579116821289, 0.030175935745239257, 0.030336736679077148, 0.03011008071899414, 0.030156351089477538, 0.030322368621826173, 0.030352031707763672, 0.030119935989379884, 0.029997312545776367, 0.02999679946899414, 0.030227935791015625, 0.03001807975769043, 0.030109695434570313, 0.030212032318115235, 0.030224447250366212, 0.03246089553833008, 0.030785535812377928, 0.030296064376831053, 0.030003200531005858, 0.029838399887084963, 0.02984441566467285, 0.03003385543823242, 0.029978687286376954, 0.02977177619934082, 0.030101503372192383, 0.030007295608520508, 0.029728511810302734, 0.02976924705505371, 0.029909727096557617, 0.029968351364135743, 0.0299434871673584, 0.029846944808959962, 0.029975488662719728, 0.029908992767333983, 0.02994175910949707, 0.029962112426757812, 0.03015388870239258, 0.02987513542175293, 0.030248735427856447, 0.02988057518005371, 0.0299683837890625, 0.02990675163269043, 0.030146751403808594, 0.02997657585144043, 0.03017919921875, 0.030275039672851563, 0.03054204750061035, 0.030611648559570312, 0.0303372802734375, 0.03058016014099121, 0.030196287155151366, 0.030342720031738282, 0.030116287231445313, 0.03018121528625488, 0.030054559707641603, 0.03018956756591797, 0.030346687316894532, 0.030284351348876953, 0.029919071197509764, 0.02993078422546387, 0.0300184326171875, 0.030264511108398437, 0.030042848587036132, 0.030166624069213867, 0.030046720504760743, 0.02996220779418945, 0.029890592575073243, 0.030304256439208983, 0.03020150375366211, 0.02998512077331543, 0.03005232048034668, 0.03001683235168457, 0.030007999420166017, 0.030062623977661133, 0.030327871322631837, 0.03016729545593262, 0.030329376220703124, 0.03024470329284668, 0.033325214385986325, 0.031747360229492184, 0.030630399703979492, 0.030443231582641603, 0.03010201644897461, 0.029898752212524415, 0.029747200012207032, 0.029849599838256836, 0.029787263870239257, 0.029743999481201173, 0.029798015594482422, 0.029914527893066405, 0.029762527465820313, 0.030080608367919922, 0.03008348846435547, 0.029916511535644532, 0.03004425621032715, 0.0298417911529541, 0.03005459213256836, 0.029999103546142578, 0.029865503311157227, 0.030129919052124022, 0.02990358352661133, 0.029900543212890623, 0.029943712234497072, 0.029884096145629882, 0.029882816314697264, 0.030289119720458984, 0.03001651191711426, 0.029937664031982423, 0.030094783782958986, 0.03002217674255371, 0.030293439865112303, 0.030196607589721678, 0.030329727172851564, 0.030202720642089845, 0.030250751495361328, 0.030138208389282228, 0.030110111236572267, 0.030038015365600586, 0.029962175369262694, 0.029919296264648437, 0.02990438461303711, 0.02988310432434082, 0.029844671249389648, 0.02991574478149414, 0.029870080947875976, 0.029865983963012696, 0.029877471923828124, 0.02999171257019043, 0.02994700813293457, 0.029911584854125976, 0.029886816024780275, 0.029911039352416992, 0.02993152046203613, 0.0299597110748291, 0.030132768630981445, 0.030000991821289062, 0.029981983184814452, 0.030069568634033202, 0.030105600357055663, 0.030066688537597655, 0.030100576400756834, 0.03231948852539063, 0.030898143768310547, 0.030517248153686522, 0.0302073917388916, 0.029964895248413087, 0.029888416290283205, 0.030077024459838866, 0.02993715286254883, 0.030192031860351562, 0.02982512092590332, 0.029755392074584962, 0.030082176208496094, 0.03023551940917969, 0.030211103439331054, 0.030012351989746094, 0.029923360824584962, 0.030080223083496095, 0.03039516830444336, 0.03018956756591797, 0.030070783615112305, 0.030017023086547853, 0.02989926338195801, 0.030019584655761718, 0.02995609664916992, 0.030053855895996094, 0.030200319290161134, 0.030001119613647462, 0.03005638313293457, 0.030107776641845704, 0.030286048889160155, 0.030365472793579103, 0.03043231964111328, 0.030298336029052735, 0.030472671508789063, 0.030595327377319338, 0.030195711135864257, 0.030294015884399415, 0.031135744094848632, 0.030269439697265626, 0.030029312133789062, 0.030003679275512694, 0.029959615707397462, 0.030313056945800783, 0.02992905616760254, 0.030305856704711913, 0.030077280044555663, 0.030072959899902343, 0.03001363182067871, 0.029970624923706054, 0.030193119049072265, 0.030144960403442382, 0.029935264587402345, 0.02997907257080078, 0.03078348731994629, 0.03033087921142578, 0.030126079559326172, 0.03022233581542969, 0.03003392028808594, 0.030023008346557616, 0.030261568069458008, 0.030318143844604493, 0.030155168533325196, 0.030433664321899413, 0.03231110382080078, 0.03096182441711426, 0.03017932891845703, 0.0299532470703125, 0.030118688583374024, 0.03018137550354004, 0.02980454444885254, 0.029800447463989257, 0.02979151916503906, 0.030052959442138674, 0.029849536895751955, 0.030277183532714844, 0.029979263305664063, 0.029915136337280275, 0.030113792419433592, 0.0298570556640625, 0.030149343490600587, 0.030085119247436523, 0.029816831588745117, 0.030126079559326172, 0.030058496475219725, 0.029834943771362303, 0.029780288696289063, 0.029863744735717773, 0.029990463256835936, 0.029991552352905272, 0.029853599548339844, 0.029851743698120117, 0.030260223388671875, 0.03029875183105469, 0.030181760787963866, 0.030283775329589844, 0.03032268714904785, 0.030320640563964843, 0.030232576370239257, 0.030253055572509766, 0.030107648849487304, 0.030191295623779296, 0.03000966453552246, 0.03007187271118164, 0.03032294464111328, 0.03018207931518555, 0.029961919784545897, 0.030002496719360353, 0.030135295867919923, 0.030277631759643556, 0.030091264724731445, 0.030021312713623047, 0.029951583862304686, 0.03130646324157715, 0.030169343948364256, 0.029867776870727537, 0.030291007995605468, 0.030268352508544923, 0.030053728103637694, 0.03010371208190918, 0.030046783447265624, 0.03003308868408203, 0.030317312240600587, 0.030013439178466796, 0.030104639053344727, 0.030155712127685547, 0.030236064910888674]",tokens/s,33.22094866482862,, @@ -4363,7 +4363,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 95139 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 88612 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4482,7 +4482,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 26226 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 20557 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.989824,11799.560192,0.0,11404.312576,11388.314624,s,1,7.22953173828125,7.22953173828125,0.0,7.22953173828125,7.22953173828125,7.22953173828125,7.22953173828125,[7.22953173828125],,kWh,6.060062895841157e-06,6.604853918541069e-07,3.1891692180013864e-06,9.90971750569665e-06,,MB,1045.102592,11812.143104,0.0,11406.409728,11107.92192,s,10,4.078596130371094,0.4078596130371094,0.006914636060580647,0.4093941650390625,0.41454249267578125,0.4161641296386719,0.4174614392089844,"[0.3908428344726563, 0.40325701904296873, 0.4048968811035156, 0.40763894653320315, 0.4104405212402344, 0.41418212890625, 0.40905242919921875, 0.41076370239257814, 0.40973590087890627, 0.4177857666015625]",tokens/s,627.6669516104005,kWh,1.1848457179999816e-05,1.3057261868365596e-06,7.91846189032011e-06,2.1072645257156487e-05,tokens/kWh,12148451.078445397,MB,1049.894912,11814.240256,0.0,11408.50688,11305.031168,s,10,33.25393603515625,3.325393603515625,0.004680598481783512,3.3244874267578126,3.331663720703125,3.3319961669921874,3.332262124023438,"[3.32085595703125, 3.32172607421875, 3.32499609375, 3.320064453125, 3.3196875, 3.323978759765625, 3.32793017578125, 3.33232861328125, 3.33158984375, 3.330778564453125]",tokens/s,18.94512575395467,kWh,9.727351534833359e-05,1.0730509639033732e-05,6.439720707327956e-05,0.0001724012320606469,tokens/kWh,365426.6228088092,,s,630,33.250572078704806,0.05277868583921402,0.00030674911309846537,0.05275551986694336,0.053081284332275394,0.05319347534179687,0.054124218521118164,"[0.0541383056640625, 0.05286969757080078, 0.05241062545776367, 0.05242035293579102, 0.052539424896240236, 0.052261089324951174, 0.05237136077880859, 0.052418399810791015, 0.05233817672729492, 0.052375518798828125, 0.05235324859619141, 0.052402145385742185, 0.05252556610107422, 0.05238297653198242, 0.05253529739379883, 0.05237820816040039, 0.05243628692626953, 0.052414497375488284, 0.05265177536010742, 0.05280767822265625, 0.05262160110473633, 0.05304336166381836, 0.052717151641845705, 0.052778976440429684, 0.05249491119384766, 0.05253279876708984, 0.052677471160888674, 0.052567424774169924, 0.052643966674804685, 0.052544033050537106, 0.05244662475585937, 0.05290854263305664, 0.052652320861816405, 0.05259030532836914, 0.05252505493164063, 0.052580352783203124, 0.05269852828979492, 0.05272431945800781, 0.053174270629882815, 0.05282179260253906, 0.053018753051757815, 0.052811870574951174, 0.05289372634887695, 0.0526520004272461, 0.05276435089111328, 0.05268259048461914, 0.052684257507324216, 0.052634624481201174, 0.05271756744384765, 0.052739585876464844, 0.05284710311889648, 0.05268889617919922, 0.05295718383789062, 0.05281932830810547, 0.05289843368530273, 0.05272371292114258, 0.05284592056274414, 0.05336131286621094, 0.05306982421875, 0.053190689086914066, 0.052962303161621094, 0.052933120727539064, 0.05286550521850586, 0.054295841217041015, 0.05278310394287109, 0.05225545501708984, 0.05222348785400391, 0.05213471984863281, 0.05276435089111328, 0.05241856002807617, 0.05244518280029297, 0.0523612174987793, 0.05259823989868164, 0.05237583923339844, 0.05227657699584961, 0.05245225524902344, 0.05256185531616211, 0.05246105575561524, 0.05245990371704102, 0.0522856330871582, 0.05280972671508789, 0.05276387023925781, 0.052833057403564455, 0.05261711883544922, 0.05257030487060547, 0.05251478576660156, 0.05252822494506836, 0.052620094299316404, 0.05253500747680664, 0.05258812713623047, 0.05267324829101563, 0.052838401794433595, 0.052598785400390625, 0.05249017715454102, 0.05257353591918945, 0.05297020721435547, 0.05268889617919922, 0.05284044647216797, 0.05280115127563476, 0.05288179016113281, 0.053029022216796874, 0.05291356658935547, 0.052753952026367186, 0.05279212951660156, 0.05269014358520508, 0.052716415405273435, 0.05270233535766602, 0.05283651351928711, 0.05289884948730469, 0.052725440979003904, 0.05300569534301758, 0.052718208312988284, 0.05275033569335937, 0.0526295051574707, 0.05269289779663086, 0.05280547332763672, 0.05272003173828125, 0.05290387344360352, 0.05283190536499023, 0.053141761779785156, 0.05346918487548828, 0.053028865814208986, 0.052942848205566405, 0.053008384704589843, 0.05283375930786133, 0.05295772933959961, 0.0541822738647461, 0.05274176025390625, 0.05234732818603516, 0.052550945281982425, 0.05255356979370117, 0.05251718521118164, 0.05244704055786133, 0.05243376159667969, 0.05239174270629883, 0.05232963180541992, 0.052342655181884766, 0.052356063842773436, 0.052547584533691405, 0.05264169692993164, 0.05256614303588867, 0.052591743469238283, 0.05249110412597656, 0.052542720794677734, 0.05268278503417969, 0.05272003173828125, 0.05251308822631836, 0.052518753051757815, 0.05262556838989258, 0.05255782318115235, 0.05266960144042969, 0.05263446426391601, 0.05271779251098633, 0.052727584838867185, 0.05248006439208985, 0.05255161666870117, 0.0526025276184082, 0.05267695999145508, 0.05294230270385742, 0.052908065795898435, 0.05303123092651367, 0.05357177734375, 0.05297488021850586, 0.052861663818359376, 0.05295539093017578, 0.05288499069213867, 0.053065982818603516, 0.052876960754394534, 0.05283795166015625, 0.05278799819946289, 0.0530882568359375, 0.05285174560546875, 0.05284281539916992, 0.05268764877319336, 0.05266985702514648, 0.05278153610229492, 0.052770782470703125, 0.05302217483520508, 0.05281849670410156, 0.05310025787353516, 0.05289769744873047, 0.05295756912231445, 0.05297151947021484, 0.05296102523803711, 0.052888961791992185, 0.0530645751953125, 0.053065727233886716, 0.052951038360595705, 0.05332992172241211, 0.05417824172973633, 0.05289571380615234, 0.05244675064086914, 0.05241219329833984, 0.052550430297851565, 0.05248604965209961, 0.05226015853881836, 0.05234479904174805, 0.052349056243896484, 0.05243145751953125, 0.05271142578125, 0.05235302352905274, 0.05244480133056641, 0.052440608978271484, 0.05248700714111328, 0.0523691520690918, 0.052289249420166016, 0.05249897766113281, 0.052850368499755856, 0.05290611267089844, 0.052828254699707033, 0.052533344268798826, 0.052478240966796874, 0.05251430511474609, 0.05239318466186523, 0.052429824829101565, 0.05249622344970703, 0.052488449096679685, 0.052442623138427735, 0.052731391906738284, 0.05265296173095703, 0.05241439819335938, 0.052496158599853515, 0.05253763198852539, 0.052545055389404294, 0.05320268630981445, 0.05318729782104492, 0.052910079956054686, 0.052921409606933596, 0.05282297515869141, 0.05267251205444336, 0.05267171096801758, 0.05290063858032226, 0.05263161468505859, 0.052770751953125, 0.05267865753173828, 0.05271551895141602, 0.05280691146850586, 0.05285145568847656, 0.052733535766601565, 0.052744449615478514, 0.05276073455810547, 0.05272576141357422, 0.05313238525390625, 0.052916385650634765, 0.052813888549804684, 0.05304143905639649, 0.052922782897949217, 0.0530882568359375, 0.052909854888916016, 0.05281814575195312, 0.05285273742675781, 0.052891647338867184, 0.054089729309082034, 0.05288547134399414, 0.05237251281738281, 0.05233337783813476, 0.05233478546142578, 0.052393600463867186, 0.05241241455078125, 0.0523823356628418, 0.05242227172851562, 0.05236083221435547, 0.05278908920288086, 0.05237036895751953, 0.052472671508789065, 0.05242902374267578, 0.05275920104980469, 0.052555679321289066, 0.05271356964111328, 0.0526192626953125, 0.05252048110961914, 0.05265251159667969, 0.05267670440673828, 0.05266217422485352, 0.05276163101196289, 0.05257321548461914, 0.052557758331298825, 0.052499584197998043, 0.05245792007446289, 0.05243948745727539, 0.05244927978515625, 0.05246771240234375, 0.0525513916015625, 0.052494625091552734, 0.05247180938720703, 0.05246361541748047, 0.05320028686523438, 0.0529920654296875, 0.052733535766601565, 0.05265708923339844, 0.05290393447875977, 0.052724864959716795, 0.05337948989868164, 0.052705760955810546, 0.05294899368286133, 0.05284864044189453, 0.052872543334960935, 0.052568737030029296, 0.05271347045898438, 0.05271273422241211, 0.05285551834106445, 0.052708576202392575, 0.05266716766357422, 0.05255782318115235, 0.05283356857299805, 0.052711776733398434, 0.053072158813476565, 0.052754528045654295, 0.05274185562133789, 0.05279260635375976, 0.05277312088012695, 0.05285289764404297, 0.05288201522827148, 0.05281587219238281, 0.052967422485351565, 0.05408150482177734, 0.052770591735839846, 0.05258671951293945, 0.05242879867553711, 0.052424705505371094, 0.052391712188720706, 0.05248255920410156, 0.05252243041992188, 0.05233078384399414, 0.05242675018310547, 0.05241424179077148, 0.05258204650878906, 0.0529697265625, 0.05251513671875, 0.05253529739379883, 0.05252710342407227, 0.05259468841552734, 0.052711166381835935, 0.05284889602661133, 0.052830432891845705, 0.05274591827392578, 0.052676288604736325, 0.05271798324584961, 0.05254553604125976, 0.05266783905029297, 0.05261983871459961, 0.05264787292480469, 0.052617279052734375, 0.052623489379882815, 0.05266960144042969, 0.05278793716430664, 0.05263359832763672, 0.05275651168823242, 0.0525700798034668, 0.05270528030395508, 0.052682369232177735, 0.052834686279296876, 0.05288140869140625, 0.05290188980102539, 0.05278307342529297, 0.052817119598388675, 0.05302777481079102, 0.052786815643310545, 0.05273420715332031, 0.05282406234741211, 0.05277084732055664, 0.05295491027832031, 0.052744384765625, 0.053053760528564455, 0.05283808135986328, 0.05270528030395508, 0.05282611083984375, 0.05281792068481445, 0.05300617599487305, 0.05300204849243164, 0.05279369735717773, 0.052999488830566405, 0.05307696151733399, 0.052870880126953124, 0.05287097549438476, 0.053231136322021484, 0.05285520172119141, 0.053017887115478515, 0.05406307220458984, 0.05285583877563477, 0.05234790420532227, 0.052397247314453124, 0.052420894622802736, 0.05233513641357422, 0.05240537643432617, 0.05258329772949219, 0.052569534301757814, 0.05259273529052735, 0.05240812683105469, 0.05234960174560547, 0.05247334289550781, 0.0528983039855957, 0.052566017150878906, 0.05257212829589844, 0.05280361557006836, 0.05270937728881836, 0.05275839996337891, 0.05278937530517578, 0.052746238708496096, 0.05259823989868164, 0.05263824081420899, 0.05264003372192383, 0.052737567901611326, 0.05265340805053711, 0.05283107376098633, 0.052762622833251956, 0.05260489654541016, 0.05267027282714844, 0.052863201141357424, 0.05324579238891602, 0.05301776123046875, 0.052863998413085936, 0.05271756744384765, 0.05290963363647461, 0.05293072128295898, 0.05283663940429688, 0.05303068923950195, 0.052989246368408204, 0.052978591918945314, 0.052899486541748045, 0.05295548629760742, 0.05285472106933594, 0.05299820709228516, 0.053017791748046876, 0.05292319869995117, 0.05290800094604492, 0.052875297546386715, 0.052784961700439455, 0.05288191986083984, 0.052802398681640626, 0.05292486572265625, 0.05304348754882812, 0.05314982223510742, 0.05293011093139648, 0.05305785751342774, 0.053055553436279296, 0.05322348785400391, 0.05311654281616211, 0.05297628784179687, 0.05293641662597656, 0.05308124923706055, 0.05461270523071289, 0.053144832611083985, 0.05265071868896484, 0.05251686477661133, 0.05243417739868164, 0.05247286224365234, 0.052538944244384767, 0.052396190643310546, 0.05261529541015625, 0.05266729736328125, 0.05251379013061523, 0.05374358367919922, 0.052701343536376954, 0.05273788833618164, 0.05269417572021484, 0.05274915313720703, 0.05264777755737305, 0.05265423965454102, 0.05267827224731445, 0.05319510269165039, 0.05292233657836914, 0.05308422470092773, 0.05280767822265625, 0.05284659194946289, 0.05279888153076172, 0.05276732635498047, 0.05276374435424805, 0.052809982299804686, 0.05270595169067383, 0.05268668746948242, 0.052719425201416016, 0.05276079940795898, 0.052754047393798825, 0.05283891296386719, 0.052868640899658204, 0.0528421745300293, 0.05279414367675781, 0.05278617477416992, 0.05300128173828125, 0.05289971160888672, 0.05314156723022461, 0.05289267349243164, 0.05293913650512695, 0.052738494873046875, 0.052961280822753906, 0.0527341423034668, 0.05281788635253906, 0.052799518585205076, 0.052811775207519535, 0.05273011016845703, 0.052813568115234376, 0.05289567947387695, 0.052916385650634765, 0.05315081787109375, 0.05319148635864258, 0.0529837760925293, 0.05310262298583984, 0.05332735824584961, 0.05320755386352539, 0.05337699127197266, 0.053346046447753905, 0.05318070220947266, 0.053106689453125, 0.05466521453857422, 0.05334339141845703, 0.052703102111816405, 0.05247894287109375, 0.05252828979492188, 0.05254377746582031, 0.052410816192626955, 0.052627582550048825, 0.05255168151855469, 0.05253324890136719, 0.052502113342285155, 0.05256233596801758, 0.052587646484375, 0.05255487823486328, 0.052612545013427735, 0.05280185699462891, 0.052482177734375, 0.05268876647949219, 0.052728031158447264, 0.05289539337158203, 0.05286310577392578, 0.05277494430541992, 0.05271708679199219, 0.05267327880859375, 0.052676063537597656, 0.05277030563354492, 0.05288828659057617, 0.052709022521972654, 0.05279792022705078, 0.05270105743408203, 0.05278464126586914, 0.05269903945922851, 0.05286563110351562, 0.052827327728271485, 0.05291228866577148, 0.05285270309448242, 0.05293331146240234, 0.052883712768554685, 0.05298688125610351, 0.0529409294128418, 0.05295798492431641, 0.053319454193115234, 0.05304121780395508, 0.0530063362121582, 0.05297971343994141, 0.05307494354248047, 0.05297840118408203, 0.052834465026855466, 0.05283654403686523, 0.05287519836425781, 0.052983806610107424, 0.052822017669677736, 0.052956768035888675, 0.05294736099243164, 0.053182464599609375, 0.05314787292480469, 0.053106464385986325, 0.05320083236694336, 0.053137248992919925, 0.05315974426269531, 0.05315353775024414, 0.05341865539550781, 0.05304681777954102, 0.05451468658447266, 0.05318656158447266, 0.052654079437255856, 0.05249871826171875, 0.052536991119384764, 0.05242585754394531, 0.05259564971923828, 0.053082015991210936, 0.05249187088012695, 0.052679168701171876, 0.05255708694458008, 0.05245836639404297, 0.05275222396850586, 0.05255478286743164, 0.052591743469238283, 0.05266211318969727, 0.05263359832763672, 0.05268428802490235, 0.05275289535522461, 0.053082111358642575, 0.05294668960571289, 0.05282822418212891, 0.05271897506713867, 0.05253116989135742, 0.05270943832397461, 0.05268339157104492, 0.052842655181884766, 0.05267804718017578, 0.052660831451416014, 0.052719615936279295, 0.05269081497192383, 0.05266998291015625, 0.0528554573059082, 0.05276627349853515, 0.052913887023925785, 0.052947616577148436, 0.05287116622924805, 0.052789249420166016, 0.053037120819091794, 0.05297760009765625, 0.05308160018920898, 0.05293721771240235, 0.052967422485351565, 0.05287936019897461, 0.05300428771972656, 0.052848190307617185, 0.05299244689941406, 0.052754432678222656, 0.052819969177246094, 0.05288905715942383, 0.05279388809204102, 0.05301248168945313, 0.05294079971313476, 0.053032958984375, 0.05303910446166992, 0.05296332931518555, 0.053125118255615236, 0.05315996932983398, 0.05339907073974609, 0.05335254287719727, 0.05317232131958008, 0.05292262268066406, 0.05307148742675781]",tokens/s,18.94704243009041,, @@ -4526,7 +4526,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.27 GiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 135692 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 2.28 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.27 GiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 129296 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 2.28 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4567,7 +4567,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 78.12 MiB is free. Process 140850 has 14.66 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 12.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 78.12 MiB is free. Process 134286 has 14.66 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 12.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4610,7 +4610,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 115141 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 108594 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4653,7 +4653,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 98286 has 14.74 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 6.49 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 91702 has 14.74 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 6.49 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.659008,1106.182144,0.0,710.934528,657.419264,s,1,7.05105859375,7.05105859375,0.0,7.05105859375,7.05105859375,7.05105859375,7.05105859375,[7.05105859375],,kWh,2.202895187519971e-06,2.3594645826461773e-07,0.0,2.438841645784589e-06,,MB,1154.048,1150.222336,0.0,744.48896,582.372352,s,21,0.34496889877319337,0.01642709041777111,0.0007742651983539773,0.016256927490234375,0.016360639572143554,0.016419744491577147,0.01918410911560059,"[0.019875200271606445, 0.01612614440917969, 0.0162511043548584, 0.01623356819152832, 0.016419744491577147, 0.01624127960205078, 0.016256927490234375, 0.01615715217590332, 0.01620195198059082, 0.01619811248779297, 0.016309696197509764, 0.01617840003967285, 0.016360639572143554, 0.01628441619873047, 0.016258655548095705, 0.016328128814697265, 0.016202848434448244, 0.016271615982055666, 0.016319295883178712, 0.01632364845275879, 0.01617036819458008]",tokens/s,15584.013570842391,kWh,6.129782504288806e-07,6.759974087109422e-08,4.079006531176538e-07,1.0884786444176286e-06,tokens/kWh,235190650.0994958,MB,1165.881344,1171.193856,0.0,765.46048,582.374912,s,21,9.861971099853514,0.46961767142159594,0.0015522680961810973,0.46928976440429687,0.4714923095703125,0.4716328125,0.47389763183593747,"[0.47017724609375, 0.46980416870117186, 0.47446383666992187, 0.4695044860839844, 0.4688964538574219, 0.469240234375, 0.4675565490722656, 0.4714923095703125, 0.46747354125976565, 0.4716328125, 0.47092059326171876, 0.47007916259765625, 0.46998968505859373, 0.46915087890625, 0.4682867736816406, 0.46879379272460936, 0.46901904296875, 0.46928976440429687, 0.4695080261230469, 0.46755807495117185, 0.4691336669921875]",tokens/s,134.15168089669734,kWh,1.3432311777348723e-05,1.481352660628371e-06,6.6641552690726686e-06,2.1577819707049762e-05,tokens/kWh,2919664.7694399385,,s,1323,9.856276663780205,0.007449944568239011,0.0001204959173266853,0.007427680015563965,0.007527481746673584,0.0076103262424469,0.007899578285217285,"[0.007407072067260742, 0.007580192089080811, 0.007585792064666748, 0.007608160018920899, 0.007400864124298095, 0.007393919944763184, 0.007436351776123047, 0.0074158720970153805, 0.007421567916870118, 0.007475840091705322, 0.007452415943145752, 0.007387135982513428, 0.007468255996704101, 0.007508768081665039, 0.007419648170471191, 0.007436575889587402, 0.007427296161651611, 0.0074882559776306154, 0.007559167861938477, 0.007354368209838867, 0.007419424057006836, 0.007387616157531738, 0.007409664154052734, 0.007437983989715576, 0.007419360160827637, 0.007389535903930664, 0.007455264091491699, 0.007455840110778809, 0.007428864002227783, 0.00744217586517334, 0.00741212797164917, 0.0073975038528442385, 0.007477119922637939, 0.00740556812286377, 0.007437695980072021, 0.007452864170074463, 0.007414112091064453, 0.0073966398239135745, 0.0076471037864685055, 0.007437248229980469, 0.00738099193572998, 0.00740556812286377, 0.007440447807312012, 0.0074584641456604, 0.0074263358116149905, 0.007433504104614258, 0.007424960136413574, 0.00747708797454834, 0.007441823959350586, 0.007471648216247559, 0.007431551933288574, 0.007453311920166015, 0.00745027208328247, 0.007756256103515625, 0.00747705602645874, 0.007513567924499511, 0.007440832138061524, 0.0074683837890625, 0.007492288112640381, 0.007512191772460938, 0.007458816051483155, 0.007505375862121582, 0.007615007877349853, 0.007315360069274903, 0.007415040016174317, 0.007428864002227783, 0.007435872077941895, 0.0074817600250244145, 0.007426047801971435, 0.007429887771606445, 0.00740169620513916, 0.007442463874816894, 0.007539807796478271, 0.007417984008789062, 0.007439136028289795, 0.007409023761749268, 0.007393407821655274, 0.007364416122436523, 0.007374911785125732, 0.0074117441177368164, 0.007432000160217285, 0.0074202880859375, 0.007436831951141357, 0.007401343822479248, 0.007411712169647216, 0.007417856216430664, 0.007487520217895508, 0.007441760063171387, 0.0074369277954101564, 0.007448351860046386, 0.007407584190368652, 0.007387392044067383, 0.007401472091674805, 0.007441855907440185, 0.007426623821258545, 0.007428095817565918, 0.007431263923645019, 0.007513311862945557, 0.007406655788421631, 0.007678592205047607, 0.007600128173828125, 0.007513887882232666, 0.007694560050964356, 0.007522175788879395, 0.007507264137268066, 0.00761033582687378, 0.007455584049224854, 0.007415359973907471, 0.007388959884643555, 0.007451295852661133, 0.007460864067077637, 0.007479296207427978, 0.007480800151824951, 0.007470623970031738, 0.007416831970214844, 0.007391232013702393, 0.007452672004699707, 0.007497727870941162, 0.007448095798492432, 0.007465439796447754, 0.007469056129455566, 0.007407616138458252, 0.007442431926727295, 0.007532735824584961, 0.007550784111022949, 0.007448768138885498, 0.007382847785949707, 0.00748035192489624, 0.00744543981552124, 0.007409599781036377, 0.007370751857757568, 0.007407616138458252, 0.0074301438331604, 0.007458943843841553, 0.0074852161407470705, 0.007488768100738525, 0.007443295955657959, 0.007393280029296875, 0.007415840148925781, 0.007446335792541504, 0.007454912185668946, 0.007443903923034668, 0.007442848205566406, 0.007511648178100586, 0.0074287037849426266, 0.007440063953399658, 0.007423423767089844, 0.007391327857971191, 0.007450975894927978, 0.0074225602149963375, 0.007393055915832519, 0.00738099193572998, 0.007368351936340332, 0.0074568638801574706, 0.007427616119384766, 0.007444384098052978, 0.007439551830291748, 0.0074460158348083495, 0.007510144233703614, 0.007405663967132568, 0.007434144020080567, 0.007522592067718506, 0.007433951854705811, 0.007462528228759766, 0.008919424057006835, 0.009375519752502441, 0.008029664039611816, 0.007535359859466553, 0.0074992961883544925, 0.00748528003692627, 0.007486239910125732, 0.0074750399589538575, 0.007434400081634521, 0.0074217281341552735, 0.007513311862945557, 0.007480160236358642, 0.007527488231658936, 0.007543200016021728, 0.007480991840362549, 0.007480192184448242, 0.0075920639038085935, 0.007440256118774414, 0.007401472091674805, 0.0074629120826721195, 0.007831552028656007, 0.007510015964508057, 0.0076249918937683105, 0.007566112041473389, 0.007506879806518554, 0.007452640056610107, 0.007468959808349609, 0.007427807807922363, 0.007417952060699463, 0.007442527770996093, 0.007432415962219239, 0.007439839839935303, 0.007483551979064942, 0.007399807929992676, 0.007415808200836181, 0.007389023780822754, 0.007397535800933838, 0.00771398401260376, 0.007424543857574463, 0.007419392108917237, 0.007381792068481445, 0.007391232013702393, 0.007407616138458252, 0.007442048072814941, 0.007442815780639649, 0.007460864067077637, 0.007447711944580078, 0.007435103893280029, 0.007383039951324463, 0.007362559795379638, 0.007517248153686523, 0.007460063934326172, 0.0074237117767333985, 0.007442431926727295, 0.007400544166564942, 0.007368800163269043, 0.007379776000976563, 0.007421887874603271, 0.007423615932464599, 0.007440832138061524, 0.007419551849365234, 0.00740835189819336, 0.0074297599792480466, 0.007420928001403809, 0.007437312126159668, 0.007411712169647216, 0.007753727912902832, 0.007716127872467041, 0.007525087833404541, 0.007485439777374267, 0.0074235520362854, 0.007436736106872559, 0.007527455806732177, 0.007490528106689453, 0.00747276782989502, 0.007473536014556885, 0.0074601278305053715, 0.007395423889160156, 0.007387775897979737, 0.007400864124298095, 0.007414400100708008, 0.007458591938018799, 0.007426239967346191, 0.007427807807922363, 0.007409887790679932, 0.00737286376953125, 0.00744652795791626, 0.007483391761779785, 0.007422143936157226, 0.007585951805114746, 0.007640575885772705, 0.00750438404083252, 0.00743612813949585, 0.007407616138458252, 0.00738099193572998, 0.007406816005706787, 0.007391615867614746, 0.007399648189544678, 0.007426080226898194, 0.007413919925689697, 0.007360191822052002, 0.0073173117637634275, 0.007405600070953369, 0.007395328044891358, 0.007776735782623291, 0.007437664031982422, 0.007567903995513916, 0.007434368133544922, 0.0074670081138610836, 0.007802879810333252, 0.007406847953796387, 0.00744649600982666, 0.007453472137451172, 0.0074301438331604, 0.007444447994232177, 0.007536672115325928, 0.007374847888946533, 0.007376575946807861, 0.007460319995880127, 0.007406271934509277, 0.0074403839111328125, 0.007419871807098389, 0.007452191829681397, 0.007372767925262451, 0.00735097599029541, 0.0074301438331604, 0.007407360076904297, 0.007417247772216797, 0.007450719833374023, 0.007387807846069336, 0.0074011521339416506, 0.007398848056793213, 0.0073769278526306156, 0.0075560321807861324, 0.007428095817565918, 0.007411903858184814, 0.00736627197265625, 0.0073118720054626465, 0.007388768196105957, 0.007389279842376709, 0.007413856029510498, 0.0073827199935913084, 0.007421664237976074, 0.007391007900238037, 0.007389120101928711, 0.007427999973297119, 0.007391808032989502, 0.007438079833984375, 0.007660096168518067, 0.007426047801971435, 0.007370751857757568, 0.007354207992553711, 0.007397600173950195, 0.007415584087371826, 0.007527455806732177, 0.007455711841583252, 0.0074481601715087895, 0.007407519817352295, 0.0073751678466796875, 0.007334080219268799, 0.007368288040161133, 0.007378528118133545, 0.007381984233856201, 0.00741974401473999, 0.007387423992156982, 0.007290080070495606, 0.00737334394454956, 0.00769209623336792, 0.00739958381652832, 0.007368703842163086, 0.007409664154052734, 0.007308928012847901, 0.007321983814239502, 0.007374335765838623, 0.007381504058837891, 0.007369984149932862, 0.007940864086151123, 0.007437952041625976, 0.007358880043029785, 0.007376863956451416, 0.007382847785949707, 0.00743228816986084, 0.007380256175994873, 0.007414591789245605, 0.007385024070739746, 0.0074263038635253905, 0.007372608184814453, 0.007368703842163086, 0.007439616203308105, 0.007448895931243897, 0.007502272129058838, 0.007419072151184082, 0.007472991943359375, 0.0074122557640075686, 0.007456416130065918, 0.007459424018859863, 0.007434271812438965, 0.007768159866333008, 0.007684160232543946, 0.007593503952026367, 0.007549407958984375, 0.007499872207641601, 0.007534143924713135, 0.007540863990783691, 0.007573215961456299, 0.007495520114898682, 0.0075000319480896, 0.007479712009429932, 0.007510015964508057, 0.007399456024169922, 0.007374815940856934, 0.007408991813659668, 0.007397215843200683, 0.007425983905792236, 0.00738047981262207, 0.007551487922668457, 0.007374847888946533, 0.007343552112579346, 0.007354015827178955, 0.007488416194915771, 0.007394591808319092, 0.007375296115875244, 0.007397247791290283, 0.007342495918273926, 0.007376383781433105, 0.007406079769134521, 0.007397247791290283, 0.007417503833770752, 0.007436768054962158, 0.007397408008575439, 0.007335360050201416, 0.007328288078308105, 0.007387487888336181, 0.0076836800575256345, 0.007445792198181153, 0.00743609619140625, 0.007431136131286621, 0.0073842878341674805, 0.007342879772186279, 0.007415808200836181, 0.007376128196716308, 0.007487711906433106, 0.00743887996673584, 0.007378943920135498, 0.007329792022705078, 0.007319551944732666, 0.007378592014312744, 0.007395679950714111, 0.007423615932464599, 0.007360896110534668, 0.007347936153411865, 0.007358304023742676, 0.007383488178253174, 0.007364607810974121, 0.007419904232025146, 0.007387135982513428, 0.007358335971832276, 0.007286911964416504, 0.007387135982513428, 0.007447999954223633, 0.007424255847930908, 0.007395135879516602, 0.007380671977996826, 0.007723328113555908, 0.007463263988494873, 0.007483551979064942, 0.007490943908691406, 0.007450496196746826, 0.0074531202316284175, 0.007446847915649414, 0.007388288021087646, 0.00737779188156128, 0.007606272220611572, 0.0076943359375, 0.007422175884246826, 0.007450399875640869, 0.007413760185241699, 0.007372960090637207, 0.0077940158843994144, 0.007420576095581054, 0.007388319969177246, 0.007392096042633057, 0.007423999786376953, 0.007378655910491944, 0.0074932479858398435, 0.007708928108215332, 0.008310527801513671, 0.008026783943176269, 0.007952415943145752, 0.007511168003082275, 0.007591904163360596, 0.007392288208007813, 0.007366655826568603, 0.007411392211914063, 0.0074544320106506344, 0.007393343925476074, 0.00742416000366211, 0.007653600215911866, 0.007419392108917237, 0.0073671360015869145, 0.007442080020904541, 0.007440767765045166, 0.007423679828643798, 0.007395648002624512, 0.007377920150756836, 0.007325695991516113, 0.00733900785446167, 0.007447648048400879, 0.007426271915435791, 0.007381696224212647, 0.007378496170043945, 0.0073482880592346195, 0.0073381118774414065, 0.007358719825744629, 0.007440224170684815, 0.007422111988067627, 0.008077312469482421, 0.007475200176239013, 0.0074035201072692874, 0.0073424320220947265, 0.007351583957672119, 0.007418240070343017, 0.007383264064788818, 0.007390463829040527, 0.0073794879913330075, 0.007421440124511719, 0.007358975887298584, 0.007361599922180176, 0.007543680191040039, 0.00767955207824707, 0.00768665599822998, 0.0075304961204528805, 0.0074563522338867184, 0.007442431926727295, 0.007427775859832763, 0.0073796801567077635, 0.007462207794189453, 0.007448863983154297, 0.00746127986907959, 0.007429696083068847, 0.007374752044677735, 0.007442368030548096, 0.007401408195495606, 0.007373760223388672, 0.007421919822692871, 0.007397408008575439, 0.00738918399810791, 0.007427680015563965, 0.007356832027435303, 0.007309567928314209, 0.007367616176605225, 0.007445312023162841, 0.007370751857757568, 0.007423647880554199, 0.00739686393737793, 0.007344992160797119, 0.007358560085296631, 0.0074198079109191895, 0.0074301438331604, 0.007413760185241699, 0.007540736198425293, 0.0074035201072692874, 0.007390336036682129, 0.007358496189117432, 0.007430592060089111, 0.007410079956054688, 0.007412864208221436, 0.007410560131072998, 0.007407616138458252, 0.007406720161437988, 0.007391520023345947, 0.007400383949279785, 0.007418591976165772, 0.007455679893493652, 0.007460864067077637, 0.0073400321006774905, 0.007350143909454346, 0.007530623912811279, 0.007385087966918945, 0.007391488075256348, 0.0074254398345947265, 0.007417600154876709, 0.007342688083648682, 0.0074629120826721195, 0.007442431926727295, 0.0074301438331604, 0.007433311939239502, 0.0074208321571350095, 0.007413536071777344, 0.00737718391418457, 0.007407551765441895, 0.007442431926727295, 0.007521279811859131, 0.007668928146362304, 0.007501023769378662, 0.007489920139312744, 0.007422175884246826, 0.00738918399810791, 0.0073619518280029295, 0.007420032024383545, 0.007465312004089356, 0.007407519817352295, 0.007407839775085449, 0.007373792171478271, 0.007370463848114014, 0.0073883838653564455, 0.007637824058532715, 0.007456768035888672, 0.007451968193054199, 0.00745472002029419, 0.0074414081573486324, 0.007372576236724854, 0.007309279918670654, 0.007415743827819824, 0.007435840129852295, 0.007417856216430664, 0.0073855361938476565, 0.007383039951324463, 0.007337120056152344, 0.00753872013092041, 0.007477119922637939, 0.007472064018249512, 0.00745472002029419, 0.007441792011260986, 0.007864960193634033, 0.00751529598236084, 0.007526527881622314, 0.008338144302368164, 0.007725056171417236, 0.007474431991577149, 0.007439104080200196, 0.007466271877288818, 0.007449312210083008, 0.007481344223022461, 0.0074503359794616695, 0.0074570560455322265, 0.0074275522232055665, 0.007432735919952392, 0.007501823902130127, 0.00740556812286377, 0.007450623989105225, 0.007444543838500977, 0.007413119792938233, 0.007363135814666748, 0.007387135982513428, 0.007434239864349365, 0.007563551902770996, 0.007470816135406494, 0.007419904232025146, 0.007391232013702393, 0.007383039951324463, 0.007472447872161865, 0.007438047885894776, 0.007432608127593994, 0.007492479801177979, 0.007435967922210693, 0.007408927917480468, 0.007396063804626465, 0.0073842878341674805, 0.007424799919128418, 0.007815167903900147, 0.007796735763549805, 0.007600128173828125, 0.007546879768371582, 0.007561215877532959, 0.0074670081138610836, 0.00742195177078247, 0.00738262414932251, 0.007324128150939941, 0.007419839859008789, 0.007368703842163086, 0.007436287879943848, 0.007368703842163086, 0.00738108777999878, 0.008138655662536621, 0.008627679824829102, 0.007445024013519287, 0.00740556812286377, 0.00739737606048584, 0.007443456172943115, 0.007414783954620361, 0.0074035201072692874, 0.007380256175994873, 0.00740825605392456, 0.007333759784698486, 0.007383296012878418, 0.0073862080574035646, 0.007526912212371826, 0.007465343952178955, 0.007472832202911377, 0.007377120018005371, 0.007395008087158203, 0.007360447883605957, 0.007432960033416748, 0.007436351776123047, 0.007419551849365234, 0.007577600002288819, 0.0074301438331604, 0.0074113597869873045, 0.007399775981903076, 0.007409664154052734, 0.007395328044891358, 0.007401792049407959, 0.0074011521339416506, 0.007358463764190673, 0.00739081621170044, 0.007375264167785644, 0.007415808200836181, 0.00740499210357666, 0.00742790412902832, 0.00745907211303711, 0.007436607837677002, 0.0073975038528442385, 0.007505824089050293, 0.007488800048828125, 0.007597152233123779, 0.0076203842163085934, 0.007491583824157715, 0.007473152160644531, 0.007438208103179932, 0.0075285758972167965, 0.0075304961204528805, 0.007903232097625732, 0.00749289608001709, 0.007549920082092285, 0.007505663871765137, 0.00744217586517334, 0.00740172815322876, 0.00739686393737793, 0.007305471897125244, 0.007360320091247558, 0.007437856197357178, 0.007431136131286621, 0.007433504104614258, 0.0074700479507446285, 0.0074135041236877445, 0.007395328044891358, 0.008044544219970704, 0.007440159797668457, 0.007446559906005859, 0.00750816011428833, 0.00744652795791626, 0.00745472002029419, 0.0074702720642089845, 0.007406400203704834, 0.00738646411895752, 0.007617184162139893, 0.007479296207427978, 0.007491583824157715, 0.0075304961204528805, 0.007475200176239013, 0.007409023761749268, 0.007414400100708008, 0.007385087966918945, 0.007485439777374267, 0.007526400089263916, 0.0074338879585266115, 0.007626431941986084, 0.007477536201477051, 0.007487872123718262, 0.007419904232025146, 0.007427680015563965, 0.0074203200340271, 0.0073994240760803225, 0.007412064075469971, 0.007441376209259033, 0.007369408130645752, 0.007415167808532715, 0.007383679866790772, 0.007423999786376953, 0.007428095817565918, 0.007405600070953369, 0.007391200065612793, 0.007455935955047607, 0.0074225602149963375, 0.007565248012542725, 0.0074336638450622555, 0.007439199924468994, 0.007491583824157715, 0.007419904232025146, 0.007391232013702393, 0.007406943798065185, 0.007446944236755371, 0.007446752071380615, 0.0074486079216003415, 0.007439487934112548, 0.0074720001220703125, 0.007372479915618896, 0.007389503955841065, 0.0074403839111328125, 0.0074711360931396485, 0.007798367977142334, 0.007540287971496582, 0.007563712120056153, 0.007497727870941162, 0.007479296207427978, 0.007411231994628906, 0.007533023834228516, 0.00742195177078247, 0.007458816051483155, 0.007550655841827392, 0.007440703868865967, 0.007460031986236572, 0.007408671855926514, 0.007374623775482178, 0.007401408195495606, 0.0074169921875, 0.007418496131896973, 0.0074304318428039555, 0.007526656150817871, 0.007386879920959473, 0.007339583873748779, 0.007367104053497314, 0.007415520191192627, 0.007423871994018554, 0.007408031940460205, 0.00745030403137207, 0.007413919925689697, 0.007388864040374756, 0.007479775905609131, 0.007452672004699707, 0.007448575973510742, 0.007458367824554443, 0.007560736179351807, 0.007478176116943359, 0.007426047801971435, 0.0074217281341552735, 0.00746723222732544, 0.007437952041625976, 0.0076282558441162105, 0.007475776195526123, 0.007463263988494873, 0.007477375984191894, 0.00736243200302124, 0.007403295993804931, 0.007423327922821045, 0.007402368068695068, 0.007419904232025146, 0.007438240051269532, 0.007412064075469971, 0.007378464221954346, 0.0075155520439147945, 0.007463871955871582, 0.007443456172943115, 0.007430751800537109, 0.007419616222381592, 0.007465536117553711, 0.007428095817565918, 0.007413760185241699, 0.007446656227111817, 0.0076102399826049804, 0.007510015964508057, 0.0076574721336364745, 0.007514111995697022, 0.007511392116546631, 0.007523071765899658, 0.007569407939910889, 0.007491583824157715, 0.00743833589553833, 0.007493631839752197, 0.0074403839111328125, 0.007436287879943848, 0.007428095817565918, 0.007425568103790283, 0.007390687942504883, 0.007561215877532959, 0.00744755220413208, 0.007400703907012939, 0.00744649600982666, 0.007418144226074219, 0.007456607818603515, 0.007346848011016846, 0.007398655891418457, 0.007400383949279785, 0.007483104228973389, 0.0074170241355896, 0.0074085121154785155, 0.0073825597763061525, 0.0074611520767211914, 0.007413983821868897, 0.007434239864349365, 0.007411712169647216, 0.007398719787597656, 0.007439040184020996, 0.007444447994232177, 0.007393311977386474, 0.007379199981689453, 0.007575295925140381, 0.007439871788024902, 0.0074882559776306154, 0.0074237117767333985, 0.007401855945587158, 0.007413407802581787, 0.007401472091674805, 0.0074997758865356446, 0.007466047763824463, 0.007564223766326904, 0.007479296207427978, 0.007432191848754883, 0.007407264232635498, 0.0073885760307312014, 0.007574463844299316, 0.0074479360580444335, 0.007455359935760498, 0.007464799880981445, 0.007559328079223633, 0.0074405760765075685, 0.007398655891418457, 0.0074143362045288085, 0.00740880012512207, 0.0074577279090881346, 0.007437376022338868, 0.007437151908874512, 0.007385087966918945, 0.00743552017211914, 0.007395296096801758, 0.007406623840332032, 0.0074319357872009275, 0.00738486385345459, 0.007407360076904297, 0.007766496181488037, 0.0076943359375, 0.00752569580078125, 0.007405663967132568, 0.007405824184417724, 0.007588160037994385, 0.007437439918518066, 0.007469984054565429, 0.0074237117767333985, 0.007376448154449463, 0.007404255867004395, 0.0073690562248229985, 0.007399072170257568, 0.007444575786590576, 0.007448480129241943, 0.007468704223632812, 0.007481152057647705, 0.00737337589263916, 0.007390655994415283, 0.007387680053710937, 0.0073990077972412105, 0.007410079956054688, 0.00742195177078247, 0.007417856216430664, 0.007344128131866455, 0.00739247989654541, 0.007396128177642822, 0.007407616138458252, 0.007449600219726562, 0.007411903858184814, 0.007356959819793701, 0.007446815967559814, 0.007407616138458252, 0.007407616138458252, 0.007437632083892822, 0.007407680034637452, 0.007431871891021728, 0.0074559998512268065, 0.00749241590499878, 0.0074126081466674805, 0.007424191951751709, 0.007415616035461426, 0.00746073579788208, 0.00737395191192627, 0.007387872219085693, 0.007395167827606201, 0.007418303966522217, 0.00740067195892334, 0.00740835189819336, 0.007394368171691894, 0.007358719825744629, 0.007348991870880127, 0.007358367919921875, 0.007444575786590576, 0.00742195177078247, 0.00742412805557251, 0.007561183929443359, 0.007392831802368164, 0.0074915518760681156, 0.007392928123474121, 0.007399648189544678, 0.00737065601348877, 0.007585311889648437, 0.007486015796661377, 0.007456831932067871, 0.007435840129852295, 0.007395423889160156, 0.007447135925292969, 0.007476736068725586, 0.007402847766876221, 0.007405824184417724, 0.007419519901275635, 0.007384031772613525, 0.007343520164489746, 0.007360127925872803, 0.007427040100097656, 0.007428319931030274, 0.007886623859405518, 0.0074668159484863285, 0.0074544639587402345, 0.0074592318534851074, 0.007425151824951172, 0.007443359851837158, 0.007409664154052734, 0.00738918399810791, 0.007421088218688965, 0.007418303966522217, 0.007317920207977295, 0.007359712123870849, 0.007398176193237305, 0.007395328044891358, 0.0074297599792480466, 0.007418240070343017, 0.007505919933319092, 0.007425407886505127, 0.007376704216003418, 0.007424831867218018, 0.007641088008880615, 0.007438623905181884, 0.0074503359794616695, 0.007475391864776611, 0.007395103931427002, 0.0073554558753967285, 0.007379936218261719, 0.007421599864959717, 0.007389408111572265, 0.0074154877662658695, 0.007376319885253906, 0.007459839820861816, 0.0075071358680725096, 0.007457087993621827, 0.007444128036499024, 0.007396192073822022, 0.007419360160827637, 0.007414271831512451, 0.007413792133331299, 0.007429471969604492, 0.0074349122047424315, 0.007423999786376953, 0.0074301438331604, 0.007482528209686279, 0.007555935859680176, 0.00745472002029419, 0.007386528015136719, 0.007298751831054688, 0.0074652800559997555, 0.0074193282127380375, 0.0074471039772033695, 0.007477248191833496, 0.007444223880767822, 0.007436543941497803, 0.007573503971099854, 0.007650911808013916, 0.0076763200759887695, 0.007480639934539795, 0.0076622719764709475, 0.007489568233489991, 0.007480319976806641, 0.0074572482109069825, 0.007378687858581543, 0.007402239799499511, 0.007432447910308838, 0.00743398380279541, 0.007485439777374267, 0.0074403839111328125, 0.007407360076904297, 0.007409920215606689, 0.007356416225433349, 0.0073744959831237794, 0.007479648113250733, 0.007458816051483155, 0.00740556812286377, 0.007411712169647216, 0.007346496105194091, 0.007355743885040283, 0.007385183811187744, 0.007421567916870118, 0.007430784225463867, 0.007426047801971435, 0.007415103912353516, 0.00738483190536499, 0.007377855777740479, 0.007409664154052734, 0.007395328044891358, 0.007437664031982422, 0.007402143955230713, 0.007478816032409668, 0.0074572482109069825, 0.007405151844024658, 0.0074551358222961425, 0.0074301438331604, 0.007538591861724854, 0.007511616230010986, 0.007451168060302734, 0.007403456211090088, 0.007400896072387695, 0.007399487972259522, 0.0074654722213745115, 0.007441855907440185, 0.007434879779815674, 0.007417856216430664, 0.0074059200286865235, 0.007390143871307373, 0.007428800106048584, 0.0074108800888061524, 0.0074422721862792965, 0.007484384059906006, 0.007384768009185791, 0.00740556812286377, 0.007364607810974121, 0.007553311824798584, 0.007501535892486573, 0.007477248191833496, 0.007483200073242187, 0.007476672172546387, 0.007478015899658203, 0.00744652795791626, 0.007552927970886231, 0.007425183773040771, 0.007475776195526123, 0.007471487998962402, 0.007505472183227539, 0.007452864170074463, 0.007397823810577393, 0.0073807997703552244, 0.007491583824157715, 0.007438496112823486, 0.007454815864562988, 0.007425087928771973, 0.007422656059265137, 0.0074217281341552735, 0.007438208103179932, 0.0073753600120544435, 0.0074074559211730956, 0.007362368106842041, 0.0074180479049682614, 0.0074033279418945315, 0.007390399932861328, 0.007400415897369385, 0.007370783805847168, 0.007423391819000244, 0.007436895847320556, 0.007414944171905517, 0.007420767784118652, 0.007516160011291504, 0.0074035201072692874, 0.007380415916442871, 0.007457344055175781, 0.007866112232208252, 0.007475456237792969, 0.007447999954223633, 0.007459392070770263, 0.007374080181121826, 0.007372896194458008, 0.007385695934295654, 0.007450816154479981, 0.0074626879692077635, 0.00753059196472168, 0.007440320014953613, 0.007413824081420898, 0.0073722882270812985, 0.007404032230377197, 0.007432191848754883, 0.007458816051483155, 0.0074414401054382325, 0.007435232162475586, 0.007406688213348388, 0.0073693118095397945, 0.007425504207611084, 0.007719776153564453, 0.007370719909667969, 0.0073920321464538575, 0.007440032005310059, 0.007444479942321777, 0.007384575843811035, 0.007323647975921631, 0.007406079769134521, 0.007419904232025146, 0.007425536155700683, 0.007435935974121094, 0.007397280216217041, 0.007416768074035645, 0.007402624130249023, 0.0078017921447753906, 0.007726367950439453, 0.007510111808776855, 0.007517983913421631, 0.0075541439056396485, 0.00749126386642456, 0.007434239864349365, 0.00739737606048584, 0.007385087966918945, 0.007434239864349365, 0.007497727870941162, 0.007429279804229736, 0.007428959846496582, 0.007427999973297119, 0.007390528202056885, 0.007346816062927246, 0.0074057278633117675, 0.007405151844024658, 0.007436384201049804, 0.007452991962432862, 0.007391232013702393, 0.007507967948913574, 0.0074301438331604, 0.0074301438331604, 0.00763862419128418, 0.007478847980499268, 0.007473311901092529, 0.007442527770996093, 0.007415775775909424, 0.007389120101928711, 0.007461567878723144, 0.007417856216430664, 0.007423327922821045, 0.0074349122047424315, 0.00741103982925415, 0.007395999908447266, 0.00737721586227417, 0.007433919906616211, 0.007442463874816894, 0.0074254398345947265, 0.0074834561347961424, 0.007457024097442627, 0.007503712177276611, 0.007458752155303955, 0.007427807807922363, 0.007461311817169189, 0.007455039978027344, 0.0074824318885803225, 0.007519167900085449, 0.007429471969604492, 0.007389535903930664, 0.007477248191833496, 0.0074141759872436526, 0.007448575973510742, 0.007409952163696289, 0.007431903839111328, 0.007427519798278809, 0.007442527770996093, 0.007365087985992432, 0.0073702077865600586, 0.007425759792327881, 0.007432064056396484, 0.007401855945587158, 0.007418432235717773, 0.007424032211303711, 0.00736681604385376, 0.007321407794952392, 0.007401023864746094, 0.007405663967132568, 0.007436575889587402, 0.007403584003448487, 0.007368703842163086, 0.00739302396774292, 0.007388480186462402, 0.007915743827819824, 0.007415616035461426, 0.0074126400947570805, 0.007550816059112549, 0.007433728218078613, 0.007363232135772705, 0.007391232013702393, 0.007391232013702393, 0.007413760185241699, 0.0073935680389404295, 0.0074563841819763185, 0.007415904045104981, 0.00738918399810791, 0.00734611177444458, 0.0074271678924560545, 0.007408607959747315, 0.00742195177078247, 0.007436287879943848, 0.007361792087554932, 0.007377247810363769, 0.0073703999519348145, 0.007486207962036133, 0.0073985280990600585, 0.007390207767486572, 0.0073788161277770995, 0.007368703842163086, 0.007464831829071045, 0.0074460158348083495, 0.0074154877662658695, 0.007406527996063232, 0.0073994240760803225, 0.007427807807922363, 0.007379231929779052, 0.007372799873352051, 0.007428095817565918, 0.007441792011260986, 0.007406208038330078, 0.007455904006958008, 0.007374720096588135, 0.0074290881156921385, 0.007436287879943848, 0.007396416187286377, 0.007447487831115722, 0.007448768138885498, 0.007438144207000732, 0.007436287879943848, 0.007382656097412109, 0.007389567852020264, 0.007370751857757568, 0.007396383762359619, 0.007451615810394287, 0.007420928001403809, 0.007565695762634278, 0.007374591827392578, 0.007317696094512939, 0.007388959884643555, 0.007411808013916016, 0.007525184154510498, 0.007819263935089112, 0.007655168056488037, 0.007550687789916992, 0.007457312107086182, 0.007350272178649903, 0.007395328044891358, 0.007411392211914063, 0.007461184024810791, 0.007456768035888672, 0.007456768035888672, 0.007401472091674805, 0.007415808200836181, 0.007362559795379638, 0.007639039993286132, 0.007485439777374267, 0.007468639850616455, 0.007450047969818115, 0.007506912231445313, 0.007417119979858398, 0.007436160087585449, 0.007389344215393066, 0.0074124159812927245, 0.007407904148101807, 0.007435999870300293, 0.0074035201072692874, 0.007391488075256348, 0.007367487907409668, 0.007447648048400879, 0.007462080001831055, 0.007447199821472168, 0.007439455986022949, 0.007427072048187256, 0.007501120090484619, 0.007405856132507324, 0.00743996810913086, 0.0074124479293823245, 0.007433919906616211, 0.0074693760871887204, 0.007474559783935547, 0.007369344234466553, 0.007403071880340576, 0.0074514241218566896, 0.007447711944580078, 0.007429855823516846]",tokens/s,134.2291866523748,, @@ -4697,7 +4697,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 85874 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 79581 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4740,7 +4740,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 480.12 MiB is free. Process 100611 has 14.27 GiB memory in use. Of the allocated memory 14.15 GiB is allocated by PyTorch, and 10.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 480.12 MiB is free. Process 105515 has 14.27 GiB memory in use. Of the allocated memory 14.15 GiB is allocated by PyTorch, and 10.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4783,7 +4783,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 72.12 MiB is free. Process 127268 has 14.67 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 2.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 72.12 MiB is free. Process 120943 has 14.67 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 2.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -4826,7 +4826,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 80617 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 74221 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.947392,9637.39648,0.0,9242.148864,8603.568128,s,1,7.6634521484375,7.6634521484375,0.0,7.6634521484375,7.6634521484375,7.6634521484375,7.6634521484375,[7.6634521484375],,kWh,1.2463342704139297e-05,1.3674601049854078e-06,5.724449023997158e-06,1.9555251833121862e-05,,MB,1140.477952,9886.957568,0.0,9481.224192,8972.090368,s,10,7.041542114257813,0.7041542114257812,0.00480887993823181,0.7044410705566406,0.7089775390624999,0.7092788391113282,0.7095198791503906,"[0.6915390625, 0.7037074584960937, 0.7027904663085938, 0.7025294799804688, 0.707328369140625, 0.7038873291015625, 0.7049948120117188, 0.7062744140625, 0.7095801391601563, 0.7089105834960937]",tokens/s,363.5567264188446,kWh,2.0551944174999337e-05,2.266518104224068e-06,1.3657510926000834e-05,3.647597320522424e-05,tokens/kWh,7018318.5671200855,MB,1162.006528,9891.151872,0.0,9485.418496,8972.092928,s,10,25.263025390625,2.5263025390625002,0.014150736211522822,2.5282164306640627,2.541820458984375,2.5455982177734375,2.5486204248046875,"[2.50933154296875, 2.5493759765625, 2.54098095703125, 2.530982421875, 2.53126611328125, 2.530928466796875, 2.52550439453125, 2.523835205078125, 2.524316650390625, 2.496503662109375]",tokens/s,24.937630796737047,kWh,7.337199067916725e-05,8.093241882545913e-06,4.856909441079924e-05,0.0001300343269725124,tokens/kWh,484487.45394219947,,s,630,25.25983639907835,0.0400949784112355,0.0007732978727018159,0.03999001693725586,0.04052755584716797,0.041276592826843254,0.04312175277709963,"[0.04249353790283203, 0.043307422637939456, 0.04041104125976563, 0.04082649612426758, 0.039516448974609375, 0.04048089599609375, 0.03943430328369141, 0.03960422515869141, 0.03983273696899414, 0.03978736114501953, 0.040048641204833986, 0.03961222457885742, 0.039841983795166014, 0.039616512298583983, 0.0398315200805664, 0.03960160064697266, 0.039370529174804686, 0.039526657104492186, 0.039404094696044924, 0.039636993408203126, 0.0396492805480957, 0.0395489273071289, 0.03973104095458985, 0.03973699188232422, 0.03969279861450195, 0.03982950210571289, 0.0396124153137207, 0.039546463012695314, 0.039659393310546874, 0.039483936309814456, 0.03946700668334961, 0.03945676803588867, 0.039907329559326174, 0.039407646179199216, 0.03943753433227539, 0.03988966369628906, 0.03963401412963867, 0.0395846061706543, 0.039653438568115235, 0.03948249435424805, 0.039720897674560544, 0.03951440048217773, 0.03964144134521484, 0.03988883209228516, 0.039820735931396484, 0.03991151809692383, 0.039537567138671875, 0.03972438430786133, 0.04002799987792969, 0.0399183349609375, 0.0400445442199707, 0.039894943237304685, 0.040130657196044923, 0.03968819046020508, 0.039223297119140625, 0.03971072006225586, 0.03956041717529297, 0.03962140655517578, 0.0394728012084961, 0.039738945007324215, 0.03996652984619141, 0.03994704055786133, 0.0397468147277832, 0.04230822372436523, 0.04061753463745117, 0.04019039916992188, 0.04031488037109375, 0.040381568908691406, 0.04056358337402344, 0.04083017730712891, 0.04029232025146484, 0.04026860809326172, 0.04024115371704102, 0.04027801513671875, 0.040235008239746094, 0.040065025329589846, 0.04008972930908203, 0.04031475067138672, 0.03991551971435547, 0.040182785034179686, 0.04011872100830078, 0.04032979202270508, 0.040613887786865234, 0.04000521469116211, 0.04030636978149414, 0.04048355102539063, 0.04027391815185547, 0.04000342559814453, 0.04035190582275391, 0.04004249572753906, 0.04007843017578125, 0.04011529541015625, 0.04025939178466797, 0.04039475250244141, 0.04056883239746094, 0.04006092834472656, 0.040030208587646485, 0.04024729537963867, 0.03990323257446289, 0.04020822525024414, 0.040249504089355466, 0.040359935760498046, 0.04177094268798828, 0.05037062454223633, 0.040304641723632816, 0.03995606231689453, 0.04008182525634765, 0.040570911407470704, 0.04038803100585937, 0.04024076843261719, 0.040403968811035154, 0.040560256958007815, 0.03999116897583008, 0.04015913772583008, 0.040331775665283204, 0.04028211212158203, 0.04038614273071289, 0.03992556762695312, 0.039981056213378906, 0.04011193466186523, 0.04023580932617187, 0.040226814270019534, 0.04014284896850586, 0.04012236785888672, 0.04028742218017578, 0.040119102478027344, 0.042237953186035154, 0.040529918670654294, 0.040189056396484374, 0.040264575958251954, 0.03995600128173828, 0.04003839874267578, 0.03977059173583984, 0.03991689682006836, 0.04018652725219726, 0.040130561828613284, 0.03990528106689453, 0.04024115371704102, 0.03989299011230469, 0.04024140930175781, 0.04001475143432617, 0.04149129486083984, 0.04015513610839844, 0.0400148811340332, 0.04007974243164063, 0.04015478515625, 0.04041366577148438, 0.0402619514465332, 0.04006108856201172, 0.04007491302490234, 0.04033980941772461, 0.040793441772460935, 0.043618976593017576, 0.04017356872558594, 0.03986022567749024, 0.040005630493164065, 0.040182880401611325, 0.040197025299072264, 0.040136703491210936, 0.04324726486206055, 0.040528160095214844, 0.04041494369506836, 0.040080928802490236, 0.04008348846435547, 0.04017641448974609, 0.03976380920410156, 0.04017942428588867, 0.04032761764526367, 0.04001587295532227, 0.040271198272705075, 0.04011804962158203, 0.039943038940429686, 0.039852001190185546, 0.03999337768554687, 0.04029849624633789, 0.0404029426574707, 0.04031401443481445, 0.04038896179199219, 0.04026371383666992, 0.041116416931152346, 0.04016505432128906, 0.040022048950195316, 0.040136703491210936, 0.039890113830566405, 0.040360767364501955, 0.040118270874023435, 0.04021657562255859, 0.03993804931640625, 0.04049903869628906, 0.04219910430908203, 0.040132190704345705, 0.040548126220703126, 0.040122112274169924, 0.0401212158203125, 0.040339710235595704, 0.03994598388671875, 0.040343551635742186, 0.04240588760375977, 0.040908798217773434, 0.04048889541625977, 0.04038246536254883, 0.04024639892578125, 0.04031155014038086, 0.040116416931152345, 0.03985168075561524, 0.03987900924682617, 0.03994009780883789, 0.039725055694580076, 0.039882080078125, 0.03977072143554688, 0.04063852691650391, 0.040030208587646485, 0.04000358581542969, 0.04009369659423828, 0.039929855346679685, 0.03970364761352539, 0.03980527877807617, 0.03950249481201172, 0.03989807891845703, 0.04387321472167969, 0.04003190231323242, 0.04065299224853516, 0.03973068618774414, 0.0397823371887207, 0.04006943893432617, 0.040132545471191404, 0.03981727981567383, 0.03960211181640625, 0.03975743865966797, 0.039737407684326174, 0.03945568084716797, 0.040290145874023436, 0.04001792144775391, 0.04005401611328125, 0.03995929718017578, 0.03975167846679688, 0.039782398223876955, 0.039728416442871096, 0.0397790412902832, 0.03991686248779297, 0.040065216064453124, 0.04014883041381836, 0.03991619110107422, 0.03998886489868164, 0.04026816177368164, 0.04030025482177734, 0.04028995132446289, 0.039895103454589846, 0.03997753524780273, 0.04017935943603516, 0.03992995071411133, 0.040478046417236326, 0.04252345657348633, 0.04176688003540039, 0.03992374420166016, 0.040097793579101565, 0.03978035354614258, 0.04008038330078125, 0.03974403381347656, 0.03961459350585937, 0.0397633285522461, 0.03987760162353516, 0.039792640686035156, 0.04057510375976563, 0.04004441452026367, 0.0400261116027832, 0.04024428939819336, 0.039893184661865234, 0.04004735946655273, 0.03968000030517578, 0.03990323257446289, 0.040164958953857424, 0.040137054443359375, 0.04082284927368164, 0.0402655029296875, 0.04041046524047852, 0.04049359893798828, 0.04001177597045898, 0.041587039947509764, 0.03987875366210938, 0.040081024169921875, 0.039936286926269535, 0.039667713165283204, 0.04018918228149414, 0.03981593704223633, 0.03968729782104492, 0.04035820770263672, 0.04001811218261719, 0.04026124954223633, 0.03986412811279297, 0.039830463409423825, 0.0398636474609375, 0.03971343994140625, 0.039937343597412106, 0.039529151916503906, 0.039663646697998045, 0.0399318733215332, 0.04036198425292969, 0.041538719177246095, 0.04276924896240234, 0.041893470764160154, 0.039936416625976565, 0.03991926574707031, 0.04089478302001953, 0.03980233764648437, 0.039567039489746096, 0.03998348617553711, 0.039784286499023436, 0.04051545715332031, 0.039713569641113285, 0.04026313781738281, 0.03988124847412109, 0.0395489273071289, 0.03957756805419922, 0.03954691314697266, 0.042626911163330075, 0.03976003265380859, 0.03952409744262695, 0.03968022537231446, 0.03951004791259766, 0.03968819046020508, 0.039609760284423826, 0.03963875198364258, 0.039723743438720704, 0.03981123352050781, 0.03977948760986328, 0.0396317138671875, 0.03970457458496094, 0.039626750946044925, 0.03955507278442383, 0.039599777221679684, 0.03958204650878906, 0.039752960205078125, 0.03978931045532227, 0.04245913696289062, 0.03993804931640625, 0.04020412826538086, 0.04121567916870117, 0.0398135986328125, 0.039796382904052734, 0.039817184448242185, 0.03974959945678711, 0.0399117431640625, 0.039822654724121095, 0.03967055892944336, 0.03969023895263672, 0.0395994873046875, 0.04105484771728515, 0.04093337631225586, 0.039919296264648435, 0.04002848052978516, 0.04005462265014648, 0.04026339340209961, 0.040126625061035155, 0.04013699340820313, 0.040226814270019534, 0.04046847915649414, 0.040269824981689455, 0.040271873474121096, 0.040267200469970704, 0.04038304138183594, 0.0405852165222168, 0.04038643264770508, 0.03994432067871094, 0.04101116943359375, 0.04034694290161133, 0.040089920043945314, 0.040272289276123044, 0.040458240509033204, 0.04013449478149414, 0.040185791015625, 0.04039680099487305, 0.04019836807250977, 0.04041475296020508, 0.04062019348144531, 0.04253523254394531, 0.040132766723632814, 0.040224609375, 0.0425750732421875, 0.04048934555053711, 0.04036662292480469, 0.04028598403930664, 0.04020230484008789, 0.040374271392822264, 0.040390655517578124, 0.04035500717163086, 0.04003923034667969, 0.04014617538452148, 0.04024396896362305, 0.0400992317199707, 0.040127071380615234, 0.04062822341918945, 0.040447681427001954, 0.04046675109863281, 0.041326431274414065, 0.04020598220825195, 0.04014745712280274, 0.04004191970825195, 0.04042195129394531, 0.040180862426757814, 0.04027891159057617, 0.04054425430297852, 0.04032067108154297, 0.04011452865600586, 0.04089785766601563, 0.03979945755004883, 0.03999542236328125, 0.039649185180664064, 0.0397694091796875, 0.03962073516845703, 0.04077017593383789, 0.04016035079956055, 0.03974854278564453, 0.039739166259765625, 0.039728862762451175, 0.04039120101928711, 0.03972911834716797, 0.03956307220458984, 0.03968819046020508, 0.03958784103393555, 0.04004188919067383, 0.040230846405029295, 0.03981939315795899, 0.039656158447265624, 0.0395546875, 0.039639423370361325, 0.03953823852539062, 0.039483329772949216, 0.039559680938720705, 0.039943294525146486, 0.04048166275024414, 0.039798782348632815, 0.03970172882080078, 0.03984054565429687, 0.039683391571044925, 0.03960022354125976, 0.03969836807250977, 0.03976668930053711, 0.03972476959228516, 0.039532257080078126, 0.04019878387451172, 0.042686431884765626, 0.040072574615478515, 0.04028684616088867, 0.040050048828125, 0.0437254409790039, 0.03998454284667969, 0.04017139053344727, 0.039885025024414066, 0.039629310607910154, 0.04011008071899414, 0.039631935119628904, 0.039328704833984374, 0.040013343811035156, 0.04003644943237305, 0.03962099075317383, 0.03975481414794922, 0.0398570556640625, 0.03965254211425781, 0.039871326446533205, 0.03969177627563476, 0.0398770866394043, 0.03965760040283203, 0.04024899291992187, 0.039974143981933594, 0.03990771102905273, 0.03967552185058594, 0.03973392105102539, 0.03979504013061524, 0.039462913513183595, 0.03963651275634766, 0.03969887924194336, 0.03991145706176758, 0.04006707382202149, 0.040174976348876956, 0.03981990432739258, 0.039886081695556644, 0.039639774322509765, 0.03974313735961914, 0.040053119659423826, 0.03975987243652344, 0.03970230484008789, 0.039491134643554686, 0.03988528060913086, 0.04009328079223633, 0.03975228881835938, 0.03974553680419922, 0.04007052612304687, 0.0397523193359375, 0.03972844696044922, 0.03989369583129883, 0.03988479995727539, 0.03985123062133789, 0.0399529914855957, 0.04143328094482422, 0.03999334335327148, 0.039800830841064457, 0.04371839904785156, 0.039946495056152345, 0.03993190383911133, 0.040308734893798825, 0.0398131217956543, 0.039817054748535155, 0.040161441802978516, 0.04222566223144531, 0.03999692916870117, 0.03972876739501953, 0.04028710556030273, 0.039657024383544924, 0.039728641510009766, 0.039791038513183594, 0.03967356872558594, 0.03973308944702148, 0.03974854278564453, 0.03970790481567383, 0.03975040054321289, 0.03960627365112305, 0.03949747085571289, 0.040175167083740235, 0.03974828720092773, 0.03984384155273438, 0.0396124153137207, 0.039929855346679685, 0.03963900756835938, 0.04009539031982422, 0.03987494277954102, 0.03967583847045898, 0.03975600051879883, 0.039875839233398436, 0.040221473693847654, 0.03997257614135742, 0.039954784393310544, 0.040189697265625, 0.04098787307739258, 0.039983905792236325, 0.03960780715942383, 0.03994265747070312, 0.039782398223876955, 0.03944412612915039, 0.04019235229492187, 0.03976396942138672, 0.03990323257446289, 0.040030208587646485, 0.03968511962890625, 0.0428144645690918, 0.040089599609375, 0.04002816009521484, 0.03951747131347656, 0.03972784042358399, 0.040898399353027345, 0.040099647521972655, 0.04009603118896484, 0.04015929412841797, 0.040086849212646485, 0.04044796752929687, 0.04009664154052734, 0.04006076812744141, 0.04142710494995117, 0.04020198440551758, 0.040527488708496096, 0.0401638069152832, 0.04038860702514648, 0.04010598373413086, 0.039847934722900394, 0.04001315307617188, 0.040106655120849606, 0.04006467056274414, 0.042194847106933595, 0.03951193618774414, 0.03925651168823242, 0.03937737655639648, 0.039175552368164064, 0.03977484893798828, 0.039327743530273435, 0.03949977493286133, 0.03945798492431641, 0.03958249664306641, 0.03936259078979492, 0.03938825607299805, 0.03962563323974609, 0.039908607482910155, 0.03928128051757813, 0.0394013442993164, 0.03953241729736328, 0.039489601135253904, 0.039338302612304685, 0.03938508987426758, 0.03924972915649414, 0.03937094497680664, 0.04028982543945313, 0.04028464126586914, 0.04197785568237305, 0.039395328521728515, 0.03915724945068359, 0.03924579238891602, 0.03899756622314453, 0.03931235122680664, 0.039117855072021486, 0.03911164855957031, 0.039075328826904294, 0.03918502426147461, 0.03954390335083008, 0.039448673248291016, 0.03922118377685547, 0.039185150146484375, 0.03907516860961914, 0.03897139358520508, 0.03913897705078125, 0.03926323318481445, 0.03935980987548828, 0.03906351852416992, 0.039336673736572264, 0.03940966415405273, 0.039340000152587894, 0.039489566802978514, 0.03951520156860352, 0.039642047882080075, 0.03990937423706055, 0.040341503143310545, 0.040232158660888674, 0.040098686218261716, 0.04002374267578125, 0.04030691146850586, 0.03999884796142578, 0.040100479125976564, 0.04018175888061523, 0.04001094436645508, 0.04002899169921875, 0.0400711669921875, 0.040223873138427735]",tokens/s,24.940779110627425,, @@ -4872,7 +4872,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 83645 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 77343 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,813.715456,3354.329088,0.0,2959.081472,2942.567424,s,1,7.60247119140625,7.60247119140625,0.0,7.60247119140625,7.60247119140625,7.60247119140625,7.60247119140625,[7.60247119140625],,kWh,1.027498409583245e-05,1.124794598929495e-06,4.711670436002846e-06,1.611144913076479e-05,,MB,1118.08512,3545.16992,0.0,3139.436544,3105.830912,s,10,2.5615629425048834,0.2561562942504883,0.0025241374356524703,0.25539284515380856,0.26039235534667965,0.26045658416748046,0.26050796722412106,"[0.2603780822753906, 0.2574383544921875, 0.2528868408203125, 0.2541678009033203, 0.25575640869140626, 0.2550292816162109, 0.2546710662841797, 0.2536778869628906, 0.2605208129882812, 0.2570364074707031]",tokens/s,999.3898480966645,kWh,7.679832204056866e-06,8.469384555319108e-07,5.085201436578925e-06,1.3611972096167704e-05,tokens/kWh,18806973.610537585,MB,1144.561664,3587.11296,0.0,3181.379584,3162.0096,s,10,13.417264282226563,1.3417264282226564,0.013599975796685194,1.3432182006835938,1.3592529663085937,1.3610109436035156,1.3624173254394532,"[1.3265428466796876, 1.3588623046875, 1.3477608642578125, 1.3466318359375, 1.34207275390625, 1.3443636474609375, 1.3353577880859375, 1.31404443359375, 1.3627689208984375, 1.33885888671875]",tokens/s,46.954430258524575,kWh,3.888306745719286e-05,4.2885395841120465e-06,2.506946157602105e-05,6.824106861732596e-05,tokens/kWh,923197.7352711724,,s,630,13.414208038330063,0.021292393711635042,0.0005596877692547637,0.021259455680847167,0.021615834045410155,0.022005284976959225,0.02318074527740479,"[0.02122380828857422, 0.020987552642822267, 0.02083875274658203, 0.020719200134277343, 0.02167843246459961, 0.0205980167388916, 0.02066633605957031, 0.02083718490600586, 0.020845888137817382, 0.02263644790649414, 0.021711904525756835, 0.020838176727294922, 0.020822015762329102, 0.02083430480957031, 0.02065555191040039, 0.020869695663452148, 0.02089574432373047, 0.0209749755859375, 0.020836544036865235, 0.02113580894470215, 0.021360479354858398, 0.020811935424804688, 0.020969472885131835, 0.02066227149963379, 0.02062131118774414, 0.020551071166992188, 0.02053590393066406, 0.02062950325012207, 0.020549631118774413, 0.021269535064697264, 0.020786144256591796, 0.020676607131958007, 0.020793216705322266, 0.02068809509277344, 0.020581279754638672, 0.020642976760864257, 0.020616031646728514, 0.02067865562438965, 0.020774944305419922, 0.020943904876708986, 0.020870271682739257, 0.020727615356445312, 0.020727807998657227, 0.020917728424072267, 0.021040735244750978, 0.021232160568237304, 0.021495296478271485, 0.021324703216552734, 0.02145052719116211, 0.021397727966308594, 0.02136195182800293, 0.021284927368164064, 0.022167680740356445, 0.021407808303833008, 0.02129484748840332, 0.02139561653137207, 0.021470048904418945, 0.02145155143737793, 0.02151910400390625, 0.021424320220947264, 0.021409887313842774, 0.021597728729248047, 0.021488000869750976, 0.021881311416625977, 0.021443904876708983, 0.02135465621948242, 0.021456735610961914, 0.02142473602294922, 0.021325759887695313, 0.021406976699829102, 0.02133475112915039, 0.02135481643676758, 0.021377952575683593, 0.021386016845703126, 0.022263168334960937, 0.021505760192871093, 0.021564064025878907, 0.021763423919677734, 0.02161552047729492, 0.02145894432067871, 0.02149580764770508, 0.021546016693115233, 0.021470176696777345, 0.021213279724121094, 0.02145471954345703, 0.021467168807983397, 0.022299840927124025, 0.021452831268310546, 0.021343008041381836, 0.021336063385009766, 0.021480640411376952, 0.021420864105224608, 0.02146633529663086, 0.02124880027770996, 0.02123075294494629, 0.0213656005859375, 0.021475040435791015, 0.021487648010253907, 0.02156972885131836, 0.023054399490356446, 0.022042623519897463, 0.02148761558532715, 0.021634592056274413, 0.021317375183105468, 0.02159814453125, 0.021299680709838866, 0.02141747283935547, 0.0214304313659668, 0.021364704132080078, 0.021347007751464843, 0.021420032501220702, 0.021348352432250976, 0.021336063385009766, 0.021317983627319338, 0.021400768280029295, 0.021154272079467774, 0.02126643180847168, 0.02125823974609375, 0.02151628875732422, 0.021640928268432617, 0.02147545623779297, 0.02143657684326172, 0.021393760681152344, 0.021406784057617187, 0.023015232086181642, 0.024418752670288087, 0.02168012809753418, 0.021169279098510744, 0.021162879943847655, 0.021434463500976563, 0.02137868881225586, 0.021133600234985353, 0.021174272537231444, 0.021131263732910157, 0.021227519989013673, 0.021198848724365234, 0.02211862373352051, 0.021614368438720704, 0.021710847854614256, 0.02145075225830078, 0.021223424911499023, 0.02128108787536621, 0.02122659111022949, 0.0211746883392334, 0.021403839111328125, 0.0215118408203125, 0.021544832229614258, 0.02135641670227051, 0.021267040252685547, 0.02140390396118164, 0.021192447662353515, 0.021213184356689452, 0.02110588836669922, 0.02128156852722168, 0.021301248550415038, 0.021151744842529296, 0.021102592468261717, 0.02126665687561035, 0.020952287673950194, 0.024524831771850587, 0.02122707176208496, 0.020874399185180664, 0.021102399826049806, 0.020743488311767578, 0.02100704002380371, 0.022416608810424805, 0.021646112442016602, 0.021581823348999024, 0.022279199600219728, 0.021314527511596678, 0.021263744354248045, 0.02117043113708496, 0.021361024856567382, 0.021446304321289064, 0.02125971221923828, 0.021273151397705078, 0.021256479263305664, 0.021198911666870116, 0.021267967224121095, 0.021387487411499023, 0.021221824645996094, 0.02156732749938965, 0.02130668830871582, 0.021289663314819338, 0.02168422317504883, 0.021261407852172853, 0.021424543380737304, 0.02136252784729004, 0.021185184478759767, 0.02188047981262207, 0.02115190315246582, 0.021336544036865236, 0.021114944458007812, 0.02106572723388672, 0.02136809539794922, 0.021592159271240235, 0.02159881591796875, 0.021263904571533203, 0.0210150089263916, 0.021147680282592774, 0.021376096725463867, 0.021313631057739257, 0.021316608428955077, 0.02214860725402832, 0.021268287658691407, 0.02139187240600586, 0.021180416107177736, 0.021311487197875977, 0.021194944381713866, 0.021243711471557618, 0.021204416275024413, 0.021113407135009764, 0.02130473518371582, 0.021080160140991212, 0.021375423431396486, 0.021497919082641603, 0.021477344512939454, 0.021302623748779295, 0.0215631046295166, 0.021289440155029298, 0.02198358345031738, 0.02294528007507324, 0.02134310340881348, 0.021241216659545897, 0.02128486442565918, 0.021287391662597657, 0.021213119506835937, 0.021202943801879884, 0.02134364891052246, 0.02120355224609375, 0.022134559631347656, 0.021079263687133788, 0.02146611213684082, 0.021729280471801758, 0.021307392120361326, 0.02122547149658203, 0.021153791427612305, 0.02115692710876465, 0.02112403106689453, 0.02127872085571289, 0.02143846321105957, 0.021238912582397462, 0.02117932891845703, 0.021249984741210936, 0.0212541446685791, 0.021048479080200196, 0.02147769546508789, 0.02115135955810547, 0.02103593635559082, 0.021352447509765626, 0.022439071655273438, 0.021257055282592772, 0.021649152755737304, 0.021547359466552736, 0.0215097599029541, 0.02141744041442871, 0.021077024459838868, 0.021036991119384764, 0.02137654495239258, 0.02134009552001953, 0.021148191452026368, 0.021053440093994142, 0.020995264053344728, 0.021138240814208984, 0.021253952026367186, 0.02129859161376953, 0.021215744018554687, 0.02117238426208496, 0.02136457633972168, 0.021090591430664062, 0.021120351791381838, 0.021191328048706055, 0.021397504806518555, 0.02139952087402344, 0.021348384857177733, 0.021336063385009766, 0.021157888412475585, 0.021211135864257814, 0.021332000732421873, 0.021198816299438476, 0.021151744842529296, 0.020967424392700194, 0.02103091239929199, 0.02128281593322754, 0.021331199645996092, 0.022072063446044923, 0.021421760559082032, 0.021137311935424806, 0.021170591354370116, 0.021110784530639647, 0.02126028823852539, 0.021172224044799806, 0.021338111877441408, 0.021437471389770507, 0.021540895462036132, 0.021273536682128905, 0.021254207611083983, 0.021075904846191405, 0.021280128479003905, 0.021259199142456053, 0.021472543716430665, 0.021387680053710938, 0.021560672760009766, 0.021324447631835938, 0.02127769660949707, 0.02110361671447754, 0.021839872360229492, 0.021310848236083986, 0.02146771240234375, 0.02093881607055664, 0.02141788864135742, 0.021553247451782227, 0.02166783905029297, 0.021310623168945313, 0.021184576034545897, 0.02168822479248047, 0.02104470443725586, 0.021189599990844726, 0.021583871841430666, 0.02163408088684082, 0.022155807495117186, 0.021166528701782227, 0.02120412826538086, 0.021271392822265624, 0.02122137641906738, 0.021011680603027345, 0.021085311889648437, 0.021350048065185548, 0.024834367752075197, 0.021546592712402345, 0.02120841598510742, 0.021218048095703126, 0.021567487716674806, 0.02112512016296387, 0.021510143280029297, 0.021736703872680663, 0.021349119186401366, 0.021362176895141603, 0.02142255973815918, 0.021403743743896485, 0.021088191986083984, 0.0212807674407959, 0.021024480819702148, 0.021094688415527342, 0.021202943801879884, 0.021204736709594725, 0.021047552108764647, 0.021127168655395507, 0.02106572723388672, 0.021073919296264648, 0.02088960075378418, 0.02111692810058594, 0.021034112930297853, 0.02130828857421875, 0.020786687850952147, 0.021209152221679687, 0.0214716796875, 0.02141209602355957, 0.021454559326171876, 0.02116217613220215, 0.02119868850708008, 0.021134815216064452, 0.02127324867248535, 0.02127017593383789, 0.021012704849243165, 0.02123776054382324, 0.021180767059326172, 0.02208118438720703, 0.021354143142700194, 0.021305248260498046, 0.020998592376708983, 0.02143436813354492, 0.021390527725219727, 0.021293888092041014, 0.02107151985168457, 0.02122172737121582, 0.02129088020324707, 0.02134614372253418, 0.02181340789794922, 0.0211680965423584, 0.0211343994140625, 0.021046207427978515, 0.02126438331604004, 0.021235712051391603, 0.021006336212158205, 0.021342208862304687, 0.02118377685546875, 0.021199199676513673, 0.021342592239379884, 0.02116761589050293, 0.021182464599609374, 0.02093516731262207, 0.021243072509765624, 0.021455007553100584, 0.021393312454223632, 0.021426048278808594, 0.021234560012817382, 0.021153791427612305, 0.02108940887451172, 0.021180479049682618, 0.02119353675842285, 0.020998144149780275, 0.022128639221191407, 0.021429279327392577, 0.021238527297973632, 0.021395103454589844, 0.022294431686401366, 0.021851903915405275, 0.021373023986816408, 0.021379295349121093, 0.021215839385986326, 0.02130067253112793, 0.021154367446899414, 0.02102681541442871, 0.020940576553344727, 0.020832223892211912, 0.02086265563964844, 0.021137344360351563, 0.020875936508178712, 0.021136959075927733, 0.02103113555908203, 0.021098688125610353, 0.020962560653686523, 0.020986623764038086, 0.021190464019775392, 0.021381183624267577, 0.021098047256469726, 0.020918176651000975, 0.02086524772644043, 0.02102112007141113, 0.021308544158935547, 0.021170751571655273, 0.021203487396240235, 0.021114656448364258, 0.02099836730957031, 0.020975391387939454, 0.02105548858642578, 0.020813695907592772, 0.020861055374145506, 0.020968479156494142, 0.021102783203125, 0.021472000122070314, 0.021866592407226562, 0.02105958366394043, 0.02079689598083496, 0.02075641632080078, 0.020970079421997072, 0.021650560379028322, 0.021046207427978515, 0.02075436782836914, 0.020714719772338866, 0.020769567489624025, 0.021236991882324217, 0.02075315284729004, 0.020750335693359375, 0.020612192153930665, 0.020847488403320312, 0.020883487701416015, 0.02086297607421875, 0.02074985694885254, 0.020619743347167967, 0.020775968551635743, 0.02055062484741211, 0.02057401657104492, 0.02053548812866211, 0.020682687759399413, 0.020643903732299806, 0.020594688415527345, 0.020606752395629882, 0.020705440521240234, 0.020650047302246094, 0.022099967956542968, 0.020821216583251954, 0.020620063781738283, 0.02063155174255371, 0.020683839797973634, 0.020650400161743163, 0.020617599487304687, 0.020750495910644533, 0.020766687393188477, 0.020670719146728515, 0.020709152221679687, 0.02067865562438965, 0.020672416687011717, 0.020721151351928712, 0.020613311767578125, 0.02174118423461914, 0.020697887420654298, 0.02087881660461426, 0.02068662452697754, 0.020760448455810546, 0.020687040328979493, 0.020649856567382812, 0.020766944885253907, 0.020908639907836913, 0.021407743453979493, 0.021163328170776367, 0.020883712768554687, 0.020911840438842772, 0.020646400451660156, 0.020817535400390625, 0.021161951065063477, 0.021058111190795897, 0.020738079071044923, 0.02147737693786621, 0.021331968307495116, 0.021101696014404297, 0.02119708824157715, 0.0213090877532959, 0.02144879913330078, 0.022098783493041993, 0.021712896347045898, 0.023810176849365233, 0.02156844711303711, 0.02154537582397461, 0.02191209602355957, 0.02149580764770508, 0.021452096939086913, 0.021663583755493165, 0.021344287872314453, 0.021490495681762697, 0.02209324836730957, 0.02146566390991211, 0.02143436813354492, 0.021311487197875977, 0.021370880126953123, 0.021312543869018555, 0.021388256072998046, 0.021352447509765626, 0.021579776763916016, 0.021448703765869142, 0.02128691291809082, 0.021403776168823243, 0.021410943984985352, 0.022600160598754884, 0.021603872299194336, 0.021618656158447266, 0.021604415893554687, 0.02153750419616699, 0.021741567611694337, 0.02143027114868164, 0.021327871322631836, 0.021484703063964845, 0.021439327239990234, 0.021381343841552734, 0.02147052764892578, 0.021444608688354492, 0.0214102725982666, 0.02130352020263672, 0.021169952392578125, 0.021264192581176757, 0.021239999771118165, 0.021514240264892577, 0.02162073516845703, 0.02123075294494629, 0.021269344329833986, 0.02109235191345215, 0.021319135665893555, 0.023232351303100585, 0.02972947120666504, 0.021136640548706054, 0.02114761543273926, 0.02138528060913086, 0.020902624130249025, 0.020970592498779295, 0.02103388786315918, 0.02100662422180176, 0.021309440612792968, 0.021105663299560547, 0.02165225601196289, 0.021205087661743165, 0.02127020835876465, 0.021146047592163087, 0.0212807674407959, 0.021237119674682618, 0.021151968002319336, 0.02114761543273926, 0.021168447494506835, 0.020958879470825195, 0.021121503829956055, 0.02102252769470215, 0.021002431869506837, 0.021102304458618163, 0.021176607131958007, 0.02147532844543457, 0.02127462387084961, 0.02127257537841797, 0.021559295654296876, 0.021276416778564452, 0.021177696228027343, 0.021169055938720704, 0.02126233673095703, 0.021067520141601563, 0.020983680725097657, 0.021227615356445313, 0.02118275260925293, 0.021153791427612305, 0.021102592468261717, 0.021391008377075197, 0.0214531192779541, 0.02138319969177246, 0.021313440322875975, 0.022146528244018554, 0.021250688552856445, 0.021180448532104493, 0.021274911880493165, 0.021176223754882813, 0.021135135650634764, 0.021061632156372072, 0.020923839569091798, 0.021052032470703124, 0.021574752807617188, 0.021417823791503907, 0.021141952514648437, 0.021518911361694336, 0.021535999298095704, 0.021588735580444336, 0.021178367614746094, 0.023399456024169922, 0.022023040771484374, 0.021243999481201172, 0.021317472457885744, 0.021016288757324218, 0.021033311843872072, 0.02082195281982422, 0.02060310363769531, 0.0211778564453125, 0.02075276756286621, 0.02058559989929199, 0.020624319076538087]",tokens/s,46.96512818347703,, @@ -4908,7 +4908,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 144041 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 137416 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,809.92256,14639.104,0.0,14243.856384,14221.3376,s,1,7.50015673828125,7.50015673828125,0.0,7.50015673828125,7.50015673828125,7.50015673828125,7.50015673828125,[7.50015673828125],,kWh,1.5394994562500605e-05,1.6904588861759922e-06,7.184450191999921e-06,2.426990364067652e-05,,MB,1110.228992,14735.572992,0.0,14329.839616,14290.688,s,10,14.017546020507812,1.4017546020507814,0.005064467607014813,1.4028267822265625,1.4066118896484376,1.4069143432617188,1.4071563061523438,"[1.3934443359375, 1.3924166259765625, 1.399595947265625, 1.4004007568359376, 1.4030111083984376, 1.4026424560546875, 1.4064407958984375, 1.407216796875, 1.40583251953125, 1.406544677734375]",tokens/s,182.6282572038425,kWh,4.101319376291641e-05,4.523301419646922e-06,2.717318840519999e-05,7.270968358776332e-05,tokens/kWh,3520851.520293007,MB,1138.958336,14750.253056,0.0,14344.51968,14290.69056,s,10,39.382056884765625,3.9382056884765624,0.0028361852012258122,3.9377154541015624,3.9428391357421875,3.942949108886719,3.943037087402344,"[3.93264404296875, 3.937390380859375, 3.936491943359375, 3.9376533203125, 3.937684814453125, 3.93774609375, 3.938677978515625, 3.93789453125, 3.94305908203125, 3.942814697265625]",tokens/s,15.997132954315202,kWh,0.00011518598807333356,1.2704394243449077e-05,7.642061669200003e-05,0.00020431099900878268,tokens/kWh,308353.4430630033,,s,630,39.37825381469723,0.06250516478523374,0.00023357617037945695,0.06250548934936523,0.06278532638549804,0.06288896923065185,0.06312925102233886,"[0.06311766433715821, 0.06226121520996094, 0.06225932693481445, 0.06189859390258789, 0.06193881607055664, 0.06222883224487305, 0.062042686462402345, 0.06216089630126953, 0.06200115203857422, 0.06224281692504883, 0.062414207458496095, 0.06231216049194336, 0.06223750305175781, 0.06258502578735352, 0.06216080093383789, 0.062195838928222655, 0.06219948959350586, 0.06223212814331055, 0.062155391693115236, 0.061967552185058596, 0.06203680038452149, 0.06238412857055664, 0.062453758239746096, 0.06251529693603515, 0.062299297332763674, 0.06239932632446289, 0.06256835174560547, 0.06234688186645508, 0.06235583877563477, 0.0625022087097168, 0.06236435317993164, 0.06235955047607422, 0.06239231872558594, 0.062486526489257815, 0.0626104965209961, 0.062387134552001955, 0.062183425903320315, 0.06274252700805664, 0.06236774444580078, 0.06257020950317382, 0.062295360565185545, 0.06242812728881836, 0.06235100936889648, 0.06262566375732422, 0.06256089782714844, 0.06289523315429688, 0.06241558456420898, 0.062365280151367185, 0.06243158340454102, 0.0625992317199707, 0.06236108779907226, 0.06244403076171875, 0.06293017578125, 0.0627119369506836, 0.06256089782714844, 0.062718017578125, 0.06256991958618165, 0.06255836868286133, 0.06274492645263671, 0.06263919830322266, 0.062446495056152344, 0.06305791854858399, 0.06260940933227539, 0.06337152099609375, 0.06236617660522461, 0.062189697265625, 0.0622059211730957, 0.06199219131469726, 0.06248444747924805, 0.0626480941772461, 0.0623089599609375, 0.062179550170898434, 0.06225324630737305, 0.06225100708007812, 0.06245321655273438, 0.06236959838867188, 0.062443359375, 0.06221836853027344, 0.06223244857788086, 0.06235635375976562, 0.06242083358764648, 0.06240480041503906, 0.0625781135559082, 0.06203776168823242, 0.06258774566650391, 0.06259241485595703, 0.062396064758300784, 0.06261407852172851, 0.06264172744750976, 0.06238899230957031, 0.06265856170654296, 0.06253936004638672, 0.06258870315551758, 0.062401153564453124, 0.06234316635131836, 0.06234223937988281, 0.06262614440917968, 0.06252953720092773, 0.06270214462280274, 0.06232451248168945, 0.06226908874511719, 0.062341697692871095, 0.06239846420288086, 0.06252105712890625, 0.06245404815673828, 0.062389633178710935, 0.06264281463623046, 0.06261920166015625, 0.0628813133239746, 0.06250588989257813, 0.062437374114990236, 0.0626480941772461, 0.06256371307373047, 0.06261398315429688, 0.06275305557250976, 0.06250102233886719, 0.06256019210815429, 0.062453407287597656, 0.0627305908203125, 0.0628223991394043, 0.06261491012573242, 0.06262643051147461, 0.06271155166625976, 0.06254111862182617, 0.06269382476806641, 0.0625805778503418, 0.06323279953002929, 0.06244480133056641, 0.06195462417602539, 0.06197174453735352, 0.06202057647705078, 0.06210355377197266, 0.062117889404296876, 0.062156097412109375, 0.062370494842529295, 0.06252544021606446, 0.062461952209472656, 0.06241888046264649, 0.06259267044067383, 0.06221993637084961, 0.06219209671020508, 0.06256262588500977, 0.06238361740112305, 0.06256662368774414, 0.06222463989257813, 0.062279678344726565, 0.06249241638183594, 0.0623372802734375, 0.062230270385742185, 0.06227788925170898, 0.06233472061157227, 0.0628364486694336, 0.06253366470336914, 0.062470657348632816, 0.06265001678466797, 0.06252579116821289, 0.06264012908935547, 0.06253948974609375, 0.062384449005126956, 0.06255379104614257, 0.062331199645996094, 0.06238934326171875, 0.062281856536865236, 0.06225382232666016, 0.06267903900146485, 0.0625541114807129, 0.06258895874023437, 0.06283039855957032, 0.06266073608398437, 0.06249065780639648, 0.06242899322509766, 0.06250310516357421, 0.0625450553894043, 0.062402721405029296, 0.06244579315185547, 0.06283929443359375, 0.06248566436767578, 0.06252767944335938, 0.06247283172607422, 0.06247219085693359, 0.06278511810302734, 0.0625316162109375, 0.06254767990112305, 0.06276079940795898, 0.06269209671020508, 0.06261142349243164, 0.06286959838867187, 0.06285472106933594, 0.06267744064331054, 0.06311296081542969, 0.063023681640625, 0.06239980697631836, 0.062185665130615235, 0.062368255615234375, 0.06248646545410156, 0.062292030334472656, 0.06258009719848633, 0.06228649520874024, 0.06237334442138672, 0.06241888046264649, 0.062293952941894534, 0.06242367935180664, 0.06255001449584961, 0.06257664108276367, 0.06236569595336914, 0.062296062469482424, 0.062438560485839845, 0.06245379257202149, 0.06234195327758789, 0.06222438430786133, 0.062273536682128906, 0.06208512115478516, 0.06271721649169922, 0.06257056045532226, 0.06263792037963867, 0.06243411254882812, 0.06236502456665039, 0.06232950210571289, 0.062381790161132815, 0.06241923141479492, 0.062394367218017575, 0.062210079193115234, 0.06262688064575195, 0.06285609436035157, 0.06252953720092773, 0.06239641571044922, 0.06255001449584961, 0.06234255981445312, 0.06252934265136718, 0.06247504043579102, 0.06254182434082031, 0.06264236831665039, 0.06281609725952149, 0.06253065490722656, 0.06274288177490234, 0.06250960159301758, 0.06239401626586914, 0.06245830535888672, 0.0625516471862793, 0.062439743041992186, 0.06271139144897461, 0.06240431976318359, 0.06270022583007813, 0.0625041618347168, 0.06275913619995117, 0.06271446228027344, 0.06268937683105469, 0.06248819351196289, 0.06251264190673828, 0.06257535934448243, 0.0625334701538086, 0.06250486373901368, 0.06326476669311523, 0.06257171249389648, 0.062003265380859374, 0.06205094528198242, 0.061884449005126956, 0.062217662811279294, 0.062077598571777345, 0.06223052978515625, 0.06232252883911133, 0.06264233779907226, 0.06252463912963867, 0.06259519958496093, 0.062415519714355466, 0.06256006240844726, 0.0626157455444336, 0.06241487884521484, 0.06250508880615234, 0.06264815902709961, 0.06249881744384766, 0.0624824333190918, 0.06230019378662109, 0.0625561294555664, 0.06217113494873047, 0.06223801422119141, 0.062281566619873045, 0.06251708984375, 0.06240972900390625, 0.06280825424194336, 0.0623675537109375, 0.06285702514648438, 0.06253587341308593, 0.06262579345703125, 0.06268713760375977, 0.06254982376098633, 0.06233116912841797, 0.06246809768676758, 0.06227084732055664, 0.0624251823425293, 0.06218191909790039, 0.06240179061889648, 0.062400447845458985, 0.06284735870361328, 0.06265647888183594, 0.062519775390625, 0.06236569595336914, 0.06251043319702149, 0.06238684844970703, 0.06243139266967773, 0.062449504852294925, 0.06248857498168945, 0.06268915176391601, 0.0627256965637207, 0.06258723068237304, 0.06278720092773438, 0.06251100921630859, 0.062462337493896486, 0.06264438247680663, 0.06260559844970703, 0.06244486236572266, 0.06257030487060547, 0.0629502067565918, 0.06282969665527344, 0.0629349136352539, 0.06313398361206055, 0.062306304931640626, 0.061986175537109375, 0.06216870498657227, 0.06230876922607422, 0.062469982147216795, 0.062294784545898436, 0.06219980621337891, 0.06226947021484375, 0.06256022262573242, 0.06264131164550782, 0.06254806518554687, 0.062470943450927734, 0.06226953506469726, 0.06219555282592774, 0.06238332748413086, 0.06248860931396484, 0.06250783920288086, 0.062375358581542965, 0.062324703216552736, 0.06210390472412109, 0.06235891342163086, 0.062196575164794925, 0.0625398063659668, 0.06229913711547851, 0.06261417770385742, 0.0629865608215332, 0.0625316162109375, 0.06247011184692383, 0.062484127044677734, 0.0624268798828125, 0.0625011215209961, 0.062443294525146485, 0.06257846450805664, 0.06254230499267578, 0.062403999328613284, 0.062364574432373046, 0.06262287902832031, 0.06239718246459961, 0.06246342468261719, 0.06266947174072265, 0.06256972885131835, 0.0625456657409668, 0.06253676986694336, 0.06256768035888671, 0.06252819061279297, 0.06265964889526367, 0.062491134643554686, 0.06285734558105469, 0.06281849670410156, 0.06255801773071289, 0.062488895416259765, 0.06267084884643555, 0.06258393478393555, 0.06242598342895508, 0.06245785522460937, 0.06271385574340821, 0.06294937515258789, 0.06274867248535156, 0.06264435195922852, 0.06258838272094727, 0.06258131027221679, 0.06251897430419921, 0.06338969421386718, 0.06230764770507812, 0.06221667098999024, 0.06216847991943359, 0.0619958381652832, 0.06206399917602539, 0.06206531143188477, 0.0622562255859375, 0.06222476959228516, 0.06220982360839844, 0.06274329757690429, 0.06271392059326172, 0.0625266227722168, 0.06247663879394531, 0.06232310485839844, 0.062362945556640625, 0.06251187133789063, 0.06235504150390625, 0.062306655883789065, 0.06257664108276367, 0.06219776153564453, 0.062321727752685546, 0.062446529388427735, 0.062281726837158206, 0.06234883117675781, 0.0623985595703125, 0.06262211227416992, 0.06258256149291992, 0.06240604782104492, 0.06253647994995117, 0.06243081665039062, 0.06266694259643554, 0.06258505630493164, 0.06255820846557616, 0.06245158386230469, 0.06241225433349609, 0.06237820816040039, 0.0623724479675293, 0.062370849609375, 0.06228255844116211, 0.062434879302978516, 0.06264214324951171, 0.06268473434448242, 0.06282332611083985, 0.06271356964111328, 0.06252691268920899, 0.06268012619018555, 0.06284265518188477, 0.06255372619628906, 0.06265894317626954, 0.062475936889648434, 0.06269987106323242, 0.06254169464111328, 0.06267712020874024, 0.06287155151367188, 0.06269337463378906, 0.06264124679565429, 0.06267919921875, 0.0626879997253418, 0.0628809928894043, 0.06277920150756836, 0.06287263870239258, 0.06273993682861329, 0.06307872009277343, 0.06253891372680664, 0.062354270935058596, 0.06221014404296875, 0.062033790588378906, 0.062272670745849606, 0.06225913619995117, 0.06230518341064453, 0.06236972808837891, 0.06242867279052734, 0.06242707061767578, 0.06253760147094727, 0.062314369201660155, 0.06243779373168945, 0.06252297592163086, 0.06257548904418946, 0.06270326232910156, 0.06265420913696289, 0.06231228637695312, 0.06224972915649414, 0.06216022491455078, 0.06218112182617187, 0.062192543029785156, 0.06210697555541992, 0.062169761657714845, 0.06227475357055664, 0.06244160079956055, 0.0626572151184082, 0.06275459289550782, 0.06264585494995117, 0.06250969696044922, 0.06244966506958008, 0.06251830291748046, 0.0626115837097168, 0.06230223846435547, 0.06219232177734375, 0.062425247192382814, 0.0626003189086914, 0.06239113616943359, 0.062461952209472656, 0.0623135986328125, 0.06242755126953125, 0.06236620712280273, 0.06269705581665039, 0.06277772903442383, 0.06264371109008789, 0.06244809722900391, 0.06266681671142578, 0.06259913635253907, 0.06306204986572266, 0.06258070373535156, 0.06246793746948242, 0.06249283218383789, 0.06253148651123047, 0.06256204986572265, 0.0623985595703125, 0.062445217132568356, 0.06278358459472656, 0.06269321441650391, 0.0630951042175293, 0.06310960006713867, 0.06295849609375, 0.06276559829711914, 0.06363750457763671, 0.062493854522705075, 0.062117855072021486, 0.06217001724243164, 0.062152671813964847, 0.0625541114807129, 0.062363296508789065, 0.06243977737426758, 0.062228511810302735, 0.06235340881347656, 0.06246192169189453, 0.06279894256591798, 0.06251359939575195, 0.062454238891601566, 0.06237913513183594, 0.06274665451049805, 0.06229414367675781, 0.062333343505859375, 0.062303550720214845, 0.062235649108886716, 0.0620637435913086, 0.06254681777954102, 0.06274867248535156, 0.06261920166015625, 0.06242758560180664, 0.06263603210449219, 0.06255363082885743, 0.06261193466186524, 0.062441375732421874, 0.06254409790039063, 0.06260015869140625, 0.0626902084350586, 0.06256991958618165, 0.06255465698242188, 0.06277059173583985, 0.06254451370239258, 0.062304256439208984, 0.06237712097167969, 0.06239068984985351, 0.06246591949462891, 0.06242569732666016, 0.06299440002441406, 0.06295347213745117, 0.06295670318603516, 0.0626328010559082, 0.06287974548339843, 0.06260534286499024, 0.06251830291748046, 0.06257145690917969, 0.06277436828613281, 0.0627680320739746, 0.06278144073486328, 0.06265174484252929, 0.06286403274536133, 0.06257846450805664, 0.06270793533325195, 0.06261964797973633, 0.0626729278564453, 0.0627199363708496, 0.06285654449462891, 0.06297833633422852, 0.06302560043334961, 0.06266876983642578, 0.06329708862304688, 0.062414302825927734, 0.062134815216064454, 0.062142688751220705, 0.06235161590576172, 0.062339038848876954, 0.06250451278686524, 0.062354942321777344, 0.062443649291992184, 0.06258319854736329, 0.0622022705078125, 0.06255408096313476, 0.0625992317199707, 0.06256995010375976, 0.062335487365722655, 0.06255615997314454, 0.06250499343872071, 0.0625458869934082, 0.0623185920715332, 0.062217601776123045, 0.062173534393310546, 0.06231606292724609, 0.06236630249023437, 0.062457408905029294, 0.062487136840820315, 0.06252544021606446, 0.06254796981811524, 0.06251472091674805, 0.06262179183959961, 0.06260089492797852, 0.06244217681884766, 0.06255136108398437, 0.0625835189819336, 0.06261270523071288, 0.0625334701538086, 0.06253456115722657, 0.062408737182617184, 0.06248239898681641, 0.0625576629638672, 0.06263407897949219, 0.06251359939575195, 0.06258073425292969, 0.06271088027954101, 0.06274863815307617, 0.06261446380615235, 0.06283059310913086, 0.06292214584350586, 0.06275337600708007, 0.06265804672241211, 0.06294681549072266, 0.06256313705444336, 0.06280774307250976, 0.06289596939086914, 0.06260147094726562, 0.06283283233642578, 0.06293116760253906, 0.06290537643432617, 0.06275993728637695, 0.06263804626464843, 0.06274665451049805, 0.06286844635009765, 0.06285209655761718, 0.06282649612426758]",tokens/s,15.998677924231961,, @@ -4955,7 +4955,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 130585 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 124109 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.669824,569.311232,0.0,174.063616,172.57984,s,1,7.17195458984375,7.17195458984375,0.0,7.17195458984375,7.17195458984375,7.17195458984375,7.17195458984375,[7.17195458984375],,kWh,4.49210194999902e-06,4.882781367663077e-07,1.981946030002457e-06,6.962326116767785e-06,,MB,1108.475904,640.6144,0.0,234.881024,215.589888,s,25,0.27508755302429205,0.011003502120971677,0.00015233908185133062,0.010978240013122559,0.011162656211853028,0.011172537803649903,0.011461617774963379,"[0.01155247974395752, 0.010974176406860351, 0.011006848335266113, 0.010918208122253418, 0.011093888282775879, 0.011083488464355468, 0.010961407661437989, 0.010906815528869629, 0.010854496002197265, 0.010844511985778809, 0.010817952156066894, 0.010858304023742675, 0.011097536087036133, 0.011025247573852539, 0.011046751976013184, 0.01100153636932373, 0.011167136192321778, 0.011155936241149902, 0.011173888206481934, 0.010980640411376953, 0.010856415748596191, 0.010905407905578613, 0.010951775550842285, 0.010978240013122559, 0.010874464035034179]",tokens/s,23265.320184933415,kWh,3.5411350660398743e-07,3.90525435950372e-08,2.336999577257331e-07,6.268660079247578e-07,tokens/kWh,408380733.3045366,MB,1135.460352,642.711552,0.0,236.978176,215.592448,s,25,9.833397613525392,0.39333590454101564,0.02841497875139694,0.38773126220703125,0.3964486999511719,0.3983803649902344,0.498144299316406,"[0.3969692077636719, 0.5295372924804688, 0.39566793823242186, 0.3912528991699219, 0.39300204467773436, 0.388822998046875, 0.38113458251953125, 0.37845510864257814, 0.3840166931152344, 0.37589962768554686, 0.3793076171875, 0.39152328491210936, 0.3906401062011719, 0.3927539978027344, 0.39408251953125, 0.392140869140625, 0.398733154296875, 0.38739495849609373, 0.3850337829589844, 0.38190274047851563, 0.38393341064453124, 0.38520547485351564, 0.38467071533203123, 0.3835853271484375, 0.38773126220703125]",tokens/s,160.16844450931782,kWh,1.0956496214870327e-05,1.2083142877942698e-06,4.543194889428047e-06,1.670800539209264e-05,tokens/kWh,3770647.574115332,,s,1575,9.820896668434136,0.006235489948212153,0.0033837550131982762,0.0061016960144042965,0.006408895969390869,0.006480585527420044,0.006702362804412841,"[0.006441760063171387, 0.006477503776550293, 0.007278592109680176, 0.006242335796356201, 0.006197023868560791, 0.006123712062835693, 0.00619536018371582, 0.0062871999740600586, 0.0062791681289672855, 0.006250271797180176, 0.006262400150299072, 0.00640777587890625, 0.006451871871948242, 0.006435488224029541, 0.0064787201881408695, 0.006379903793334961, 0.006322336196899414, 0.006346879959106445, 0.006325600147247315, 0.006332896232604981, 0.006299583911895752, 0.0063175358772277835, 0.006257599830627442, 0.006154208183288574, 0.0061010241508483885, 0.006008416175842285, 0.00629750394821167, 0.006022655963897705, 0.006118144035339355, 0.006304160118103027, 0.006664127826690674, 0.006571743965148926, 0.006684864044189453, 0.00659660816192627, 0.006536287784576416, 0.006404416084289551, 0.006496863842010498, 0.006506591796875, 0.006361055850982666, 0.006279104232788086, 0.006217728137969971, 0.006187007904052734, 0.00610537576675415, 0.006141664028167725, 0.006139488220214844, 0.0062119998931884765, 0.006354015827178955, 0.006132031917572021, 0.006070847988128662, 0.006041632175445556, 0.006061952114105224, 0.006266335964202881, 0.0062523198127746584, 0.006249343872070313, 0.006047743797302246, 0.006020480155944824, 0.006121088027954102, 0.006068895816802979, 0.006124063968658448, 0.0061190400123596195, 0.006299839973449707, 0.006440864086151123, 0.006282815933227539, 0.006154208183288574, 0.006459455966949463, 0.006365056037902832, 0.006270336151123047, 0.006281504154205322, 0.0063179202079772945, 0.006151008129119873, 0.006214687824249268, 0.006289663791656494, 0.00641868782043457, 0.00624396800994873, 0.006178719997406006, 0.006220736026763916, 0.006245855808258057, 0.14023097229003906, 0.006438432216644287, 0.006478464126586914, 0.006367008209228516, 0.006177023887634277, 0.006741055965423584, 0.006666848182678223, 0.006194975852966309, 0.006236991882324219, 0.006126751899719238, 0.006145696163177491, 0.006063039779663086, 0.0061320638656616215, 0.006153183937072754, 0.0061016960144042965, 0.006318079948425293, 0.0062912960052490235, 0.006263199806213379, 0.006190336227416992, 0.006142079830169678, 0.006067615985870361, 0.0063088321685791015, 0.006425695896148682, 0.006314911842346191, 0.006272895812988281, 0.006056159973144531, 0.00615558385848999, 0.006147903919219971, 0.0063373122215271, 0.0063508481979370115, 0.006336512088775635, 0.006254591941833496, 0.00695091199874878, 0.006221824169158936, 0.006158239841461181, 0.006170720100402832, 0.006177087783813476, 0.006147520065307617, 0.006162687778472901, 0.006333727836608887, 0.006486815929412842, 0.006649504184722901, 0.006252831935882568, 0.006184415817260742, 0.006132256031036377, 0.0061831679344177244, 0.0062082881927490235, 0.006208127975463868, 0.006134111881256104, 0.005865471839904785, 0.006037856101989746, 0.006028960227966309, 0.006107135772705078, 0.006444416046142578, 0.00649894380569458, 0.006588704109191894, 0.006657760143280029, 0.006494016170501709, 0.006662591934204102, 0.006512447834014893, 0.006442016124725342, 0.006470560073852539, 0.0063610877990722655, 0.006357279777526856, 0.006414048194885254, 0.006186272144317627, 0.006176896095275879, 0.006113887786865235, 0.006187104225158692, 0.00617193603515625, 0.006304192066192627, 0.0063203201293945315, 0.006230016231536865, 0.006182015895843506, 0.006160352230072021, 0.006350048065185547, 0.0063259520530700684, 0.006202720165252686, 0.006091423988342285, 0.006133312225341797, 0.0062689919471740725, 0.006090464115142823, 0.006234784126281739, 0.006102240085601807, 0.006074975967407226, 0.006121664047241211, 0.006028992176055908, 0.006142591953277588, 0.0065491838455200195, 0.006555808067321777, 0.006338240146636963, 0.006260863780975342, 0.006132832050323486, 0.006200287818908691, 0.006116479873657227, 0.006169439792633057, 0.006230016231536865, 0.006195199966430664, 0.0062259202003479, 0.006518784046173095, 0.0065413122177124024, 0.0064430079460144046, 0.006467584133148193, 0.006606847763061524, 0.006338560104370118, 0.006399328231811523, 0.00623308801651001, 0.006174143791198731, 0.006082784175872802, 0.006190815925598145, 0.0060104641914367676, 0.0060824317932128905, 0.0061421761512756344, 0.00657366418838501, 0.0065446081161499026, 0.006555808067321777, 0.006583104133605957, 0.006516064167022705, 0.006380127906799316, 0.0063571839332580565, 0.006253856182098389, 0.0062368960380554195, 0.006162240028381348, 0.0061420159339904785, 0.006054944038391114, 0.00614246416091919, 0.006173151969909668, 0.006033408164978027, 0.00603110408782959, 0.0060646400451660155, 0.0060698561668396, 0.006082719802856446, 0.006064127922058105, 0.006088223934173584, 0.006042079925537109, 0.006090752124786377, 0.006047743797302246, 0.006150144100189209, 0.006136127948760986, 0.0060638079643249515, 0.006078271865844726, 0.006041088104248047, 0.006166207790374756, 0.006115615844726563, 0.0062984638214111325, 0.006290847778320312, 0.0061831998825073245, 0.006312287807464599, 0.006327744007110596, 0.006338592052459717, 0.00621401596069336, 0.006223872184753418, 0.006199295997619629, 0.006153952121734619, 0.006160448074340821, 0.006117216110229492, 0.006191167831420898, 0.006327807903289795, 0.006329152107238769, 0.006367392063140869, 0.006283103942871094, 0.006315167903900147, 0.006409056186676025, 0.006300831794738769, 0.0062206401824951175, 0.006278848171234131, 0.006219295978546142, 0.006136608123779297, 0.006045087814331055, 0.006036064147949219, 0.006017024040222168, 0.006071424007415772, 0.006059135913848877, 0.006045440196990967, 0.006107135772705078, 0.006348832130432129, 0.006434815883636475, 0.006508607864379883, 0.006668320178985596, 0.0067721281051635746, 0.006377151966094971, 0.0064234561920166015, 0.0063610877990722655, 0.006407392024993896, 0.006359295845031738, 0.006272575855255127, 0.006247712135314941, 0.006297279834747314, 0.006217728137969971, 0.006209856033325196, 0.006119455814361572, 0.006096127986907959, 0.006143487930297851, 0.006663167953491211, 0.006080416202545166, 0.00609830379486084, 0.006070015907287597, 0.006091231822967529, 0.006020991802215576, 0.006253087997436523, 0.006494400024414063, 0.006410048007965088, 0.006289247989654541, 0.006141503810882568, 0.006091360092163086, 0.006208896160125732, 0.006910528182983398, 0.006057888031005859, 0.006135871887207031, 0.00620358419418335, 0.006245855808258057, 0.00627945613861084, 0.006191264152526856, 0.006190847873687744, 0.006051519870758057, 0.00606166410446167, 0.005978752136230469, 0.006058656215667725, 0.006114816188812256, 0.006309120178222656, 0.006243264198303223, 0.00638972806930542, 0.006241663932800293, 0.006118048191070557, 0.006101088047027588, 0.0061131839752197265, 0.006191103935241699, 0.006060031890869141, 0.0061168642044067386, 0.006089119911193847, 0.006089824199676514, 0.006071296215057373, 0.006104415893554688, 0.006050464153289795, 0.006069952011108398, 0.006017343997955322, 0.006122591972351074, 0.00630617618560791, 0.006260799884796143, 0.0063446397781372075, 0.006395904064178467, 0.00641974401473999, 0.006273727893829345, 0.006219711780548096, 0.006221920013427734, 0.006196320056915283, 0.0062262721061706544, 0.00628495979309082, 0.006218656063079834, 0.0061131839752197265, 0.006133855819702149, 0.006064127922058105, 0.006159391880035401, 0.006024159908294678, 0.006062079906463623, 0.006039135932922364, 0.00635097599029541, 0.006383039951324463, 0.006588287830352783, 0.00664467191696167, 0.006448959827423096, 0.006289792060852051, 0.006215136051177978, 0.006246880054473877, 0.006176544189453125, 0.006204576015472412, 0.006123551845550537, 0.00604256010055542, 0.006075520038604737, 0.006012063980102539, 0.006055232048034668, 0.0060133438110351566, 0.006082464218139649, 0.006002560138702393, 0.006215968132019043, 0.006258624076843262, 0.006158336162567139, 0.0060860800743103025, 0.006007359981536865, 0.00601907205581665, 0.006003903865814209, 0.0060076799392700195, 0.005959616184234619, 0.006174111843109131, 0.005993055820465088, 0.0060416641235351565, 0.005990623950958252, 0.006008416175842285, 0.005994624137878418, 0.006017183780670166, 0.005955584049224853, 0.006011839866638184, 0.00609168004989624, 0.006328256130218506, 0.006506559848785401, 0.006327807903289795, 0.006261248111724854, 0.006199391841888427, 0.006086688041687011, 0.006033279895782471, 0.006017024040222168, 0.0057554559707641605, 0.005979135990142822, 0.006034463882446289, 0.0059576001167297365, 0.005983967781066895, 0.00596940803527832, 0.005995296001434326, 0.005969823837280274, 0.006023519992828369, 0.0059818878173828124, 0.006021183967590332, 0.005967264175415039, 0.005986911773681641, 0.005952576160430908, 0.006038688182830811, 0.005959455966949463, 0.006041376113891601, 0.006084832191467285, 0.007065279960632324, 0.006388031959533692, 0.006017024040222168, 0.006012928009033203, 0.00597811222076416, 0.006039552211761475, 0.0059935998916625976, 0.006026112079620362, 0.006000639915466309, 0.006014944076538086, 0.005980224132537842, 0.0059732160568237306, 0.005970751762390137, 0.0059697279930114745, 0.006072256088256836, 0.006136000156402588, 0.006228991985321045, 0.005983168125152588, 0.005959743976593017, 0.0061354880332946775, 0.005955327987670898, 0.006042175769805909, 0.005967455863952637, 0.0060208959579467775, 0.005972608089447022, 0.006039231777191162, 0.005978271961212158, 0.006039711952209472, 0.006003712177276611, 0.0061224961280822755, 0.005964191913604736, 0.006037087917327881, 0.0059592962265014645, 0.0060207037925720215, 0.006015103816986084, 0.006032032012939453, 0.006418496131896973, 0.006033567905426025, 0.005982175827026367, 0.0060347518920898435, 0.006017536163330078, 0.0059593281745910645, 0.005968224048614502, 0.006455264091491699, 0.0059987521171569825, 0.005822688102722168, 0.005946527957916259, 0.005976704120635987, 0.006002463817596435, 0.005988416194915772, 0.005937376022338867, 0.006008287906646728, 0.005966303825378418, 0.006005023956298828, 0.00594217586517334, 0.005980991840362549, 0.005929152011871338, 0.005975872039794922, 0.005917695999145508, 0.005995488166809082, 0.005975903987884522, 0.006008831977844238, 0.006013472080230713, 0.006019968032836914, 0.005978911876678467, 0.00601907205581665, 0.005935200214385986, 0.005986112117767334, 0.005942495822906494, 0.006029727935791015, 0.005998112201690674, 0.006013023853302002, 0.006009056091308594, 0.006053567886352539, 0.006023359775543213, 0.006052127838134766, 0.005986783981323242, 0.006036863803863525, 0.00612175989151001, 0.006025568008422851, 0.006076416015625, 0.00601907205581665, 0.0059411201477050785, 0.006060192108154297, 0.0059411201477050785, 0.0059905281066894535, 0.005918687820434571, 0.0060104641914367676, 0.0059415998458862305, 0.005976352214813232, 0.0059489598274230955, 0.006432576179504395, 0.0059584641456604005, 0.006020927906036377, 0.005933023929595947, 0.006121088027954102, 0.005955743789672852, 0.006004511833190918, 0.005968160152435302, 0.006020415782928467, 0.006044415950775146, 0.006006912231445312, 0.006170400142669678, 0.006030496120452881, 0.005959968090057373, 0.005988959789276123, 0.005946400165557861, 0.005996607780456543, 0.005866015911102295, 0.006158720016479493, 0.005962912082672119, 0.007720128059387207, 0.008408767700195312, 0.00780617618560791, 0.007717919826507568, 0.0070100479125976565, 0.005971519947052002, 0.005986656188964844, 0.005963840007781982, 0.005984255790710449, 0.005922560214996338, 0.005966080188751221, 0.005912576198577881, 0.005987552165985108, 0.00588265609741211, 0.006002463817596435, 0.0059699521064758305, 0.005950784206390381, 0.005971136093139648, 0.005990079879760742, 0.005945087909698487, 0.005932384014129639, 0.005895071983337402, 0.005936927795410156, 0.005925087928771972, 0.0060026879310607914, 0.005938943862915039, 0.005990655899047852, 0.005951583862304688, 0.005928864002227783, 0.0059435200691223145, 0.005924479961395264, 0.0059671678543090825, 0.005953824043273926, 0.0059500160217285155, 0.0059269118309021, 0.005910528182983398, 0.005937151908874512, 0.005947391986846923, 0.005967872142791748, 0.005944736003875732, 0.005964384078979492, 0.005908480167388916, 0.0058951997756958, 0.0059647679328918455, 0.005904160022735596, 0.005949247837066651, 0.005894495964050293, 0.00592083215713501, 0.0059678077697753905, 0.005975488185882568, 0.005884543895721435, 0.00593452787399292, 0.005900864124298096, 0.0059658241271972655, 0.005898528099060059, 0.005941055774688721, 0.005913760185241699, 0.0059275197982788085, 0.005965983867645264, 0.00594870376586914, 0.005664127826690674, 0.005906688213348389, 0.005962111949920654, 0.005951231956481934, 0.0059558401107788084, 0.005912223815917969, 0.0059415998458862305, 0.005984255790710449, 0.006062079906463623, 0.0059205121994018554, 0.005947648048400879, 0.005887519836425781, 0.006045728206634521, 0.005915008068084717, 0.005945280075073243, 0.005939616203308106, 0.005927807807922363, 0.005907360076904297, 0.005953536033630371, 0.005914559841156006, 0.005943168163299561, 0.005936384201049805, 0.005938144207000733, 0.0061801280975341795, 0.005986944198608398, 0.005943168163299561, 0.005965727806091309, 0.005927231788635254, 0.005988255977630615, 0.005937280178070069, 0.00595136022567749, 0.005937119960784912, 0.005950975894927979, 0.0059227199554443355, 0.0059584641456604005, 0.005932864189147949, 0.005969088077545166, 0.006034175872802735, 0.0059515519142150876, 0.005980160236358643, 0.0059550080299377445, 0.005952064037322998, 0.005922815799713135, 0.005988351821899414, 0.005933055877685547, 0.005988639831542969, 0.005928063869476318, 0.00599510383605957, 0.005927167892456055, 0.005991968154907226, 0.005951712131500244, 0.0062979841232299804, 0.005985983848571777, 0.005965760231018066, 0.006059455871582031, 0.005966400146484375, 0.0059269118309021, 0.005943552017211914, 0.0059246401786804195, 0.005973983764648438, 0.0059617919921875, 0.005983295917510986, 0.005919616222381591, 0.005689343929290771, 0.005947455883026123, 0.005965760231018066, 0.005934144020080566, 0.005941247940063477, 0.0059211840629577635, 0.006122079849243164, 0.007403456211090088, 0.006957056045532227, 0.0062156801223754886, 0.0059688959121704105, 0.005952511787414551, 0.005935232162475586, 0.005983520030975342, 0.005941855907440186, 0.005958752155303955, 0.005910431861877442, 0.005935679912567138, 0.00593887996673584, 0.005985023975372314, 0.005918879985809326, 0.005928639888763428, 0.005926559925079346, 0.006015679836273193, 0.006008607864379883, 0.005953567981719971, 0.005951039791107178, 0.005949535846710205, 0.0060635838508605955, 0.005949600219726563, 0.00594374418258667, 0.00595747184753418, 0.005957183837890625, 0.00599948787689209, 0.005953536033630371, 0.005937151908874512, 0.005975135803222656, 0.005939551830291748, 0.005976672172546387, 0.005942431926727295, 0.005974080085754395, 0.005902112007141113, 0.005958623886108398, 0.005910528182983398, 0.006109087944030762, 0.0060498881340026855, 0.00601043176651001, 0.005929408073425293, 0.005969503879547119, 0.005916063785552979, 0.005978879928588868, 0.005904928207397461, 0.005973728179931641, 0.0059324798583984375, 0.00602784013748169, 0.005980160236358643, 0.006082560062408447, 0.006088160037994385, 0.006142240047454834, 0.006040095806121826, 0.006086368083953858, 0.005998432159423828, 0.006023327827453614, 0.005827936172485351, 0.006211872100830078, 0.006291872024536133, 0.006391583919525146, 0.0063482561111450194, 0.006269248008728028, 0.006226431846618652, 0.006270976066589356, 0.006367072105407715, 0.006162496089935303, 0.006154304027557373, 0.00603872013092041, 0.006025728225708008, 0.005963967800140381, 0.006080671787261963, 0.006012224197387696, 0.006118080139160156, 0.006209536075592041, 0.00617087984085083, 0.006147071838378906, 0.006057888031005859, 0.006024032115936279, 0.0060026879310607914, 0.006038688182830811, 0.006154719829559326, 0.006179200172424316, 0.00610748815536499, 0.006158304214477539, 0.006168288230895996, 0.006375391960144043, 0.006330687999725342, 0.006268608093261719, 0.006367231845855713, 0.00638105583190918, 0.006407904148101807, 0.006279935836791992, 0.0063266558647155765, 0.006227583885192871, 0.0061645121574401855, 0.006158527851104737, 0.006137663841247558, 0.006098944187164307, 0.0060661759376525876, 0.006227456092834473, 0.006087007999420166, 0.0062846078872680665, 0.00647049617767334, 0.006552927970886231, 0.006453120231628418, 0.006269696235656738, 0.006259871959686279, 0.006187136173248291, 0.00618943977355957, 0.006101376056671143, 0.006158336162567139, 0.006090752124786377, 0.006023263931274414, 0.006039167881011963, 0.006266304016113281, 0.006155104160308838, 0.006387455940246582, 0.006463935852050781, 0.006512032032012939, 0.005967423915863037, 0.006191008090972901, 0.006179327964782715, 0.006275360107421875, 0.006350592136383056, 0.006295551776885986, 0.006327616214752197, 0.006430399894714355, 0.006392288208007813, 0.006359392166137695, 0.006389472007751465, 0.006377855777740479, 0.006456895828247071, 0.006438752174377441, 0.006408736228942871, 0.0064139838218688966, 0.00637337589263916, 0.006441472053527832, 0.0063192639350891115, 0.006246975898742676, 0.006361375808715821, 0.006244095802307129, 0.006202688217163086, 0.006146719932556152, 0.006285600185394287, 0.006205440044403076, 0.006162367820739746, 0.006121535778045654, 0.006080512046813965, 0.006096896171569824, 0.006042943954467773, 0.006105279922485352, 0.006131648063659668, 0.006113247871398926, 0.006105984210968017, 0.006102303981781006, 0.006078911781311035, 0.006082464218139649, 0.005999743938446045, 0.006074592113494873, 0.006146111965179443, 0.006275775909423828, 0.006165887832641602, 0.006128255844116211, 0.006019040107727051, 0.006021152019500733, 0.006053887844085694, 0.006060128211975098, 0.006184864044189453, 0.006217728137969971, 0.006178815841674804, 0.006133247852325439, 0.006124032020568848, 0.0061214399337768555, 0.006066048145294189, 0.006052000045776367, 0.006092512130737305, 0.005996672153472901, 0.0061166400909423825, 0.006114528179168701, 0.006098176002502441, 0.006242112159729004, 0.006140768051147461, 0.00602726411819458, 0.006256671905517578, 0.006212992191314697, 0.006238719940185547, 0.006159743785858154, 0.006120255947113037, 0.0061420159339904785, 0.00606601619720459, 0.006109183788299561, 0.006094848155975342, 0.006072319984436035, 0.0060499200820922855, 0.0061626238822937015, 0.006196928024291992, 0.006330592155456543, 0.0063528637886047365, 0.006278560161590576, 0.006265247821807861, 0.006199295997619629, 0.006172671794891358, 0.006117087841033935, 0.00626470422744751, 0.006071936130523681, 0.006139808177947998, 0.006394112110137939, 0.006197855949401855, 0.006326047897338867, 0.0064924159049987796, 0.006612991809844971, 0.006840415954589844, 0.00640934419631958, 0.006416672229766845, 0.006408703804016113, 0.006343999862670899, 0.006365888118743897, 0.006369279861450195, 0.00623638391494751, 0.0060778241157531734, 0.00602563190460205, 0.005991680145263672, 0.006054656028747558, 0.006199295997619629, 0.00638156795501709, 0.006262784004211426, 0.006258975982666015, 0.006280928134918213, 0.006245888233184814, 0.006208000183105469, 0.006141024112701416, 0.0060834879875183104, 0.006049791812896729, 0.006100992202758789, 0.00603545618057251, 0.006067615985870361, 0.006199903964996338, 0.0064245758056640625, 0.00630998420715332, 0.006158143997192383, 0.006049312114715576, 0.00611084794998169, 0.006308703899383545, 0.00630406379699707, 0.006362912178039551, 0.005897439956665039, 0.00616534423828125, 0.006092735767364502, 0.00611737585067749, 0.006205440044403076, 0.006292736053466797, 0.00623308801651001, 0.006235263824462891, 0.006324160099029541, 0.006265247821807861, 0.006299935817718506, 0.0060677118301391605, 0.006086880207061767, 0.006038976192474365, 0.006107872009277344, 0.006119552135467529, 0.006217567920684814, 0.006217887878417969, 0.006168575763702393, 0.006154240131378174, 0.006098944187164307, 0.006134975910186768, 0.00616099214553833, 0.006314208030700683, 0.006379231929779052, 0.006312096118927002, 0.00620966386795044, 0.006117280006408691, 0.006115327835083008, 0.00610211181640625, 0.006093823909759521, 0.0063318080902099606, 0.006295904159545899, 0.006119103908538819, 0.006462368011474609, 0.006688767910003662, 0.006082208156585694, 0.0062847681045532225, 0.006187263965606689, 0.006215968132019043, 0.006256383895874023, 0.006211679935455322, 0.006125823974609375, 0.006379392147064209, 0.006526783943176269, 0.006590784072875977, 0.006518688201904297, 0.006479135990142822, 0.006424511909484863, 0.006437695980072021, 0.006342016220092774, 0.0063630399703979495, 0.0064617919921875, 0.006400352001190186, 0.006414048194885254, 0.006240543842315674, 0.006280735969543457, 0.006132192134857177, 0.006188159942626953, 0.006201759815216064, 0.006191264152526856, 0.006251071929931641, 0.0061478400230407714, 0.00613478422164917, 0.00637440013885498, 0.006485568046569824, 0.006318111896514892, 0.006315487861633301, 0.0061874880790710445, 0.006168384075164795, 0.006094624042510986, 0.006034624099731445, 0.006061535835266113, 0.006031583786010742, 0.006145023822784424, 0.0062863359451293946, 0.006276768207550049, 0.0061586880683898925, 0.00636627197265625, 0.006429632186889648, 0.0064204797744750975, 0.006291679859161377, 0.006173791885375976, 0.006222527980804443, 0.006141952037811279, 0.006155488014221192, 0.006160639762878418, 0.006238080024719238, 0.006093183994293213, 0.006111519813537598, 0.006350783824920654, 0.0063303041458129886, 0.006332543849945069, 0.0062518720626831056, 0.006242976188659668, 0.0062791681289672855, 0.0061851201057434085, 0.006068064212799072, 0.006004735946655273, 0.006038911819458008, 0.006015615940093994, 0.006028831958770752, 0.006009088039398193, 0.00608892822265625, 0.006134047985076905, 0.006310688018798828, 0.0063639039993286135, 0.0063526082038879396, 0.006220384120941162, 0.006207071781158448, 0.006164768218994141, 0.006038943767547608, 0.006070528030395508, 0.006048416137695313, 0.0060514240264892575, 0.006055871963500977, 0.006183072090148926, 0.006347104072570801, 0.006454944133758545, 0.006505951881408691, 0.006426464080810547, 0.006367487907409668, 0.00624889612197876, 0.006256608009338379, 0.006230048179626465, 0.006424799919128418, 0.006057983875274659, 0.006498559951782227, 0.006460447788238525, 0.006382304191589356, 0.0064471039772033695, 0.006289408206939697, 0.006297344207763672, 0.006307871818542481, 0.0063879361152648926, 0.0064143362045288085, 0.0062873601913452145, 0.006250688076019287, 0.006131455898284912, 0.006221888065338135, 0.006342175960540771, 0.0063820481300354, 0.006338304042816162, 0.00623638391494751, 0.006258143901824951, 0.006285888195037842, 0.006350336074829102, 0.0065090560913085935, 0.006334464073181153, 0.006316031932830811, 0.00620688009262085, 0.006375487804412842, 0.006523104190826416, 0.006490143775939941, 0.006472224235534668, 0.006325215816497803, 0.006255392074584961, 0.006174975872039795, 0.006133503913879394, 0.006199295997619629, 0.006174719810485839, 0.006164480209350586, 0.006131711959838867, 0.006171807765960693, 0.006201663970947266, 0.0063508481979370115, 0.006567935943603515, 0.006392384052276611, 0.006473760128021241, 0.006891456127166748, 0.006373663902282715, 0.006431903839111328, 0.00642310380935669, 0.006405151844024658, 0.006299967765808105, 0.0061938238143920896, 0.00626204776763916, 0.006171360015869141, 0.006129407882690429, 0.006087135791778564, 0.006098720073699951, 0.0060661759376525876, 0.006061279773712158, 0.006371583938598633, 0.006484511852264404, 0.006563615798950195, 0.006442560195922852, 0.0063883838653564454, 0.006408192157745361, 0.006060031890869141, 0.0062269439697265625, 0.006217055797576905, 0.006143519878387451, 0.0060797438621521, 0.006110079765319824, 0.00604307222366333, 0.0060505599975585935, 0.005973440170288086, 0.005990816116333008, 0.005987552165985108, 0.005990431785583496, 0.005960415840148925, 0.006063519954681396, 0.006291200160980224, 0.006267744064331055, 0.006080512046813965, 0.006150144100189209, 0.005996543884277344, 0.006103040218353272, 0.006000639915466309, 0.00601635217666626, 0.005994175910949707, 0.006044640064239502, 0.006025184154510498, 0.00612559986114502, 0.006193376064300537, 0.006233888149261475, 0.006188416004180909, 0.006079103946685791, 0.006084928035736084, 0.006065855979919434, 0.006127295970916748, 0.0060910720825195315, 0.006051839828491211, 0.006017024040222168, 0.006033184051513672, 0.006013440132141113, 0.006033152103424072, 0.006117216110229492, 0.006416512012481689, 0.006428671836853027, 0.006489952087402343, 0.0065414719581604, 0.0064839677810668945, 0.0064692158699035645, 0.006375840187072754, 0.006318143844604492, 0.006313920021057129, 0.00614739179611206, 0.006134367942810058, 0.006090559959411621, 0.006202976226806641, 0.006089312076568603, 0.006100480079650879, 0.006111839771270752, 0.006103040218353272, 0.006029439926147461, 0.006084479808807373, 0.006189184188842773, 0.006073311805725097, 0.006073247909545898, 0.006238207817077636, 0.006025728225708008, 0.006159872055053711, 0.00615664005279541, 0.0061132159233093265, 0.006126815795898437, 0.00610745620727539, 0.006160255908966065, 0.006083168029785156, 0.00608028793334961, 0.006062528133392334, 0.006122432231903076, 0.0060342721939086914, 0.006114880084991455, 0.006039999961853028, 0.006031360149383545, 0.006002848148345947, 0.006000607967376709, 0.005936351776123047, 0.005968544006347656, 0.005928256034851074, 0.006011360168457032, 0.005955264091491699, 0.005996928215026855, 0.005961343765258789, 0.006068287849426269, 0.006064223766326904, 0.0060993280410766605, 0.006006175994873047, 0.006093376159667969, 0.006051167964935302, 0.006216383934020996, 0.006110911846160889, 0.006062528133392334, 0.0060126399993896485, 0.005975488185882568, 0.006025951862335205, 0.005996511936187744, 0.006082496166229248, 0.005963456153869629, 0.005976480007171631, 0.006033408164978027, 0.006270495891571045, 0.006408671855926514, 0.006436992168426514, 0.00635481595993042, 0.006428864002227783, 0.006422336101531982, 0.006315743923187256, 0.006238495826721192, 0.006180863857269287, 0.0061996479034423825, 0.006098495960235596, 0.006084703922271729, 0.006024608135223389, 0.006019455909729004, 0.005974239826202392, 0.006000256061553955, 0.005927584171295166, 0.00599183988571167, 0.005986271858215332, 0.00624291181564331, 0.006526048183441162, 0.006341279983520508, 0.005863423824310303, 0.006123519897460937, 0.006124767780303955, 0.00605398416519165, 0.006046400070190429, 0.006004831790924072, 0.00598419189453125, 0.00603439998626709, 0.006181024074554444, 0.0060136961936950685, 0.00599283218383789, 0.005920447826385498, 0.005955967903137207, 0.005933856010437012, 0.005976128101348877, 0.005927264213562012, 0.005923295974731445, 0.005905695915222168, 0.005988863945007325, 0.005998176097869873, 0.006101632118225097, 0.006268032073974609, 0.006323071956634521, 0.0065491518974304195, 0.006379871845245361, 0.0061494078636169435, 0.006073376178741455, 0.0060044159889221195, 0.0059920320510864255, 0.005978528022766113, 0.005963776111602783, 0.005942912101745606, 0.0059498238563537595, 0.00591212797164917, 0.0059433279037475585, 0.005898848056793213, 0.005949567794799804, 0.005912255764007568, 0.005931295871734619, 0.005997568130493164, 0.006108928203582764, 0.006393983840942383, 0.006373888015747071, 0.006304096221923828, 0.006280288219451904, 0.006288127899169922, 0.00618668794631958, 0.006004672050476074, 0.005966368198394775, 0.005976096153259277, 0.005947360038757324, 0.006011072158813476, 0.005924352169036865, 0.006003007888793945, 0.005971968173980713, 0.00606822395324707, 0.005928959846496582, 0.005998623847961426, 0.005969823837280274, 0.0061502718925476076, 0.00624019193649292, 0.0061328959465026854, 0.006015071868896485, 0.0057849278450012205, 0.0059688959121704105, 0.0060002880096435545, 0.005978271961212158, 0.005995744228363037, 0.005912320137023926, 0.0059827518463134765, 0.006021471977233886, 0.006016895771026611, 0.006072447776794433, 0.005980127811431884, 0.005994527816772461, 0.006014976024627685, 0.006030432224273682, 0.005996511936187744, 0.005995456218719482, 0.006004735946655273, 0.006012224197387696, 0.005935808181762695, 0.005937280178070069, 0.00604966402053833, 0.006230016231536865, 0.006453248023986816, 0.006352255821228028, 0.006274847984313965, 0.00631712007522583, 0.006347743988037109, 0.006470143795013428, 0.006451519966125488, 0.006684351921081543, 0.006367775917053222, 0.006267712116241455, 0.006167520046234131, 0.005980160236358643, 0.006047743797302246, 0.006021344184875489, 0.006053696155548096, 0.005959360122680664, 0.006027008056640625, 0.005945888042449951, 0.005984384059906006, 0.005894015789031983, 0.006018176078796387, 0.00593395185470581, 0.005955584049224853, 0.0060269122123718264, 0.006312287807464599, 0.006440095901489258, 0.006320608139038086, 0.006125823974609375, 0.006037888050079346, 0.0059983677864074705, 0.0060412797927856445, 0.006050079822540283, 0.005973311901092529, 0.005982975959777832, 0.006041728019714356, 0.006018847942352295, 0.006092832088470459, 0.0060702719688415525, 0.00601043176651001, 0.006035583972930908, 0.005996863842010498, 0.00571830415725708, 0.00597760009765625, 0.005939231872558594, 0.0059732160568237306, 0.005960512161254883, 0.005957632064819336, 0.005944767951965332, 0.005994688034057618, 0.006013472080230713, 0.006033184051513672, 0.005986559867858887, 0.006039775848388672, 0.0060433921813964845, 0.006076704025268555, 0.006024064064025879, 0.006093152046203613, 0.006001120090484619, 0.006060031890869141, 0.006020448207855225, 0.006078176021575928, 0.006093791961669922, 0.00602726411819458, 0.005966047763824463, 0.005991487979888916, 0.005982336044311524, 0.005960544109344482, 0.005969664096832275, 0.006103199958801269, 0.006364511966705322, 0.006425087928771973, 0.006424352169036866, 0.006443071842193604, 0.0065168957710266115, 0.006370975971221924, 0.006370687961578369, 0.006353663921356201, 0.0061561279296875, 0.006211904048919678, 0.0061010560989379885, 0.006090752124786377, 0.006010015964508056, 0.00604860782623291, 0.006236159801483154, 0.005990399837493897, 0.00601087999343872, 0.005996672153472901, 0.00602294397354126, 0.006016511917114258, 0.006050399780273438, 0.006316031932830811, 0.006278783798217774, 0.006130080223083496, 0.006166016101837158, 0.006275551795959473, 0.006389472007751465, 0.00618287992477417, 0.006101503849029541, 0.006090496063232422, 0.0061051521301269535, 0.006059967994689942, 0.006326432228088379, 0.006035359859466552, 0.006033472061157227, 0.005752831935882568, 0.0060356159210205075, 0.005936031818389893, 0.005987360000610352, 0.005955488204956055, 0.006002560138702393, 0.005959743976593017, 0.006010367870330811, 0.005978687763214112, 0.00606822395324707, 0.006017087936401367, 0.006042623996734619, 0.006076960086822509, 0.006073823928833008, 0.006103936195373535, 0.0060910720825195315, 0.006091839790344238, 0.006062975883483887, 0.006149312019348144, 0.006130303859710693, 0.006112576007843018, 0.006180736064910888, 0.006131840229034424, 0.0060217280387878415, 0.006031455993652343, 0.005939328193664551, 0.005975935935974121, 0.005959360122680664, 0.006052031993865967, 0.006234208106994629, 0.00655731201171875, 0.00655951976776123, 0.006455872058868408, 0.006457151889801026, 0.0064330239295959475, 0.006379007816314697, 0.006131392002105713, 0.006113696098327637, 0.006167263984680176, 0.00608403205871582, 0.006092160224914551, 0.006030144214630127, 0.006004672050476074, 0.005966207981109619, 0.005994304180145264, 0.005958784103393555, 0.005989183902740478, 0.005955584049224853, 0.005969120025634766, 0.006038368225097656, 0.006219711780548096, 0.0061562881469726565, 0.006203104019165039, 0.006199584007263183, 0.006162432193756104, 0.006068287849426269, 0.0060638079643249515, 0.005968128204345703, 0.0060201921463012694, 0.005993728160858154, 0.00617468786239624, 0.006400864124298095, 0.006084671974182129, 0.005830495834350586, 0.00610313606262207, 0.006026559829711914, 0.006130176067352295, 0.00604150390625, 0.006032896041870117, 0.0060217280387878415, 0.005990784168243408, 0.005957503795623779, 0.005926400184631348, 0.005952000141143799, 0.0060208640098571775, 0.006024608135223389, 0.006037407875061035, 0.006036416053771972, 0.0060085439682006836, 0.0060152640342712406, 0.006039552211761475, 0.006014944076538086, 0.006035520076751709, 0.006027232170104981, 0.006017216205596924, 0.005966976165771485, 0.005962431907653809, 0.005986303806304932, 0.005989408016204834, 0.00602950382232666, 0.005972767829895019, 0.006166528224945069, 0.006420447826385498, 0.0064980158805847165, 0.00632863998413086, 0.006455296039581298, 0.006471712112426758, 0.006507743835449218, 0.006408959865570068, 0.006246335983276367, 0.006293568134307861, 0.006256319999694824, 0.006134079933166504, 0.006110496044158935, 0.006025951862335205, 0.006008831977844238, 0.006017024040222168, 0.005963007926940918, 0.006025375843048096, 0.005999199867248535, 0.005965151786804199, 0.005992159843444824, 0.0059539518356323245, 0.006113311767578125, 0.006276768207550049, 0.00624073600769043, 0.006053664207458496, 0.0060136961936950685, 0.005996511936187744, 0.006004096031188965, 0.005982592105865479, 0.005983967781066895, 0.006072735786437989, 0.005949440002441406, 0.005951488018035888, 0.00601859188079834, 0.0060785279273986815, 0.006256703853607178, 0.006608831882476807, 0.006103104114532471, 0.005994336128234863, 0.005976416110992431, 0.0060165758132934575, 0.006070816040039063, 0.0060415358543396, 0.006125408172607422, 0.005971583843231201, 0.006031551837921143, 0.0060126399993896485, 0.0060824317932128905, 0.005994016170501709, 0.006110079765319824, 0.006024511814117431, 0.0060750718116760255, 0.006047935962677002, 0.0060661759376525876, 0.006077983856201172, 0.006032800197601319, 0.006023263931274414, 0.0060152320861816405, 0.006079360008239746, 0.006043456077575684, 0.006074048042297363, 0.006452991962432862, 0.006256608009338379, 0.00640880012512207, 0.006524767875671387, 0.006377471923828125, 0.0063805441856384275, 0.006441952228546143, 0.006280352115631103, 0.0062657279968261715, 0.006302815914154053, 0.006370463848114014, 0.0062991042137146, 0.006215968132019043, 0.006140223979949951, 0.006065695762634277, 0.006123680114746094, 0.005989376068115234, 0.005999839782714843, 0.005955359935760498, 0.006092512130737305, 0.005951712131500244, 0.00603276777267456, 0.006220064163208008, 0.006422880172729492, 0.006346176147460938, 0.0061543679237365725, 0.006097536087036132, 0.006162303924560547, 0.006078464031219482, 0.006067999839782715, 0.006039775848388672, 0.006053088188171386, 0.006179615974426269, 0.006032735824584961, 0.006146687984466553, 0.006250527858734131]",tokens/s,160.37232171093808,, @@ -5003,7 +5003,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 48286 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 42184 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5113,7 +5113,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 28721 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 23090 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,739.258368,3450.79808,0.0,3055.550464,2937.680896,s,1,7.3439462890625,7.3439462890625,0.0,7.3439462890625,7.3439462890625,7.3439462890625,7.3439462890625,[7.3439462890625],,kWh,7.681751145829215e-06,8.40129576004978e-07,3.286391518003695e-06,1.1808272239837887e-05,,MB,1068.863488,3520.004096,0.0,3114.27072,2817.473024,s,10,2.6321105651855463,0.2632110565185547,0.002689435497496622,0.26245011901855464,0.26671708374023434,0.26688536682128905,0.2670199932861328,"[0.26238107299804686, 0.2625191650390625, 0.26112960815429687, 0.26030831909179686, 0.2659143371582031, 0.25960931396484377, 0.26705364990234376, 0.26563711547851565, 0.2666796875, 0.2608782958984375]",tokens/s,972.603519723168,kWh,7.710339589473723e-06,8.499682402763044e-07,5.08705085326314e-06,1.3647358683013168e-05,tokens/kWh,18758208.525627933,MB,1094.914048,3520.004096,0.0,3114.27072,2877.80864,s,10,11.568907958984372,1.1568907958984376,0.0022785457809861944,1.1568141479492189,1.1604347778320312,1.1604911926269532,1.1605363244628906,"[1.1604222412109375, 1.15336181640625, 1.15709375, 1.1544727783203126, 1.1566710205078126, 1.160547607421875, 1.156957275390625, 1.1546573486328124, 1.156302001953125, 1.158422119140625]",tokens/s,54.45630670012757,kWh,3.367687294385985e-05,3.7132154301009313e-06,2.2316998847736745e-05,5.970708722169752e-05,tokens/kWh,1055151.1207719047,,s,630,11.56644199371338,0.01835943173605298,0.0003209969920466255,0.018289616584777832,0.018546284675598143,0.0187432110786438,0.019744583721160894,"[0.019453887939453126, 0.018830528259277345, 0.01853696060180664, 0.018365087509155272, 0.018214624404907228, 0.01823315238952637, 0.01832512092590332, 0.01828883171081543, 0.018188735961914063, 0.018196352005004884, 0.018296735763549805, 0.018154783248901366, 0.018243871688842773, 0.01816636848449707, 0.018135232925415037, 0.018240959167480468, 0.018158111572265625, 0.018204383850097657, 0.018325632095336913, 0.018389152526855468, 0.018384735107421876, 0.01846067237854004, 0.0198656005859375, 0.01984102439880371, 0.018390335083007813, 0.018277088165283204, 0.018292800903320312, 0.018261920928955077, 0.018371904373168945, 0.01853004837036133, 0.018463680267333984, 0.01850102424621582, 0.018246240615844726, 0.01829020881652832, 0.018315807342529297, 0.01853593635559082, 0.0183055362701416, 0.018237567901611327, 0.018239295959472657, 0.01830297660827637, 0.018343936920166014, 0.01822275161743164, 0.01828803253173828, 0.018283456802368165, 0.01830873680114746, 0.018313600540161134, 0.018397184371948243, 0.01855414390563965, 0.018504512786865233, 0.018479007720947266, 0.018550783157348632, 0.018451839447021483, 0.018412160873413085, 0.018460159301757813, 0.01839699172973633, 0.018536256790161132, 0.01848566436767578, 0.018379232406616212, 0.018358272552490236, 0.018347936630249022, 0.018329376220703124, 0.018392608642578124, 0.018329727172851563, 0.019767904281616212, 0.01886630439758301, 0.01852332878112793, 0.01838368034362793, 0.018229248046875, 0.018263168334960937, 0.018137983322143555, 0.01817804718017578, 0.018249727249145507, 0.018104320526123048, 0.018143072128295898, 0.018239648818969726, 0.018147232055664063, 0.018089855194091797, 0.01811244773864746, 0.018112384796142578, 0.01812931251525879, 0.018153472900390624, 0.018151071548461913, 0.018116767883300782, 0.01916876792907715, 0.018240032196044923, 0.01825939178466797, 0.018201311111450194, 0.018190336227416993, 0.018190336227416993, 0.01817804718017578, 0.018300256729125976, 0.01826220893859863, 0.018134880065917968, 0.018178688049316407, 0.018118656158447266, 0.018307104110717773, 0.018170015335083008, 0.01837808036804199, 0.018197151184082033, 0.01817990493774414, 0.018240991592407226, 0.01823798370361328, 0.01827235221862793, 0.01822096061706543, 0.018210271835327148, 0.018172447204589843, 0.01827599906921387, 0.018237791061401366, 0.01822710418701172, 0.01826348876953125, 0.018206687927246095, 0.018285247802734376, 0.018279455184936524, 0.018396127700805665, 0.01829478454589844, 0.018339168548583983, 0.018384639739990234, 0.018406303405761718, 0.018521631240844726, 0.018417823791503907, 0.01848556709289551, 0.018378751754760742, 0.018345407485961914, 0.018546239852905273, 0.018379776000976563, 0.01834592056274414, 0.019687488555908204, 0.018814207077026367, 0.018500192642211914, 0.01828659248352051, 0.01818435287475586, 0.01825584030151367, 0.0180861759185791, 0.01811404800415039, 0.018075103759765624, 0.01810518455505371, 0.018165760040283203, 0.01836025619506836, 0.01811414337158203, 0.018104736328125, 0.018167808532714845, 0.018744895935058594, 0.019987136840820312, 0.019451263427734376, 0.01813747215270996, 0.01819647979736328, 0.018137088775634767, 0.018198528289794923, 0.01819148826599121, 0.018149728775024413, 0.018317855834960938, 0.018155519485473632, 0.018275936126708983, 0.0182706241607666, 0.01825529670715332, 0.018264223098754882, 0.018223520278930663, 0.01828976058959961, 0.018393375396728515, 0.01833184051513672, 0.018155168533325196, 0.018279296875, 0.018202272415161133, 0.01826201629638672, 0.018218399047851563, 0.018207712173461912, 0.01820044708251953, 0.0181777286529541, 0.01820198440551758, 0.018498495101928712, 0.01827840042114258, 0.018233343124389647, 0.01822105598449707, 0.01830297660827637, 0.01827020835876465, 0.018379968643188478, 0.018319328308105468, 0.018445152282714844, 0.01840447998046875, 0.018498432159423827, 0.018579456329345705, 0.018525888442993164, 0.01848521614074707, 0.01847881507873535, 0.018461376190185546, 0.01853398323059082, 0.018358911514282227, 0.018388864517211913, 0.01829052734375, 0.01924390411376953, 0.018632095336914064, 0.018447071075439452, 0.018308191299438475, 0.018262624740600586, 0.018225151062011717, 0.0180731201171875, 0.01809401512145996, 0.01814790344238281, 0.018214912414550782, 0.018124544143676757, 0.018218816757202147, 0.018151872634887694, 0.018225151062011717, 0.01819443130493164, 0.018356224060058594, 0.018220895767211913, 0.018206880569458007, 0.018116607666015624, 0.018200159072875977, 0.018231296539306642, 0.01842598342895508, 0.018198816299438477, 0.018211904525756835, 0.01816419219970703, 0.01823967933654785, 0.01822559928894043, 0.01823289680480957, 0.018163904190063477, 0.018222623825073243, 0.01816428756713867, 0.018370559692382812, 0.018294015884399415, 0.018372480392456054, 0.0183604793548584, 0.01831599998474121, 0.018300832748413084, 0.01837065505981445, 0.01823539161682129, 0.018266111373901366, 0.018211904525756835, 0.018289600372314453, 0.018324960708618165, 0.018330144882202148, 0.018253055572509766, 0.01826278305053711, 0.018278079986572264, 0.018397504806518555, 0.018386943817138672, 0.01836851119995117, 0.018331647872924805, 0.018429439544677736, 0.01839923286437988, 0.018561119079589843, 0.018417888641357422, 0.018688192367553712, 0.018374656677246092, 0.018471200942993163, 0.01852582359313965, 0.018583263397216797, 0.018430335998535156, 0.018493440628051756, 0.018388256072998047, 0.019920671463012695, 0.019418975830078126, 0.018680192947387694, 0.018445472717285156, 0.01834480094909668, 0.018225151062011717, 0.01832352066040039, 0.018296512603759765, 0.018256128311157225, 0.01824947166442871, 0.018143487930297852, 0.01812227249145508, 0.018078176498413086, 0.018151071548461913, 0.018390752792358397, 0.0181429443359375, 0.018141120910644532, 0.018113504409790038, 0.018273759841918945, 0.018268159866333008, 0.018188831329345703, 0.01816166305541992, 0.018621856689453126, 0.018188896179199218, 0.018192800521850586, 0.018207679748535155, 0.018196863174438478, 0.018162975311279295, 0.018192895889282225, 0.018260480880737305, 0.018288639068603514, 0.018251264572143554, 0.018283008575439453, 0.018249887466430664, 0.01823030471801758, 0.018250560760498045, 0.01829478454589844, 0.018269664764404298, 0.01827280044555664, 0.018268159866333008, 0.018311168670654295, 0.018414880752563475, 0.01833462333679199, 0.018227008819580077, 0.018296831130981444, 0.018272256851196288, 0.018333120346069334, 0.01839366340637207, 0.018423999786376953, 0.01839286422729492, 0.018361568450927734, 0.018349056243896485, 0.018536224365234374, 0.018741151809692384, 0.018443391799926757, 0.01846784019470215, 0.018459903717041017, 0.018608896255493165, 0.018515552520751953, 0.01834435272216797, 0.018333311080932616, 0.018378400802612306, 0.018428512573242187, 0.01960960006713867, 0.018894847869873048, 0.01864076805114746, 0.018396863937377928, 0.01821446418762207, 0.018225471496582032, 0.018145439147949218, 0.018182559967041014, 0.018228607177734377, 0.01814790344238281, 0.018217023849487306, 0.01817366409301758, 0.018188352584838866, 0.01827043151855469, 0.01821696090698242, 0.018155296325683593, 0.01822492790222168, 0.018319807052612304, 0.018386335372924806, 0.018415647506713866, 0.018256351470947264, 0.018255231857299804, 0.018138944625854494, 0.018260896682739256, 0.0183045768737793, 0.018299327850341798, 0.018298879623413086, 0.01827436828613281, 0.01820460891723633, 0.018488895416259764, 0.018288383483886717, 0.018250431060791016, 0.018181535720825197, 0.01825424003601074, 0.018218591690063478, 0.018301536560058593, 0.01820467185974121, 0.018259967803955078, 0.018282112121582032, 0.018393184661865233, 0.01848758316040039, 0.018542015075683593, 0.021207616806030272, 0.018765823364257812, 0.018479103088378905, 0.01841766357421875, 0.018520320892333984, 0.018386016845703124, 0.01827702331542969, 0.018343936920166014, 0.01840332794189453, 0.018583839416503906, 0.01858460807800293, 0.01841231918334961, 0.01863462448120117, 0.01839926338195801, 0.0184597110748291, 0.018508384704589844, 0.01838729667663574, 0.018396480560302735, 0.018436800003051756, 0.018476736068725585, 0.018506048202514648, 0.019605951309204103, 0.019136512756347656, 0.01873945617675781, 0.01846451187133789, 0.018296831130981444, 0.0182825927734375, 0.018261920928955077, 0.018325504302978517, 0.018173952102661133, 0.01813827133178711, 0.01819343948364258, 0.018146976470947266, 0.01817795181274414, 0.018106048583984374, 0.01817888069152832, 0.018148128509521484, 0.018239967346191405, 0.018333471298217774, 0.018268896102905274, 0.018224992752075196, 0.018268512725830077, 0.018243392944335936, 0.01820057678222656, 0.018336864471435548, 0.018174879074096678, 0.018181535720825197, 0.018180320739746094, 0.0182030086517334, 0.01832246398925781, 0.018213855743408204, 0.01823299217224121, 0.018257728576660158, 0.018354463577270507, 0.018555135726928712, 0.01847222328186035, 0.01841744041442871, 0.018292831420898437, 0.018260448455810548, 0.01829052734375, 0.018235200881958007, 0.018256032943725586, 0.018210752487182617, 0.0182806396484375, 0.018275871276855468, 0.01835238456726074, 0.01826883125305176, 0.018274303436279296, 0.018284543991088868, 0.018354175567626953, 0.01838489532470703, 0.018422975540161132, 0.018328096389770506, 0.018657087326049804, 0.01851430320739746, 0.018394880294799805, 0.01840995216369629, 0.018582592010498045, 0.01836934471130371, 0.018296192169189453, 0.01834566307067871, 0.018328447341918946, 0.018535999298095702, 0.019448320388793947, 0.019371519088745116, 0.018810911178588866, 0.01856787109375, 0.018382303237915038, 0.018267711639404296, 0.018181951522827148, 0.018045024871826174, 0.018043296813964844, 0.01807200050354004, 0.018036800384521483, 0.01808332824707031, 0.018125247955322266, 0.01801420783996582, 0.01801625633239746, 0.01823744010925293, 0.018078720092773438, 0.01812761688232422, 0.018141311645507814, 0.018135168075561522, 0.018102272033691406, 0.018104320526123048, 0.018193471908569337, 0.018127552032470705, 0.018102527618408203, 0.018108415603637695, 0.018481151580810547, 0.01951900863647461, 0.018546688079833985, 0.018257919311523436, 0.018321887969970703, 0.018427616119384767, 0.018204959869384765, 0.018239168167114257, 0.01843132781982422, 0.018236703872680664, 0.01811404800415039, 0.018187488555908203, 0.01828963279724121, 0.01835212707519531, 0.018501312255859374, 0.018237760543823242, 0.01828220748901367, 0.01820086479187012, 0.018354175567626953, 0.018368543624877928, 0.018208703994750976, 0.018222143173217773, 0.018323551177978514, 0.018485536575317384, 0.01850569534301758, 0.01850022315979004, 0.018366464614868162, 0.0183767032623291, 0.018483327865600585, 0.018435007095336915, 0.01855526351928711, 0.018348415374755858, 0.018546880722045897, 0.018314720153808594, 0.018475391387939452, 0.018448671340942382, 0.018421472549438475, 0.01836828804016113, 0.01953596878051758, 0.01903545570373535, 0.018678112030029295, 0.01847270393371582, 0.018279008865356446, 0.018192384719848635, 0.018163711547851562, 0.018322656631469727, 0.01822115135192871, 0.018344640731811523, 0.018388351440429686, 0.01833228874206543, 0.018481151580810547, 0.01835212707519531, 0.018696191787719727, 0.01840153694152832, 0.01828428840637207, 0.018237056732177733, 0.018213375091552735, 0.018179616928100585, 0.01824188804626465, 0.018106367111206053, 0.01816991996765137, 0.01833568000793457, 0.018694143295288086, 0.018351551055908202, 0.01835475158691406, 0.01827164840698242, 0.018219743728637695, 0.018211936950683592, 0.018237344741821288, 0.018387840270996093, 0.018210079193115233, 0.01819107246398926, 0.01822892761230469, 0.018198495864868165, 0.018346208572387696, 0.018391168594360352, 0.018366464614868162, 0.018323455810546875, 0.0182108154296875, 0.01823766326904297, 0.018157407760620116, 0.018175935745239256, 0.01824563217163086, 0.01819647979736328, 0.018183839797973632, 0.018283039093017577, 0.01824924850463867, 0.0182989444732666, 0.01838307189941406, 0.018414976119995118, 0.01850227165222168, 0.01844428825378418, 0.018307104110717773, 0.018318431854248047, 0.018342720031738282, 0.01848860740661621, 0.018481952667236328, 0.018446079254150392, 0.01841177558898926, 0.018291711807250977, 0.018338048934936523, 0.019400863647460936, 0.018782207489013672, 0.018579391479492186, 0.018427967071533203, 0.018279808044433594, 0.018155616760253908, 0.01806800079345703, 0.018061216354370118, 0.01883145523071289, 0.018276351928710938, 0.01805721664428711, 0.018070592880249023, 0.018035648345947265, 0.018136287689208986, 0.018141759872436523, 0.01809667205810547, 0.01801593589782715, 0.018129215240478516, 0.018242496490478516, 0.018379520416259766, 0.018542400360107424, 0.018299072265625, 0.018155519485473632, 0.01817215919494629, 0.018267135620117187, 0.01833839988708496, 0.01826972770690918, 0.018170560836791992, 0.018120800018310547, 0.018116640090942382, 0.018321056365966797, 0.018155839920043944, 0.01816582489013672, 0.01814313507080078, 0.018141056060791017, 0.018317312240600587, 0.018141183853149414, 0.018183231353759766, 0.018112991333007814, 0.018133216857910157, 0.021786880493164063, 0.01957683181762695, 0.018312736511230467, 0.01821129608154297, 0.01817190361022949, 0.018241535186767577, 0.018714399337768556, 0.018602207183837892, 0.018382848739624022, 0.01840460777282715, 0.018228992462158203, 0.018355199813842774, 0.018331680297851562, 0.018513343811035157, 0.01847555160522461, 0.01839030456542969, 0.01856988716125488, 0.018596960067749024, 0.018422271728515623, 0.018411455154418947, 0.018291072845458986, 0.018446495056152343, 0.018296831130981444]",tokens/s,54.46791678395302,, @@ -5159,7 +5159,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 133559 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 127103 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5202,7 +5202,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 89713 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 83287 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.403008,1326.383104,0.0,931.135488,917.648384,s,1,7.63307861328125,7.63307861328125,0.0,7.63307861328125,7.63307861328125,7.63307861328125,7.63307861328125,[7.63307861328125],,kWh,1.0338465666666252e-05,1.13285999688864e-06,3.105280261997412e-06,1.4576605925552304e-05,,MB,1135.603712,1456.406528,0.0,1050.673152,1018.330112,s,10,0.7034482269287109,0.0703448226928711,0.0008076623861901718,0.07017057418823242,0.0707837059020996,0.0716950527191162,0.07242413017272949,"[0.0726063995361328, 0.07038086700439453, 0.06963465881347657, 0.07012895965576171, 0.06993052673339843, 0.07021218872070313, 0.07058118438720704, 0.06974800109863281, 0.07038400268554687, 0.06984143829345703]",tokens/s,3639.21593942611,kWh,2.329240217592651e-06,2.56763201417403e-07,1.5366215996666443e-06,4.122625018676698e-06,tokens/kWh,62096358.228130154,MB,1162.858496,1473.183744,0.0,1067.450368,1032.767488,s,10,13.302551025390624,1.3302551025390623,0.024940005579764144,1.3159708251953126,1.3628881225585936,1.3655794982910157,1.367732598876953,"[1.3622900390625, 1.3682708740234375, 1.3511224365234376, 1.3586226806640624, 1.3164345703125, 1.3095928955078124, 1.307552978515625, 1.302308349609375, 1.315507080078125, 1.31084912109375]",tokens/s,47.3593372276879,kWh,3.8109842997824186e-05,4.20321723531936e-06,1.767769932733434e-05,5.999075956047789e-05,tokens/kWh,1050161.7325996421,,s,630,13.296805097579947,0.0211060398374285,0.0005385663482820034,0.020997008323669436,0.02172949161529541,0.021857902526855467,0.02261669342041016,"[0.022597631454467772, 0.021526527404785157, 0.02134342384338379, 0.0214182071685791, 0.021647872924804686, 0.02165353584289551, 0.02230284881591797, 0.021929983139038087, 0.02179680061340332, 0.02147532844543457, 0.02156470489501953, 0.021756288528442382, 0.02178492736816406, 0.02179836845397949, 0.021703359603881835, 0.02156732749938965, 0.021453855514526367, 0.021554143905639648, 0.021456480026245117, 0.021507711410522462, 0.021325952529907228, 0.02159174346923828, 0.021931072235107422, 0.02183065605163574, 0.021742431640625, 0.021707103729248046, 0.02154863929748535, 0.021587648391723634, 0.021436864852905274, 0.021700927734375, 0.0222096004486084, 0.02158451271057129, 0.02149177551269531, 0.02155232048034668, 0.021584127426147463, 0.021605056762695314, 0.021738880157470702, 0.0214136962890625, 0.021658239364624025, 0.021723455429077148, 0.02163596725463867, 0.021720191955566407, 0.021691583633422853, 0.02162259292602539, 0.021740224838256834, 0.02165555191040039, 0.02157583999633789, 0.021528032302856444, 0.021591615676879884, 0.021457056045532226, 0.021041183471679686, 0.02081046485900879, 0.02141584014892578, 0.021598207473754884, 0.021302688598632814, 0.021457504272460938, 0.021532447814941406, 0.021798208236694337, 0.021652383804321287, 0.021726272583007813, 0.021573631286621094, 0.02139814376831055, 0.021291328430175782, 0.021510143280029297, 0.02159119987487793, 0.02144470405578613, 0.021570304870605468, 0.02223308753967285, 0.02168012809753418, 0.021338111877441408, 0.02104876708984375, 0.021000288009643556, 0.02151641654968262, 0.021782655715942383, 0.022624479293823243, 0.02234339141845703, 0.021600831985473634, 0.02215228843688965, 0.02167788887023926, 0.02157401657104492, 0.021508895874023437, 0.021968544006347655, 0.021540319442749024, 0.02161292839050293, 0.02156764793395996, 0.021411584854125976, 0.021298944473266603, 0.021227136611938476, 0.02191244888305664, 0.021729280471801758, 0.021763744354248046, 0.02145110321044922, 0.021521856307983398, 0.021525056838989257, 0.021354496002197267, 0.021484735488891602, 0.021832319259643556, 0.021975200653076173, 0.022322656631469727, 0.021938751220703125, 0.022347776412963868, 0.022716415405273437, 0.021846048355102538, 0.021401567459106444, 0.021297407150268555, 0.021462783813476563, 0.021796863555908205, 0.021540864944458008, 0.02166988754272461, 0.021497856140136717, 0.021683328628540038, 0.02194428825378418, 0.02155788803100586, 0.02177872085571289, 0.021589311599731445, 0.021657440185546876, 0.021940288543701173, 0.02181427192687988, 0.021816511154174805, 0.022708831787109376, 0.021864511489868163, 0.021682111740112305, 0.0216964168548584, 0.02168025588989258, 0.021612512588500978, 0.021380096435546874, 0.021208703994750975, 0.021369375228881837, 0.02177039909362793, 0.021630655288696288, 0.021644607543945312, 0.022151872634887694, 0.021448703765869142, 0.021169919967651368, 0.02114358329772949, 0.021694143295288085, 0.021748191833496095, 0.021297536849975585, 0.021289888381958007, 0.021183263778686522, 0.021206111907958985, 0.02091859245300293, 0.021012351989746093, 0.0214268798828125, 0.021451967239379883, 0.021248863220214843, 0.0211267204284668, 0.0213222713470459, 0.02145849609375, 0.0216210880279541, 0.02145280075073242, 0.021587039947509764, 0.02152579116821289, 0.021505279541015623, 0.02154319953918457, 0.021606496810913086, 0.021522432327270507, 0.021383167266845703, 0.02147532844543457, 0.021537919998168946, 0.021441408157348633, 0.02217513656616211, 0.021218048095703126, 0.021415359497070314, 0.021092800140380858, 0.021483488082885742, 0.021340383529663085, 0.021753631591796874, 0.02155264091491699, 0.021606016159057616, 0.021639711380004884, 0.0217545280456543, 0.021277952194213866, 0.020963903427124023, 0.020832128524780273, 0.021217279434204102, 0.02180499267578125, 0.02173139190673828, 0.021690271377563478, 0.021427871704101563, 0.02144095993041992, 0.02148953628540039, 0.021319807052612303, 0.02125619125366211, 0.02129305648803711, 0.021514240264892577, 0.021602304458618164, 0.02127667236328125, 0.021186559677124024, 0.02140812873840332, 0.02176576042175293, 0.021624191284179688, 0.021646656036376954, 0.021587648391723634, 0.021403648376464843, 0.021587968826293946, 0.02150918388366699, 0.021399648666381835, 0.02099292755126953, 0.02099510383605957, 0.021535648345947265, 0.02160867118835449, 0.02149558448791504, 0.021563135147094726, 0.021566848754882812, 0.021711936950683595, 0.022163040161132814, 0.021571807861328125, 0.021524511337280273, 0.021698816299438477, 0.021481184005737303, 0.02207043266296387, 0.024928895950317383, 0.02184982490539551, 0.021383167266845703, 0.02158233642578125, 0.021397504806518555, 0.021315616607666017, 0.02128700828552246, 0.0214649600982666, 0.02128291130065918, 0.02143577575683594, 0.020889888763427733, 0.021646751403808593, 0.02177680015563965, 0.02154745674133301, 0.02135264015197754, 0.02127177619934082, 0.0215347843170166, 0.021653215408325197, 0.021590656280517578, 0.021583999633789062, 0.021612607955932617, 0.021534719467163087, 0.02271027183532715, 0.021413888931274414, 0.020996095657348633, 0.021243743896484375, 0.02168764877319336, 0.021784959793090822, 0.0214881591796875, 0.0211691837310791, 0.021033504486083984, 0.021164384841918946, 0.021284799575805664, 0.02158006477355957, 0.021384992599487306, 0.021288415908813477, 0.021391904830932618, 0.021606399536132814, 0.021489664077758788, 0.02142617607116699, 0.02091663932800293, 0.021643264770507813, 0.021413888931274414, 0.021665023803710937, 0.022551551818847656, 0.021619808197021483, 0.0214800968170166, 0.02152889633178711, 0.021487295150756838, 0.0211494083404541, 0.02095337677001953, 0.020762624740600585, 0.020727807998657227, 0.020707359313964845, 0.020774879455566407, 0.02126665687561035, 0.021325056076049804, 0.021209983825683593, 0.021097152709960938, 0.020837503433227537, 0.020583904266357422, 0.020625791549682616, 0.020488000869750975, 0.02047609519958496, 0.02048723220825195, 0.020454336166381835, 0.020557695388793946, 0.020559999465942384, 0.020563968658447264, 0.020561759948730468, 0.020437152862548828, 0.02056972885131836, 0.020474239349365233, 0.020502527236938475, 0.020598688125610352, 0.020615455627441406, 0.020645727157592775, 0.02071548843383789, 0.020731903076171874, 0.02085068893432617, 0.020609024047851563, 0.020658176422119142, 0.020764671325683593, 0.020694623947143553, 0.020728031158447267, 0.021235071182250976, 0.02084947204589844, 0.02065328025817871, 0.020948991775512696, 0.02064259147644043, 0.020862464904785157, 0.02090991973876953, 0.02174131202697754, 0.020972000122070313, 0.020951007843017577, 0.020821855545043944, 0.02077743911743164, 0.020721824645996093, 0.020710655212402344, 0.020760799407958986, 0.020799488067626954, 0.020748832702636718, 0.020717567443847656, 0.02096758460998535, 0.021202623367309572, 0.020926624298095702, 0.020901023864746095, 0.020617696762084962, 0.020652416229248047, 0.020700639724731445, 0.020554271697998047, 0.020690944671630858, 0.020692991256713866, 0.020738048553466795, 0.020572160720825194, 0.020590591430664062, 0.020768768310546876, 0.02099612808227539, 0.021086463928222655, 0.02112483215332031, 0.021112831115722656, 0.020860576629638673, 0.020709888458251953, 0.023066272735595705, 0.020792959213256836, 0.020821920394897463, 0.02054742431640625, 0.020437183380126952, 0.02080620765686035, 0.020719680786132812, 0.021372127532958984, 0.020670944213867188, 0.020645343780517578, 0.0210565128326416, 0.020676448822021486, 0.020586496353149415, 0.020554784774780274, 0.02155766487121582, 0.0205963191986084, 0.02075894355773926, 0.020826688766479494, 0.020780799865722656, 0.02061747169494629, 0.02046976089477539, 0.020463615417480468, 0.020597856521606447, 0.02041529655456543, 0.020491424560546874, 0.020487104415893555, 0.020418560028076172, 0.020600128173828124, 0.020888256072998046, 0.020961280822753905, 0.021006080627441408, 0.020869312286376954, 0.020778751373291014, 0.02073017692565918, 0.020817920684814452, 0.02062745666503906, 0.020590591430664062, 0.020485248565673828, 0.020646976470947265, 0.02061497688293457, 0.020662559509277343, 0.02057366371154785, 0.02053116798400879, 0.020390527725219727, 0.020537343978881836, 0.020815872192382814, 0.020912128448486327, 0.021594112396240234, 0.020979007720947265, 0.02063350486755371, 0.02062214469909668, 0.020557792663574218, 0.020448959350585938, 0.020511039733886717, 0.020639999389648438, 0.0205185604095459, 0.020424800872802733, 0.020496383666992187, 0.02066201591491699, 0.02184217643737793, 0.021061632156372072, 0.020510879516601563, 0.020465696334838867, 0.02052012825012207, 0.020386432647705077, 0.02046063995361328, 0.020568992614746092, 0.020609024047851563, 0.02061484718322754, 0.02051718330383301, 0.02067036819458008, 0.0205164794921875, 0.02058243179321289, 0.020588991165161132, 0.020508607864379882, 0.02056608009338379, 0.023451648712158202, 0.020735712051391603, 0.02062735939025879, 0.020516544342041015, 0.02051862335205078, 0.020495328903198242, 0.02046281623840332, 0.02062345504760742, 0.020520832061767576, 0.02062745666503906, 0.02058336067199707, 0.020694143295288087, 0.02105187225341797, 0.020827423095703124, 0.020581344604492187, 0.020508703231811524, 0.021874624252319334, 0.02118662452697754, 0.020649984359741212, 0.020619264602661135, 0.021360095977783204, 0.020636192321777345, 0.020743711471557617, 0.020643455505371094, 0.020519775390625, 0.02053513526916504, 0.02164486312866211, 0.02097417640686035, 0.02055561637878418, 0.020544864654541015, 0.020424671173095703, 0.020645151138305663, 0.020695808410644532, 0.020649984359741212, 0.020510719299316405, 0.02052412796020508, 0.02068355178833008, 0.020743871688842775, 0.020494783401489258, 0.020450912475585937, 0.020611488342285156, 0.020564096450805664, 0.020563840866088867, 0.020756832122802736, 0.020581695556640626, 0.020576608657836913, 0.02047177505493164, 0.02052092742919922, 0.02062339210510254, 0.020449312210083007, 0.020463615417480468, 0.020404096603393554, 0.020804895401000976, 0.020849760055541993, 0.020788991928100586, 0.020620479583740234, 0.020656095504760743, 0.020458240509033204, 0.020463712692260744, 0.020602880477905275, 0.020561920166015626, 0.020619264602661135, 0.020553407669067384, 0.02051513671875, 0.020653663635253908, 0.020644256591796875, 0.020707328796386718, 0.02069196891784668, 0.0206812801361084, 0.020532800674438478, 0.02058464050292969, 0.020918975830078124, 0.020563968658447264, 0.02050214385986328, 0.020504255294799805, 0.02041516876220703, 0.022337535858154296, 0.020785280227661133, 0.02076201629638672, 0.020655616760253907, 0.020655071258544922, 0.020642879486083985, 0.020847551345825194, 0.021036096572875976, 0.021221855163574218, 0.02116399955749512, 0.020859392166137695, 0.02081558418273926, 0.02051100730895996, 0.020512767791748047, 0.02049843215942383, 0.02058639907836914, 0.020586591720581054, 0.02040652847290039, 0.02061235237121582, 0.020551551818847658, 0.020430912017822267, 0.020560447692871093, 0.020539392471313478, 0.021753984451293944, 0.020602176666259766, 0.020643903732299806, 0.02060873603820801, 0.020570560455322264, 0.020472095489501952, 0.020466943740844727, 0.0204083194732666, 0.020564800262451173, 0.020482303619384766, 0.02060406494140625, 0.020533855438232423, 0.02086092758178711, 0.021056575775146483, 0.020751615524291993, 0.020930143356323243, 0.02113545608520508, 0.021727231979370116, 0.021716543197631836, 0.021051296234130858, 0.021044767379760743, 0.020877504348754884, 0.02225663948059082, 0.021180416107177736, 0.020877119064331054, 0.020776960372924806, 0.020705087661743164, 0.020738239288330077, 0.02072812843322754, 0.02070319938659668, 0.020997888565063478, 0.02068191909790039, 0.020789087295532225, 0.020587455749511718, 0.020760576248168947, 0.02087500762939453, 0.020519168853759765, 0.02087731170654297, 0.02064588737487793, 0.02083839988708496, 0.021583520889282226, 0.021025119781494142, 0.02166374397277832, 0.02077401542663574, 0.02108095932006836, 0.020578304290771485, 0.020857887268066408, 0.02072675132751465, 0.02066431999206543, 0.02059644889831543, 0.02176950454711914, 0.021230527877807617, 0.021084224700927735, 0.02123366355895996, 0.021002464294433594, 0.020777055740356445, 0.0208155517578125, 0.02128892707824707, 0.02117635154724121, 0.021127168655395507, 0.021153087615966796, 0.021492576599121092, 0.021489503860473633, 0.021152095794677736, 0.020925151824951173, 0.02084566307067871, 0.021243999481201172, 0.021345983505249022, 0.021276191711425783, 0.02126902389526367, 0.02118396759033203, 0.02096623992919922, 0.020848320007324218, 0.020796800613403322, 0.020834943771362305, 0.020602880477905275, 0.020587776184082033, 0.02049420738220215, 0.020490816116333008, 0.020631296157836914, 0.02058425521850586, 0.02077568054199219, 0.02062131118774414, 0.020572160720825194, 0.020752384185791017, 0.020981760025024415, 0.02136412811279297, 0.021004095077514648, 0.020795200347900392, 0.020568735122680665, 0.020582304000854493, 0.02071139144897461, 0.02068115234375, 0.020676383972167967, 0.020512256622314453, 0.0205599365234375, 0.020578975677490233, 0.02048204803466797, 0.020570112228393556, 0.020520959854125977, 0.02051024055480957, 0.02057881546020508, 0.020772512435913087, 0.020777280807495118, 0.020813215255737306, 0.020719648361206055, 0.02123404884338379, 0.020660415649414062, 0.02060492706298828, 0.020510656356811523, 0.020465728759765624, 0.020989952087402345, 0.02068604850769043, 0.020591392517089843, 0.020531200408935548, 0.020551136016845703, 0.020476415634155275, 0.020449312210083007, 0.020600223541259767, 0.020673023223876954]",tokens/s,47.37980254479786,, @@ -5248,7 +5248,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 118744 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 112474 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5283,7 +5283,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 74761 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 67951 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5326,7 +5326,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 92734 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 86280 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.159296,1133.44512,0.0,738.197504,715.772928,s,1,7.06663818359375,7.06663818359375,0.0,7.06663818359375,7.06663818359375,7.06663818359375,7.06663818359375,[7.06663818359375],,kWh,3.0683363375070863e-06,3.3134657421837375e-07,9.327785240043074e-07,4.332461435729768e-06,,MB,1147.338752,1173.291008,0.0,767.557632,723.637248,s,11,0.6937036476135254,0.06306396796486595,0.0010367258932583786,0.06303084945678711,0.06340774536132812,0.0646856803894043,0.06570802841186524,"[0.06596361541748047, 0.06277628707885742, 0.062066654205322265, 0.06303247833251953, 0.061806304931640625, 0.06340774536132812, 0.062698974609375, 0.06335663986206054, 0.062423137664794924, 0.06314096069335938, 0.06303084945678711]",tokens/s,4059.3703228858376,kWh,2.0431546406759623e-06,2.251474102397912e-07,1.3615395507692388e-06,3.6298416016849926e-06,tokens/kWh,70526493.46493891,MB,1172.320256,1213.136896,0.0,807.40352,735.775744,s,11,9.432381652832031,0.8574892411665483,0.002954941198204269,0.858698486328125,0.8597750854492188,0.8612584533691406,0.8624451477050782,"[0.8552522583007812, 0.8554146118164062, 0.8523182983398437, 0.8535504150390625, 0.8597698974609375, 0.858861083984375, 0.8591412353515625, 0.8597750854492188, 0.8627418212890625, 0.858698486328125, 0.8568584594726563]",tokens/s,73.47030956830821,kWh,2.4743069268035864e-05,2.7288963556497834e-06,1.1863748807231034e-05,3.933571443091668e-05,tokens/kWh,1601597.9603127257,,s,693,9.426552733421326,0.01360252919685617,0.00023421402319323897,0.013552384376525879,0.013768626976013184,0.013935711860656739,0.014409163513183595,"[0.013556032180786133, 0.01360867214202881, 0.013643168449401855, 0.013607808113098144, 0.013897727966308594, 0.013780896186828612, 0.013569439888000488, 0.013677248001098633, 0.013581727981567383, 0.013580767631530762, 0.013491552352905273, 0.013515104293823242, 0.01357209587097168, 0.013429183959960938, 0.013487199783325195, 0.013463616371154785, 0.013419424057006836, 0.013576128005981446, 0.013453696250915527, 0.013432095527648926, 0.013537631988525391, 0.013701120376586913, 0.013501536369323731, 0.013560447692871094, 0.013496543884277344, 0.01355577564239502, 0.013422592163085938, 0.013420736312866211, 0.013576000213623048, 0.013495519638061523, 0.013654111862182617, 0.013584575653076171, 0.01352467155456543, 0.014236703872680664, 0.013633312225341797, 0.013686783790588379, 0.013621248245239258, 0.01368892765045166, 0.013557184219360351, 0.013543904304504395, 0.013649920463562011, 0.013537280082702637, 0.013489407539367676, 0.013580927848815918, 0.013553631782531739, 0.01371894359588623, 0.013517151832580567, 0.013500415802001953, 0.01354793643951416, 0.013451264381408692, 0.013613056182861329, 0.013500415802001953, 0.013490367889404296, 0.01363657569885254, 0.013552127838134765, 0.013656415939331056, 0.013567999839782715, 0.013522368431091308, 0.013539775848388672, 0.013377951622009278, 0.013418208122253418, 0.013526304244995117, 0.013456128120422363, 0.013297280311584473, 0.013674976348876954, 0.013577471733093262, 0.013691295623779296, 0.013680992126464843, 0.013742143630981446, 0.01377683162689209, 0.013554783821105957, 0.013631936073303222, 0.013457887649536133, 0.01415708827972412, 0.013691743850708009, 0.01367852783203125, 0.013649855613708496, 0.013482239723205567, 0.013604607582092285, 0.013459263801574707, 0.01344438362121582, 0.013597599983215332, 0.01349238395690918, 0.01378105640411377, 0.013592351913452148, 0.013514368057250977, 0.013615488052368164, 0.01350211238861084, 0.013508992195129394, 0.013545503616333008, 0.013444895744323731, 0.013527199745178223, 0.0135697603225708, 0.013493951797485351, 0.013578847885131836, 0.013469023704528809, 0.013508416175842286, 0.01353600025177002, 0.013377056121826172, 0.013496895790100098, 0.01356326389312744, 0.013900032043457031, 0.013701248168945312, 0.013561599731445313, 0.013674976348876954, 0.01350864028930664, 0.013592864036560058, 0.013526752471923828, 0.013393183708190919, 0.013515487670898437, 0.013495967864990235, 0.013547871589660644, 0.01375382423400879, 0.013484576225280761, 0.013595840454101563, 0.013462016105651856, 0.01343507194519043, 0.013498496055603028, 0.013480159759521484, 0.013495391845703125, 0.013486751556396484, 0.01341875171661377, 0.013522047996520997, 0.013865632057189941, 0.013502559661865234, 0.01352070426940918, 0.013373215675354004, 0.013569279670715332, 0.013509599685668946, 0.01344540786743164, 0.013448384284973145, 0.01351529598236084, 0.013539104461669922, 0.01356208038330078, 0.013615103721618652, 0.013464608192443848, 0.013549856185913086, 0.013511360168457031, 0.013445119857788086, 0.013667360305786132, 0.013450207710266114, 0.013592160224914551, 0.01354793643951416, 0.013403615951538085, 0.013936767578125, 0.013773440361022949, 0.013564991950988769, 0.01358512020111084, 0.01351471996307373, 0.013674304008483887, 0.013461759567260742, 0.013497407913208009, 0.013537983894348144, 0.013449376106262206, 0.013522815704345704, 0.01353916835784912, 0.013471839904785156, 0.01360307216644287, 0.013428735733032226, 0.013398015975952148, 0.013472031593322754, 0.01336515235900879, 0.0136145601272583, 0.01343727970123291, 0.013407808303833008, 0.013479616165161133, 0.013441951751708984, 0.013457247734069824, 0.013542464256286621, 0.013415360450744629, 0.013567999839782715, 0.01381772804260254, 0.013456992149353027, 0.013455967903137207, 0.013420479774475098, 0.01352899169921875, 0.013512800216674805, 0.013367136001586915, 0.013463168144226075, 0.013469471931457519, 0.013492351531982421, 0.013480223655700684, 0.013484383583068848, 0.01375641632080078, 0.013678624153137206, 0.013485983848571777, 0.0135414400100708, 0.013495871543884277, 0.013496959686279297, 0.013339232444763184, 0.013522720336914063, 0.013553152084350586, 0.013628064155578613, 0.013455424308776855, 0.013434880256652832, 0.01348624038696289, 0.013373279571533203, 0.013475839614868163, 0.01343283176422119, 0.013404159545898438, 0.013578335762023925, 0.01344054412841797, 0.01339395236968994, 0.013486432075500489, 0.0133754243850708, 0.013420607566833496, 0.013500415802001953, 0.01343280029296875, 0.01352883243560791, 0.01345952033996582, 0.013418720245361329, 0.013619199752807617, 0.013436927795410156, 0.013423904418945313, 0.013564031600952148, 0.01359727954864502, 0.014243295669555664, 0.014405920028686523, 0.014350784301757813, 0.013569055557250976, 0.013648672103881836, 0.013567584037780762, 0.013482399940490723, 0.013524991989135742, 0.01345910358428955, 0.013573472023010253, 0.01347481632232666, 0.013389823913574218, 0.013499872207641601, 0.013460000038146973, 0.013514752388000489, 0.013450528144836426, 0.013445440292358399, 0.013495967864990235, 0.013451456069946289, 0.013426431655883789, 0.013529024124145507, 0.013380031585693359, 0.013482239723205567, 0.013893343925476075, 0.013512415885925293, 0.013607423782348632, 0.013541407585144043, 0.01356822395324707, 0.013553791999816895, 0.013488224029541016, 0.013600704193115234, 0.013578399658203125, 0.013479295730590821, 0.013603424072265625, 0.013450976371765136, 0.013508447647094727, 0.013562432289123535, 0.013839360237121581, 0.013609472274780274, 0.01367910385131836, 0.013600895881652832, 0.013494144439697266, 0.013610783576965333, 0.01345967960357666, 0.013525343894958497, 0.013534879684448243, 0.01369324779510498, 0.01360863971710205, 0.015853407859802247, 0.013883551597595216, 0.013639679908752441, 0.013628543853759766, 0.01354377555847168, 0.013548064231872558, 0.013503968238830566, 0.013558303833007813, 0.013504544258117675, 0.013549375534057616, 0.013512864112854003, 0.013500415802001953, 0.013611007690429687, 0.013562208175659179, 0.013480799674987793, 0.013568832397460937, 0.013500415802001953, 0.01345315170288086, 0.013508768081665038, 0.01347379207611084, 0.014075648307800294, 0.013514880180358887, 0.013555839538574218, 0.013854207992553711, 0.013648384094238282, 0.01358614444732666, 0.013479583740234375, 0.013598752021789551, 0.013549280166625977, 0.013503264427185059, 0.013543328285217286, 0.013467840194702149, 0.013452768325805664, 0.013504032135009766, 0.013437952041625977, 0.013496319770812988, 0.013426688194274903, 0.013477343559265137, 0.013676799774169923, 0.013648159980773926, 0.013596672058105469, 0.013549568176269532, 0.01362339210510254, 0.013535200119018554, 0.01354304027557373, 0.013863583564758301, 0.01356275177001953, 0.013450016021728515, 0.013529439926147462, 0.014190239906311035, 0.01471014404296875, 0.013615103721618652, 0.014029984474182128, 0.01356272029876709, 0.013633536338806153, 0.013504159927368165, 0.013507136344909668, 0.013647647857666015, 0.013728032112121582, 0.013745823860168456, 0.01355065631866455, 0.013442048072814941, 0.013684479713439942, 0.013565471649169922, 0.013595487594604492, 0.013486144065856933, 0.013535039901733398, 0.013630559921264648, 0.013678591728210449, 0.013591456413269042, 0.013512895584106446, 0.013418304443359374, 0.013604928016662597, 0.01346275234222412, 0.013662976264953613, 0.013527327537536622, 0.013479616165161133, 0.014118304252624512, 0.013556320190429688, 0.013535231590270995, 0.013625344276428223, 0.013730079650878906, 0.01365782356262207, 0.013461759567260742, 0.013782303810119629, 0.013610655784606934, 0.01350489616394043, 0.013564191818237304, 0.013535391807556153, 0.01377280044555664, 0.013622976303100586, 0.013629759788513184, 0.013801471710205078, 0.013550623893737793, 0.01363475227355957, 0.013446399688720703, 0.013556511878967285, 0.013501440048217773, 0.013552384376525879, 0.013626879692077636, 0.013484800338745117, 0.013538784027099609, 0.013709152221679687, 0.013726143836975098, 0.013758336067199707, 0.013740480422973633, 0.013620927810668945, 0.013527039527893067, 0.01345638370513916, 0.013455904006958008, 0.013492704391479492, 0.013489727973937988, 0.013608927726745606, 0.014916288375854491, 0.013785152435302735, 0.013815584182739258, 0.013614303588867187, 0.013490559577941895, 0.013904735565185547, 0.015267552375793457, 0.014446463584899902, 0.01364185619354248, 0.01358233642578125, 0.013496319770812988, 0.013557760238647461, 0.013705216407775878, 0.013434623718261719, 0.013664064407348632, 0.013443679809570312, 0.01350380802154541, 0.013660832405090332, 0.013413344383239746, 0.013466879844665528, 0.013475040435791015, 0.013380031585693359, 0.013505760192871094, 0.013535584449768066, 0.01348038387298584, 0.013501536369323731, 0.013593152046203613, 0.013469504356384278, 0.01359280014038086, 0.013457728385925292, 0.013668383598327637, 0.013778911590576173, 0.013677887916564942, 0.013496447563171386, 0.01351689624786377, 0.01353286361694336, 0.013462431907653808, 0.013551487922668456, 0.01433568000793457, 0.013592608451843262, 0.013508864402770996, 0.0134202241897583, 0.013438336372375488, 0.013437376022338868, 0.013462047576904296, 0.014327391624450684, 0.013492639541625977, 0.013769951820373535, 0.01373468780517578, 0.013450559616088867, 0.013687616348266601, 0.01347980785369873, 0.013639967918395996, 0.013571935653686524, 0.013657983779907226, 0.0135098876953125, 0.013538047790527344, 0.013492223739624023, 0.01347379207611084, 0.013479935646057128, 0.013737792015075684, 0.013559743881225586, 0.013537535667419434, 0.01372163200378418, 0.01360636806488037, 0.014370783805847168, 0.013732416152954102, 0.01363702392578125, 0.013486080169677735, 0.013498016357421875, 0.013437888145446778, 0.01345644760131836, 0.013657024383544922, 0.013501472473144531, 0.013460288047790528, 0.014229663848876953, 0.013506112098693847, 0.013533503532409667, 0.013484160423278808, 0.013602463722229004, 0.013711711883544923, 0.01356390380859375, 0.013704992294311524, 0.013576288223266602, 0.013789312362670898, 0.013578240394592284, 0.013844703674316406, 0.013860639572143554, 0.013588831901550294, 0.01363321590423584, 0.01358784008026123, 0.013615167617797852, 0.013594240188598632, 0.01368899154663086, 0.013669119834899903, 0.013576512336730956, 0.013610560417175293, 0.013611136436462402, 0.013538944244384765, 0.013836095809936524, 0.014023232460021972, 0.014206496238708496, 0.013935008049011231, 0.013930303573608398, 0.013758496284484863, 0.013613280296325683, 0.013380800247192383, 0.013679712295532227, 0.013554464340209961, 0.01334992027282715, 0.013492128372192384, 0.013506943702697755, 0.013421695709228515, 0.013550080299377442, 0.013433888435363769, 0.01356278419494629, 0.013715519905090331, 0.013567999839782715, 0.013665375709533691, 0.013612256050109864, 0.01364345645904541, 0.013694560050964356, 0.01349891185760498, 0.01357376003265381, 0.013457216262817383, 0.01339027214050293, 0.013645824432373046, 0.013316672325134277, 0.013495424270629882, 0.013538175582885743, 0.013456543922424317, 0.013488991737365723, 0.01349836826324463, 0.014232704162597656, 0.013593855857849122, 0.013465279579162598, 0.015788064002990723, 0.015582592010498046, 0.01374028778076172, 0.013781087875366211, 0.013642208099365234, 0.013458463668823242, 0.013510944366455079, 0.013443584442138673, 0.013588383674621582, 0.013643199920654298, 0.013728320121765137, 0.013558879852294922, 0.013515680313110352, 0.013538911819458007, 0.013589216232299804, 0.013446847915649414, 0.013645600318908692, 0.01360256004333496, 0.013805312156677246, 0.013556127548217773, 0.013539392471313477, 0.013538847923278809, 0.013598912239074707, 0.013754912376403809, 0.013739711761474609, 0.013825823783874511, 0.014125599861145019, 0.013623295783996582, 0.01356118392944336, 0.01405404758453369, 0.01374944019317627, 0.013544256210327149, 0.013685888290405273, 0.01356214427947998, 0.013407135963439941, 0.013455231666564942, 0.013483039855957031, 0.013749024391174317, 0.013723775863647461, 0.01359654426574707, 0.013676544189453126, 0.013571328163146972, 0.013763327598571778, 0.013523039817810058, 0.013457088470458985, 0.013598496437072755, 0.013422880172729492, 0.01358358383178711, 0.013542143821716309, 0.014073472023010253, 0.013711872100830079, 0.013549599647521973, 0.013632543563842773, 0.013540032386779786, 0.013492351531982421, 0.013695648193359375, 0.01356227207183838, 0.0137576322555542, 0.013758591651916504, 0.01353983974456787, 0.01366972827911377, 0.013532959938049316, 0.013599360466003418, 0.013473952293395996, 0.013462719917297363, 0.013650848388671874, 0.014149632453918457, 0.013674495697021484, 0.013740032196044923, 0.013854240417480469, 0.013668064117431641, 0.013615776062011718, 0.013684127807617187, 0.01358028793334961, 0.013591232299804688, 0.013491904258728028, 0.013503040313720703, 0.01363327980041504, 0.013512703895568847, 0.013575327873229981, 0.013586784362792969, 0.013636416435241699, 0.013653696060180664, 0.013450688362121582, 0.013603391647338868, 0.013813535690307617, 0.014044768333435058, 0.013981568336486816, 0.013904640197753906, 0.013791359901428223, 0.013729887962341309, 0.013659232139587403, 0.013565952301025391, 0.013736543655395507, 0.013584159851074218, 0.013622591972351075, 0.013671520233154297, 0.013536383628845215, 0.013671199798583984, 0.013549056053161621, 0.013607423782348632, 0.013701120376586913, 0.013537088394165038, 0.013578335762023925, 0.01366256046295166, 0.013428031921386719, 0.013576640129089355, 0.013367391586303711, 0.013402015686035156, 0.01351193618774414, 0.013484864234924317, 0.013475711822509766, 0.013438816070556641, 0.01348761558532715, 0.013702048301696777, 0.013442015647888184, 0.013484895706176758, 0.013399200439453125, 0.013638527870178222, 0.013588095664978027, 0.013707615852355958, 0.013645759582519531, 0.013723775863647461, 0.013538463592529297, 0.013648799896240234, 0.013532608032226563, 0.013521023750305176, 0.013729280471801757, 0.01359177589416504, 0.013604063987731934, 0.013546272277832032, 0.01345529556274414, 0.013507648468017578, 0.013445728302001953, 0.013684864044189454, 0.013615167617797852, 0.013416031837463378, 0.013664735794067382, 0.013746399879455567, 0.013702879905700684, 0.013813695907592773, 0.013629055976867676, 0.013563712120056153, 0.013429311752319336, 0.013528127670288086, 0.013661375999450684, 0.01360051155090332, 0.013623295783996582, 0.013541376113891602, 0.013626655578613282, 0.013630175590515136, 0.013462944030761719, 0.013544032096862793, 0.013438976287841797, 0.013447296142578126, 0.013491104125976563, 0.013450400352478027, 0.013465503692626953, 0.013468671798706054, 0.013419424057006836, 0.013379584312438965, 0.013355008125305176, 0.013522239685058593, 0.013638208389282227, 0.01351471996307373, 0.014046624183654785, 0.01411689567565918, 0.014015199661254883, 0.01394611167907715, 0.014023712158203125, 0.013598431587219238, 0.013592255592346192, 0.013680480003356933, 0.0135316162109375, 0.013569952011108399, 0.013488256454467773, 0.013467616081237792, 0.013527199745178223, 0.013412192344665527, 0.01340556812286377]",tokens/s,73.51574001628468,, @@ -5370,7 +5370,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 45311 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 39226 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5413,7 +5413,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 53252 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 47031 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.907392,806.289408,0.0,411.041792,391.374848,s,1,7.3405439453125,7.3405439453125,0.0,7.3405439453125,7.3405439453125,7.3405439453125,7.3405439453125,[7.3405439453125],,kWh,4.876233570833695e-06,5.308025702608459e-07,1.983334919991586e-06,7.3903710610861275e-06,,MB,1112.3712,879.689728,0.0,473.956352,454.832128,s,14,0.35395107078552246,0.025282219341823033,0.0006675251077587096,0.02511552047729492,0.025283087921142578,0.02612307538986206,0.027355866603851316,"[0.027664064407348633, 0.025009151458740234, 0.0250250244140625, 0.025128000259399413, 0.02510304069519043, 0.02513587188720703, 0.024959232330322264, 0.02525923156738281, 0.024972927093505858, 0.025055456161499023, 0.0250283203125, 0.02516223907470703, 0.02515519905090332, 0.025293312072753905]",tokens/s,10125.69333960776,kWh,8.802714563880312e-07,9.703644657400391e-08,5.748766312530154e-07,1.5521845342150508e-06,tokens/kWh,164928843.41838953,MB,1139.023872,906.952704,0.0,501.219328,454.834688,s,14,10.088037719726563,0.7205741228376116,0.007503769931621452,0.7197563781738281,0.7292069396972656,0.7304784545898437,0.7312623901367188,"[0.7006243286132813, 0.7314583740234375, 0.7256400146484375, 0.7224078979492188, 0.7274712524414062, 0.72591748046875, 0.714115234375, 0.7185491943359374, 0.71629638671875, 0.7185131225585938, 0.7195335083007812, 0.7299508056640625, 0.719979248046875, 0.7175808715820312]",tokens/s,87.43028371863649,kWh,2.0470741543611325e-05,2.2576304109255806e-06,8.849634876604323e-06,3.157800683114122e-05,tokens/kWh,1995059.4202124062,,s,882,10.07979100131987,0.011428334468616649,0.00034070346924705217,0.011425951957702636,0.01164567985534668,0.011714222478866578,0.012337450504302967,"[0.011187392234802246, 0.01128428840637207, 0.011154335975646973, 0.011010080337524415, 0.010950079917907715, 0.011192223548889161, 0.01102511978149414, 0.01102400016784668, 0.010981760025024413, 0.011669504165649413, 0.011978719711303711, 0.013942432403564454, 0.011230751991271973, 0.011288736343383789, 0.011254464149475098, 0.011176959991455078, 0.01110102367401123, 0.011054783821105958, 0.012296671867370606, 0.011003168106079101, 0.010989760398864746, 0.010976960182189941, 0.010957216262817383, 0.010914239883422851, 0.011362303733825683, 0.011315199851989746, 0.010932288169860839, 0.010882623672485351, 0.010912287712097167, 0.010935232162475586, 0.011019167900085449, 0.010912960052490234, 0.010942815780639648, 0.01100870418548584, 0.011087648391723633, 0.010894559860229492, 0.010933247566223145, 0.010874912261962891, 0.010816800117492675, 0.010888895988464355, 0.010926464080810546, 0.010907648086547851, 0.010867232322692872, 0.010972928047180176, 0.010893535614013673, 0.010917792320251465, 0.010840031623840332, 0.010933823585510254, 0.010846688270568847, 0.01088652801513672, 0.010914048194885255, 0.010934623718261718, 0.01090294361114502, 0.010934911727905273, 0.010950655937194824, 0.011067647933959961, 0.011034367561340332, 0.010991616249084473, 0.010966015815734862, 0.011037152290344238, 0.011231743812561035, 0.0112326078414917, 0.011455136299133301, 0.011290047645568848, 0.01157363224029541, 0.011560928344726563, 0.011636768341064453, 0.011632479667663575, 0.011688096046447754, 0.011600192070007325, 0.011626175880432129, 0.011542752265930175, 0.011669376373291015, 0.011519904136657716, 0.011464768409729004, 0.011421631813049317, 0.01149289608001709, 0.01174176025390625, 0.011482560157775878, 0.01153395175933838, 0.011540863990783691, 0.011558752059936523, 0.011541119575500488, 0.01148470401763916, 0.011463135719299316, 0.011498496055603028, 0.01141427230834961, 0.01158176040649414, 0.011614144325256348, 0.011487232208251954, 0.011552767753601074, 0.012955615997314453, 0.012981951713562012, 0.011619903564453126, 0.01168012809753418, 0.011649439811706543, 0.012210176467895508, 0.01161404800415039, 0.011614368438720704, 0.011426048278808594, 0.01187228775024414, 0.01154428768157959, 0.011496512413024903, 0.011527104377746582, 0.011610367774963378, 0.011675392150878906, 0.011970815658569335, 0.01173299217224121, 0.011515647888183593, 0.011440128326416015, 0.011456512451171874, 0.011669568061828613, 0.011499456405639649, 0.011447808265686036, 0.01148134422302246, 0.011630240440368653, 0.011630623817443848, 0.01154310417175293, 0.011495455741882325, 0.011386207580566406, 0.011491680145263672, 0.011349632263183593, 0.011459551811218261, 0.011456319808959961, 0.01133513641357422, 0.011241888046264649, 0.011032575607299805, 0.011427840232849122, 0.0115447998046875, 0.011566975593566894, 0.011640735626220703, 0.011513728141784668, 0.011403039932250976, 0.011448672294616699, 0.011531840324401856, 0.011629216194152832, 0.011405088424682618, 0.011272192001342773, 0.011266048431396485, 0.01133516788482666, 0.011606687545776368, 0.011623776435852051, 0.011602432250976562, 0.011648320198059081, 0.011510463714599609, 0.011593728065490723, 0.011746527671813965, 0.011553728103637695, 0.011612000465393067, 0.011527199745178223, 0.011545568466186524, 0.011470720291137696, 0.011489567756652832, 0.011444064140319824, 0.0113536958694458, 0.011292703628540039, 0.011383071899414062, 0.011434080123901368, 0.011660544395446778, 0.011631360054016113, 0.011698176383972168, 0.011501567840576172, 0.011489279747009277, 0.011423680305480957, 0.01154047966003418, 0.011556927680969237, 0.011454463958740235, 0.011410783767700196, 0.01142585563659668, 0.011599679946899415, 0.011473600387573241, 0.011390368461608886, 0.011399168014526367, 0.011524736404418945, 0.01147116756439209, 0.0114749755859375, 0.011458271980285645, 0.01151910400390625, 0.011521023750305176, 0.011621536254882813, 0.011596608161926269, 0.011699616432189941, 0.011405599594116212, 0.011509119987487793, 0.011631520271301269, 0.011700160026550293, 0.011671551704406738, 0.011577343940734864, 0.011525664329528808, 0.011155008316040039, 0.011452863693237305, 0.011413503646850585, 0.011390975952148438, 0.0117391357421875, 0.011433792114257813, 0.011395584106445313, 0.011449983596801758, 0.011585599899291992, 0.011503616333007812, 0.011351200103759765, 0.011457375526428223, 0.011540831565856934, 0.011529855728149414, 0.01152012825012207, 0.01148470401763916, 0.011538016319274902, 0.011522720336914063, 0.011432064056396485, 0.011393024444580077, 0.011362303733825683, 0.011294719696044921, 0.011253824234008789, 0.011340831756591797, 0.011393535614013671, 0.011483551979064942, 0.011493375778198242, 0.01132953643798828, 0.011302016258239745, 0.011290656089782716, 0.011190688133239746, 0.011202048301696778, 0.011407936096191406, 0.01164352035522461, 0.011464127540588379, 0.011445664405822753, 0.01151478385925293, 0.011559200286865235, 0.011677408218383789, 0.01147052764892578, 0.011540127754211425, 0.011597503662109375, 0.011625503540039063, 0.011628576278686523, 0.011564959526062011, 0.011460288047790528, 0.011513567924499512, 0.011544768333435058, 0.011425503730773926, 0.01152070426940918, 0.01142745590209961, 0.011405856132507323, 0.011462400436401367, 0.011374143600463868, 0.011401663780212403, 0.011468480110168457, 0.011552255630493164, 0.011510944366455078, 0.0115217924118042, 0.011342016220092774, 0.011411264419555664, 0.01157475185394287, 0.011516448020935058, 0.011628543853759766, 0.013717184066772461, 0.011911487579345703, 0.011515904426574707, 0.011476415634155274, 0.011471103668212891, 0.01145248031616211, 0.01158351993560791, 0.011587712287902832, 0.01161843204498291, 0.011538399696350097, 0.011611840248107911, 0.011534111976623535, 0.011663935661315917, 0.01159119987487793, 0.011714240074157715, 0.01161292839050293, 0.011497792243957519, 0.011520031929016114, 0.01140003204345703, 0.011350879669189453, 0.0116627836227417, 0.011468607902526856, 0.01151369571685791, 0.011412320137023925, 0.011347711563110352, 0.011539775848388672, 0.01151478385925293, 0.01142307186126709, 0.011346688270568847, 0.011347935676574708, 0.011470656394958496, 0.01146236801147461, 0.01150819206237793, 0.011436032295227052, 0.01145241641998291, 0.011460448265075683, 0.011327360153198242, 0.011418047904968262, 0.011382368087768555, 0.01136076831817627, 0.011364095687866211, 0.011499808311462403, 0.011531871795654297, 0.01148969554901123, 0.011503328323364257, 0.011501855850219726, 0.0114518404006958, 0.011479328155517578, 0.011526111602783204, 0.011524127960205079, 0.011575296401977539, 0.011652768135070801, 0.011631135940551757, 0.011695327758789063, 0.01159228801727295, 0.011429887771606445, 0.011500896453857422, 0.01155958366394043, 0.011505663871765137, 0.01140940761566162, 0.011296319961547851, 0.011322943687438965, 0.011124128341674805, 0.011658207893371582, 0.011484512329101563, 0.011401568412780762, 0.011434144020080566, 0.011333791732788086, 0.011294719696044921, 0.012070624351501465, 0.011448512077331543, 0.011585280418395997, 0.011502079963684082, 0.011352160453796386, 0.0113570556640625, 0.01141823959350586, 0.011584063529968262, 0.011595775604248047, 0.011562463760375976, 0.011581727981567383, 0.011623616218566895, 0.01154047966003418, 0.011493120193481445, 0.01152444839477539, 0.01154105567932129, 0.011789983749389648, 0.01166585636138916, 0.011690079689025879, 0.011476479530334472, 0.011508128166198731, 0.011454591751098633, 0.011439935684204101, 0.011477055549621583, 0.011583488464355468, 0.011573023796081543, 0.011528127670288086, 0.011341152191162109, 0.011378879547119141, 0.011806528091430664, 0.011367487907409667, 0.011396703720092773, 0.011426079750061035, 0.011646976470947265, 0.012785216331481934, 0.011399295806884766, 0.011305024147033691, 0.011204863548278808, 0.011456640243530273, 0.011435903549194337, 0.01124687957763672, 0.01163747215270996, 0.011558912277221679, 0.011531904220581054, 0.011328895568847657, 0.011306303977966309, 0.011480768203735352, 0.011487104415893555, 0.011569503784179688, 0.011489055633544921, 0.01159926414489746, 0.0117990083694458, 0.011484992027282715, 0.011437824249267578, 0.011461183547973633, 0.011306271553039551, 0.011020383834838866, 0.011834495544433594, 0.011561599731445313, 0.011286751747131347, 0.011161536216735839, 0.011355487823486329, 0.011458239555358886, 0.01140220832824707, 0.01137052822113037, 0.01127830410003662, 0.01122713565826416, 0.011318400382995605, 0.011293567657470704, 0.011399168014526367, 0.011270496368408203, 0.011131775856018067, 0.011176735877990722, 0.01117734432220459, 0.011146271705627441, 0.011998815536499024, 0.011028608322143555, 0.011007648468017578, 0.011438303947448731, 0.011671551704406738, 0.01168716812133789, 0.01162668800354004, 0.011517631530761718, 0.01148630428314209, 0.011489055633544921, 0.011382399559020996, 0.011325535774230957, 0.01123750400543213, 0.011352224349975586, 0.011182368278503418, 0.01112179183959961, 0.011201120376586915, 0.011259455680847168, 0.011270112037658691, 0.011230815887451171, 0.01116044807434082, 0.011175423622131348, 0.01147481632232666, 0.011647616386413574, 0.011395071983337402, 0.011228192329406738, 0.011287520408630371, 0.011319231986999512, 0.011201919555664063, 0.011356863975524902, 0.011401439666748047, 0.011351840019226074, 0.011560959815979004, 0.01133743953704834, 0.011536928176879882, 0.01147980785369873, 0.011171039581298827, 0.01118505573272705, 0.0111494722366333, 0.011057439804077149, 0.010963104248046874, 0.010989151954650878, 0.011173695564270019, 0.011473119735717774, 0.01198265552520752, 0.011597791671752929, 0.011545023918151855, 0.011427231788635254, 0.011345727920532227, 0.011336480140686035, 0.011364480018615723, 0.011267104148864747, 0.011203071594238282, 0.011192192077636718, 0.011129311561584473, 0.011318559646606446, 0.011594207763671874, 0.011469056129455566, 0.011294719696044921, 0.01147935962677002, 0.011491007804870605, 0.011404831886291504, 0.011237824440002441, 0.011218015670776366, 0.011250335693359375, 0.011219584465026855, 0.011129695892333985, 0.011235967636108398, 0.011397184371948242, 0.011354240417480468, 0.011288576126098633, 0.011122271537780762, 0.01111900806427002, 0.011082847595214844, 0.011055968284606933, 0.010969152450561523, 0.01092403221130371, 0.011327263832092285, 0.011657024383544922, 0.011564736366271972, 0.01153536033630371, 0.011541664123535156, 0.011529024124145509, 0.011448224067687989, 0.011937952041625977, 0.01209926414489746, 0.011427840232849122, 0.012511296272277832, 0.011321120262145996, 0.011315360069274902, 0.011306912422180175, 0.011337408065795898, 0.011501983642578125, 0.011503968238830566, 0.011515551567077637, 0.011311103820800781, 0.011324704170227051, 0.011393759727478027, 0.011472895622253418, 0.01140940761566162, 0.0112576322555542, 0.011265536308288575, 0.011412192344665527, 0.011472415924072266, 0.011475104331970215, 0.011409728050231934, 0.011332991600036622, 0.011019136428833008, 0.011167743682861327, 0.011103263854980469, 0.01126307201385498, 0.011672960281372071, 0.011581695556640625, 0.011669055938720703, 0.011662015914916992, 0.011577343940734864, 0.011488415718078614, 0.011420512199401855, 0.01122713565826416, 0.011093376159667968, 0.011343903541564941, 0.011179807662963868, 0.01117471981048584, 0.011050335884094238, 0.011138943672180176, 0.01135696029663086, 0.01152409553527832, 0.011718463897705079, 0.011607904434204102, 0.01125158405303955, 0.011282848358154298, 0.01134598445892334, 0.01123145580291748, 0.011202527999877929, 0.011310912132263184, 0.011481087684631347, 0.011390975952148438, 0.011356160163879395, 0.011567104339599609, 0.011396575927734375, 0.011172479629516601, 0.011018176078796387, 0.010997376441955566, 0.011055711746215821, 0.011443679809570312, 0.011701567649841309, 0.011623456001281738, 0.011539999961853026, 0.01150614356994629, 0.011373855590820313, 0.011313983917236328, 0.011251744270324706, 0.011206463813781738, 0.01123737621307373, 0.011249024391174317, 0.011266752243041992, 0.01152627182006836, 0.01147475242614746, 0.011463744163513184, 0.011296799659729004, 0.011213600158691406, 0.01144547176361084, 0.011470815658569337, 0.011340543746948243, 0.011379167556762696, 0.01149510383605957, 0.011437824249267578, 0.011421759605407715, 0.011489503860473633, 0.011449503898620605, 0.011141119956970215, 0.011389984130859374, 0.011142047882080078, 0.011192383766174317, 0.011583200454711914, 0.011540639877319335, 0.011683903694152831, 0.01154054355621338, 0.011491328239440919, 0.011576800346374512, 0.01147548770904541, 0.01132755184173584, 0.011380672454833984, 0.011302911758422851, 0.011222463607788086, 0.011253567695617675, 0.011407487869262696, 0.011457216262817383, 0.011417792320251465, 0.011248415946960449, 0.01128060817718506, 0.011538271903991698, 0.011560959815979004, 0.011395456314086914, 0.011333312034606934, 0.011373408317565918, 0.011511584281921387, 0.011472352027893066, 0.01138764762878418, 0.011552448272705079, 0.011434304237365722, 0.01139065647125244, 0.011394880294799804, 0.011239935874938965, 0.011222528457641602, 0.011260416030883789, 0.011309056282043458, 0.011084863662719727, 0.011158432006835937, 0.011400320053100587, 0.011684767723083496, 0.011705856323242187, 0.011612223625183106, 0.011616928100585937, 0.01155782413482666, 0.011719264030456544, 0.011303168296813965, 0.01109228801727295, 0.01127619171142578, 0.011396096229553223, 0.011303071975708007, 0.011543007850646972, 0.011401375770568848, 0.011226719856262207, 0.011202336311340332, 0.01122374439239502, 0.011194304466247558, 0.011431936264038087, 0.011388928413391113, 0.011466303825378417, 0.011571935653686524, 0.011447456359863281, 0.01147049617767334, 0.011185471534729003, 0.01138268756866455, 0.011340576171875, 0.011339776039123535, 0.011591520309448242, 0.011524255752563477, 0.01160752010345459, 0.011643424034118652, 0.011480640411376954, 0.011534784317016602, 0.011390432357788086, 0.01132307243347168, 0.011221792221069336, 0.010997823715209961, 0.010950655937194824, 0.011142304420471191, 0.011305824279785156, 0.01115884780883789, 0.01132806396484375, 0.01159603214263916, 0.011435327529907226, 0.011380672454833984, 0.011412096023559571, 0.011396639823913575, 0.011386816024780273, 0.011530783653259278, 0.011334976196289063, 0.011316255569458008, 0.011413215637207031, 0.011395008087158202, 0.011255071640014649, 0.01150211238861084, 0.01143558406829834, 0.01192204761505127, 0.011956512451171875, 0.011598912239074707, 0.011410079956054687, 0.011385919570922852, 0.011469759941101075, 0.0114617919921875, 0.011524959564208985, 0.011692031860351563, 0.011333632469177245, 0.011304960250854493, 0.011593728065490723, 0.01165721607208252, 0.011419872283935546, 0.011534111976623535, 0.011802463531494141, 0.01147100830078125, 0.011757823944091797, 0.011482463836669923, 0.01131497573852539, 0.011324159622192383, 0.011302783966064453, 0.011247008323669434, 0.011336288452148437, 0.011482272148132324, 0.011504480361938477, 0.011187552452087402, 0.011014335632324219, 0.011006431579589844, 0.011196352005004883, 0.011448543548583984, 0.011786016464233398, 0.011518143653869628, 0.0113438081741333, 0.011450336456298829, 0.011713888168334961, 0.014022527694702148, 0.011655872344970702, 0.011632384300231934, 0.011481120109558105, 0.011462880134582519, 0.011303263664245606, 0.011177632331848144, 0.011169695854187011, 0.01145251178741455, 0.01146675205230713, 0.011394271850585938, 0.011402303695678712, 0.011368224143981934, 0.011376799583435059, 0.011758624076843261, 0.011551103591918944, 0.011362688064575195, 0.011249407768249512, 0.011210623741149903, 0.011183679580688477, 0.011477791786193847, 0.011648927688598633, 0.011614368438720704, 0.011456480026245117, 0.011481216430664062, 0.011396991729736327, 0.01120687961578369, 0.01105465602874756, 0.011036800384521485, 0.011175423622131348, 0.011260512351989747, 0.011165696144104004, 0.011392831802368164, 0.01124953556060791, 0.011456831932067871, 0.011280256271362305, 0.01153651237487793, 0.011376768112182618, 0.011472767829895019, 0.01147606372833252, 0.011362815856933594, 0.011204352378845215, 0.011276960372924805, 0.011318431854248048, 0.011479328155517578, 0.011385439872741699, 0.011321311950683593, 0.01127785587310791, 0.011346559524536132, 0.012291872024536132, 0.016046464920043944, 0.015373920440673828, 0.011554911613464355, 0.011342111587524415, 0.011253600120544433, 0.011236703872680665, 0.011177760124206543, 0.011076031684875488, 0.011390496253967285, 0.011491904258728028, 0.011560256004333497, 0.011459168434143066, 0.011396703720092773, 0.011481504440307617, 0.011438079833984375, 0.011304960250854493, 0.01139913558959961, 0.01140944004058838, 0.011347135543823243, 0.011286656379699707, 0.011404064178466797, 0.01150496006011963, 0.011397407531738282, 0.011417887687683105, 0.01113491153717041, 0.011211903572082519, 0.011645919799804687, 0.011658368110656738, 0.011613280296325683, 0.011558367729187011, 0.011491647720336914, 0.011978752136230468, 0.011326560020446777, 0.011172767639160155, 0.011209919929504394, 0.01115772819519043, 0.011117183685302734, 0.011470911979675293, 0.01134768009185791, 0.011332799911499023, 0.011502271652221679, 0.01136575984954834, 0.011318207740783692, 0.011222208023071289, 0.011400159835815429, 0.011456543922424317, 0.011430047988891601, 0.01132630443572998, 0.011294591903686523, 0.011506624221801758, 0.011339039802551269, 0.011465439796447754, 0.011308320045471191, 0.011220000267028808, 0.011183744430541992, 0.011493087768554688, 0.011569503784179688, 0.011720447540283204, 0.011773951530456543, 0.011538687705993653, 0.011544544219970703, 0.011444576263427734, 0.011851648330688476, 0.011355072021484376, 0.0114901762008667, 0.011317248344421387, 0.011271552085876465, 0.011473631858825684, 0.011474495887756347, 0.011527839660644532, 0.011315808296203614, 0.011546688079833984, 0.011382783889770508, 0.01132953643798828, 0.01128809642791748, 0.011333503723144532, 0.011448415756225586, 0.011491840362548827, 0.011517824172973633, 0.011509311676025391, 0.011655679702758789, 0.011647295951843262, 0.011771648406982423, 0.011629887580871582, 0.011601823806762696, 0.011498496055603028, 0.011361536026000976, 0.011356703758239746, 0.011286175727844239, 0.011306976318359375, 0.011382207870483399, 0.01155571174621582, 0.011362367630004883, 0.011471936225891113, 0.011461376190185547, 0.011335743904113769, 0.011432064056396485, 0.011531904220581054, 0.011321727752685546, 0.011353983879089355, 0.011331520080566405, 0.01130515193939209, 0.011405311584472656, 0.011334815979003906, 0.01132153606414795, 0.011439007759094238, 0.011288607597351074, 0.011207776069641113, 0.011099072456359863, 0.011328831672668457, 0.01164735984802246, 0.011677696228027343, 0.012042207717895508, 0.011521599769592284, 0.01209596824645996, 0.011251711845397949, 0.01110540771484375, 0.01113491153717041, 0.011086400032043458, 0.011023776054382324, 0.011246560096740722, 0.011314944267272949, 0.011133184432983398, 0.011300864219665528, 0.011206751823425292, 0.011374879837036132, 0.011299679756164551, 0.01111734390258789, 0.011167743682861327, 0.011170880317687988, 0.011125823974609376, 0.011093215942382812, 0.011279168128967286]",tokens/s,87.50181426227068,, @@ -5457,7 +5457,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 77762 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 71231 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5500,7 +5500,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 148263 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 141627 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.673536,4675.534848,0.0,4280.287232,4115.121152,s,1,7.6016826171875,7.6016826171875,0.0,7.6016826171875,7.6016826171875,7.6016826171875,7.6016826171875,[7.6016826171875],,kWh,1.0649561300040963e-05,1.1669500490514966e-06,4.535559183990734e-06,1.6352070533083194e-05,,MB,1141.620736,4981.71904,0.0,4575.985664,4408.408064,s,10,3.0703495788574213,0.3070349578857422,0.0031113508746365526,0.30735542297363283,0.30992298278808594,0.3105379837036133,0.3110299844360352,"[0.30563629150390625, 0.3078035583496094, 0.30978631591796874, 0.305809814453125, 0.31115298461914065, 0.3053193359375, 0.3092148132324219, 0.30690728759765623, 0.29955322265625, 0.30916595458984375]",tokens/s,833.7812793788322,kWh,9.109229113762307e-06,1.0042019392834388e-06,6.0174627264245135e-06,1.6130893779470258e-05,tokens/kWh,15870168.355197433,MB,1163.149312,4981.71904,0.0,4575.985664,4408.410624,s,10,15.703744506835939,1.570374450683594,0.010764770164829358,1.5709315795898438,1.5774057006835938,1.5861736145019532,1.5931879455566407,"[1.5635303955078126, 1.5949415283203126, 1.575457275390625, 1.567982177734375, 1.57343115234375, 1.5576832275390624, 1.5684320068359374, 1.5746732177734375, 1.5737548828125, 1.553858642578125]",tokens/s,40.117820289661296,kWh,4.525882735623948e-05,4.99226833244792e-06,2.9779704800174935e-05,8.003080048886235e-05,tokens/kWh,787196.9243737294,,s,630,15.701173805236804,0.024922498103550503,0.0004272402564130739,0.024874271392822264,0.02528845100402832,0.025565788555145262,0.026496753330230712,"[0.026102687835693358, 0.025196224212646483, 0.024997535705566405, 0.024463615417480468, 0.025049472808837892, 0.02510438346862793, 0.025030656814575194, 0.02476032066345215, 0.024825344085693358, 0.02540390396118164, 0.02489958381652832, 0.024928255081176756, 0.024862016677856445, 0.024836799621582032, 0.024797183990478516, 0.025020639419555665, 0.026484415054321288, 0.024778112411499024, 0.024988384246826173, 0.024788991928100586, 0.024749536514282227, 0.024975904464721678, 0.02467840003967285, 0.024720415115356446, 0.024675296783447265, 0.02478451156616211, 0.0245863037109375, 0.024650047302246094, 0.02476851272583008, 0.024532991409301756, 0.024600576400756836, 0.024489311218261717, 0.024676416397094728, 0.024684383392333985, 0.024685312271118164, 0.02476851272583008, 0.024755680084228515, 0.02483987236022949, 0.024685407638549806, 0.024723455429077147, 0.024803327560424804, 0.024874431610107422, 0.02490220832824707, 0.024626592636108398, 0.024721088409423827, 0.02501696014404297, 0.024639232635498047, 0.024637983322143554, 0.02453708839416504, 0.024753503799438477, 0.02453708839416504, 0.02462905693054199, 0.02465878486633301, 0.02449407958984375, 0.024461311340332033, 0.02452889633178711, 0.024820831298828124, 0.024779680252075196, 0.024526687622070314, 0.024524192810058593, 0.0245533447265625, 0.024827775955200197, 0.025082176208496093, 0.025601760864257812, 0.025077600479125977, 0.025168256759643556, 0.025092096328735353, 0.025173759460449217, 0.025379072189331053, 0.02611199951171875, 0.02509823989868164, 0.024986976623535155, 0.026501792907714844, 0.025174016952514647, 0.02531283187866211, 0.025022911071777343, 0.02510643196105957, 0.025440256118774415, 0.025280511856079102, 0.025208831787109375, 0.02533990478515625, 0.025112031936645508, 0.025060928344726563, 0.02517705535888672, 0.025235679626464842, 0.026005279541015624, 0.025058752059936525, 0.02528108787536621, 0.02527846336364746, 0.025186239242553712, 0.025094207763671876, 0.025026559829711914, 0.025834720611572267, 0.028717248916625977, 0.025162080764770507, 0.025092063903808595, 0.025484895706176756, 0.02531808090209961, 0.025070720672607422, 0.025049983978271486, 0.02565555191040039, 0.024929376602172853, 0.025104320526123047, 0.02495382308959961, 0.025081600189208984, 0.02486800003051758, 0.02741744041442871, 0.025100160598754882, 0.025208255767822266, 0.025230016708374024, 0.02483404731750488, 0.02494259262084961, 0.02655561637878418, 0.025391584396362306, 0.025010496139526366, 0.024936447143554686, 0.025272319793701172, 0.025309183120727538, 0.0249487361907959, 0.024983903884887696, 0.025122079849243164, 0.02492185592651367, 0.02502038383483887, 0.024807231903076172, 0.024941408157348632, 0.024813568115234375, 0.02556159973144531, 0.024815807342529295, 0.025108480453491212, 0.02476201629638672, 0.024852832794189452, 0.025132352828979493, 0.025187007904052733, 0.02479859161376953, 0.02477324867248535, 0.024879104614257814, 0.02485043144226074, 0.024979455947875977, 0.0246778564453125, 0.024840736389160158, 0.024905920028686523, 0.02506528091430664, 0.024967231750488282, 0.02499772834777832, 0.025595935821533203, 0.02516543960571289, 0.025006528854370116, 0.024720800399780272, 0.024848352432250975, 0.02477120018005371, 0.024868864059448242, 0.02488115119934082, 0.024781984329223634, 0.02485536003112793, 0.024868896484375, 0.024856447219848633, 0.024764543533325196, 0.025228511810302733, 0.02533452796936035, 0.024896896362304688, 0.02492483139038086, 0.02471731185913086, 0.024895488739013674, 0.02493235206604004, 0.024669792175292967, 0.02498192024230957, 0.0248353271484375, 0.024867456436157228, 0.024909952163696288, 0.024764415740966796, 0.02496512031555176, 0.02529280090332031, 0.025018367767333984, 0.024927520751953126, 0.024864896774291993, 0.025127519607543947, 0.024861696243286133, 0.024695808410644532, 0.024676000595092774, 0.024912191390991212, 0.024989984512329103, 0.02495257568359375, 0.026462207794189452, 0.02704310417175293, 0.025580255508422852, 0.025200096130371094, 0.024760160446166992, 0.02485318374633789, 0.02488934326171875, 0.025630720138549806, 0.025101696014404297, 0.02508198356628418, 0.02480179214477539, 0.02466160011291504, 0.024838144302368165, 0.02480684852600098, 0.024687583923339845, 0.024453119277954103, 0.02474777603149414, 0.02509971237182617, 0.024995935440063476, 0.02474985694885254, 0.02482681655883789, 0.024737951278686523, 0.025045984268188475, 0.024750495910644533, 0.024869344711303712, 0.024811807632446288, 0.025155296325683595, 0.024747711181640625, 0.02469875144958496, 0.024842687606811523, 0.02551398468017578, 0.025205888748168946, 0.02493529510498047, 0.024768192291259764, 0.024743776321411132, 0.024828384399414063, 0.024922111511230468, 0.02475212860107422, 0.0249051513671875, 0.0246625919342041, 0.024785919189453123, 0.024638463973999023, 0.024786815643310547, 0.025782400131225586, 0.025222591400146484, 0.024950944900512695, 0.02475200080871582, 0.02473628807067871, 0.025057279586791992, 0.024915136337280274, 0.024777536392211915, 0.02476995277404785, 0.024771167755126954, 0.02488528060913086, 0.024692703247070312, 0.02451251220703125, 0.025055007934570314, 0.025018592834472657, 0.02471651268005371, 0.024728160858154297, 0.024671903610229494, 0.024715808868408202, 0.02510643196105957, 0.024880607604980468, 0.025005983352661132, 0.02481558418273926, 0.02496169662475586, 0.024729600906372072, 0.024724863052368165, 0.025129600524902342, 0.025726688385009765, 0.025105791091918947, 0.02500495910644531, 0.024823808670043947, 0.024723295211791993, 0.024918176651000976, 0.025014272689819338, 0.02489753532409668, 0.024782848358154298, 0.02474809646606445, 0.024735679626464845, 0.024827903747558593, 0.02471900749206543, 0.024858879089355468, 0.02515567970275879, 0.024952831268310546, 0.024961023330688475, 0.02480931282043457, 0.02479283142089844, 0.025078176498413086, 0.024954879760742187, 0.024763904571533202, 0.02482431983947754, 0.024799232482910157, 0.025038175582885742, 0.024758495330810548, 0.02511644744873047, 0.026067615509033203, 0.025126911163330077, 0.025062976837158205, 0.024733951568603516, 0.025063583374023438, 0.02494063949584961, 0.02482975959777832, 0.024748159408569337, 0.02488096046447754, 0.024774848937988283, 0.024870336532592772, 0.024797760009765624, 0.024991743087768553, 0.025287967681884765, 0.02530748748779297, 0.024847808837890624, 0.02485958480834961, 0.02489952087402344, 0.02494441604614258, 0.024877344131469727, 0.02489753532409668, 0.0254748477935791, 0.024923648834228516, 0.024926464080810548, 0.02488368034362793, 0.025202592849731444, 0.025151584625244142, 0.024704191207885744, 0.025014591217041016, 0.02481203269958496, 0.025081439971923827, 0.025279232025146484, 0.025085248947143556, 0.024858976364135744, 0.02487411117553711, 0.025213823318481446, 0.025489887237548827, 0.02481564712524414, 0.024809663772583007, 0.02466377639770508, 0.02482614326477051, 0.024738815307617186, 0.024490400314331053, 0.024510879516601563, 0.024731840133666992, 0.02461033630371094, 0.02467238426208496, 0.024725727081298828, 0.024763967514038084, 0.02464614486694336, 0.02443199920654297, 0.024465375900268555, 0.024623327255249024, 0.024692960739135742, 0.024436511993408204, 0.024893951416015626, 0.02728246307373047, 0.02475926399230957, 0.02447100830078125, 0.024568447113037108, 0.02450979232788086, 0.024838720321655273, 0.024409311294555664, 0.02447849655151367, 0.024905439376831054, 0.025569215774536132, 0.024965471267700195, 0.024682079315185547, 0.02456220817565918, 0.024739072799682616, 0.024527488708496095, 0.024380672454833986, 0.025199359893798828, 0.024696832656860353, 0.02468659210205078, 0.02437443161010742, 0.024912736892700196, 0.02471673583984375, 0.024672672271728514, 0.024545024871826172, 0.024434431076049805, 0.024951007843017577, 0.02498147201538086, 0.024844768524169922, 0.02477414321899414, 0.02460723114013672, 0.024713279724121094, 0.024472864151000976, 0.024849056243896484, 0.02474630355834961, 0.02487468719482422, 0.024475616455078127, 0.0243917121887207, 0.024475648880004884, 0.024614431381225585, 0.02432044792175293, 0.024348352432250978, 0.024633695602416992, 0.024465055465698243, 0.02547884750366211, 0.024681856155395508, 0.02468118476867676, 0.024413759231567383, 0.024253215789794922, 0.025208671569824218, 0.024379392623901368, 0.02430735969543457, 0.02432035255432129, 0.02427449607849121, 0.024344255447387695, 0.02431667137145996, 0.02409267234802246, 0.02410851287841797, 0.024443424224853516, 0.02451878356933594, 0.02455779266357422, 0.02455311965942383, 0.024497600555419923, 0.02435744094848633, 0.024377344131469726, 0.0245166072845459, 0.02454732894897461, 0.024450624465942383, 0.024590784072875977, 0.02469478416442871, 0.024866559982299804, 0.024920320510864256, 0.025042943954467774, 0.02532307243347168, 0.025065919876098634, 0.02529859161376953, 0.025080160140991212, 0.025182207107543944, 0.02574131202697754, 0.025229312896728515, 0.024907039642333983, 0.025221855163574218, 0.02509414482116699, 0.025332927703857422, 0.02503763198852539, 0.0252969913482666, 0.025105663299560547, 0.025259807586669923, 0.025178720474243164, 0.02539507293701172, 0.025026208877563478, 0.02499580764770508, 0.025058080673217773, 0.02500806427001953, 0.025158815383911133, 0.024957056045532226, 0.024912448883056642, 0.025775711059570314, 0.02559449577331543, 0.02534604835510254, 0.025188352584838865, 0.02511257553100586, 0.024952192306518555, 0.02512249565124512, 0.024984384536743166, 0.02547110366821289, 0.02495078468322754, 0.02573311996459961, 0.02510438346862793, 0.024982816696166994, 0.025018304824829102, 0.0251297607421875, 0.024991743087768553, 0.024993791580200195, 0.02484809684753418, 0.024926496505737306, 0.02512009620666504, 0.024891040802001954, 0.024787967681884765, 0.02516377639770508, 0.025157312393188476, 0.02498796844482422, 0.025059328079223633, 0.02479497528076172, 0.025075872421264647, 0.024937952041625976, 0.025044607162475585, 0.02483228874206543, 0.02491827201843262, 0.024852319717407225, 0.024936479568481447, 0.024752639770507814, 0.02457747268676758, 0.02515385627746582, 0.025128351211547852, 0.024994848251342773, 0.02500966453552246, 0.024864288330078126, 0.025039264678955078, 0.02497747230529785, 0.02494211196899414, 0.024867231369018555, 0.024985984802246095, 0.024811103820800783, 0.02484003257751465, 0.024891807556152345, 0.02502467155456543, 0.025081823348999024, 0.025304895401000976, 0.024956863403320314, 0.024762655258178713, 0.02482713508605957, 0.024888032913208007, 0.024864927291870117, 0.02474380874633789, 0.02486262321472168, 0.025010271072387694, 0.02507161521911621, 0.024821760177612305, 0.024944032669067383, 0.02484694480895996, 0.02502182388305664, 0.025830015182495118, 0.02495692825317383, 0.025053152084350584, 0.025466911315917967, 0.02503987121582031, 0.024826400756835936, 0.02504038429260254, 0.025039392471313475, 0.025901952743530274, 0.025040319442749023, 0.02484486389160156, 0.0249467830657959, 0.024981407165527342, 0.02522915267944336, 0.027841856002807617, 0.025258848190307617, 0.025022464752197264, 0.025313247680664064, 0.024809503555297853, 0.02530860710144043, 0.024846912384033203, 0.024901216506958007, 0.024909568786621095, 0.024836767196655275, 0.0247127685546875, 0.024854976654052733, 0.0248090877532959, 0.024789375305175783, 0.02532099151611328, 0.025024991989135742, 0.02510438346862793, 0.02495692825317383, 0.02495078468322754, 0.024860448837280273, 0.024867040634155273, 0.02503887939453125, 0.024852447509765625, 0.02490096092224121, 0.02482441520690918, 0.024846399307250976, 0.024912927627563478, 0.024914016723632814, 0.025157983779907227, 0.025118976593017577, 0.024872928619384765, 0.024856895446777345, 0.025040447235107424, 0.024844736099243165, 0.02526518440246582, 0.025076416015625, 0.024860416412353516, 0.024975168228149415, 0.02530352020263672, 0.024951040267944338, 0.024993791580200195, 0.02516713523864746, 0.024832735061645506, 0.024532991409301756, 0.024606016159057616, 0.024582048416137696, 0.024684576034545897, 0.024572479248046876, 0.024503936767578127, 0.024566335678100584, 0.025038848876953124, 0.024696352005004883, 0.02490825653076172, 0.024750080108642578, 0.024944639205932616, 0.024710559844970705, 0.0245827522277832, 0.02560406494140625, 0.0246343994140625, 0.024334592819213866, 0.024294111251831056, 0.024363008499145508, 0.024772287368774414, 0.02504025650024414, 0.02467715263366699, 0.025089311599731445, 0.024744831085205077, 0.024397823333740236, 0.024467552185058594, 0.024422592163085937, 0.024391359329223632, 0.024465248107910155, 0.02428767967224121, 0.02412928009033203, 0.024260608673095704, 0.024224767684936522, 0.02447257614135742, 0.024227840423583984, 0.024276992797851563, 0.024223743438720705, 0.02434662437438965, 0.024319616317749024, 0.024489471435546875, 0.024296319961547852, 0.024321792602539062, 0.024270368576049806, 0.024369407653808593, 0.024230367660522462, 0.024231071472167968, 0.024239999771118164, 0.024346879959106445, 0.024429279327392577, 0.024449024200439453, 0.02481155204772949, 0.024452896118164064, 0.02439151954650879, 0.024455360412597656, 0.02438159942626953, 0.025869983673095703, 0.02511907196044922, 0.024579904556274415, 0.024358495712280274, 0.024795743942260744, 0.024793088912963866, 0.024440832138061523, 0.024411455154418945, 0.02466217613220215, 0.02484217643737793, 0.024576608657836913, 0.024827327728271485, 0.02606867218017578, 0.026467199325561522, 0.02553152084350586, 0.025037120819091797, 0.024994144439697264, 0.025038656234741212, 0.025051136016845704, 0.025047391891479493, 0.025286720275878905, 0.02515558433532715]",tokens/s,40.12438864856562,, @@ -5544,7 +5544,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 95935 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 89440 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5667,7 +5667,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 136402 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 130092 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1035.137024,10975.379456,0.0,10580.13184,10162.029568,s,1,11.9223359375,11.9223359375,0.0,11.9223359375,11.9223359375,11.9223359375,11.9223359375,[11.9223359375],,kWh,7.199695445816208e-06,7.769629309695722e-07,3.88500310800205e-06,1.186166148478783e-05,,MB,1304.940544,11094.91712,0.0,10689.183744,9358.065152,s,10,8.74055206298828,0.874055206298828,0.0030499145467565874,0.8741604309082032,0.8765967407226563,0.8782952270507812,0.8796540161132812,"[0.8684814453125, 0.8739532470703125, 0.8738178100585937, 0.8712845458984375, 0.8759263916015625, 0.871088623046875, 0.8799937133789062, 0.8754193725585937, 0.8743676147460937, 0.8762192993164063]",tokens/s,292.88767820974107,kWh,2.5562439670831813e-05,2.8187286797481014e-06,1.693825429133269e-05,4.5319422641912595e-05,tokens/kWh,5648792.175106937,MB,1330.46272,11094.91712,0.0,10689.183744,9397.6704,s,10,30.260364257812498,3.0260364257812498,0.006906549708564813,3.0271842041015624,3.032440185546875,3.0346143798828127,3.0363537353515624,"[3.017893310546875, 3.017557373046875, 3.01461865234375, 3.02799169921875, 3.02494384765625, 3.03678857421875, 3.03181982421875, 3.030417236328125, 3.026376708984375, 3.03195703125]",tokens/s,20.81931316597913,kWh,8.862553406583476e-05,9.774984543296727e-06,5.896827865606729e-05,0.00015736879726519878,tokens/kWh,400333.491103907,,s,630,30.256514354705796,0.048026213261437786,0.00030421240737976484,0.047994447708129884,0.04833654632568359,0.04852303009033203,0.049130749359130865,"[0.04891849517822266, 0.04798441696166992, 0.04778211212158203, 0.0475681266784668, 0.04751385498046875, 0.04756710433959961, 0.04762361526489258, 0.04757183837890625, 0.04763846588134766, 0.04765695953369141, 0.047582942962646486, 0.04776700973510742, 0.04768851089477539, 0.047900543212890626, 0.04771184158325195, 0.047624736785888674, 0.048045406341552736, 0.04780303955078125, 0.04794582366943359, 0.047857566833496096, 0.04783718490600586, 0.048133983612060546, 0.04780252838134766, 0.04798659133911133, 0.047715713500976566, 0.0477949104309082, 0.047590656280517576, 0.047827518463134766, 0.04771648025512695, 0.048003135681152345, 0.04777337646484375, 0.04888739013671875, 0.04814102554321289, 0.04781260681152344, 0.04777983856201172, 0.04798668670654297, 0.0477388801574707, 0.047882240295410154, 0.04790000152587891, 0.04789465713500977, 0.047884990692138675, 0.047994239807128906, 0.04806243133544922, 0.04896979141235352, 0.04839059066772461, 0.04791910552978516, 0.047890430450439454, 0.047652126312255856, 0.04801545715332031, 0.047982814788818356, 0.04804003143310547, 0.04805372619628906, 0.04786057662963867, 0.047751167297363284, 0.04800662231445312, 0.04789072036743164, 0.047798526763916015, 0.047634078979492185, 0.04788054275512695, 0.048056320190429686, 0.04799465560913086, 0.04797872161865235, 0.04788947296142578, 0.048936767578125, 0.04811139297485351, 0.04753654479980469, 0.047512958526611325, 0.04758156967163086, 0.047743198394775394, 0.04771023941040039, 0.04762003326416016, 0.04752297592163086, 0.04779100799560547, 0.04794367980957031, 0.04768767929077149, 0.04777308654785156, 0.04773129653930664, 0.048920574188232424, 0.047726593017578124, 0.04777983856201172, 0.047850784301757814, 0.047559391021728514, 0.04774092864990234, 0.04802969741821289, 0.04813820648193359, 0.04767951965332031, 0.0477388801574707, 0.04767334365844727, 0.047876094818115236, 0.04770406341552735, 0.04779008102416992, 0.048353279113769534, 0.04785587310791015, 0.047701663970947265, 0.04795097732543945, 0.04820256042480469, 0.04795929718017578, 0.04787907028198242, 0.047898624420166014, 0.04792444610595703, 0.04791542434692383, 0.04778140640258789, 0.04795068740844727, 0.04781603240966797, 0.04797507095336914, 0.0481927375793457, 0.047817665100097655, 0.04779401779174805, 0.047833087921142575, 0.04791296005249023, 0.04793139266967773, 0.047967647552490236, 0.047931999206542966, 0.04765081787109375, 0.04801945495605469, 0.04796211242675781, 0.04798831939697266, 0.04779459381103516, 0.04808415985107422, 0.04788716888427735, 0.047923198699951174, 0.04799871826171875, 0.047846881866455075, 0.047991134643554687, 0.04814393615722656, 0.047966911315917966, 0.048801631927490235, 0.04803379058837891, 0.04772975921630859, 0.047713184356689455, 0.04758323287963867, 0.04768153762817383, 0.04747673416137695, 0.047621150970458985, 0.04745929718017578, 0.04759961700439453, 0.04760543823242187, 0.04747043228149414, 0.04771683120727539, 0.048107521057128906, 0.04774256134033203, 0.04758774566650391, 0.04754431915283203, 0.04755580902099609, 0.047473438262939455, 0.04771558380126953, 0.047741439819335936, 0.04776716613769531, 0.04753062438964844, 0.04785712051391602, 0.04784592056274414, 0.047688831329345704, 0.0476864013671875, 0.04777791976928711, 0.04782889556884766, 0.047564640045166015, 0.04756006240844726, 0.04754496002197266, 0.04815238571166992, 0.0498221435546875, 0.04794572830200195, 0.0478699836730957, 0.04760367965698242, 0.04799897766113281, 0.04766515350341797, 0.047816703796386716, 0.04779417419433594, 0.047851520538330077, 0.047662334442138674, 0.04828448104858398, 0.047881919860839846, 0.04785177612304688, 0.04785971069335938, 0.04778406524658203, 0.047932735443115236, 0.04801923370361328, 0.04779292678833008, 0.047922977447509764, 0.0479664306640625, 0.04776937484741211, 0.04810720062255859, 0.04807939147949219, 0.04789209747314453, 0.04802803039550781, 0.048024574279785154, 0.048021663665771486, 0.04806934356689453, 0.04809036636352539, 0.04808793640136719, 0.048917152404785155, 0.04823859024047852, 0.04758937454223633, 0.0478532485961914, 0.04785120010375977, 0.04803417587280273, 0.04773455810546875, 0.04761183929443359, 0.047882110595703124, 0.048399009704589845, 0.04778927993774414, 0.047877185821533205, 0.04773638534545899, 0.047953983306884766, 0.04765910339355469, 0.04790393447875976, 0.04758201599121094, 0.0478474235534668, 0.047777793884277345, 0.048166526794433596, 0.04821347045898437, 0.048218879699707035, 0.047947902679443356, 0.047965248107910155, 0.04796460723876953, 0.04808902359008789, 0.0480846061706543, 0.04782783889770508, 0.048154495239257813, 0.0478394546508789, 0.04801289749145508, 0.047990398406982424, 0.047859584808349606, 0.047952224731445316, 0.04782735824584961, 0.04813020706176758, 0.048027393341064456, 0.04833715057373047, 0.04856614303588867, 0.047978271484375, 0.0481099853515625, 0.047992767333984374, 0.04799283218383789, 0.04799897766113281, 0.04801740646362305, 0.04804636764526367, 0.0483521614074707, 0.048021793365478516, 0.04803142547607422, 0.04805718231201172, 0.04789657592773437, 0.048130046844482424, 0.04825702285766602, 0.048174816131591795, 0.04811932754516601, 0.04817081451416016, 0.04812486267089844, 0.048418815612792966, 0.04820751953125, 0.04812243270874023, 0.04925212860107422, 0.04845977783203125, 0.04827660751342774, 0.04901744079589844, 0.048205249786376955, 0.0476717758178711, 0.047867904663085936, 0.047661056518554686, 0.047615550994873045, 0.04756649780273438, 0.04786460876464844, 0.047884288787841796, 0.047855072021484375, 0.047921791076660156, 0.04789238357543945, 0.04797644805908203, 0.04792835235595703, 0.0478873291015625, 0.04783420944213867, 0.04763536071777344, 0.04792729568481445, 0.04792934417724609, 0.048901729583740235, 0.0485601921081543, 0.048173408508300784, 0.04826931381225586, 0.04796982574462891, 0.047765888214111325, 0.04800931167602539, 0.04767129516601563, 0.047935489654541016, 0.047933441162109375, 0.04797340774536133, 0.04808393478393555, 0.04780803298950195, 0.04801993560791016, 0.047933441162109375, 0.04788864135742187, 0.04811750411987305, 0.04812799835205078, 0.04786742401123047, 0.04802608108520508, 0.04793958282470703, 0.048096702575683596, 0.048000896453857425, 0.047962783813476566, 0.04815372848510742, 0.04791321563720703, 0.04803855895996094, 0.04801923370361328, 0.047943809509277346, 0.04798883056640625, 0.04814137649536133, 0.04801836776733399, 0.04812799835205078, 0.04803379058837891, 0.047890430450439454, 0.04809891128540039, 0.04806492614746094, 0.04824448013305664, 0.048197856903076174, 0.04804816055297852, 0.04813955307006836, 0.04803247833251953, 0.04819968032836914, 0.048121856689453124, 0.049148223876953126, 0.048205825805664064, 0.047836318969726566, 0.04775203323364258, 0.0478267822265625, 0.04773289489746094, 0.0479268798828125, 0.047954334259033206, 0.04792729568481445, 0.0481743049621582, 0.047964897155761715, 0.04796627044677734, 0.04816617584228516, 0.048056896209716794, 0.04815068817138672, 0.04811907196044922, 0.047909343719482425, 0.048236801147460935, 0.047967521667480466, 0.04825980758666992, 0.0482242546081543, 0.0481607666015625, 0.04800102233886719, 0.04791676712036133, 0.04784912109375, 0.04829248046875, 0.04843110275268555, 0.04815052795410156, 0.047981983184814454, 0.048097217559814456, 0.04824044799804687, 0.04806268692016601, 0.048005950927734374, 0.048153472900390626, 0.048202529907226566, 0.048342239379882815, 0.04815353775024414, 0.04839424133300781, 0.048311489105224606, 0.04827628707885742, 0.048527359008789066, 0.048353374481201174, 0.04813737487792969, 0.04829436874389648, 0.048160224914550784, 0.048261470794677734, 0.04810316848754883, 0.04806291198730469, 0.048009185791015624, 0.04827791976928711, 0.048256256103515624, 0.0482966079711914, 0.048244735717773435, 0.048261119842529294, 0.04845363235473633, 0.048611328125, 0.048814079284667966, 0.04883251190185547, 0.04831436920166016, 0.04836966323852539, 0.04862774276733398, 0.04838396835327148, 0.04817667388916016, 0.04935887908935547, 0.048524513244628906, 0.0486506233215332, 0.048002975463867184, 0.04809571075439453, 0.047783935546875, 0.047833087921142575, 0.047736831665039066, 0.047818016052246094, 0.04787606430053711, 0.047971073150634765, 0.04782284927368164, 0.04803529739379883, 0.04801385498046875, 0.04788019180297851, 0.048057376861572264, 0.04794214248657227, 0.04796163177490234, 0.04788729476928711, 0.04818329620361328, 0.048078529357910155, 0.04815699386596679, 0.04809231948852539, 0.04797552108764649, 0.04908796691894531, 0.04814470291137695, 0.04790678405761719, 0.04806390380859375, 0.04788489532470703, 0.047939521789550785, 0.0478331527709961, 0.04804822540283203, 0.048072608947753906, 0.048092193603515625, 0.04789145660400391, 0.04784950256347656, 0.04794262313842773, 0.04797539138793945, 0.04799235153198242, 0.04861385726928711, 0.0481525764465332, 0.04844553756713867, 0.04853359985351562, 0.04810895919799805, 0.048029376983642576, 0.048317054748535156, 0.04823459243774414, 0.048107521057128906, 0.04793974304199219, 0.04814937591552734, 0.048161758422851565, 0.048213630676269534, 0.048038272857666015, 0.04825702285766602, 0.04796575927734375, 0.04796051025390625, 0.04825497436523438, 0.048325984954833985, 0.04799910354614258, 0.048140640258789065, 0.048250686645507815, 0.04824496078491211, 0.048468128204345706, 0.04905292892456055, 0.04824671936035156, 0.048029823303222655, 0.0479667854309082, 0.04791843032836914, 0.0477949104309082, 0.04793142318725586, 0.04793753433227539, 0.04820560073852539, 0.04796847915649414, 0.047912639617919923, 0.04797062301635742, 0.04787152099609375, 0.048019073486328126, 0.048096096038818356, 0.047946975708007815, 0.04772534561157227, 0.04820572662353516, 0.047734878540039063, 0.04808499145507812, 0.048322528839111326, 0.04835023880004883, 0.04815564727783203, 0.04824169540405274, 0.047807456970214844, 0.04823859024047852, 0.047925247192382815, 0.04808086395263672, 0.047962142944335935, 0.04820377731323242, 0.047998046875, 0.04795452880859375, 0.04788665771484375, 0.048078014373779294, 0.04798486328125, 0.04789670562744141, 0.047702495574951174, 0.04799283218383789, 0.04806860733032227, 0.047908863067626956, 0.04800710296630859, 0.04842313766479492, 0.048080734252929684, 0.04803763198852539, 0.0502110710144043, 0.048091136932373046, 0.04792272186279297, 0.048056800842285155, 0.04823040008544922, 0.048115711212158206, 0.04801891326904297, 0.048108062744140624, 0.0480008316040039, 0.04805855941772461, 0.04802764892578125, 0.048227424621582034, 0.04800198364257813, 0.04799484634399414, 0.04805980682373047, 0.048257568359375, 0.04804364776611328, 0.048159168243408206, 0.048510974884033206, 0.04917436981201172, 0.048175998687744144, 0.04767465591430664, 0.047561439514160156, 0.04771635055541992, 0.0476956787109375, 0.04784352111816406, 0.04875263977050781, 0.047711872100830076, 0.047798656463623045, 0.04791910552978516, 0.04774092864990234, 0.04790476989746094, 0.04793753433227539, 0.04785091018676758, 0.04792486572265625, 0.04782179260253906, 0.04760780715942383, 0.04834099197387695, 0.04852121734619141, 0.04793660736083984, 0.04809328079223633, 0.0479136962890625, 0.04776121520996094, 0.047925537109375, 0.04816486358642578, 0.047742462158203124, 0.0477209587097168, 0.047876094818115236, 0.04782489776611328, 0.04789443206787109, 0.047914913177490234, 0.047556224822998046, 0.04804169464111328, 0.04801385498046875, 0.04795129776000977, 0.047954784393310544, 0.0479595832824707, 0.047621761322021484, 0.04811407852172851, 0.04804191970825195, 0.04801567840576172, 0.04801087951660156, 0.04833647918701172, 0.048053249359130856, 0.04817919921875, 0.0482344970703125, 0.04918044662475586, 0.048311614990234376, 0.048177345275878906, 0.048024158477783206, 0.04823871994018555, 0.048138240814208984, 0.04818739318847656, 0.048105472564697264, 0.04814451217651367, 0.048347007751464846, 0.048137313842773435, 0.0480307502746582, 0.048094303131103515, 0.04804307174682617, 0.048223968505859374, 0.04807238388061524, 0.04890236663818359, 0.04805785751342773, 0.04768928146362305, 0.04806729507446289, 0.04760400009155273, 0.047767551422119144, 0.048347137451171876, 0.04781238555908203, 0.04766326522827148, 0.04795808029174805, 0.04799478530883789, 0.0480313606262207, 0.04850735855102539, 0.047923198699951174, 0.04818739318847656, 0.04799270248413086, 0.047782142639160155, 0.048108928680419924, 0.04811196899414062, 0.04793487930297852, 0.048331775665283204, 0.048468929290771484, 0.04821894454956055, 0.04793324661254883, 0.04802163314819336, 0.048240703582763673, 0.04807475280761719, 0.04819148635864258, 0.04782470321655274, 0.04802988815307617, 0.04796364974975586, 0.048083553314208986, 0.048076160430908205, 0.048347679138183594, 0.04788188934326172, 0.04817750549316406, 0.048162815093994144, 0.04819148635864258, 0.04833280181884766, 0.04834415817260742, 0.04809532928466797, 0.04803462219238281, 0.048035072326660155, 0.04814720153808594, 0.04826012802124023, 0.04834336090087891, 0.048091808319091794, 0.04797257614135742, 0.04807190322875977, 0.048035808563232425, 0.047981151580810545, 0.048082782745361326, 0.047945758819580075, 0.04811750411987305, 0.04816099166870117, 0.04817318344116211, 0.04834243011474609, 0.04833135986328125, 0.04816444778442383, 0.048323009490966795, 0.04841062545776367, 0.048578048706054686, 0.04847244644165039]",tokens/s,20.821962259575876,, @@ -5711,7 +5711,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 115843 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 109382 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.378048,14689.435648,0.0,14294.188032,14284.158464,s,1,7.67521337890625,7.67521337890625,0.0,7.67521337890625,7.67521337890625,7.67521337890625,7.67521337890625,[7.67521337890625],,kWh,1.4913201391709664e-05,1.6374225206308957e-06,7.240561347987562e-06,2.379118526032812e-05,,MB,1108.60288,14993.522688,0.0,14587.789312,14512.892416,s,10,13.831203735351563,1.3831203735351563,0.005871257155461055,1.3818245849609374,1.391693359375,1.3923574829101562,1.3928887817382811,"[1.3721275634765624, 1.3819266357421875, 1.3801014404296874, 1.3817225341796875, 1.37926953125, 1.3808548583984375, 1.3878416748046876, 1.3827921142578126, 1.3915457763671875, 1.3930216064453125]",tokens/s,185.08873479007644,kWh,4.046693066208491e-05,4.4630393216569606e-06,2.6789104764601834e-05,7.17190747483437e-05,tokens/kWh,3569482.747766655,MB,1123.889152,15098.380288,0.0,14692.646912,14646.153216,s,10,43.86718408203125,4.386718408203125,0.0031396886923479163,4.385417236328125,4.38997939453125,4.391894970703125,4.3934274316406245,"[4.3893564453125, 4.383873046875, 4.38734375, 4.38417919921875, 4.38458154296875, 4.38401171875, 4.38422119140625, 4.3862529296875, 4.3895537109375, 4.393810546875]",tokens/s,14.36153273075166,kWh,0.000128290075229163,1.4151362859655802e-05,8.529765157139767e-05,0.00022773908966021648,tokens/kWh,276632.3519339395,,s,630,43.86357635498046,0.06962472437298486,0.0003836243782883348,0.06962299346923828,0.06999763946533202,0.0701092010498047,0.07131899414062501,"[0.07148953247070312, 0.06933856201171874, 0.06915724945068359, 0.06906285095214844, 0.06904985809326172, 0.06947686767578125, 0.06943856048583984, 0.06923971557617188, 0.06963814544677735, 0.06922444915771485, 0.06935346984863282, 0.06929730987548828, 0.06950313568115235, 0.06970639801025391, 0.06972013092041016, 0.06969136047363281, 0.06957833862304688, 0.06917491149902344, 0.069153564453125, 0.06951094055175781, 0.06960326385498047, 0.06940262603759766, 0.06939826965332031, 0.06927823638916016, 0.06958080291748046, 0.06958284759521484, 0.06937728118896484, 0.06945049285888671, 0.0696258544921875, 0.0697630386352539, 0.06975081634521485, 0.06960521697998047, 0.06968335723876953, 0.0696975326538086, 0.06953369903564453, 0.07010099029541016, 0.069984130859375, 0.06956633758544922, 0.069604736328125, 0.06987436676025391, 0.06936790466308594, 0.0693853759765625, 0.06999468994140624, 0.06988992309570312, 0.06990275573730469, 0.07011484527587891, 0.07140643310546875, 0.06964166259765625, 0.06981903839111328, 0.06985507202148437, 0.06988339233398437, 0.06975350189208984, 0.06977913665771485, 0.0696671371459961, 0.0697891845703125, 0.06981683349609374, 0.06974435424804687, 0.06986959838867188, 0.06980226898193359, 0.06995555114746094, 0.06989209747314454, 0.06975667572021485, 0.06962217712402344, 0.07135222625732422, 0.06936370849609375, 0.06899712371826172, 0.06900297546386719, 0.06910348510742187, 0.06912380981445312, 0.06887699127197265, 0.06901145935058593, 0.069146240234375, 0.06937580871582032, 0.06944620513916015, 0.06926335906982421, 0.06919071960449219, 0.06935561370849609, 0.06974345397949219, 0.06968038177490235, 0.06923545837402344, 0.06927696228027344, 0.06910435485839844, 0.06937190246582031, 0.06935247802734375, 0.06953398132324219, 0.06913504028320312, 0.06951692962646484, 0.06967539215087891, 0.06966585540771485, 0.06957766723632812, 0.06952140808105468, 0.06944153594970703, 0.06963552093505859, 0.06984665679931641, 0.06970841979980469, 0.06940499114990234, 0.06934912109375, 0.06944576263427735, 0.06943142700195312, 0.06958451080322266, 0.06940300750732421, 0.06932275390625, 0.069570556640625, 0.06950819396972656, 0.06963292694091797, 0.06971596527099609, 0.06979366302490235, 0.06987789154052734, 0.069914306640625, 0.06977158355712891, 0.06962947082519531, 0.06972259521484375, 0.06952345275878906, 0.06967295837402344, 0.06967478179931641, 0.06976051330566406, 0.06990643310546875, 0.07005027008056641, 0.06974899291992187, 0.06984422302246093, 0.07026262664794922, 0.0700560302734375, 0.07003014373779297, 0.07040204620361327, 0.06994944000244141, 0.06991667175292969, 0.07123763275146484, 0.06943475341796874, 0.06907564544677734, 0.06919366455078126, 0.06899251556396484, 0.06901401519775391, 0.0690847396850586, 0.06907129669189453, 0.06923209381103515, 0.06926595306396484, 0.06939647674560546, 0.06921398162841796, 0.06941907501220704, 0.06978166198730469, 0.07022502136230468, 0.06957965087890625, 0.06956031799316406, 0.06957875061035156, 0.0694824981689453, 0.06914662170410156, 0.06923878479003906, 0.0692462387084961, 0.06963228607177735, 0.06935750579833984, 0.07023056030273438, 0.06960739135742187, 0.06967622375488282, 0.06983763122558594, 0.06990348815917968, 0.06985552215576171, 0.0697534408569336, 0.06964019012451172, 0.06963404846191407, 0.069316162109375, 0.06940624237060547, 0.06959401702880859, 0.06942924499511718, 0.06940672302246094, 0.06938540649414063, 0.06959801483154297, 0.0694988784790039, 0.06971548461914062, 0.06986799621582031, 0.0699669418334961, 0.06987257385253906, 0.069846435546875, 0.06980172729492187, 0.06991545867919922, 0.06973849487304687, 0.0696627197265625, 0.06974771118164062, 0.0695367660522461, 0.06954179382324219, 0.0698345947265625, 0.06991487884521484, 0.0698490219116211, 0.07006623840332031, 0.0699901123046875, 0.07022783660888672, 0.06992694091796875, 0.07000511932373046, 0.06994287872314453, 0.06979145812988281, 0.07158169555664062, 0.0693616943359375, 0.06920598602294922, 0.06920396423339843, 0.06907904052734375, 0.06913433837890624, 0.069123779296875, 0.0690643539428711, 0.06907766723632812, 0.06913200378417969, 0.06921449279785157, 0.06922022247314454, 0.06925865936279296, 0.06963385772705079, 0.06965862274169922, 0.06957711791992187, 0.06964889526367188, 0.06946406555175781, 0.06913228607177735, 0.06930786895751953, 0.06925965118408203, 0.06913606262207031, 0.06927613067626953, 0.0692674560546875, 0.06921401977539063, 0.06956050872802734, 0.06967910766601562, 0.06985731506347656, 0.06983881378173828, 0.06957846069335938, 0.07005983734130859, 0.06970825958251953, 0.06968112182617188, 0.06942912292480469, 0.0693814697265625, 0.06960006713867188, 0.06935266876220703, 0.06939523315429688, 0.07006963348388671, 0.06970406341552735, 0.06964864349365234, 0.06967091369628907, 0.06994310760498047, 0.06971206665039062, 0.0698502426147461, 0.06966675567626954, 0.06977632141113281, 0.06981427001953125, 0.06956963348388671, 0.06976092529296875, 0.06970265960693359, 0.06981807708740234, 0.06962579345703125, 0.06952339172363281, 0.06967132568359374, 0.06972402954101563, 0.0699024658203125, 0.06981836700439453, 0.06987558746337891, 0.0698694076538086, 0.07016067504882813, 0.06986547088623046, 0.0697092514038086, 0.07101286315917969, 0.06932505798339844, 0.06958454132080077, 0.06906095886230469, 0.06921193695068359, 0.0694151382446289, 0.06912409973144532, 0.06899468994140626, 0.06902742767333984, 0.06910975646972656, 0.06946431732177734, 0.06959926605224609, 0.06925772857666015, 0.069615234375, 0.06980032348632813, 0.069644287109375, 0.06918527984619141, 0.06913459014892578, 0.06914457702636718, 0.06926239776611329, 0.06924143981933593, 0.06921660614013672, 0.06917027282714844, 0.06918646240234375, 0.06943949127197266, 0.0693446044921875, 0.06938848114013672, 0.06991209411621094, 0.06993196868896484, 0.06977651214599609, 0.06958684539794922, 0.06940499114990234, 0.06962246704101563, 0.06975389099121093, 0.06944242858886719, 0.06974470520019531, 0.06949478149414062, 0.0693511962890625, 0.06948681640625, 0.06940643310546875, 0.06951350402832031, 0.06969139099121094, 0.07001910400390625, 0.06987158203125, 0.06981171417236329, 0.06973900604248047, 0.06990227508544922, 0.0697242202758789, 0.06982450866699219, 0.06977065277099609, 0.06951382446289063, 0.06979379272460938, 0.0696844482421875, 0.06960822296142578, 0.0696627197265625, 0.06997401428222656, 0.07002835083007812, 0.07021231842041016, 0.06982383728027344, 0.06993106842041015, 0.07039046478271484, 0.0699920654296875, 0.0699148178100586, 0.07136966705322266, 0.06942924499511718, 0.06906060791015625, 0.06910361480712891, 0.06933229064941407, 0.06928864288330078, 0.06911385345458984, 0.06926950073242187, 0.0690268783569336, 0.06917113494873046, 0.06912716674804688, 0.06924214172363281, 0.06921062469482422, 0.06965443420410156, 0.06963641357421875, 0.06942896270751953, 0.06951760101318359, 0.06914662170410156, 0.06940057373046875, 0.06948770904541016, 0.06956256103515625, 0.06950166320800781, 0.069168701171875, 0.0692040023803711, 0.06946173095703125, 0.06963629150390625, 0.06932077026367188, 0.06954000091552734, 0.06968144226074219, 0.06971139526367187, 0.06969391632080078, 0.06949874877929688, 0.06948876953125, 0.06937737274169922, 0.06966521453857422, 0.06973190307617187, 0.06942991638183593, 0.06932473754882812, 0.06936991882324219, 0.06970687866210938, 0.06935142517089844, 0.06942604827880859, 0.0697343978881836, 0.06979763031005859, 0.06985343933105469, 0.06974259185791015, 0.06971600341796876, 0.06965245056152344, 0.06945331573486328, 0.06983238220214844, 0.06994412994384766, 0.06974022674560547, 0.06960364532470703, 0.07042864227294922, 0.0698120346069336, 0.06976448059082031, 0.0698499526977539, 0.0700211181640625, 0.06990233612060547, 0.06990636444091797, 0.06999660491943359, 0.070076416015625, 0.06994944000244141, 0.07112908935546874, 0.06944153594970703, 0.06910771179199218, 0.06920396423339843, 0.06904994964599609, 0.06911138916015624, 0.06896636962890625, 0.06906124877929687, 0.06908902740478516, 0.06913686370849609, 0.06932479858398438, 0.06919891357421876, 0.06932572937011719, 0.06940879821777343, 0.06940467071533203, 0.06931635284423829, 0.06919602966308594, 0.06920191955566406, 0.06977519989013672, 0.06944579315185546, 0.06919782257080079, 0.06916268920898437, 0.069281982421875, 0.06962351989746093, 0.06941903686523437, 0.0693905258178711, 0.06978169250488281, 0.06978678131103516, 0.06952582550048828, 0.06979843139648438, 0.06958258819580078, 0.06958220672607422, 0.06942400360107422, 0.06928793334960938, 0.06970982360839843, 0.06955964660644531, 0.06970774078369141, 0.0696951675415039, 0.07006060791015625, 0.06964268493652344, 0.06992870330810547, 0.06993536376953124, 0.06962588500976563, 0.06974848175048828, 0.06974281311035156, 0.069930908203125, 0.06965257263183594, 0.06965042877197265, 0.06936083221435548, 0.06958573150634766, 0.06957164764404297, 0.06999750518798828, 0.06988777923583984, 0.06984111785888672, 0.06988582611083985, 0.0699208984375, 0.06986281585693359, 0.06980258941650391, 0.06999244689941406, 0.0698936996459961, 0.06990892791748046, 0.07008016204833985, 0.06992316436767577, 0.07142601776123046, 0.06943334197998047, 0.06899472045898437, 0.06901996612548827, 0.06916909027099609, 0.06907810974121094, 0.06906963348388671, 0.06919522857666016, 0.06920054626464844, 0.06921382141113282, 0.06952540588378907, 0.06939907073974609, 0.06955741119384766, 0.06974140930175782, 0.0699513931274414, 0.06965567779541015, 0.069333984375, 0.06922835540771484, 0.06917129516601563, 0.06928115081787109, 0.06926972961425781, 0.06931097412109374, 0.06932246398925782, 0.06922882843017578, 0.06941081237792969, 0.06967446136474609, 0.06946604919433594, 0.069552734375, 0.06974854278564453, 0.06982470703125, 0.06980515289306641, 0.07046377563476562, 0.06951705932617187, 0.06967724609375, 0.06934188842773438, 0.06961357116699218, 0.06940876770019531, 0.06930355072021484, 0.06956639862060547, 0.06951200103759765, 0.06944563293457032, 0.06945996856689453, 0.0700967025756836, 0.07010230255126954, 0.0700384292602539, 0.06978150177001953, 0.06976102447509766, 0.0696556167602539, 0.06966368103027344, 0.0696094741821289, 0.06994329833984375, 0.06966067504882813, 0.06963404846191407, 0.06972608184814454, 0.06978892517089844, 0.06986953735351563, 0.06990873718261718, 0.07002713775634765, 0.06994818878173828, 0.07004774475097657, 0.07004364776611328, 0.06999654388427734, 0.07003472137451172, 0.07095539093017578, 0.06944541168212891, 0.06913184356689453, 0.06909008026123047, 0.06914252471923828, 0.06910361480712891, 0.06931660461425782, 0.06918553924560547, 0.06922147369384765, 0.06920694732666016, 0.06938371276855469, 0.06932720184326172, 0.06940275573730469, 0.06963404846191407, 0.06965846252441406, 0.0699024658203125, 0.06981145477294921, 0.06921218872070313, 0.06909625244140626, 0.0693656997680664, 0.06919782257080079, 0.06940009307861328, 0.06948912048339843, 0.0693780517578125, 0.06926131439208984, 0.06955830383300782, 0.06982176208496094, 0.06976934051513672, 0.06990617370605469, 0.06990723419189453, 0.06957465362548829, 0.06962995147705078, 0.06959913635253906, 0.06938419342041016, 0.06941295623779296, 0.0693759994506836, 0.06954300689697265, 0.06951209259033203, 0.06935955047607421, 0.06955219268798828, 0.0697548828125, 0.07003103637695313, 0.06999884796142578, 0.06997772979736328, 0.0697041244506836, 0.06974463653564453, 0.06983270263671874, 0.06979923248291016, 0.0697946548461914, 0.06991241455078125, 0.07024832153320312, 0.06991462707519532, 0.06990563201904297, 0.06985932922363282, 0.07044528198242188, 0.07016313934326172, 0.07026467132568359, 0.0701822738647461, 0.07001372528076172, 0.07036431884765625, 0.0700681915283203, 0.06992371368408203, 0.07003529357910156, 0.07164012908935546, 0.06991149139404297, 0.06931251525878906, 0.06922374725341797, 0.06925142669677735, 0.06909372711181641, 0.06908665466308593, 0.06913286590576172, 0.06979174041748047, 0.06960694122314454, 0.06955260467529296, 0.0693759994506836, 0.06929129791259765, 0.06982886505126953, 0.06971849822998047, 0.06948863983154296, 0.06930809783935547, 0.06935078430175781, 0.06947936248779298, 0.06957997131347657, 0.07076537322998047, 0.06935529327392578, 0.06926771545410157, 0.06966614532470704, 0.06944217681884765, 0.0693207015991211, 0.06960511779785156, 0.06945613098144532, 0.06966681671142579, 0.07002480316162109, 0.07021609497070312, 0.06984633636474609, 0.06961017608642578, 0.06972621154785157, 0.06979993438720702, 0.07004080200195313, 0.06964304351806641, 0.069607421875, 0.06955548858642578, 0.06996355438232423, 0.06996399688720703, 0.06979452514648438, 0.06999244689941406, 0.06994124603271484, 0.06991462707519532, 0.0700145263671875, 0.06962630462646484, 0.06962179565429688, 0.07027247619628907, 0.06970829010009766, 0.06970982360839843, 0.06992272186279297, 0.06955836486816407, 0.06988390350341797, 0.06996355438232423, 0.06986774444580078, 0.06993852996826172, 0.06986953735351563, 0.070008544921875, 0.07031084442138671, 0.0700203857421875, 0.0699411849975586, 0.06998713684082031]",tokens/s,14.362713949759069,, @@ -5756,7 +5756,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 86676 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 80327 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -5799,7 +5799,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 101419 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 106315 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,782.934016,14129.496064,0.0,13734.248448,13728.777216,s,1,7.401724609375,7.401724609375,0.0,7.401724609375,7.401724609375,7.401724609375,7.401724609375,[7.401724609375],,kWh,7.16384237918722e-06,7.825425540195544e-07,3.5797250860017393e-06,1.1526110019208514e-05,,MB,1137.598464,14142.078976,0.0,13736.3456,13487.53408,s,10,12.767971313476561,1.276797131347656,0.0034546170262645267,1.2778217773437501,1.2802987060546875,1.2804129638671875,1.2805043701171874,"[1.2690040283203126, 1.27655322265625, 1.273699462890625, 1.2774561767578125, 1.2802733154296875, 1.273830322265625, 1.27937548828125, 1.2805272216796875, 1.279064697265625, 1.2781873779296875]",tokens/s,200.50170361033994,kWh,3.725450471458354e-05,4.10871512767824e-06,2.4681547523000557e-05,6.604476736526235e-05,tokens/kWh,3876158.705869689,MB,1162.518528,14142.078976,0.0,13736.3456,13661.262848,s,10,37.650972656250005,3.7650972656249997,0.0023110677335511625,3.765232421875,3.768010693359375,3.7682917236328124,3.7685165478515628,"[3.76295849609375, 3.766229248046875, 3.7679482421875, 3.76857275390625, 3.766557373046875, 3.765681884765625, 3.760609375, 3.764782958984375, 3.76463818359375, 3.762994140625]",tokens/s,16.73263545544609,kWh,0.00010977200966958206,1.2107495557714114e-05,7.312919739219867e-05,0.00019500870261949485,tokens/kWh,323062.5051792019,,s,630,37.64779961776732,0.05975841209169418,0.00019985936006495185,0.05975465583801269,0.059999047088623046,0.0600776029586792,0.06035248321533203,"[0.060187488555908206, 0.059701248168945314, 0.059469825744628904, 0.05955081558227539, 0.05928847885131836, 0.059463680267333986, 0.05941183853149414, 0.05939468765258789, 0.05959884643554687, 0.06039756774902344, 0.0595333137512207, 0.059588481903076175, 0.05944316864013672, 0.059616737365722657, 0.059562686920166016, 0.05978112030029297, 0.059770591735839845, 0.05977088165283203, 0.05964009475708008, 0.05964083099365235, 0.0595561294555664, 0.05955452728271484, 0.0594977912902832, 0.05954220962524414, 0.05954150390625, 0.059686912536621096, 0.059510784149169924, 0.05954764938354492, 0.05949161529541016, 0.0596910400390625, 0.05975315093994141, 0.05966438293457031, 0.059795425415039065, 0.059856895446777345, 0.059672607421875, 0.05982393646240235, 0.05967686462402344, 0.059805694580078124, 0.05965414428710938, 0.059584510803222655, 0.06037299346923828, 0.05977907180786133, 0.059772926330566405, 0.059719680786132816, 0.05973929595947266, 0.059705760955810545, 0.05970169448852539, 0.05989718246459961, 0.05993715286254883, 0.05998825454711914, 0.05991424179077148, 0.05996502304077148, 0.05989382553100586, 0.059913761138916014, 0.05985363388061524, 0.05983027267456055, 0.05979689788818359, 0.059961761474609375, 0.06000864028930664, 0.059807743072509766, 0.0597995834350586, 0.05982204818725586, 0.05977679824829102, 0.06014617538452149, 0.05969142532348633, 0.05938307189941406, 0.059437793731689455, 0.05940140914916992, 0.05948259353637695, 0.05948982238769531, 0.05949545669555664, 0.05949792098999023, 0.05951932907104492, 0.05962319946289062, 0.05962339019775391, 0.059581886291503905, 0.05959148788452148, 0.059568126678466796, 0.05955910491943359, 0.059893695831298825, 0.059818878173828124, 0.05970534515380859, 0.0596357421875, 0.05951631927490234, 0.05954412841796875, 0.05958041763305664, 0.059579681396484375, 0.059592575073242185, 0.05967116928100586, 0.05959027099609375, 0.05976028823852539, 0.05978412628173828, 0.059791358947753906, 0.05969305419921875, 0.05969919967651367, 0.059756542205810545, 0.059842655181884766, 0.05976873779296875, 0.05978889465332031, 0.060110622406005856, 0.05999004745483399, 0.05984438323974609, 0.059732833862304685, 0.05977494430541992, 0.060090465545654295, 0.05975187301635742, 0.05982374572753906, 0.05973078536987304, 0.05991628646850586, 0.05994496154785156, 0.059854270935058594, 0.05996319961547852, 0.06004169464111328, 0.060068126678466796, 0.059942401885986325, 0.059994014739990234, 0.05999593734741211, 0.060005184173583984, 0.06001804733276367, 0.059970176696777344, 0.05978112030029297, 0.05990399932861328, 0.06063036727905274, 0.06003692626953125, 0.06001321411132812, 0.05986016082763672, 0.060125503540039066, 0.059625537872314456, 0.05962924957275391, 0.05940310287475586, 0.05945657730102539, 0.05948652648925781, 0.05942284774780274, 0.05973446273803711, 0.06014166259765625, 0.05984601593017578, 0.059585121154785155, 0.059615264892578124, 0.05959676742553711, 0.05955535888671875, 0.059576801300048825, 0.05978112030029297, 0.05980979156494141, 0.05969097518920898, 0.059607105255126955, 0.059762657165527346, 0.05961884689331055, 0.05973654556274414, 0.05978217697143555, 0.05955683135986328, 0.05962937545776367, 0.06022505569458008, 0.059966110229492185, 0.059834369659423826, 0.05976883316040039, 0.059774974822998046, 0.05967443084716797, 0.059748382568359376, 0.06004444885253906, 0.05995212936401367, 0.05979692840576172, 0.05976326370239258, 0.059840511322021485, 0.05981919860839844, 0.05986387252807617, 0.05986304092407226, 0.05970684814453125, 0.05994345474243164, 0.05978857421875, 0.059880062103271486, 0.059784385681152345, 0.05993529510498047, 0.05986953735351563, 0.05992784118652344, 0.059928512573242186, 0.060305633544921876, 0.059925056457519534, 0.05987123107910156, 0.05985279846191406, 0.05994496154785156, 0.05991219329833984, 0.05993859100341797, 0.059776512145996094, 0.05989007949829102, 0.05986540985107422, 0.05988662338256836, 0.05993068695068359, 0.060040096282958984, 0.0600002555847168, 0.060268543243408204, 0.059686912536621096, 0.05954079818725586, 0.0594600944519043, 0.05957603073120117, 0.05950921630859375, 0.05954764938354492, 0.0596190071105957, 0.059644222259521484, 0.05963776016235352, 0.059598560333251956, 0.05971177673339844, 0.05955583953857422, 0.059651615142822266, 0.05965046310424805, 0.05953532791137695, 0.05972572708129883, 0.05982352066040039, 0.05971212768554687, 0.05959596633911133, 0.05960188674926758, 0.05956787109375, 0.05972316741943359, 0.05976559829711914, 0.059789310455322264, 0.05977052688598633, 0.059647327423095704, 0.05968707275390625, 0.05972409439086914, 0.0597628173828125, 0.05972623825073242, 0.05965363311767578, 0.05974272155761719, 0.05997772979736328, 0.06014976119995117, 0.06020438385009766, 0.059646591186523434, 0.05971868896484375, 0.059887775421142576, 0.05975481414794922, 0.05998982238769531, 0.059880062103271486, 0.05974009704589844, 0.059725345611572264, 0.059810367584228516, 0.05985696029663086, 0.05979750442504883, 0.0599818229675293, 0.05998284912109375, 0.060006401062011716, 0.059896671295166015, 0.06006697463989258, 0.05991731262207031, 0.06008195114135742, 0.06011312103271484, 0.060022785186767576, 0.060184574127197264, 0.0602149772644043, 0.060335647583007815, 0.06002463912963867, 0.05997260665893555, 0.06005075073242187, 0.06004908752441406, 0.0604956169128418, 0.059609153747558596, 0.05955440139770508, 0.05954716873168946, 0.059424705505371093, 0.05978988647460937, 0.05958041763305664, 0.060014591217041016, 0.059573726654052736, 0.059587039947509766, 0.05951059341430664, 0.059698497772216794, 0.05980051040649414, 0.05970684814453125, 0.059612766265869144, 0.05961004638671875, 0.059717632293701174, 0.059797374725341794, 0.05974556732177734, 0.059568992614746095, 0.05954889678955078, 0.059634273529052734, 0.05976287841796875, 0.059676673889160155, 0.05972518539428711, 0.05967526245117188, 0.0596049919128418, 0.059652000427246096, 0.05975839996337891, 0.05993913650512695, 0.0598773422241211, 0.05976601409912109, 0.0598392333984375, 0.059842559814453126, 0.059789310455322264, 0.05975449752807617, 0.05976886367797852, 0.05987705612182617, 0.05959267044067383, 0.05974867248535156, 0.05974972915649414, 0.05991462326049805, 0.05976927947998047, 0.05966937637329101, 0.05979849624633789, 0.059768318176269535, 0.05986095809936524, 0.059951648712158204, 0.0599444465637207, 0.05988195037841797, 0.05994895935058594, 0.060069408416748044, 0.05995993423461914, 0.0598546257019043, 0.059892990112304687, 0.05980460739135742, 0.059824127197265625, 0.0600384635925293, 0.059953857421875, 0.05994905471801758, 0.059936767578125, 0.05992038345336914, 0.059969600677490235, 0.06006070327758789, 0.05967929458618164, 0.05942108917236328, 0.059463680267333986, 0.05932783889770508, 0.059670238494873046, 0.05943392181396484, 0.05954134368896485, 0.05956012725830078, 0.059463680267333986, 0.059485248565673825, 0.05960796737670898, 0.05954492950439453, 0.059787105560302735, 0.059568958282470705, 0.05960908889770508, 0.05976268768310547, 0.05991628646850586, 0.059715007781982424, 0.05957894515991211, 0.0595882568359375, 0.060268894195556644, 0.05975449752807617, 0.059694206237792966, 0.05961308670043945, 0.05967766571044922, 0.05971484756469726, 0.059665153503417966, 0.059639263153076175, 0.05975296020507812, 0.05969715118408203, 0.05964799880981445, 0.059731998443603516, 0.06026649475097656, 0.059757537841796875, 0.0597694091796875, 0.05982979202270508, 0.05970143890380859, 0.059681503295898435, 0.059676193237304685, 0.05970915222167969, 0.059738174438476566, 0.059998912811279295, 0.05997772979736328, 0.05976883316040039, 0.059807743072509766, 0.059966880798339846, 0.059857246398925784, 0.05999590301513672, 0.05999257659912109, 0.0599920654296875, 0.05997568130493164, 0.0600494384765625, 0.060016609191894534, 0.059756542205810545, 0.059908096313476565, 0.06019651031494141, 0.060072288513183594, 0.05974425506591797, 0.059829822540283205, 0.05988191986083984, 0.059844062805175784, 0.05997212982177735, 0.06016211318969727, 0.059686878204345706, 0.05937097549438477, 0.05931222534179687, 0.05946182250976562, 0.05946799850463867, 0.05981769561767578, 0.05942300796508789, 0.0594920654296875, 0.059447582244873044, 0.05942230224609375, 0.05961564636230469, 0.059469825744628904, 0.05956185531616211, 0.05951919937133789, 0.059568031311035156, 0.05975244903564453, 0.059686912536621096, 0.059660289764404295, 0.059676673889160155, 0.05959385681152344, 0.05963776016235352, 0.059572639465332033, 0.05949488067626953, 0.059432193756103514, 0.059433727264404296, 0.05950054550170898, 0.059600704193115236, 0.05963702392578125, 0.05968783950805664, 0.05970473480224609, 0.05959270477294922, 0.05963193511962891, 0.05986681747436524, 0.059789920806884764, 0.05992652893066406, 0.05981824111938477, 0.05978291320800781, 0.05971353530883789, 0.05967871856689453, 0.05967814254760742, 0.059802177429199216, 0.05966563034057617, 0.05980649566650391, 0.05969311904907226, 0.05981587219238281, 0.060022785186767576, 0.059931934356689455, 0.05989638519287109, 0.059754112243652346, 0.05982262420654297, 0.059891712188720705, 0.05977907180786133, 0.059873279571533204, 0.05979052734375, 0.06003180694580078, 0.05977097702026367, 0.059840415954589846, 0.0597212142944336, 0.05986089706420898, 0.059687744140625, 0.05975769424438476, 0.05972588729858398, 0.060084030151367186, 0.05970851135253906, 0.059464607238769535, 0.059487648010253906, 0.05948886489868164, 0.05951216125488281, 0.05954627227783203, 0.059399585723876956, 0.059525760650634765, 0.059539039611816405, 0.059476222991943356, 0.05954060745239258, 0.059529216766357425, 0.05987225723266602, 0.05960815811157227, 0.05972796630859375, 0.0599375991821289, 0.06000844955444336, 0.05978521728515625, 0.05972172927856445, 0.05978112030029297, 0.05962131118774414, 0.05964191818237305, 0.05964761734008789, 0.05951667022705078, 0.05954947280883789, 0.059546047210693356, 0.05962998580932617, 0.05967843246459961, 0.059729503631591796, 0.05965689468383789, 0.0599733772277832, 0.0603875846862793, 0.05978112030029297, 0.05973606491088867, 0.05978316879272461, 0.05969062423706055, 0.05990028762817383, 0.059718910217285155, 0.05966310501098633, 0.05958467102050781, 0.059727710723876955, 0.05957632064819336, 0.059661823272705077, 0.059738624572753904, 0.059698238372802734, 0.059980735778808594, 0.05990524673461914, 0.0599150390625, 0.05985696029663086, 0.0597154541015625, 0.05985823822021484, 0.059916545867919925, 0.0598873291015625, 0.0600700798034668, 0.06040636825561523, 0.05989785766601562, 0.05997292709350586, 0.059877086639404296, 0.059808319091796874, 0.05980201721191406, 0.06004121780395508, 0.05993577575683594, 0.060243968963623044, 0.05971558380126953, 0.05948320007324219, 0.05952767944335938, 0.05944527816772461, 0.06004336166381836, 0.05946774291992187, 0.05939440155029297, 0.0594595832824707, 0.05962464141845703, 0.05956486511230469, 0.05964543914794922, 0.05962799835205078, 0.05975830459594727, 0.05976710510253906, 0.05967792129516602, 0.0598043212890625, 0.059911392211914063, 0.05988454437255859, 0.05978694534301758, 0.05996976089477539, 0.059614654541015624, 0.05948412704467773, 0.05959740829467773, 0.05965619277954102, 0.05957017517089844, 0.05979520034790039, 0.059715839385986326, 0.0595599365234375, 0.05964799880981445, 0.059658241271972653, 0.05974211120605469, 0.05975868988037109, 0.059979774475097655, 0.05989577484130859, 0.059850273132324217, 0.05986067199707031, 0.05975676727294922, 0.059695518493652344, 0.059635902404785154, 0.05988739013671875, 0.059813312530517575, 0.05960326385498047, 0.059816417694091795, 0.05978083038330078, 0.05985686492919922, 0.059840065002441406, 0.05985971069335937, 0.05995542526245117, 0.059954975128173826, 0.05997564697265625, 0.06003919982910156, 0.05976646423339844, 0.059869503021240236, 0.059881153106689455, 0.059768928527832034, 0.05968217468261719, 0.0597471694946289, 0.05974835205078125, 0.05976678466796875, 0.05977088165283203, 0.05986716842651367, 0.059813697814941405, 0.060256542205810545, 0.059643905639648435, 0.05966761779785156, 0.05943996810913086, 0.05940019226074219, 0.05942691040039062, 0.05936528015136719, 0.059379711151123046, 0.05935932922363281, 0.059394081115722655, 0.05941644668579102, 0.05961068725585938, 0.059421119689941404, 0.059660289764404295, 0.05969900894165039, 0.0597977294921875, 0.059772865295410156, 0.059912223815917966, 0.059848705291748044, 0.05970534515380859, 0.059504863739013675, 0.05963478469848633, 0.05946755218505859, 0.05949257659912109, 0.059628223419189455, 0.0595333137512207, 0.05957017517089844, 0.0596049919128418, 0.05957820892333984, 0.0597751350402832, 0.06011084747314453, 0.05986304092407226, 0.06013132858276367, 0.05996255874633789, 0.05984543991088867, 0.059950206756591795, 0.05987145614624023, 0.05966096115112305, 0.05961321640014648, 0.05967244720458984, 0.05965427017211914, 0.059799518585205075, 0.05972124862670898, 0.059859390258789065, 0.05976038360595703, 0.05963174438476562, 0.05971574401855469, 0.05971148681640625, 0.059643905639648435, 0.059791358947753906, 0.05990115356445312, 0.06002355194091797, 0.05997091293334961, 0.060359359741210934, 0.05994496154785156, 0.059799774169921875, 0.059748126983642576, 0.059780319213867186, 0.05975529479980469, 0.0599733772277832, 0.059797630310058594, 0.059889087677001955, 0.05981254577636719]",tokens/s,16.73404571837661,, @@ -5845,7 +5845,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 82148 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 75731 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6077,7 +6077,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 85134 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 78829 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6173,7 +6173,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp0ak5otrg/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpwxqz5jt_/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.217472,14639.104,0.0,14243.856384,14221.3376,s,1,7.73187548828125,7.73187548828125,0.0,7.73187548828125,7.73187548828125,7.73187548828125,7.73187548828125,[7.73187548828125],,kWh,1.5289258250001583e-05,1.583704214532396e-06,5.663893419999809e-06,2.2536855884533787e-05,,MB,1176.8832,14737.670144,0.0,14329.839616,14290.688,s,10,2.162870071411133,0.2162870071411133,0.004696034653436737,0.2166063461303711,0.21995816955566405,0.22030833129882813,0.22058846069335938,"[0.20358029174804687, 0.21567996215820312, 0.21597410583496093, 0.21516217041015626, 0.21971200561523438, 0.2152815399169922, 0.21988035583496093, 0.21723858642578125, 0.21970256042480468, 0.2206584930419922]",tokens/s,1183.6124757738062,kWh,6.414262803894934e-06,7.073772449779575e-07,4.258982875782576e-06,1.1380622924655468e-05,tokens/kWh,22494375.017503712,MB,1197.019136,14752.350208,0.0,14344.51968,14290.69056,s,10,38.786872070312505,3.8786872070312506,0.006007349949207754,3.878911254882812,3.8850998779296875,3.8858206909179684,3.8863973413085935,"[3.86822705078125, 3.869866455078125, 3.8762998046875, 3.877141845703125, 3.875823974609375, 3.8806806640625, 3.884939697265625, 3.8824814453125, 3.88486962890625, 3.88654150390625]",tokens/s,16.24260906777792,kWh,0.0001135623148202716,1.2526194901041918e-05,7.550227537761758e-05,0.00020159078509893112,tokens/kWh,312514.2846637688,,s,630,38.78313149261476,0.06156052617875356,0.0005346627059583197,0.06147313499450684,0.06187085227966308,0.06209518413543701,0.0648411915588379,"[0.06455510711669922, 0.06257535934448243, 0.06161743927001953, 0.06133129501342773, 0.061139488220214845, 0.061219169616699216, 0.06158134460449219, 0.06133699035644531, 0.061130943298339846, 0.061174144744873045, 0.06128025436401367, 0.06136217498779297, 0.0612044792175293, 0.06106524658203125, 0.06133139038085938, 0.06137859344482422, 0.06139206314086914, 0.06144492721557617, 0.061488929748535155, 0.06133782577514649, 0.06118403244018555, 0.06113894271850586, 0.06117375946044922, 0.06125088119506836, 0.06095276641845703, 0.060787166595458984, 0.06112774276733399, 0.06120284652709961, 0.06129103851318359, 0.06122003173828125, 0.06128067016601563, 0.06118431854248047, 0.061415519714355465, 0.061571071624755856, 0.06145539093017578, 0.061434879302978515, 0.06140860748291015, 0.06142851257324219, 0.061468513488769534, 0.06149478530883789, 0.06123344039916992, 0.06148284912109375, 0.0613359375, 0.06105878448486328, 0.061403423309326174, 0.061265918731689455, 0.06129375839233398, 0.061341663360595704, 0.06132003021240234, 0.06149529647827148, 0.06137855911254883, 0.06113075256347656, 0.06128844833374023, 0.06150656127929687, 0.06157619094848633, 0.061556190490722654, 0.06131974411010742, 0.06125129699707031, 0.06128873443603516, 0.06154441452026367, 0.06125721740722656, 0.061409248352050784, 0.061698497772216795, 0.06434092712402344, 0.062304031372070315, 0.06152624130249024, 0.061203712463378905, 0.06103731155395508, 0.06134912109375, 0.06124364852905274, 0.06118761444091797, 0.061062175750732424, 0.06103238296508789, 0.061246910095214845, 0.0609285774230957, 0.06118915176391602, 0.06125609588623047, 0.06131974411010742, 0.06176358413696289, 0.06177785491943359, 0.061806655883789065, 0.06161324691772461, 0.06151046371459961, 0.061158462524414064, 0.061487041473388675, 0.0612567024230957, 0.06126572799682617, 0.061120704650878904, 0.06131916809082031, 0.06121273422241211, 0.061240863800048825, 0.06153667068481445, 0.06138675308227539, 0.06132534408569336, 0.06155875015258789, 0.061679615020751956, 0.061949600219726564, 0.06196012878417969, 0.061442462921142575, 0.061431774139404295, 0.061354015350341795, 0.061317119598388675, 0.06128844833374023, 0.06114508819580078, 0.06104403305053711, 0.060979774475097656, 0.061040000915527345, 0.06113766479492187, 0.06111983871459961, 0.06120515060424805, 0.061298561096191403, 0.06165843200683594, 0.06163497543334961, 0.06179471969604492, 0.0616357421875, 0.06159600067138672, 0.06145280075073242, 0.06136217498779297, 0.061341697692871094, 0.06113455963134766, 0.06125187301635742, 0.06138880157470703, 0.06128787231445312, 0.06129439926147461, 0.06137420654296875, 0.06135903930664063, 0.06497280120849609, 0.06270124816894532, 0.0614475212097168, 0.06150243377685547, 0.06108918380737305, 0.06117161560058594, 0.06119625473022461, 0.06116835021972656, 0.06128752136230469, 0.061094814300537106, 0.06107046508789062, 0.06114355087280274, 0.061059455871582034, 0.06107340621948242, 0.0612086067199707, 0.06176716613769531, 0.062091136932373045, 0.06186044692993164, 0.062132225036621094, 0.06180835342407227, 0.06179459381103516, 0.06145347213745117, 0.06143881607055664, 0.06113894271850586, 0.06113203048706055, 0.061105983734130856, 0.06141753768920898, 0.061752193450927736, 0.06111641693115234, 0.06166732788085937, 0.061087745666503906, 0.06137247848510742, 0.06146451187133789, 0.06138265609741211, 0.061337406158447266, 0.06145248031616211, 0.06176358413696289, 0.061572128295898435, 0.061512321472167966, 0.06148745727539062, 0.06116556930541992, 0.06127734375, 0.06176387023925781, 0.0615810546875, 0.0616640625, 0.06151504135131836, 0.06169055938720703, 0.061642784118652344, 0.06146047973632812, 0.06162163162231445, 0.06157171249389649, 0.061626399993896484, 0.061961822509765625, 0.061585182189941405, 0.061354591369628904, 0.06123519897460938, 0.06117382431030274, 0.06122284698486328, 0.061427391052246094, 0.06141513442993164, 0.06147126388549805, 0.061599807739257814, 0.061499393463134766, 0.06485810852050782, 0.06280556869506836, 0.06165139389038086, 0.06140447998046875, 0.06123180770874023, 0.061483009338378906, 0.06133481597900391, 0.06139344024658203, 0.06123097610473633, 0.06165126419067383, 0.061233150482177735, 0.06119619369506836, 0.061102176666259764, 0.06113663864135742, 0.061502784729003904, 0.06193657684326172, 0.06183939361572265, 0.061894622802734375, 0.061911041259765626, 0.06155820846557617, 0.06128662490844727, 0.06136441421508789, 0.06156508636474609, 0.061443775177001954, 0.06119456100463867, 0.06146662521362305, 0.061387966156005856, 0.06130771255493164, 0.06130207824707031, 0.06130505752563477, 0.06170057678222656, 0.061906017303466794, 0.061792415618896486, 0.061750049591064456, 0.06167958450317383, 0.0618535041809082, 0.061475006103515625, 0.06122905731201172, 0.061325183868408205, 0.06130476760864258, 0.06113299179077149, 0.06110620880126953, 0.06113481521606445, 0.06113894271850586, 0.061334945678710937, 0.06106534576416016, 0.06141331100463867, 0.06172079849243164, 0.06158367919921875, 0.06189056015014648, 0.06192079925537109, 0.061860321044921875, 0.06176540756225586, 0.06155699157714844, 0.06146214294433594, 0.061406623840332034, 0.06121696090698242, 0.06122918319702148, 0.06133414459228516, 0.061308895111083984, 0.06128236770629883, 0.06133695983886719, 0.061571678161621096, 0.06479977416992187, 0.0625940170288086, 0.06150147247314453, 0.061335521697998045, 0.06113689422607422, 0.06141929626464844, 0.061219039916992186, 0.061222942352294925, 0.0612856330871582, 0.06125641632080078, 0.06145769500732422, 0.061313758850097655, 0.06142761611938476, 0.06130239868164063, 0.0614354248046875, 0.06183417510986328, 0.06192127990722656, 0.06180454254150391, 0.06169715118408203, 0.06140607833862305, 0.06144316864013672, 0.06119247817993164, 0.06118390274047852, 0.06116985702514648, 0.061034271240234375, 0.061115135192871095, 0.061128097534179686, 0.06116364669799805, 0.061321727752685545, 0.06137200164794922, 0.061380992889404296, 0.06159769439697266, 0.06174720001220703, 0.06175539016723633, 0.06166678237915039, 0.06157561492919922, 0.06173295974731445, 0.061521953582763675, 0.06157104110717773, 0.061321247100830076, 0.061273887634277345, 0.061319103240966795, 0.061093441009521486, 0.06113542556762695, 0.061314849853515624, 0.061303009033203126, 0.06138044738769531, 0.06152582550048828, 0.06143600082397461, 0.06169843292236328, 0.061555774688720706, 0.0614901123046875, 0.06170217514038086, 0.06184483337402344, 0.06157513427734375, 0.061556640625, 0.06180326461791992, 0.061423614501953126, 0.06156224060058594, 0.06172713470458984, 0.06127017593383789, 0.061502655029296874, 0.06160406494140625, 0.06554598236083985, 0.06342863845825196, 0.06211376190185547, 0.06147715377807617, 0.06130265426635742, 0.061231006622314454, 0.06128271865844727, 0.06133964920043945, 0.06121065521240234, 0.06126793670654297, 0.061231136322021484, 0.061065185546875, 0.060947711944580076, 0.06101174545288086, 0.0613078727722168, 0.06163587188720703, 0.06171311950683594, 0.06195513534545898, 0.06190095901489258, 0.06173740768432617, 0.06152431869506836, 0.06144553756713867, 0.06122761535644531, 0.061284320831298825, 0.06124755096435547, 0.061269153594970704, 0.06145267105102539, 0.061560478210449215, 0.061413280487060545, 0.06125600051879883, 0.0614356803894043, 0.061494049072265626, 0.0617155532836914, 0.061578174591064454, 0.06181600189208984, 0.06171065521240234, 0.06176816177368164, 0.06145792007446289, 0.06157980728149414, 0.06180659103393555, 0.061689823150634766, 0.061394878387451175, 0.061454017639160155, 0.06149363327026367, 0.061400062561035154, 0.061321247100830076, 0.061468830108642576, 0.06176403045654297, 0.061731231689453124, 0.06167958450317383, 0.06171852874755859, 0.06180659103393555, 0.06156595230102539, 0.06129971313476563, 0.061287742614746094, 0.061440704345703125, 0.06165673446655273, 0.06137216186523437, 0.061603839874267576, 0.061506145477294924, 0.061530113220214844, 0.06177382278442383, 0.06155632019042969, 0.06560559844970704, 0.06320329666137696, 0.062007328033447266, 0.061638656616210936, 0.06126387023925781, 0.061445568084716795, 0.061432384490966795, 0.06149324798583984, 0.061290496826171874, 0.06150348663330078, 0.06137036895751953, 0.06156412887573242, 0.06140396881103516, 0.06138876724243164, 0.06174518585205078, 0.06177382278442383, 0.062182910919189455, 0.062304737091064454, 0.061892608642578124, 0.06167552185058594, 0.0615546875, 0.06159564971923828, 0.06129401779174805, 0.06124604797363281, 0.061257694244384764, 0.06134486389160156, 0.061295520782470705, 0.061489151000976565, 0.0614093132019043, 0.06103241729736328, 0.0612720947265625, 0.0615230712890625, 0.061645278930664064, 0.06199318313598633, 0.06179024124145508, 0.061730209350585936, 0.061794784545898436, 0.061800704956054685, 0.061742271423339844, 0.06150537490844726, 0.06150783920288086, 0.06152265548706055, 0.061373950958251954, 0.06139136123657227, 0.06160179138183594, 0.061417217254638674, 0.06169830322265625, 0.06147622299194336, 0.06151628875732422, 0.06152764892578125, 0.06185219192504883, 0.06211174392700195, 0.061818878173828126, 0.061712383270263675, 0.06159097671508789, 0.06159622573852539, 0.061400192260742184, 0.061524063110351565, 0.061426464080810546, 0.061411136627197264, 0.06152211380004883, 0.06136835098266601, 0.06165654373168945, 0.06534742736816407, 0.06290099334716796, 0.06167705535888672, 0.06150502395629883, 0.0613869743347168, 0.06128924942016602, 0.061515777587890626, 0.061464576721191405, 0.06145024108886719, 0.061470718383789064, 0.061394622802734375, 0.06137887954711914, 0.06157721710205078, 0.06123110580444336, 0.0615464973449707, 0.061949600219726564, 0.06207113647460937, 0.06198409652709961, 0.061878944396972654, 0.0617938232421875, 0.06158982467651367, 0.06139644622802734, 0.061207168579101565, 0.06133107376098633, 0.06148912048339844, 0.061353759765625, 0.06118060684204102, 0.061532161712646485, 0.061650974273681644, 0.06144204711914063, 0.0618106575012207, 0.061738014221191406, 0.06177824020385742, 0.06173331069946289, 0.06177199935913086, 0.06186188888549805, 0.062000801086425784, 0.06152771377563476, 0.06146937561035156, 0.06128639984130859, 0.06111638259887695, 0.061408767700195314, 0.06132585525512695, 0.06141299057006836, 0.061480575561523435, 0.06132515335083008, 0.061440929412841794, 0.061443424224853514, 0.061750942230224606, 0.06185881423950195, 0.06175129699707031, 0.06155673599243164, 0.06183935928344726, 0.061868030548095705, 0.06157516860961914, 0.0614824333190918, 0.06139894485473633, 0.061303489685058596, 0.0614870719909668, 0.06132294464111328, 0.06129900741577148, 0.061265918731689455, 0.06146665573120117, 0.0649062728881836, 0.06303334426879882, 0.06179840087890625, 0.06136422348022461, 0.061351295471191405, 0.06111433410644531, 0.061507678985595705, 0.06155728149414062, 0.06154025650024414, 0.06163241577148437, 0.06170646286010742, 0.06116348648071289, 0.061192192077636716, 0.06120819091796875, 0.06150182342529297, 0.06216447830200195, 0.062063102722167966, 0.06198428726196289, 0.06195574569702148, 0.06160262298583984, 0.06147894287109375, 0.06144160079956055, 0.061604095458984376, 0.06153792190551758, 0.061680191040039065, 0.061360095977783205, 0.061306880950927733, 0.06150348663330078, 0.06137449645996094, 0.06153420639038086, 0.061582592010498045, 0.06157583999633789, 0.061827167510986325, 0.06184307098388672, 0.06171990585327149, 0.06151663970947266, 0.06147884750366211, 0.06125593566894531, 0.06162633514404297, 0.061503040313720704, 0.061637054443359374, 0.061638656616210936, 0.06147686386108398, 0.06153823852539062, 0.061554206848144534, 0.06155094528198242, 0.0617760009765625, 0.06164691162109375, 0.061655040740966796, 0.06196428680419922, 0.061851646423339846, 0.06170009613037109, 0.06160105514526367, 0.06165167999267578, 0.061580543518066404, 0.06167350387573242, 0.06150201416015625, 0.06155059051513672, 0.06152207946777344, 0.061626014709472654, 0.06152816009521484, 0.06168787384033203, 0.061542625427246096, 0.06508134460449219, 0.0629227523803711, 0.06164067077636719, 0.06132499313354492, 0.061208927154541015, 0.06129199981689453, 0.06137500762939453, 0.0613642578125, 0.061423583984375, 0.061295967102050784, 0.061196094512939454, 0.06112956619262695, 0.06130278396606445, 0.061224960327148435, 0.06165507125854492, 0.06210355377197266, 0.06235123062133789, 0.062344993591308594, 0.06209977722167969, 0.061960193634033205, 0.06183695983886719, 0.06165055847167969, 0.06130467224121094, 0.06117875289916992, 0.06148303985595703, 0.06153740692138672, 0.06129135894775391, 0.061327392578125, 0.06129660797119141, 0.06133059310913086, 0.061730751037597655, 0.06169283294677735, 0.06190681457519531, 0.061914558410644534, 0.062111774444580076, 0.06208377456665039, 0.06176927947998047, 0.061488800048828125, 0.06176787185668945, 0.06163475036621094, 0.06146892929077148, 0.06158963012695313, 0.06142348861694336, 0.06163264083862305, 0.06146640014648438, 0.061266143798828124, 0.06144617462158203, 0.061431774139404295, 0.06168166351318359, 0.06182454299926758, 0.06192585754394531, 0.061917217254638675, 0.062098495483398436, 0.06185257720947265, 0.06182876968383789, 0.06162467193603516, 0.06157926559448242, 0.061669055938720706, 0.06150921630859375, 0.06137519836425781, 0.061869953155517576, 0.061580448150634765, 0.061496288299560546]",tokens/s,16.244175644248,, @@ -6284,7 +6284,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 132042 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 125520 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6380,7 +6380,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpahm93z14/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp6uf3f3h/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6620,7 +6620,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 49703 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 43621 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6792,7 +6792,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 30242 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 24753 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6908,7 +6908,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 134893 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 128594 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -6953,7 +6953,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 91247 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 84778 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7061,7 +7061,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 120218 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 114010 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7096,7 +7096,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 76268 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 69475 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7139,7 +7139,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 94252 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 87843 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7245,7 +7245,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 46814 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 40754 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7288,7 +7288,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 54771 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 48495 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7394,7 +7394,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 79237 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 72707 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7437,7 +7437,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 149811 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 143288 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7543,7 +7543,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 97467 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 90938 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7617,7 +7617,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpamtibqcw/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp90gvzlw9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7892,7 +7892,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 137887 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 131382 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7925,7 +7925,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_kz5vlu7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5wogzz0n/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -7970,7 +7970,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 117249 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 110979 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8003,7 +8003,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpe7b9wzo9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpt0smu4ul/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8109,7 +8109,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 88194 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 81768 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8142,7 +8142,7 @@ ChildProcessError: Traceback (most recent call last): cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( -ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8evz61ae/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new +ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpk57ipwkt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8248,7 +8248,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 81412 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 74973 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.36928,9637.39648,0.0,9242.148864,8603.568128,s,1,7.5771103515625,7.5771103515625,0.0,7.5771103515625,7.5771103515625,7.5771103515625,7.5771103515625,[7.5771103515625],,kWh,1.2212581470809407e-05,1.3398002122987354e-06,5.9325047460090374e-06,1.948488642911718e-05,,MB,1216.487424,9889.05472,0.0,9481.224192,8972.090368,s,10,1.0719497299194336,0.10719497299194336,0.008302012551692599,0.11059019088745117,0.11199024887084962,0.11226201972961426,0.11247943641662599,"[0.10413276672363281, 0.11084531402587891, 0.10690787506103516, 0.11083315277099609, 0.11034722900390626, 0.11118134307861328, 0.10990019226074219, 0.11253379058837891, 0.08333821105957032, 0.1119298553466797]",tokens/s,2388.171691775515,kWh,3.3949029875972787e-06,3.7433836348732785e-07,2.2527537401861285e-06,6.021995091270735e-06,tokens/kWh,42510828.408194534,MB,1244.884992,9893.249024,0.0,9485.418496,8972.092928,s,10,24.113931396484375,2.4113931396484376,0.0033347543767851566,2.411421142578125,2.4156163574218747,2.4166848876953124,2.4175397119140625,"[2.406288818359375, 2.40878857421875, 2.411273681640625, 2.406954345703125, 2.41148291015625, 2.412856201171875, 2.411359375, 2.41537890625, 2.411795166015625, 2.41775341796875]",tokens/s,26.12597629318333,kWh,7.044531578990048e-05,7.770005280472887e-06,4.676936881081512e-05,0.0001249846898811885,tokens/kWh,504061.7379607721,,s,630,24.110681304931674,0.038270922706240704,0.0008782322904293862,0.03810611152648926,0.03860705413818359,0.03883380527496338,0.04386638484954834,"[0.04344163131713867, 0.03990086364746094, 0.03855628967285156, 0.038123390197753904, 0.03779580688476562, 0.03776681518554687, 0.03764499282836914, 0.03774457550048828, 0.03763347244262695, 0.037787296295166015, 0.03779804611206055, 0.03770825576782227, 0.03776956939697266, 0.037895584106445314, 0.037704288482666014, 0.03771360015869141, 0.03773443222045898, 0.03780624008178711, 0.03800179290771484, 0.03775743865966797, 0.03782447814941406, 0.037833248138427734, 0.037770721435546876, 0.03780230331420899, 0.038133983612060544, 0.0386899528503418, 0.03869168090820312, 0.03866624069213867, 0.038406143188476564, 0.03820697784423828, 0.037917182922363284, 0.03773952102661133, 0.03792588806152344, 0.03813369750976563, 0.03826646423339844, 0.03801545715332031, 0.03798992156982422, 0.038090431213378906, 0.03812432098388672, 0.03811459350585938, 0.038187744140625, 0.03824025726318359, 0.03788185501098633, 0.03928473663330078, 0.03786124801635742, 0.03809084701538086, 0.03799168014526367, 0.03794585418701172, 0.038320415496826174, 0.03808870315551758, 0.038282783508300784, 0.03838000106811523, 0.03863347244262695, 0.03860889434814453, 0.03859782409667969, 0.038529857635498044, 0.03821158218383789, 0.0379059829711914, 0.038162494659423826, 0.037953632354736325, 0.03805417633056641, 0.03805593490600586, 0.0380682258605957, 0.04390889739990234, 0.03971324920654297, 0.03872927856445312, 0.038076862335205075, 0.03775897598266602, 0.0377704963684082, 0.03778432083129883, 0.03791257476806641, 0.03793222427368164, 0.03827180862426758, 0.037850910186767575, 0.037694782257080076, 0.03772476959228516, 0.03781049728393555, 0.037781505584716796, 0.03811520004272461, 0.0379736328125, 0.03799091339111328, 0.037910526275634765, 0.037758430480957034, 0.037751327514648436, 0.03791820907592774, 0.03786393737792969, 0.03770556640625, 0.038459583282470705, 0.03836310577392578, 0.03826399993896484, 0.03842031860351563, 0.03825350570678711, 0.0383158073425293, 0.03794150543212891, 0.03809075164794922, 0.038069568634033206, 0.038202049255371094, 0.03831552124023437, 0.03814374542236328, 0.038228416442871095, 0.038031681060791016, 0.03806617736816406, 0.03789619064331055, 0.03790848159790039, 0.03806208038330078, 0.038324222564697266, 0.038340351104736325, 0.03784524917602539, 0.03795558547973633, 0.037969921112060545, 0.03797532653808594, 0.03799296188354492, 0.03820307159423828, 0.038447360992431644, 0.03876499176025391, 0.038854496002197265, 0.03846553421020508, 0.03823535919189453, 0.03832854461669922, 0.03810934448242188, 0.03809260940551758, 0.03854153442382813, 0.03883852767944336, 0.03807382583618164, 0.038261409759521484, 0.038088542938232425, 0.0468045768737793, 0.04062665557861328, 0.038690174102783206, 0.03782463836669922, 0.037985950469970706, 0.03812438583374023, 0.038061790466308594, 0.03796201705932617, 0.03796783828735351, 0.03765660858154297, 0.037697406768798826, 0.03789174270629883, 0.037742176055908204, 0.03782950210571289, 0.037904384613037106, 0.03765033721923828, 0.03793315124511719, 0.03793100738525391, 0.037720062255859374, 0.037787647247314454, 0.03810713577270508, 0.03825254440307617, 0.03788943862915039, 0.03799919891357422, 0.038076416015625, 0.03837055969238281, 0.038429439544677736, 0.03866995239257812, 0.038166175842285155, 0.03805667114257812, 0.03789158248901367, 0.03823462295532227, 0.03814153671264649, 0.03791689682006836, 0.03796604919433594, 0.03810464096069336, 0.03812598419189453, 0.03794112014770508, 0.03810508728027344, 0.03849843215942383, 0.03797401428222656, 0.03777536010742188, 0.03794729614257813, 0.03800870513916016, 0.037988319396972656, 0.0380951042175293, 0.03796713638305664, 0.03801971054077148, 0.038186496734619144, 0.03796847915649414, 0.03827302551269531, 0.03829555130004883, 0.03845939254760742, 0.03837747192382813, 0.038526912689208985, 0.038417793273925784, 0.038357505798339846, 0.03851887893676758, 0.03874211120605469, 0.038250495910644534, 0.03805593490600586, 0.03789139175415039, 0.03811196899414063, 0.04447507095336914, 0.03966265487670898, 0.03868764877319336, 0.03808585739135742, 0.03790099334716797, 0.03779593658447265, 0.03775827026367187, 0.03802758407592773, 0.03782060623168945, 0.03771104049682617, 0.0377017936706543, 0.03806499099731445, 0.037682464599609375, 0.03784777450561523, 0.03781756973266601, 0.037767967224121096, 0.03786051177978516, 0.03818083190917969, 0.038122367858886716, 0.03812102508544922, 0.03800928115844727, 0.03802659225463867, 0.03798492813110352, 0.037875614166259765, 0.0378488655090332, 0.03817824172973633, 0.038214271545410156, 0.038246654510498045, 0.03845465469360351, 0.038402687072753905, 0.0379266242980957, 0.03790415954589844, 0.037935169219970706, 0.03810070419311523, 0.038034015655517575, 0.03796758270263672, 0.038230430603027346, 0.03840108871459961, 0.03806902313232422, 0.03790383911132812, 0.037986526489257814, 0.038088382720947264, 0.038091552734375, 0.03803327941894531, 0.03807859039306641, 0.03808870315551758, 0.03839091110229492, 0.03808345413208008, 0.03794124984741211, 0.03821353530883789, 0.03833206558227539, 0.03822022247314453, 0.038338558197021484, 0.03851651382446289, 0.03842816162109375, 0.03852975845336914, 0.03815628814697265, 0.037916576385498044, 0.03790652847290039, 0.038122943878173825, 0.03804339218139648, 0.03822041702270508, 0.038162624359130856, 0.04376230239868164, 0.03968368148803711, 0.0383656005859375, 0.0380549430847168, 0.03799043273925781, 0.03784758377075195, 0.03777753448486328, 0.03763024139404297, 0.03788780975341797, 0.03775279998779297, 0.03777763366699219, 0.03775897598266602, 0.0377262077331543, 0.037768447875976566, 0.03790719985961914, 0.03786038589477539, 0.037999584197998044, 0.03805388641357422, 0.0380682258605957, 0.03818086242675781, 0.03788595199584961, 0.037986305236816405, 0.0380948486328125, 0.038131614685058594, 0.038338558197021484, 0.03903881454467773, 0.03902899169921875, 0.038316032409667966, 0.03828902435302734, 0.038271358489990234, 0.038126655578613285, 0.038028224945068356, 0.03790777587890625, 0.038085311889648435, 0.03830108642578125, 0.03842108917236328, 0.03794739151000977, 0.038034622192382815, 0.03790111923217773, 0.037986305236816405, 0.038122718811035156, 0.03820131301879883, 0.0381407356262207, 0.03804569625854492, 0.03784662246704101, 0.03815670394897461, 0.03812761688232422, 0.038166080474853516, 0.037981697082519535, 0.038167488098144534, 0.038225887298583984, 0.03861507034301758, 0.0389769287109375, 0.0387625617980957, 0.038843936920166015, 0.03836419296264648, 0.038381534576416014, 0.03835903930664063, 0.03828531265258789, 0.0382479362487793, 0.03836886215209961, 0.03831286239624023, 0.03849132919311524, 0.044175838470458986, 0.03992620849609375, 0.03837958526611328, 0.03801417541503906, 0.03811203384399414, 0.037894142150878905, 0.03779302215576172, 0.03779865646362305, 0.0380579833984375, 0.03824156951904297, 0.037870304107666015, 0.03819472122192383, 0.03803388977050781, 0.0380682258605957, 0.038004737854003906, 0.03803340911865234, 0.0381781120300293, 0.038218433380126954, 0.038316032409667966, 0.03788595199584961, 0.03809049606323242, 0.038039806365966794, 0.038040576934814455, 0.037993183135986326, 0.03829792022705078, 0.0387454719543457, 0.03862710571289062, 0.03836191940307617, 0.038257823944091794, 0.038394718170166015, 0.038065376281738283, 0.03788438415527344, 0.038226238250732424, 0.037822463989257815, 0.03806367874145508, 0.03813011169433594, 0.038125568389892575, 0.038284481048583986, 0.03798099136352539, 0.038029312133789066, 0.0380211181640625, 0.038125568389892575, 0.0380313606262207, 0.03829759979248047, 0.03801456069946289, 0.03792057418823242, 0.038037216186523434, 0.038198143005371096, 0.038117374420166016, 0.03817062377929688, 0.03831193542480469, 0.038376670837402344, 0.03879935836791992, 0.03885862350463867, 0.038613918304443356, 0.038430721282958984, 0.038221759796142576, 0.03830585479736328, 0.038268638610839845, 0.03809462356567383, 0.03805644989013672, 0.038109184265136715, 0.038449153900146485, 0.04441084671020508, 0.03985408020019531, 0.03848134231567383, 0.037970497131347654, 0.0378342399597168, 0.03787417602539062, 0.038096736907958985, 0.03808201599121094, 0.037695648193359375, 0.037810592651367186, 0.03783283233642578, 0.037770431518554685, 0.037826942443847655, 0.038334911346435546, 0.03800064086914062, 0.0383851203918457, 0.03796796798706055, 0.03775737762451172, 0.03802316665649414, 0.03806412887573242, 0.03793471908569336, 0.037889793395996095, 0.03784761428833008, 0.038039615631103516, 0.038141952514648435, 0.03843670272827148, 0.038469791412353516, 0.038502079010009765, 0.038326591491699216, 0.03824844741821289, 0.038182910919189454, 0.03808051300048828, 0.03824844741821289, 0.037961727142333986, 0.037918495178222655, 0.037991649627685545, 0.03834982299804687, 0.03807436752319336, 0.037904384613037106, 0.03811494445800781, 0.038349056243896486, 0.03792294311523438, 0.038299648284912106, 0.03787571334838867, 0.03802092742919922, 0.03821382522583008, 0.03803881454467774, 0.03793353652954102, 0.03806028747558594, 0.03834470367431641, 0.03837542343139649, 0.038335521697998046, 0.038585311889648435, 0.03833980941772461, 0.03839670562744141, 0.03845523071289063, 0.038413665771484376, 0.038447841644287106, 0.03828521728515625, 0.03837961578369141, 0.038563838958740236, 0.03826179122924805, 0.038388702392578126, 0.04575455856323242, 0.04010540771484375, 0.038629470825195314, 0.03806070327758789, 0.03787776184082031, 0.03807231903076172, 0.037748737335205076, 0.03782867050170898, 0.037957569122314454, 0.03783270263671875, 0.03788579177856445, 0.03774889755249024, 0.038072128295898434, 0.03791686248779297, 0.037793342590332034, 0.038154689788818356, 0.03767091369628906, 0.037867454528808596, 0.03785715103149414, 0.037873855590820314, 0.03808051300048828, 0.03801456069946289, 0.037891902923583985, 0.03821628952026367, 0.03853420639038086, 0.03838457489013672, 0.038438911437988284, 0.03874819183349609, 0.03888518524169922, 0.03860086441040039, 0.0381399040222168, 0.03827462387084961, 0.03827347183227539, 0.038352127075195315, 0.03808742523193359, 0.03794944000244141, 0.0380412483215332, 0.038037857055664065, 0.038166175842285155, 0.03801327896118164, 0.037969921112060545, 0.038133792877197266, 0.03827094268798828, 0.038250495910644534, 0.03799808120727539, 0.03804620742797851, 0.03798204803466797, 0.03801513671875, 0.038161792755126954, 0.03824294281005859, 0.03826483154296875, 0.038567615509033204, 0.038926559448242186, 0.03870115280151367, 0.03860684967041016, 0.038461505889892576, 0.038598590850830075, 0.03818038558959961, 0.038214111328125, 0.038422401428222654, 0.03829062271118164, 0.03818096160888672, 0.03869782257080078, 0.04184486389160156, 0.042041439056396485, 0.04006908798217773, 0.03896319961547851, 0.03871654510498047, 0.037931903839111325, 0.037713920593261716, 0.037705726623535156, 0.03765657424926758, 0.03774457550048828, 0.03789574432373047, 0.037943809509277344, 0.03804569625854492, 0.03800806427001953, 0.03826969528198242, 0.038258689880371094, 0.03802067184448242, 0.037939647674560546, 0.038055614471435545, 0.0380173454284668, 0.037976062774658204, 0.03828883361816406, 0.0379172477722168, 0.03796105575561524, 0.03795011138916016, 0.03790959930419922, 0.03780905532836914, 0.03790643310546875, 0.038160385131835936, 0.038238239288330075, 0.03848944091796875, 0.038179454803466795, 0.038174720764160154, 0.03806208038330078, 0.03812351989746094, 0.03814604949951172, 0.038258689880371094, 0.038063232421875, 0.03809769439697266, 0.03869091033935547, 0.0384266242980957, 0.038147167205810545, 0.03794348907470703, 0.03813040161132813, 0.03817244720458984, 0.03833391952514648, 0.03825945663452148, 0.03827916717529297, 0.038174720764160154, 0.03794944000244141, 0.038063934326171875, 0.038295616149902345, 0.03825436782836914, 0.03814384078979492, 0.03833087921142578, 0.038319934844970704, 0.03822022247314453, 0.03812531280517578, 0.03801641464233398, 0.03783939361572266, 0.03833055877685547, 0.03829302215576172, 0.038231521606445315, 0.04493308639526367, 0.04011142349243164, 0.03869152069091797, 0.03813750457763672, 0.037970272064208985, 0.037720062255859374, 0.037774848937988284, 0.03792127990722656, 0.037891201019287106, 0.03780492782592773, 0.0379249267578125, 0.03816236877441406, 0.03813785552978516, 0.03807846450805664, 0.03804569625854492, 0.03799407958984375, 0.038080928802490234, 0.03804764938354492, 0.03792240142822265, 0.0382325439453125, 0.03834268951416016, 0.03805184173583984, 0.038093982696533205, 0.0380219841003418, 0.038365184783935545, 0.038481311798095705, 0.03863203048706055, 0.03865974426269531, 0.038702880859375, 0.03843337631225586, 0.038408161163330075, 0.038408191680908206, 0.03833446502685547, 0.03824025726318359, 0.03779135894775391, 0.0380863037109375, 0.03792300796508789, 0.03814217758178711, 0.038082111358642576, 0.03812838363647461, 0.038612991333007815, 0.038400001525878906, 0.03818905639648437, 0.038122943878173825, 0.03832889556884766, 0.03786751937866211, 0.037932289123535155, 0.037923583984375, 0.03830169677734375, 0.03853023910522461, 0.03845203018188476, 0.03862268829345703, 0.038468128204345704, 0.038828033447265625, 0.03868467330932617, 0.038542591094970706, 0.038437633514404296, 0.03843635177612305, 0.03851728057861328, 0.038481887817382814, 0.038356990814208985, 0.038288959503173826, 0.03814854431152344]",tokens/s,26.129498044136092,, @@ -8294,7 +8294,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 84428 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 78069 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.255552,3354.329088,0.0,2959.081472,2942.567424,s,1,7.56006298828125,7.56006298828125,0.0,7.56006298828125,7.56006298828125,7.56006298828125,7.56006298828125,[7.56006298828125],,kWh,1.030244897499036e-05,1.129170060222218e-06,4.934170613993261e-06,1.636578964920584e-05,,MB,1145.683968,3549.364224,0.0,3141.533696,3105.830912,s,10,0.3191483516693115,0.03191483516693115,0.001310640518279943,0.03153615951538086,0.0330703311920166,0.03414887790679931,0.03501171527862549,"[0.03522742462158203, 0.03179126358032226, 0.03283065414428711, 0.032280128479003904, 0.030215551376342773, 0.032129951477050785, 0.03128105545043945, 0.03119024085998535, 0.030974496841430665, 0.031227584838867187]",tokens/s,8021.348023920134,kWh,1.1245226313461203e-06,1.239511474870338e-07,7.47360640623088e-07,1.995834419456242e-06,tokens/kWh,128267153.57967736,MB,1174.114304,3591.307264,0.0,3183.476736,3163.048448,s,10,10.659780151367189,1.0659780151367189,0.013331280476035226,1.0704387817382812,1.0778973876953126,1.08138466796875,1.0841744921875,"[1.0745426025390625, 1.0615531005859375, 1.0421826171875, 1.0848719482421876, 1.068078125, 1.0727994384765625, 1.0771224365234375, 1.0770347900390624, 1.049612060546875, 1.0519830322265624]",tokens/s,59.1006560223663,kWh,3.100471784532199e-05,3.419352761774082e-06,2.0457878545777293e-05,5.488194915287337e-05,tokens/kWh,1147918.4134753277,,s,630,10.656761390686036,0.016915494270930215,0.00042222898674190197,0.016895071983337402,0.01727531833648682,0.01764526271820068,0.01838836977005005,"[0.017031999588012697, 0.01689187240600586, 0.016939008712768554, 0.01679155158996582, 0.016696416854858398, 0.01682115173339844, 0.01739129638671875, 0.017162559509277343, 0.017147296905517577, 0.016937503814697264, 0.017070144653320313, 0.017983488082885742, 0.02095235252380371, 0.01775689506530762, 0.016965631484985352, 0.016955392837524414, 0.017227264404296876, 0.01702348709106445, 0.0169881591796875, 0.016957439422607423, 0.016889184951782227, 0.016878240585327147, 0.016842752456665038, 0.0166046085357666, 0.01683888053894043, 0.017377376556396484, 0.017266944885253908, 0.017156095504760743, 0.01695052719116211, 0.017171199798583985, 0.01697532844543457, 0.016945695877075194, 0.0176843204498291, 0.01694326400756836, 0.016904096603393554, 0.01711503982543945, 0.017010816574096678, 0.016957504272460937, 0.016916479110717773, 0.016955007553100587, 0.016845184326171873, 0.017035263061523438, 0.016721920013427736, 0.016787296295166017, 0.016965791702270507, 0.017297407150268555, 0.017165472030639648, 0.01708937644958496, 0.016875360488891603, 0.016824480056762695, 0.016746271133422853, 0.01687126350402832, 0.016902528762817382, 0.016987808227539064, 0.016943456649780274, 0.017315839767456053, 0.01680384063720703, 0.016701440811157226, 0.016682783126831056, 0.016670495986938476, 0.01677510452270508, 0.01669990348815918, 0.01651273536682129, 0.017988576889038085, 0.017946624755859376, 0.01725644874572754, 0.0167956485748291, 0.01685215950012207, 0.016728384017944336, 0.016623775482177736, 0.01680214309692383, 0.016930112838745116, 0.016910655975341797, 0.016795616149902343, 0.01679747200012207, 0.01675436782836914, 0.016701887130737305, 0.016646656036376953, 0.016478208541870116, 0.016604991912841798, 0.017174720764160156, 0.01886207962036133, 0.017231679916381835, 0.01680512046813965, 0.01665119934082031, 0.017086463928222655, 0.017059839248657227, 0.016961536407470702, 0.01713283157348633, 0.017989952087402342, 0.016932512283325197, 0.01689574432373047, 0.01678985595703125, 0.016951904296875, 0.0170098876953125, 0.016871648788452147, 0.017111648559570314, 0.016954816818237305, 0.01687126350402832, 0.016638368606567384, 0.016671072006225585, 0.01657423973083496, 0.01688598442077637, 0.016920576095581053, 0.01678950309753418, 0.016699392318725585, 0.016596511840820314, 0.016435007095336913, 0.01685068893432617, 0.017111967086791992, 0.01699865531921387, 0.016762624740600585, 0.016764928817749023, 0.016381952285766603, 0.016467967987060548, 0.016343040466308592, 0.01689116859436035, 0.01639049530029297, 0.016391551971435547, 0.016299007415771484, 0.016354719161987306, 0.01639894485473633, 0.01655388832092285, 0.016384096145629884, 0.016355327606201172, 0.01636147117614746, 0.0179303035736084, 0.018018367767333985, 0.017269216537475585, 0.016719871520996094, 0.01657379150390625, 0.016730783462524414, 0.0165086727142334, 0.01643129539489746, 0.016281663894653322, 0.016407808303833007, 0.016366304397583006, 0.016463584899902343, 0.0162860164642334, 0.01698585510253906, 0.01793811225891113, 0.017041984558105468, 0.016537471771240233, 0.01660531234741211, 0.01647955131530762, 0.016534208297729492, 0.01647760009765625, 0.016763359069824218, 0.016468095779418945, 0.016357248306274413, 0.016410751342773436, 0.016403743743896484, 0.016253120422363283, 0.016382495880126954, 0.016541696548461913, 0.0164270076751709, 0.01643267250061035, 0.016306528091430662, 0.016342239379882814, 0.016322687149047853, 0.016378463745117186, 0.01645136070251465, 0.016404895782470702, 0.016448511123657226, 0.016583423614501953, 0.0164453125, 0.01642464065551758, 0.01643769645690918, 0.01637196731567383, 0.01638809585571289, 0.0165086727142334, 0.01634124755859375, 0.01638185691833496, 0.016351295471191407, 0.0163656005859375, 0.016363519668579102, 0.01641267204284668, 0.01651257514953613, 0.01653376007080078, 0.016476255416870117, 0.016428672790527343, 0.01634761619567871, 0.016324607849121094, 0.016361215591430663, 0.016326911926269533, 0.01633273506164551, 0.016360992431640624, 0.016343584060668947, 0.016496639251708984, 0.018335296630859376, 0.018438688278198244, 0.01752467155456543, 0.01724617576599121, 0.01708255958557129, 0.01722480010986328, 0.01723075294494629, 0.01743052864074707, 0.01719705581665039, 0.017061567306518553, 0.017178304672241212, 0.01711497688293457, 0.01721833610534668, 0.01713148880004883, 0.017141792297363283, 0.017137887954711915, 0.01722956848144531, 0.01712950325012207, 0.017130783081054687, 0.017191455841064452, 0.017178815841674806, 0.017141759872436522, 0.01715171241760254, 0.017316064834594726, 0.017086528778076173, 0.017024223327636718, 0.017257247924804688, 0.01726851272583008, 0.017135839462280273, 0.01721548843383789, 0.017154016494750977, 0.017057823181152343, 0.01739366340637207, 0.01702707290649414, 0.016873472213745116, 0.01707811164855957, 0.01718492889404297, 0.017286304473876954, 0.017215583801269533, 0.017222400665283202, 0.01722310447692871, 0.017140031814575196, 0.017149824142456055, 0.01716876792907715, 0.017093664169311525, 0.017052640914916994, 0.01729097557067871, 0.0172956485748291, 0.017164287567138673, 0.017303552627563477, 0.0172106876373291, 0.017265344619750978, 0.017307647705078123, 0.01737049674987793, 0.017054336547851563, 0.01708624076843262, 0.01720921516418457, 0.017102624893188475, 0.01696211242675781, 0.017127424240112304, 0.016998367309570314, 0.017274911880493165, 0.017039360046386717, 0.017170751571655273, 0.01673200035095215, 0.016605279922485353, 0.01669126319885254, 0.0170250244140625, 0.016852479934692383, 0.01690880012512207, 0.016876544952392578, 0.016458751678466797, 0.016684896469116212, 0.01751030349731445, 0.01722598457336426, 0.017125375747680666, 0.016920160293579102, 0.016773536682128908, 0.01683830451965332, 0.016691551208496094, 0.016715328216552736, 0.01683705520629883, 0.017154048919677735, 0.017069984436035156, 0.016922719955444337, 0.01678335952758789, 0.01700864028930664, 0.016981407165527342, 0.016906848907470705, 0.01681612777709961, 0.017027040481567383, 0.01686300849914551, 0.016851200103759765, 0.01681612777709961, 0.016738304138183592, 0.016705535888671876, 0.01738956832885742, 0.017147903442382813, 0.017077823638916016, 0.01698975944519043, 0.01712169647216797, 0.01698396873474121, 0.01690825653076172, 0.0169531192779541, 0.016826591491699218, 0.016830528259277344, 0.017127744674682616, 0.016948991775512696, 0.016841184616088866, 0.01680588722229004, 0.01679974365234375, 0.016893951416015626, 0.01682636833190918, 0.017073152542114257, 0.016902656555175782, 0.017076736450195314, 0.017651296615600585, 0.016974016189575194, 0.016832576751708985, 0.016795711517333986, 0.016959583282470703, 0.017210752487182616, 0.01776473617553711, 0.01699660873413086, 0.016945152282714843, 0.0168407039642334, 0.018430559158325196, 0.0183110408782959, 0.017455232620239257, 0.016942848205566408, 0.017125631332397463, 0.017031328201293945, 0.017835872650146484, 0.016857088088989256, 0.01737932777404785, 0.01706598472595215, 0.016832000732421876, 0.01668070411682129, 0.01708665657043457, 0.017023712158203124, 0.017157983779907227, 0.016951295852661134, 0.01669059181213379, 0.016921087265014647, 0.01686332893371582, 0.016715776443481444, 0.016725536346435546, 0.016894432067871095, 0.016875328063964842, 0.017042816162109373, 0.01701968002319336, 0.01682579231262207, 0.017637887954711915, 0.016781408309936522, 0.016815872192382814, 0.016896352767944336, 0.017034719467163086, 0.016662847518920897, 0.016908416748046873, 0.017126432418823244, 0.01703183937072754, 0.017060096740722657, 0.016948511123657226, 0.016827167510986327, 0.016855039596557618, 0.0166495361328125, 0.01683737564086914, 0.0169769287109375, 0.016907167434692384, 0.016699392318725585, 0.016893951416015626, 0.016860736846923827, 0.016871871948242186, 0.01701273536682129, 0.016928768157958983, 0.016997888565063478, 0.016957759857177734, 0.01680179214477539, 0.017274816513061522, 0.017395967483520507, 0.017278976440429687, 0.0169881591796875, 0.017075584411621093, 0.016921215057373047, 0.016873472213745116, 0.016914432525634765, 0.01700022315979004, 0.017090272903442384, 0.016957792282104492, 0.017672191619873046, 0.018103296279907227, 0.017461759567260742, 0.017084928512573243, 0.016990207672119142, 0.016701440811157226, 0.016728000640869142, 0.01665439987182617, 0.01721507263183594, 0.017082784652709963, 0.01701251220703125, 0.016918752670288088, 0.016899999618530274, 0.016806175231933593, 0.016821855545043944, 0.016856672286987305, 0.016799840927124023, 0.016779808044433592, 0.017005760192871092, 0.01690707206726074, 0.01688150405883789, 0.01705999946594238, 0.016885759353637696, 0.016846847534179688, 0.01705504035949707, 0.01702364730834961, 0.017399839401245117, 0.01705369567871094, 0.017121280670166016, 0.017110591888427736, 0.017119680404663086, 0.01689571189880371, 0.01692086410522461, 0.019200000762939453, 0.01750822448730469, 0.017000127792358398, 0.01704185676574707, 0.01693657684326172, 0.01710323143005371, 0.016979232788085937, 0.01716092872619629, 0.017143808364868163, 0.016947200775146484, 0.016907327651977538, 0.016870336532592775, 0.016869375228881836, 0.01680780792236328, 0.017438848495483397, 0.017147903442382813, 0.017138816833496093, 0.017052543640136718, 0.016905567169189454, 0.01685161590576172, 0.016787296295166017, 0.01693302345275879, 0.016936063766479492, 0.017022911071777343, 0.01776313591003418, 0.017092607498168946, 0.017194303512573242, 0.017003328323364257, 0.017170143127441407, 0.01795305633544922, 0.017828832626342772, 0.01779097557067871, 0.017250303268432618, 0.017133567810058595, 0.017377279281616212, 0.01794047927856445, 0.017143327713012694, 0.016941535949707032, 0.017069503784179686, 0.016873023986816407, 0.01691267204284668, 0.016980703353881837, 0.017176576614379883, 0.016855039596557618, 0.016910335540771485, 0.01702911949157715, 0.016957439422607423, 0.016883295059204103, 0.016850719451904295, 0.016949888229370116, 0.01703856086730957, 0.016846879959106446, 0.017071968078613282, 0.017210271835327147, 0.017307647705078123, 0.017122880935668945, 0.017102848052978514, 0.01718726348876953, 0.01687318420410156, 0.016906496047973632, 0.016981504440307618, 0.017201663970947266, 0.01708598327636719, 0.01691257667541504, 0.017131839752197266, 0.017104000091552735, 0.0170578556060791, 0.016902080535888674, 0.016995199203491212, 0.017303552627563477, 0.01696767997741699, 0.016979936599731446, 0.01708812713623047, 0.017222047805786133, 0.017166080474853514, 0.017182592391967774, 0.01702911949157715, 0.0169781436920166, 0.016879167556762695, 0.016869951248168945, 0.016805728912353515, 0.017049472808837892, 0.0174902400970459, 0.017811264038085937, 0.01711123275756836, 0.017096607208251954, 0.016934112548828126, 0.01709129524230957, 0.01696169662475586, 0.017067743301391602, 0.01700182342529297, 0.0169051513671875, 0.016866687774658204, 0.019163232803344726, 0.017799072265625, 0.017244319915771484, 0.01669728088378906, 0.016480255126953124, 0.016453632354736326, 0.016504703521728517, 0.016608896255493163, 0.01659942436218262, 0.016492671966552734, 0.01639219284057617, 0.01640652847290039, 0.016430368423461916, 0.01635606384277344, 0.01654374313354492, 0.01642239952087402, 0.016810207366943358, 0.016607168197631837, 0.01649660873413086, 0.01666249656677246, 0.01671824073791504, 0.016508352279663085, 0.016453407287597657, 0.016501535415649415, 0.016571744918823243, 0.016437631607055664, 0.01643721580505371, 0.016569664001464843, 0.016703968048095704, 0.016475744247436523, 0.016669631958007813, 0.01789952087402344, 0.017153247833251953, 0.01656015968322754, 0.016595712661743166, 0.01685443115234375, 0.01656278419494629, 0.016500736236572267, 0.01645676803588867, 0.01632352066040039, 0.016465919494628906, 0.016359199523925783, 0.016533727645874022, 0.01643929672241211, 0.01642073631286621, 0.01641484832763672, 0.016627712249755858, 0.01636966323852539, 0.016529407501220703, 0.016582656860351562, 0.016480255126953124, 0.016440959930419923, 0.016474496841430663, 0.016459775924682618, 0.017380447387695314, 0.01656515121459961, 0.016506879806518555, 0.01656399917602539, 0.01673561668395996, 0.016615776062011717, 0.016707071304321287, 0.016671743392944336, 0.01683612823486328, 0.01841004753112793, 0.01769808006286621, 0.01730838394165039, 0.0168222713470459, 0.016555200576782225, 0.016509759902954103, 0.016529375076293946, 0.01652720069885254, 0.01664224052429199, 0.01664156723022461, 0.016677343368530273, 0.0165928955078125, 0.016664575576782227, 0.016664575576782227, 0.016669824600219728, 0.01663680076599121, 0.016576351165771483, 0.01648361587524414, 0.016503488540649414, 0.01645136070251465, 0.016394655227661134, 0.01641164779663086, 0.01640732765197754, 0.016422719955444337, 0.016480672836303712, 0.016465919494628906, 0.01643110466003418, 0.016521215438842773, 0.01659859275817871, 0.016984031677246093, 0.016822399139404295, 0.01678505516052246, 0.01688137626647949, 0.01657881546020508, 0.016454368591308593, 0.016434431076049805, 0.016530176162719727, 0.0167587833404541, 0.016738304138183592, 0.01683456039428711, 0.01681612777709961, 0.01696870422363281, 0.01664102363586426, 0.016781312942504883, 0.016711679458618164, 0.016639999389648438, 0.016711679458618164, 0.016736255645751954, 0.016739936828613283, 0.01667487907409668, 0.01659529685974121, 0.016616800308227538, 0.016704160690307616, 0.016687103271484375, 0.016756479263305663, 0.01671603202819824, 0.01658576011657715, 0.016720863342285158, 0.016689151763916017, 0.016685056686401366, 0.016694911956787108, 0.0166808967590332, 0.01665273666381836]",tokens/s,59.11739757546015,, @@ -8573,7 +8573,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 49018 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 42871 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8683,7 +8683,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 29517 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 23952 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8792,7 +8792,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 90460 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 84047 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.214592,1326.383104,0.0,931.135488,917.648384,s,1,7.24930126953125,7.24930126953125,0.0,7.24930126953125,7.24930126953125,7.24930126953125,7.24930126953125,[7.24930126953125],,kWh,9.300030833340618e-06,1.0187963388527686e-06,3.1194469399947744e-06,1.343827411218816e-05,,MB,1148.239872,1458.50368,0.0,1050.673152,1018.330112,s,10,0.17473472023010256,0.017473472023010257,0.00032826587233633533,0.017637968063354492,0.017802262306213377,0.017810283184051513,0.01781669988632202,"[0.01692780876159668, 0.01697609519958496, 0.017071264266967773, 0.017644447326660158, 0.017800479888916015, 0.017631488800048827, 0.017719167709350586, 0.017500288009643556, 0.01781830406188965, 0.017645376205444336]",tokens/s,14650.780317894565,kWh,5.129729545540996e-07,5.6536604880475513e-08,3.414330216842004e-07,9.109425811187756e-07,tokens/kWh,281027591.97576773,MB,1176.51456,1475.280896,0.0,1067.450368,1033.282048,s,10,10.554966979980469,1.055496697998047,0.019562259450065147,1.0613449096679688,1.0746231079101562,1.076492706298828,1.0779883850097656,"[1.0216134643554688, 1.019472412109375, 1.0551478271484376, 1.0742076416015625, 1.071249755859375, 1.0658709716796875, 1.06161083984375, 1.046352783203125, 1.0783623046875, 1.0610789794921875]",tokens/s,59.68753869101784,kWh,2.952359973044379e-05,3.255993862737637e-06,1.4851884688515072e-05,4.7631478281696486e-05,tokens/kWh,1322654.7290305123,,s,630,10.548605572700493,0.016743818369365872,0.0004883465879551693,0.016839056015014647,0.017199705696105957,0.017319140911102295,0.017867099857330326,"[0.015735648155212403, 0.016029695510864257, 0.0160328311920166, 0.01604243278503418, 0.015974464416503905, 0.016023296356201172, 0.016036544799804688, 0.017674495697021484, 0.016321632385253908, 0.016054943084716798, 0.016043455123901366, 0.016112192153930664, 0.01601740837097168, 0.016089088439941408, 0.016088607788085938, 0.016128032684326173, 0.01617145538330078, 0.016051200866699217, 0.01614463996887207, 0.016037664413452148, 0.016076959609985352, 0.01610016059875488, 0.016183296203613282, 0.01608697509765625, 0.016150592803955078, 0.016082239151000977, 0.01610406494140625, 0.016195648193359374, 0.016128000259399415, 0.016496639251708984, 0.01596726417541504, 0.016001535415649415, 0.016041439056396486, 0.016153600692749022, 0.01612953567504883, 0.016041696548461912, 0.016371679306030274, 0.016505727767944334, 0.016426944732666017, 0.016332799911499024, 0.016385791778564453, 0.016278976440429686, 0.016296768188476564, 0.01621401596069336, 0.016183296203613282, 0.016060415267944335, 0.01600223922729492, 0.015999808311462402, 0.016017055511474608, 0.016078624725341797, 0.016155199050903322, 0.015984095573425294, 0.01602409553527832, 0.016262208938598633, 0.016014272689819337, 0.01603379249572754, 0.01602681541442871, 0.016895872116088867, 0.0175031681060791, 0.01760665512084961, 0.016152576446533205, 0.016281183242797852, 0.01628758430480957, 0.015768608093261718, 0.015981535911560058, 0.015935487747192383, 0.01598591995239258, 0.01641753578186035, 0.016683008193969725, 0.016441408157348632, 0.016324256896972655, 0.016214303970336914, 0.016297536849975584, 0.016320415496826172, 0.016322816848754883, 0.016174976348876952, 0.016126495361328125, 0.016121248245239257, 0.016017887115478517, 0.016273344039916992, 0.016175167083740234, 0.016039039611816405, 0.01624153518676758, 0.016107519149780272, 0.016018463134765626, 0.01605116844177246, 0.016006816864013673, 0.016021856307983397, 0.017209344863891602, 0.01609884834289551, 0.016044448852539063, 0.01607686424255371, 0.016088096618652344, 0.01603843116760254, 0.01605062484741211, 0.01605788803100586, 0.016019264221191407, 0.016368127822875975, 0.016300191879272462, 0.016652288436889647, 0.016696672439575195, 0.01655695915222168, 0.016360864639282227, 0.01609539222717285, 0.016152767181396483, 0.016205087661743164, 0.015977184295654298, 0.016004831314086913, 0.015994303703308104, 0.016048896789550782, 0.016133567810058595, 0.016111679077148437, 0.016028255462646485, 0.016164735794067384, 0.016054048538208007, 0.016007104873657228, 0.01603830337524414, 0.016138240814208983, 0.016111616134643555, 0.016039743423461914, 0.01643846321105957, 0.016089696884155274, 0.01625904083251953, 0.01607651138305664, 0.016032480239868165, 0.016090208053588868, 0.0157260799407959, 0.015909055709838867, 0.016009536743164063, 0.016273408889770507, 0.016209312438964844, 0.016144256591796875, 0.016095968246459962, 0.0160296630859375, 0.016172607421875, 0.016134431838989258, 0.0161112003326416, 0.016074783325195314, 0.016102079391479493, 0.0160515193939209, 0.016048704147338867, 0.016150527954101563, 0.016072704315185548, 0.016225791931152343, 0.0160317440032959, 0.01606447982788086, 0.016022048950195312, 0.01619055938720703, 0.016560415267944335, 0.016674720764160156, 0.017380191802978517, 0.017051519393920897, 0.01705743980407715, 0.017076095581054686, 0.01695996856689453, 0.017227327346801758, 0.017111488342285156, 0.017641471862792968, 0.01706972885131836, 0.0172445125579834, 0.017063936233520507, 0.01726438331604004, 0.01700057601928711, 0.01697385597229004, 0.017112159729003908, 0.01703424072265625, 0.016903999328613282, 0.016742591857910157, 0.01728102493286133, 0.016887807846069337, 0.01721958351135254, 0.017137664794921875, 0.016859136581420898, 0.01696076774597168, 0.01703785514831543, 0.017029056549072264, 0.017166624069213866, 0.017133567810058595, 0.017176607131958007, 0.017237535476684572, 0.017308095932006835, 0.01717612838745117, 0.017160640716552735, 0.01717625617980957, 0.016970048904418944, 0.01680179214477539, 0.017479679107666016, 0.017108991622924806, 0.01713961601257324, 0.01660412788391113, 0.01707414436340332, 0.01686960029602051, 0.016716960906982423, 0.0169007682800293, 0.016889856338500975, 0.016732160568237304, 0.01656559944152832, 0.01684342384338379, 0.016699392318725585, 0.01659699249267578, 0.016658143997192384, 0.01705603218078613, 0.016842815399169923, 0.01669865608215332, 0.016691583633422852, 0.016787839889526368, 0.01712268829345703, 0.01745952033996582, 0.01720137596130371, 0.017499391555786132, 0.019490848541259764, 0.018039199829101564, 0.017139936447143556, 0.016836511611938478, 0.01686137580871582, 0.016791263580322267, 0.016924224853515624, 0.01713385581970215, 0.017101247787475585, 0.016881023406982422, 0.017132192611694335, 0.017317855834960937, 0.016922399520874022, 0.01707766342163086, 0.016802623748779298, 0.017004512786865236, 0.01697932815551758, 0.016927391052246093, 0.016970848083496092, 0.017133600234985353, 0.017019392013549805, 0.01694963264465332, 0.01701888084411621, 0.01689616012573242, 0.016933984756469726, 0.017889919281005858, 0.016887935638427734, 0.017308832168579102, 0.017179519653320312, 0.017304832458496094, 0.01675075149536133, 0.017304128646850585, 0.017059743881225584, 0.017016927719116212, 0.017035263061523438, 0.016920576095581053, 0.01691788864135742, 0.017207679748535157, 0.01695155143737793, 0.01704960060119629, 0.017016000747680664, 0.016941343307495117, 0.01683008003234863, 0.016934688568115235, 0.017101343154907227, 0.017596063613891603, 0.017020448684692383, 0.01705068778991699, 0.01714156723022461, 0.017049407958984374, 0.017006175994873047, 0.01715827178955078, 0.01697849655151367, 0.0171560001373291, 0.017129472732543945, 0.017449184417724608, 0.0171428165435791, 0.01698508834838867, 0.017168127059936523, 0.01710470390319824, 0.01690812873840332, 0.017310047149658205, 0.016863231658935548, 0.016689151763916017, 0.016855039596557618, 0.017303552627563477, 0.01717043113708496, 0.017145856857299805, 0.01717180824279785, 0.017076896667480468, 0.016988256454467773, 0.01695120048522949, 0.01707241630554199, 0.016762239456176758, 0.01715439987182617, 0.016772512435913087, 0.016833120346069336, 0.017077312469482422, 0.016853952407836915, 0.01687548828125, 0.016947328567504884, 0.016959327697753906, 0.017223743438720702, 0.017158432006835936, 0.017413856506347657, 0.016990207672119142, 0.016986112594604492, 0.016949247360229493, 0.01661948776245117, 0.01665827178955078, 0.016977407455444335, 0.01674934387207031, 0.016746400833129883, 0.016785408020019533, 0.016855039596557618, 0.01681203269958496, 0.01682841682434082, 0.017004831314086914, 0.017100511550903322, 0.016942975997924804, 0.016630975723266602, 0.01633523178100586, 0.0169451847076416, 0.017103391647338866, 0.01703340721130371, 0.016616800308227538, 0.016629663467407227, 0.016738496780395507, 0.016527103424072265, 0.016677343368530273, 0.016856704711914063, 0.017048383712768556, 0.01727065658569336, 0.016943136215209962, 0.016651872634887696, 0.01661292839050293, 0.01673616027832031, 0.01655072021484375, 0.016924736022949218, 0.01669126319885254, 0.016363519668579102, 0.016007104873657228, 0.016673887252807617, 0.017052223205566406, 0.017199520111083985, 0.016957504272460937, 0.016935935974121095, 0.01691334342956543, 0.016670719146728515, 0.016515071868896485, 0.016594112396240233, 0.016783327102661134, 0.0168723201751709, 0.01683987236022949, 0.016950368881225586, 0.016844480514526368, 0.016918527603149415, 0.01683046340942383, 0.016702911376953126, 0.016877695083618163, 0.01683705520629883, 0.016821535110473632, 0.017025760650634766, 0.01735856056213379, 0.017192960739135742, 0.017168031692504884, 0.016917119979858397, 0.017004032135009766, 0.016648544311523437, 0.01672412872314453, 0.016930240631103516, 0.01694927978515625, 0.01683305549621582, 0.01737107276916504, 0.01671993637084961, 0.016801055908203126, 0.01647177505493164, 0.01691766357421875, 0.016973087310791016, 0.016917055130004882, 0.016871423721313478, 0.01695088005065918, 0.019883712768554686, 0.017387840270996095, 0.017113664627075195, 0.017219423294067383, 0.017301055908203126, 0.016910783767700194, 0.016424800872802733, 0.016643360137939454, 0.016883392333984375, 0.017431936264038085, 0.017148672103881837, 0.016998176574707032, 0.016732160568237304, 0.01664009666442871, 0.017137664794921875, 0.016626848220825195, 0.016675615310668947, 0.016680288314819335, 0.016858112335205077, 0.01693462371826172, 0.016982015609741212, 0.016744447708129884, 0.016644096374511717, 0.01702911949157715, 0.01699260711669922, 0.016887584686279298, 0.017104448318481444, 0.016875839233398436, 0.016975360870361327, 0.016861919403076173, 0.01681545639038086, 0.01687366485595703, 0.01711568069458008, 0.016803455352783204, 0.016765024185180662, 0.01676255989074707, 0.01688960075378418, 0.016937311172485352, 0.016838239669799804, 0.016856767654418944, 0.016697568893432616, 0.016454368591308593, 0.016539264678955078, 0.01675916862487793, 0.016584447860717774, 0.01634124755859375, 0.016725120544433595, 0.01714441680908203, 0.017104639053344726, 0.016986879348754882, 0.016903968811035158, 0.016805919647216797, 0.01687548828125, 0.016955392837524414, 0.016846975326538085, 0.016885631561279296, 0.01697587203979492, 0.01686083221435547, 0.016890207290649415, 0.016873472213745116, 0.01659699249267578, 0.016670400619506837, 0.016777151107788085, 0.016510847091674805, 0.01657907295227051, 0.0163492488861084, 0.01676691246032715, 0.018147327423095702, 0.01682431983947754, 0.016661792755126952, 0.01697455978393555, 0.016766016006469726, 0.016810815811157228, 0.016449151992797853, 0.016224544525146486, 0.016248128890991212, 0.01618217658996582, 0.016162080764770506, 0.016253664016723634, 0.016476415634155275, 0.01689356803894043, 0.016647359848022462, 0.0163624324798584, 0.01617100715637207, 0.016174240112304686, 0.016190303802490234, 0.016154624938964843, 0.016670719146728515, 0.016717824935913086, 0.016862464904785157, 0.016742719650268554, 0.016726463317871094, 0.016447488784790038, 0.016321760177612304, 0.016083711624145507, 0.01606768035888672, 0.016177951812744142, 0.0162653751373291, 0.016338815689086916, 0.01617897605895996, 0.016385759353637695, 0.01608768081665039, 0.01601068878173828, 0.01624940872192383, 0.016044031143188475, 0.016022880554199218, 0.016165536880493166, 0.016166431427001953, 0.016058847427368163, 0.016106527328491212, 0.015979616165161133, 0.016150400161743163, 0.01598259162902832, 0.01642300796508789, 0.0165579833984375, 0.01741823959350586, 0.01635103988647461, 0.016734399795532227, 0.01706710433959961, 0.017089439392089845, 0.016941247940063478, 0.01781123161315918, 0.020127071380615234, 0.017105855941772462, 0.017173728942871093, 0.016986848831176758, 0.017149728775024416, 0.01705561637878418, 0.017591615676879883, 0.017134687423706055, 0.017103904724121093, 0.0170133113861084, 0.01682044792175293, 0.017222911834716796, 0.017117727279663087, 0.01699430465698242, 0.01697590446472168, 0.01714787292480469, 0.01700399971008301, 0.01722217559814453, 0.01730294418334961, 0.017320192337036133, 0.017471839904785156, 0.017056896209716798, 0.01705459213256836, 0.01696767997741699, 0.01697567939758301, 0.017123199462890624, 0.017099071502685546, 0.017098751068115235, 0.01721139144897461, 0.017139360427856444, 0.01708847999572754, 0.017071935653686525, 0.017068607330322266, 0.017131519317626954, 0.017242271423339842, 0.01707811164855957, 0.017257984161376954, 0.01709926414489746, 0.01710652732849121, 0.01733286476135254, 0.017198879241943358, 0.017238016128540038, 0.01705513572692871, 0.01700271987915039, 0.016953344345092772, 0.017184160232543946, 0.017066976547241212, 0.017039039611816405, 0.017090879440307617, 0.0175916805267334, 0.016969600677490235, 0.016978015899658205, 0.01715852737426758, 0.01703753662109375, 0.017006784439086913, 0.01703327941894531, 0.01719043159484863, 0.017170400619506837, 0.017434944152832033, 0.017268735885620116, 0.017196928024291992, 0.017165760040283203, 0.017553951263427733, 0.01718492889404297, 0.01679302406311035, 0.016891456604003905, 0.016806848526000978, 0.01701091194152832, 0.017106880187988283, 0.01687558364868164, 0.01675257682800293, 0.016844736099243165, 0.017045408248901366, 0.0165513916015625, 0.016777759552001954, 0.017000064849853516, 0.01718729591369629, 0.0173156795501709, 0.017073568344116212, 0.01687558364868164, 0.016775775909423828, 0.01679545593261719, 0.01692185592651367, 0.017187776565551757, 0.01690118408203125, 0.01694611167907715, 0.016781343460083007, 0.016730112075805666, 0.016586143493652342, 0.016627328872680664, 0.016755264282226564, 0.016714111328125, 0.01661952018737793, 0.01662156867980957, 0.01682784080505371, 0.017351232528686523, 0.017133663177490235, 0.01688675117492676, 0.016792768478393554, 0.01682815933227539, 0.01820240020751953, 0.017045728683471678, 0.016780799865722656, 0.016765439987182617, 0.016857311248779296, 0.016879295349121092, 0.016852575302124022, 0.016908191680908204, 0.016750688552856444, 0.016695327758789062, 0.01670569610595703, 0.01668947219848633, 0.01635327911376953, 0.01613804817199707, 0.0166627197265625, 0.017133567810058595, 0.017024896621704103, 0.016902496337890625, 0.016841535568237306, 0.01692720031738281, 0.01693084716796875, 0.017205024719238283, 0.01684883117675781, 0.016841215133666994, 0.01676313591003418, 0.016742399215698242, 0.01658470344543457, 0.016695295333862305, 0.01664204788208008, 0.016582656860351562, 0.016711679458618164, 0.016514688491821288, 0.01621343994140625, 0.016183935165405273, 0.016951616287231446, 0.017260671615600586]",tokens/s,59.723533661209466,, @@ -8838,7 +8838,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 119445 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 113176 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8873,7 +8873,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 75465 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 68736 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8916,7 +8916,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 93529 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 87023 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -8992,7 +8992,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 46054 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 39986 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -9035,7 +9035,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 54034 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 47749 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.727168,806.289408,0.0,411.041792,391.374848,s,1,7.34471484375,7.34471484375,0.0,7.34471484375,7.34471484375,7.34471484375,7.34471484375,[7.34471484375],,kWh,4.979785358329991e-06,5.420543576206471e-07,1.008334140004119e-06,6.530173855954757e-06,,MB,1157.623808,881.78688,0.0,473.956352,454.832128,s,18,0.17947494411468504,0.009970830228593614,0.00041730724994131976,0.009976736068725586,0.010160755348205566,0.010392897748947142,0.011255180959701535,"[0.010007264137268067, 0.009718848228454589, 0.01008521556854248, 0.010202688217163086, 0.009753952026367188, 0.009642751693725585, 0.009979840278625488, 0.009712639808654786, 0.009546496391296387, 0.009600768089294434, 0.009583616256713867, 0.010026559829711915, 0.009973631858825684, 0.010042240142822265, 0.009878175735473633, 0.010106719970703126, 0.011470751762390137, 0.010142784118652343]",tokens/s,25674.89307619159,kWh,2.869601984267479e-07,3.164608360747545e-08,1.79733564523107e-07,4.983398465573304e-07,tokens/kWh,513705660.4414013,MB,1186.168832,909.049856,0.0,501.219328,454.834688,s,18,10.204578491210937,0.5669210272894964,0.010994312838018253,0.5672750854492188,0.5786530029296875,0.5795169067382813,0.5801510864257813,"[0.5803096313476562, 0.5649026489257812, 0.5646107788085938, 0.5665227661132812, 0.5680274047851562, 0.57097021484375, 0.5630609741210938, 0.5465955810546875, 0.5445765380859375, 0.546088134765625, 0.5648844604492187, 0.5793770141601563, 0.5783427124023437, 0.5777233276367187, 0.5779256591796875, 0.5652088012695312, 0.57047607421875, 0.5749757690429688]",tokens/s,111.12658900871787,kWh,1.611264126638789e-05,1.7769285780013281e-06,7.406182998032452e-06,2.529575284242167e-05,tokens/kWh,2490536.6680507436,,s,1134,10.194637586593622,0.008989980235091384,0.0002737373905640035,0.009007359981536865,0.00931346254348755,0.009394726514816284,0.009587025899887087,"[0.009422847747802734, 0.009308064460754394, 0.009404512405395507, 0.009428159713745117, 0.009292608261108398, 0.00938361644744873, 0.009435456275939941, 0.009379743576049804, 0.009396320343017578, 0.009295647621154784, 0.009268896102905273, 0.009192000389099121, 0.009263104438781738, 0.009144319534301757, 0.009259008407592773, 0.009332736015319825, 0.009464159965515136, 0.009314208030700684, 0.009199359893798827, 0.009211903572082519, 0.009334783554077148, 0.009252863883972168, 0.009203488349914551, 0.009160672187805176, 0.00925705623626709, 0.009261216163635254, 0.009248671531677246, 0.009336928367614745, 0.009347295761108399, 0.009195296287536622, 0.009383935928344727, 0.009297599792480469, 0.00915443229675293, 0.009131775856018067, 0.00902233600616455, 0.009092960357666015, 0.00908022403717041, 0.009120320320129395, 0.009250240325927734, 0.009259584426879883, 0.009268608093261719, 0.009378432273864746, 0.009406463623046875, 0.009397695541381836, 0.009359840393066407, 0.009346367835998536, 0.00921455955505371, 0.009047391891479493, 0.008957056045532227, 0.008928383827209473, 0.008945247650146485, 0.009150527954101562, 0.008868800163269044, 0.008819968223571777, 0.008755776405334472, 0.008980287551879882, 0.009119808197021484, 0.00893779182434082, 0.009252863883972168, 0.009072575569152832, 0.009037887573242188, 0.00888764762878418, 0.008878751754760742, 0.00851417636871338, 0.008738816261291504, 0.008873984336853028, 0.008887583732604981, 0.008769439697265626, 0.008719200134277344, 0.008769503593444824, 0.00890675163269043, 0.008815711975097656, 0.008790528297424317, 0.008802720069885254, 0.008859199523925781, 0.008688063621520995, 0.00896992015838623, 0.009326911926269532, 0.009486335754394531, 0.009266655921936035, 0.00935968017578125, 0.00910051155090332, 0.00890777587890625, 0.00890006446838379, 0.008946208000183106, 0.008869888305664063, 0.008855839729309083, 0.008856448173522949, 0.008761823654174805, 0.008763039588928222, 0.00882051181793213, 0.008808480262756348, 0.008811424255371094, 0.008935423851013183, 0.009224287986755371, 0.009137503623962403, 0.009251392364501954, 0.009261152267456055, 0.009158464431762695, 0.0088472318649292, 0.008841440200805665, 0.008857407569885255, 0.008853280067443847, 0.00895631980895996, 0.00889241600036621, 0.008771871566772461, 0.008901663780212402, 0.00903222370147705, 0.009186944007873536, 0.008790559768676758, 0.008695808410644532, 0.008619647979736328, 0.008677696228027344, 0.008582655906677245, 0.009029472351074219, 0.009464832305908203, 0.009360383987426758, 0.009278335571289063, 0.009143808364868163, 0.00928598403930664, 0.008988672256469727, 0.009065567970275879, 0.009099712371826172, 0.00906611156463623, 0.009079551696777344, 0.009041248321533203, 0.009126879692077637, 0.009217568397521973, 0.009429696083068848, 0.009115424156188965, 0.009019583702087402, 0.00899401569366455, 0.008919648170471191, 0.008761343955993652, 0.008764863967895508, 0.008771295547485352, 0.008888319969177246, 0.008825695991516114, 0.008867839813232421, 0.008828831672668456, 0.008788064002990722, 0.008734047889709473, 0.008690336227416991, 0.008882176399230958, 0.008814592361450196, 0.008681471824645997, 0.008668928146362305, 0.008591775894165038, 0.008654687881469727, 0.008755200386047364, 0.009107711791992188, 0.009412351608276368, 0.009325823783874511, 0.009179648399353027, 0.009319680213928223, 0.009153023719787597, 0.009011712074279785, 0.008804351806640624, 0.008818752288818359, 0.008839167594909669, 0.008951744079589844, 0.008853471755981446, 0.00882652759552002, 0.008806912422180176, 0.008750975608825684, 0.008720383644104004, 0.00908083152770996, 0.008998175621032715, 0.008946399688720703, 0.008967840194702148, 0.009050463676452637, 0.0090862398147583, 0.0090447998046875, 0.008990912437438965, 0.009237567901611328, 0.009147040367126465, 0.00903708839416504, 0.008970303535461426, 0.009054688453674317, 0.009038335800170898, 0.009017024040222168, 0.009053631782531739, 0.00890937614440918, 0.008950048446655274, 0.00892467212677002, 0.008765151977539062, 0.00876540756225586, 0.00897439956665039, 0.009310751914978028, 0.009320096015930176, 0.009319135665893555, 0.009262944221496582, 0.009048128128051757, 0.008931008338928223, 0.009074175834655761, 0.00903446388244629, 0.008959296226501465, 0.008919520378112793, 0.008843487739562988, 0.008808639526367188, 0.008832832336425782, 0.008928832054138183, 0.00927519989013672, 0.009138879776000976, 0.008984512329101562, 0.008951871871948243, 0.008849344253540038, 0.008914943695068359, 0.008779135704040528, 0.00875331211090088, 0.008791520118713378, 0.008985152244567871, 0.008927680015563965, 0.008838303565979004, 0.008712512016296386, 0.008669599533081055, 0.008783391952514649, 0.008824895858764649, 0.008748831748962403, 0.008648960113525391, 0.00857369613647461, 0.008547167778015136, 0.008759488105773925, 0.008836959838867188, 0.009403264045715332, 0.009566207885742188, 0.009387264251708985, 0.009312864303588866, 0.009058655738830567, 0.008939328193664551, 0.009005311965942384, 0.009016160011291505, 0.008985088348388673, 0.008960639953613281, 0.008843168258666993, 0.009076607704162598, 0.008971551895141601, 0.009207615852355957, 0.009349023818969727, 0.009083392143249512, 0.009055007934570312, 0.009034815788269044, 0.009033632278442384, 0.00913702392578125, 0.0091278076171875, 0.009023327827453614, 0.009005215644836426, 0.009001055717468261, 0.009027487754821777, 0.00901910400390625, 0.008923423767089845, 0.008877344131469726, 0.008674719810485839, 0.00886025619506836, 0.00876364803314209, 0.00872217559814453, 0.00871014404296875, 0.008661151885986328, 0.008626015663146972, 0.0086080961227417, 0.00921987247467041, 0.009463680267333985, 0.009400383949279785, 0.00933071994781494, 0.009267040252685547, 0.00900102424621582, 0.009107456207275391, 0.009108672142028809, 0.009007136344909667, 0.008989695549011231, 0.009013119697570801, 0.008863648414611817, 0.008843263626098634, 0.008810336112976075, 0.009181216239929199, 0.009477472305297852, 0.009083680152893066, 0.008939519882202148, 0.008826784133911133, 0.008931424140930176, 0.008957951545715333, 0.009009152412414552, 0.008838399887084961, 0.008782272338867187, 0.008895808219909668, 0.00899772834777832, 0.009034048080444336, 0.008853119850158691, 0.008792384147644043, 0.008717311859130859, 0.00887833595275879, 0.008987584114074708, 0.009283295631408691, 0.008988672256469727, 0.009211135864257812, 0.009054783821105958, 0.009128128051757813, 0.009234687805175782, 0.009381407737731934, 0.009455615997314454, 0.00926534366607666, 0.00926681613922119, 0.00925654411315918, 0.00926585578918457, 0.009109951972961427, 0.009011936187744141, 0.008954560279846192, 0.008942943572998047, 0.008971199989318848, 0.008996864318847657, 0.008897695541381836, 0.008835647583007812, 0.009015071868896484, 0.008942303657531738, 0.008836992263793945, 0.008502176284790039, 0.00880784034729004, 0.009112064361572265, 0.010286944389343262, 0.010262528419494628, 0.010143744468688964, 0.009098943710327148, 0.0089965763092041, 0.009091903686523437, 0.008937248229980469, 0.008914943695068359, 0.008955904006958008, 0.009111488342285156, 0.00916431999206543, 0.00895798397064209, 0.008710783958435058, 0.008710016250610351, 0.008727999687194824, 0.009170880317687988, 0.009484928131103515, 0.009469311714172363, 0.00938976001739502, 0.009464832305908203, 0.009243583679199219, 0.009184479713439941, 0.009045568466186524, 0.008847583770751953, 0.008765439987182617, 0.008784992218017578, 0.008778656005859375, 0.009676223754882812, 0.008823712348937989, 0.008779328346252441, 0.008757344245910645, 0.008964096069335938, 0.008978336334228516, 0.00904412841796875, 0.008837056159973145, 0.008766528129577636, 0.00904854393005371, 0.009265536308288575, 0.009134176254272462, 0.008972384452819825, 0.008789919853210449, 0.008705663681030273, 0.008704000473022461, 0.008914336204528809, 0.008825632095336914, 0.00870969581604004, 0.009134112358093261, 0.009363840103149414, 0.009013471603393554, 0.00879747200012207, 0.008737504005432129, 0.008683072090148927, 0.008843711853027344, 0.008763615608215331, 0.008873760223388672, 0.00910927963256836, 0.009344896316528321, 0.009257311820983887, 0.009274623870849609, 0.009327391624450683, 0.009162752151489258, 0.009096416473388672, 0.009007583618164063, 0.008894335746765137, 0.009071040153503417, 0.008959456443786621, 0.008854047775268555, 0.008851455688476563, 0.008901984214782714, 0.008896448135375977, 0.008819616317749024, 0.00894979190826416, 0.009021023750305175, 0.008957119941711425, 0.008804800033569336, 0.008788800239562989, 0.00903551959991455, 0.00921737575531006, 0.009126655578613281, 0.008935327529907227, 0.008725760459899902, 0.00873532772064209, 0.008755647659301757, 0.008812159538269042, 0.008861215591430664, 0.008902432441711426, 0.00890713596343994, 0.009146783828735352, 0.009086879730224609, 0.008912863731384278, 0.008903967857360839, 0.008766528129577636, 0.008671008110046386, 0.00861184024810791, 0.00863980770111084, 0.008569631576538086, 0.00886070442199707, 0.009313216209411621, 0.009289664268493652, 0.009326047897338867, 0.009230879783630372, 0.009174495697021485, 0.009066975593566895, 0.00911571216583252, 0.009011167526245117, 0.00894547176361084, 0.008866016387939452, 0.009027584075927735, 0.009191424369812011, 0.008917056083679199, 0.009504575729370118, 0.008814944267272948, 0.009017024040222168, 0.008899968147277832, 0.008815327644348144, 0.008678943634033202, 0.008673184394836426, 0.008649215698242188, 0.008609184265136719, 0.008622752189636231, 0.008914943695068359, 0.008976736068725587, 0.008713983535766601, 0.0084399995803833, 0.008846783638000488, 0.008683199882507325, 0.00888492774963379, 0.009137248039245606, 0.008833600044250487, 0.008669535636901855, 0.00867033576965332, 0.0086496000289917, 0.00858521556854248, 0.008775679588317872, 0.009214303970336913, 0.009157535552978515, 0.009073408126831054, 0.008957951545715333, 0.008790016174316406, 0.008712191581726075, 0.008640512466430664, 0.008560640335083008, 0.008566783905029298, 0.008581119537353516, 0.008531968116760253, 0.008566816329956055, 0.008597472190856933, 0.008597503662109375, 0.008617919921875, 0.008689855575561524, 0.00869331169128418, 0.008634143829345704, 0.008651295661926269, 0.008622079849243165, 0.0086179838180542, 0.008585344314575196, 0.008595552444458008, 0.008572128295898438, 0.008567359924316406, 0.008527968406677246, 0.008525983810424805, 0.008588095664978028, 0.008575743675231933, 0.008616127967834473, 0.00857692813873291, 0.00858675193786621, 0.008563296318054199, 0.0086179838180542, 0.008631839752197265, 0.008663519859313965, 0.00860979175567627, 0.008643967628479004, 0.00870032024383545, 0.008775103569030761, 0.008703104019165039, 0.008633088111877442, 0.00859763240814209, 0.008582176208496094, 0.00859216022491455, 0.008590304374694824, 0.008623711585998535, 0.008564767837524414, 0.008644672393798828, 0.00858348846435547, 0.00861184024810791, 0.0086364164352417, 0.008398847579956055, 0.00860159969329834, 0.008550111770629882, 0.008577312469482422, 0.008558367729187012, 0.008532256126403808, 0.009459648132324219, 0.00900924777984619, 0.008589119911193847, 0.00858448028564453, 0.00859382438659668, 0.008636832237243652, 0.008598784446716308, 0.008643424034118653, 0.008590880393981934, 0.008595135688781739, 0.008642784118652344, 0.008550880432128906, 0.008589311599731446, 0.008612095832824707, 0.00861353588104248, 0.008579520225524903, 0.008545184135437011, 0.008555264472961425, 0.00856287956237793, 0.00857596778869629, 0.0085696964263916, 0.008665056228637695, 0.008636608123779296, 0.00860979175567627, 0.008585056304931641, 0.008670495986938477, 0.00863920021057129, 0.00862822437286377, 0.008589311599731446, 0.008703680038452148, 0.008577343940734863, 0.00857907199859619, 0.008556096076965333, 0.008581503868103028, 0.008525888442993164, 0.008572064399719238, 0.008581983566284179, 0.00858521556854248, 0.008643808364868165, 0.008577759742736817, 0.00858937644958496, 0.008519359588623047, 0.00855686378479004, 0.00855395221710205, 0.00854032039642334, 0.008620736122131347, 0.00857260799407959, 0.008580767631530761, 0.00856611156463623, 0.008647808074951172, 0.008707967758178711, 0.00883670425415039, 0.00937548828125, 0.009038496017456054, 0.008652799606323243, 0.00865180778503418, 0.008678367614746094, 0.008376192092895508, 0.008861503601074218, 0.0086844482421875, 0.00865187168121338, 0.008612256050109863, 0.00864633560180664, 0.008617759704589844, 0.008598336219787598, 0.00860979175567627, 0.008586239814758301, 0.008668160438537598, 0.008673343658447265, 0.008638272285461426, 0.008638591766357421, 0.00876035213470459, 0.008713184356689453, 0.008666496276855468, 0.008595775604248047, 0.008659263610839844, 0.008584544181823731, 0.008516480445861816, 0.008599328041076661, 0.008568384170532227, 0.008576704025268554, 0.008551136016845703, 0.008550432205200196, 0.00856390380859375, 0.008579039573669433, 0.008586079597473144, 0.008553728103637696, 0.008575551986694336, 0.00854975986480713, 0.008563520431518555, 0.008598655700683593, 0.008561440467834473, 0.008597599983215331, 0.008559679985046387, 0.008812928199768066, 0.008642623901367188, 0.008591872215270996, 0.008665375709533692, 0.008644319534301758, 0.008584927558898926, 0.00858255958557129, 0.008607999801635742, 0.00854697608947754, 0.008582719802856445, 0.008526528358459472, 0.008595168113708496, 0.008566975593566895, 0.008633440017700195, 0.008639007568359376, 0.008658143997192383, 0.00862435245513916, 0.009238656044006349, 0.008744768142700195, 0.00947868824005127, 0.008918975830078125, 0.00959727954864502, 0.008677151679992676, 0.008705951690673829, 0.008650208473205566, 0.008610912322998047, 0.008331263542175293, 0.008650015830993653, 0.008618720054626465, 0.00861184024810791, 0.008541952133178711, 0.008634336471557617, 0.008565024375915527, 0.008632320404052735, 0.008566847801208496, 0.008578144073486327, 0.008632255554199218, 0.008622688293457031, 0.008595775604248047, 0.008675423622131348, 0.009055392265319824, 0.008632575988769531, 0.008565247535705567, 0.008553471565246582, 0.008535200119018555, 0.008619135856628417, 0.008602335929870605, 0.008598591804504395, 0.008813247680664063, 0.008614336013793945, 0.008636223793029784, 0.008753151893615722, 0.00876540756225586, 0.008884256362915038, 0.008919232368469239, 0.009077664375305175, 0.00897555160522461, 0.009048031806945802, 0.009080063819885255, 0.00912332820892334, 0.009152607917785644, 0.009193599700927734, 0.009163552284240723, 0.00924783992767334, 0.00936847972869873, 0.009426912307739259, 0.009250271797180175, 0.009281439781188965, 0.00919593620300293, 0.00928179168701172, 0.009256959915161133, 0.009115648269653321, 0.009066495895385742, 0.009183232307434081, 0.009204863548278808, 0.009538432121276856, 0.009218111991882324, 0.009287551879882812, 0.009266752243041992, 0.00923852825164795, 0.009458175659179687, 0.00924403190612793, 0.009149279594421387, 0.009277215957641602, 0.009307711601257325, 0.009150912284851074, 0.009005375862121582, 0.009182656288146972, 0.00945321559906006, 0.008957440376281739, 0.00918393611907959, 0.009283391952514649, 0.009277440071105958, 0.009441280364990234, 0.009510911941528321, 0.009209407806396484, 0.00912656021118164, 0.009257920265197754, 0.009130111694335937, 0.009119872093200684, 0.009112159729003906, 0.009276576042175293, 0.00936569595336914, 0.00922486400604248, 0.009441280364990234, 0.009332032203674316, 0.009093855857849121, 0.009148256301879883, 0.009136159896850586, 0.009307552337646484, 0.009525952339172363, 0.009195520401000976, 0.00930611228942871, 0.009289376258850098, 0.009145824432373047, 0.009357536315917969, 0.009282015800476074, 0.009104864120483398, 0.009025792121887206, 0.008968671798706055, 0.008922783851623536, 0.00899728012084961, 0.009027520179748534, 0.009347071647644043, 0.009263104438781738, 0.009173055648803712, 0.009201375961303711, 0.009210047721862792, 0.00917033576965332, 0.009439231872558594, 0.00917363166809082, 0.009197567939758301, 0.009117695808410644, 0.009013152122497559, 0.008933216094970704, 0.00906265640258789, 0.009258111953735352, 0.009268095970153808, 0.009051872253417968, 0.009119392395019531, 0.009267871856689452, 0.009391424179077148, 0.009222816467285157, 0.009138175964355469, 0.009032928466796875, 0.008899359703063965, 0.0089619197845459, 0.00901318359375, 0.009321760177612305, 0.009202143669128419, 0.009134528160095214, 0.009093119621276855, 0.009160479545593262, 0.009395808219909667, 0.00939414405822754, 0.00942956829071045, 0.009278656005859375, 0.009175968170166016, 0.009295167922973633, 0.009155263900756836, 0.009117088317871093, 0.009155167579650878, 0.009135807991027832, 0.008974080085754394, 0.009061216354370117, 0.009042719841003417, 0.009159616470336914, 0.009211903572082519, 0.009179231643676757, 0.009021344184875489, 0.00921126365661621, 0.009240192413330078, 0.009115839958190918, 0.009140704154968262, 0.009046367645263672, 0.009009152412414552, 0.009046015739440917, 0.00902284812927246, 0.009222911834716798, 0.00928054428100586, 0.009145024299621583, 0.008966303825378419, 0.008888544082641602, 0.009129055976867676, 0.009749183654785156, 0.009058303833007812, 0.009029055595397949, 0.008978464126586915, 0.009183775901794434, 0.009389408111572265, 0.009273664474487305, 0.009257311820983887, 0.009101375579833985, 0.009131967544555664, 0.009244640350341797, 0.009254688262939454, 0.00913987159729004, 0.009032447814941407, 0.009056096076965331, 0.009089280128479003, 0.009209440231323243, 0.009207839965820313, 0.009100416183471679, 0.009217023849487305, 0.009158656120300293, 0.009101311683654785, 0.009410847663879394, 0.00915129566192627, 0.009130016326904297, 0.009076704025268554, 0.009030495643615723, 0.00920800018310547, 0.009336704254150391, 0.009328831672668457, 0.009301823616027832, 0.009228351593017579, 0.009265151977539063, 0.00925228786468506, 0.009192000389099121, 0.00925875186920166, 0.009253120422363282, 0.009245856285095215, 0.009231040000915527, 0.009146528244018555, 0.009101311683654785, 0.009125568389892579, 0.009016863822937012, 0.009034527778625488, 0.008976351737976075, 0.009101344108581542, 0.009154175758361817, 0.009046208381652833, 0.008980223655700684, 0.00907526397705078, 0.009183103561401368, 0.00923852825164795, 0.009213983535766601, 0.009176128387451172, 0.009247648239135741, 0.009352224349975586, 0.00919215965270996, 0.009021023750305175, 0.00894428825378418, 0.008900863647460937, 0.008962112426757813, 0.009155584335327148, 0.009214655876159668, 0.009363615989685058, 0.009291616439819336, 0.00954918384552002, 0.009261695861816407, 0.009193632125854492, 0.009061504364013671, 0.009222816467285157, 0.009266528129577636, 0.009106143951416016, 0.009087072372436524, 0.009193375587463378, 0.009240575790405273, 0.009255135536193847, 0.009285280227661133, 0.009203840255737304, 0.009377792358398437, 0.009203712463378906, 0.009115455627441406, 0.009099136352539063, 0.009157024383544921, 0.009131551742553711, 0.009025919914245605, 0.009054207801818847, 0.009237919807434082, 0.009343839645385743, 0.009272704124450684, 0.009202048301696778, 0.009086976051330567, 0.008957951545715333, 0.00889583969116211, 0.008957951545715333, 0.00865328025817871, 0.009000960350036622, 0.00920576000213623, 0.009158143997192383, 0.009122271537780762, 0.008998944282531738, 0.009025535583496093, 0.009082015991210937, 0.009222208023071289, 0.008996831893920898, 0.009244768142700196, 0.009177824020385743, 0.009379839897155762, 0.009652223587036133, 0.009451519966125489, 0.009928704261779785, 0.009515263557434083, 0.009676223754882812, 0.00989568042755127, 0.009249183654785156, 0.009289024353027343, 0.00912179183959961, 0.009040736198425292, 0.008902655601501466, 0.008978464126586915, 0.008986207962036133, 0.009152671813964845, 0.00909670352935791, 0.009022175788879394, 0.008988096237182617, 0.009011775970458984, 0.009184288024902345, 0.009108448028564453, 0.009076800346374512, 0.009074624061584474, 0.008972064018249511, 0.00899839973449707, 0.008954591751098633, 0.009089216232299805, 0.00917689609527588, 0.00916703987121582, 0.009197183609008789, 0.009223936080932617, 0.009053855895996095, 0.0090283842086792, 0.008986528396606446, 0.008984031677246094, 0.009357760429382325, 0.00900268840789795, 0.009019519805908203, 0.008945343971252441, 0.009064352035522461, 0.009132896423339844, 0.00920195198059082, 0.009293760299682617, 0.009364831924438476, 0.009369983673095703, 0.009215999603271484, 0.009082176208496094, 0.00906719970703125, 0.009209856033325196, 0.009176480293273925, 0.009278047561645507, 0.009110048294067382, 0.009154335975646972, 0.008954079627990722, 0.008859647750854491, 0.0088722562789917, 0.009008831977844238, 0.009043264389038085, 0.008849920272827149, 0.008709792137145997, 0.00891977596282959, 0.009028639793395996, 0.00902019214630127, 0.009110527992248535, 0.009101344108581542, 0.008963040351867676, 0.008795807838439942, 0.008730976104736329, 0.008736767768859864, 0.008832032203674317, 0.008887264251708984, 0.008779104232788087, 0.00872105598449707, 0.009076064109802245, 0.00913475227355957, 0.008800224304199218, 0.008710176467895507, 0.008859647750854491, 0.008763392448425293, 0.008652480125427246, 0.008743231773376464, 0.008552063941955566, 0.008569215774536133, 0.008748448371887207, 0.009091839790344239, 0.009438464164733887, 0.009295583724975586, 0.009354111671447753, 0.009311264038085937, 0.009268192291259766, 0.009252896308898926, 0.009084511756896972, 0.008884639739990234, 0.008939488410949707, 0.008987775802612305, 0.008991583824157715, 0.00886406421661377, 0.008859359741210937, 0.008824288368225098, 0.008978976249694823, 0.009265151977539063, 0.00910147190093994, 0.009314144134521484, 0.008973983764648438, 0.00887782382965088, 0.008770367622375489, 0.008809696197509766, 0.008917280197143555, 0.00906668758392334, 0.009216095924377441, 0.009072256088256837, 0.008888704299926758, 0.008962047576904298, 0.00910540771484375, 0.010554143905639649, 0.009851903915405273, 0.009129983901977539, 0.009019455909729003, 0.00912384033203125, 0.00912172794342041, 0.0090600004196167, 0.009004639625549317, 0.008944160461425781, 0.008859999656677246, 0.008738592147827148, 0.008697152137756348, 0.008683520317077637, 0.00878889560699463, 0.009366815567016601, 0.00941868782043457, 0.009364128112792969, 0.009420255661010742, 0.00931884765625, 0.009324511528015137, 0.009162303924560546, 0.009157183647155762, 0.009164959907531738, 0.009101152420043945, 0.009074048042297363, 0.009115519523620606, 0.008908576011657714, 0.008886431694030762, 0.00897862434387207, 0.009382399559020996, 0.009109791755676269, 0.008984416007995606, 0.008739904403686523, 0.008769536018371582, 0.008963007926940918, 0.009279616355895996, 0.009310432434082031, 0.009150112152099609, 0.009015104293823243, 0.009081024169921875, 0.009048064231872559, 0.008955904006958008, 0.00885865592956543, 0.008927424430847168, 0.008905856132507325, 0.008831999778747558, 0.00881935977935791, 0.009033727645874023, 0.008990495681762696, 0.00884115219116211, 0.00880668830871582, 0.008892640113830567, 0.008928863525390626, 0.008893792152404784, 0.00889737606048584, 0.008950816154479981, 0.00897532844543457, 0.008843263626098634, 0.008828448295593263, 0.008809056282043457, 0.008826751708984375, 0.008843263626098634, 0.009054400444030762, 0.009355775833129883, 0.009312735557556153, 0.00940236759185791, 0.009293824195861817, 0.009313568115234375, 0.009159296035766602, 0.009201760292053223, 0.00903551959991455, 0.009084671974182128, 0.00912656021118164, 0.008980511665344238, 0.009082688331604003, 0.008867456436157226, 0.008968576431274414, 0.009215935707092286, 0.009174400329589844, 0.00917363166809082, 0.009135199546813964, 0.009079775810241698, 0.009207807540893554, 0.009236543655395509, 0.009082816123962402, 0.008957311630249024, 0.008953503608703613, 0.009012191772460938, 0.008970239639282226, 0.009089088439941407, 0.009048031806945802, 0.009180928230285644, 0.009164992332458497, 0.009037535667419433, 0.009168607711791992, 0.00924947166442871, 0.009013152122497559, 0.00902137565612793, 0.00910547161102295, 0.009072287559509278, 0.008960576057434081, 0.009297663688659669, 0.009059647560119628, 0.008889056205749512, 0.008798208236694336, 0.008900351524353027, 0.009261311531066895, 0.009397567749023438, 0.00934291172027588, 0.009399040222167969, 0.009309503555297851, 0.009204128265380859, 0.009281760215759277, 0.009306367874145507, 0.009166432380676269, 0.009150752067565918, 0.00903264045715332, 0.008954879760742187, 0.00898252773284912, 0.008956064224243164, 0.008824671745300293, 0.00917244815826416, 0.009213824272155762, 0.009077216148376465, 0.008984319686889649, 0.008974944114685059]",tokens/s,111.23494978293853,, @@ -9079,7 +9079,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 78441 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 71980 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -9122,7 +9122,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 149031 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 142457 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.018176,4675.534848,0.0,4280.287232,4115.121152,s,1,8.0935927734375,8.0935927734375,0.0,8.0935927734375,8.0935927734375,8.0935927734375,8.0935927734375,[8.0935927734375],,kWh,1.0311723070882786e-05,1.130143968225839e-06,4.96389286000487e-06,1.6405759899113496e-05,,MB,1174.614016,4983.816192,0.0,4575.985664,4408.408064,s,10,0.4055550994873047,0.04055550994873047,0.0018906092084394035,0.04021809577941894,0.04169289970397949,0.04372158527374267,0.04534453372955322,"[0.04575027084350586, 0.04020310211181641, 0.040419647216796875, 0.03980409622192383, 0.04040787124633789, 0.03984611129760742, 0.04023308944702148, 0.039516319274902345, 0.04124208068847656, 0.03813251113891602]",tokens/s,6312.335865672272,kWh,1.4787861881734404e-06,1.6299069460646011e-07,9.893933279898385e-07,2.631170210769739e-06,tokens/kWh,97295111.86777543,MB,1203.552256,4983.816192,0.0,4575.985664,4408.410624,s,10,13.95576257324219,1.395576257324219,0.007290252780742853,1.3983818359375,1.4026697021484376,1.40456220703125,1.4060762109375,"[1.38884130859375, 1.4064547119140625, 1.39838720703125, 1.399352294921875, 1.39837646484375, 1.395798828125, 1.38310400390625, 1.38431884765625, 1.4022491455078125, 1.3988797607421875]",tokens/s,45.14264245279712,kWh,4.0314031168912836e-05,4.44631747783814e-06,2.6443989729811237e-05,7.120433837656222e-05,tokens/kWh,884777.5491828349,,s,630,13.953395492553726,0.022148246813577317,0.00035882815347562457,0.02210428810119629,0.022449370384216308,0.022625327491760255,0.023180296020507814,"[0.02265292739868164, 0.022458368301391602, 0.022116352081298828, 0.022278144836425783, 0.022360063552856444, 0.022321151733398437, 0.02226380729675293, 0.022046720504760742, 0.022244543075561524, 0.02204960060119629, 0.022237184524536133, 0.022209920883178712, 0.02222496032714844, 0.022226943969726562, 0.02204444885253906, 0.022170400619506835, 0.022382591247558595, 0.022267520904541014, 0.022343263626098633, 0.022112703323364256, 0.022094079971313477, 0.022316415786743163, 0.022466272354125977, 0.022445056915283205, 0.022089696884155272, 0.02208736038208008, 0.022132863998413087, 0.0219486083984375, 0.021932064056396486, 0.02191564750671387, 0.022009855270385743, 0.02188083267211914, 0.022001440048217774, 0.021975263595581055, 0.02200371170043945, 0.021939807891845704, 0.021969120025634767, 0.021907936096191405, 0.022162431716918944, 0.021955039978027342, 0.022098175048828127, 0.021875999450683595, 0.02200649642944336, 0.022117759704589842, 0.022196863174438478, 0.022147071838378905, 0.02216524887084961, 0.021815200805664063, 0.021801120758056642, 0.021684415817260744, 0.021716575622558593, 0.021722528457641603, 0.021619712829589844, 0.021985279083251954, 0.02159539222717285, 0.021792863845825194, 0.021864511489868163, 0.021800928115844727, 0.021765888214111326, 0.021699455261230467, 0.021681247711181642, 0.021610719680786133, 0.021626815795898438, 0.022214208602905273, 0.022192960739135743, 0.02190889549255371, 0.022146879196166994, 0.021861120223999022, 0.021825536727905274, 0.02194384002685547, 0.02178233528137207, 0.02169923210144043, 0.021954559326171876, 0.022150943756103516, 0.022476287841796876, 0.022123008728027343, 0.02239094352722168, 0.02237392044067383, 0.02225974464416504, 0.02240768051147461, 0.022331552505493166, 0.022352863311767578, 0.022196992874145508, 0.02231513595581055, 0.022515520095825196, 0.022231231689453124, 0.022837247848510742, 0.022284095764160156, 0.022325439453125, 0.022168607711791993, 0.02208867263793945, 0.022433759689331055, 0.02254198455810547, 0.02267788887023926, 0.022437887191772463, 0.02244812774658203, 0.022417407989501953, 0.022109792709350585, 0.02219254493713379, 0.022196224212646484, 0.02241244888305664, 0.022254432678222656, 0.022194496154785158, 0.022791807174682616, 0.022763456344604492, 0.02220044708251953, 0.022527999877929687, 0.022257471084594728, 0.022603391647338867, 0.022628927230834962, 0.022449216842651366, 0.02219919967651367, 0.02423811149597168, 0.02319491195678711, 0.0222194881439209, 0.022235071182250977, 0.02227168083190918, 0.022226783752441408, 0.02242729568481445, 0.02226880073547363, 0.022152671813964842, 0.022128223419189453, 0.022122592926025392, 0.02219036865234375, 0.022274623870849608, 0.0221265926361084, 0.022955743789672852, 0.022495487213134765, 0.02235411262512207, 0.02233718490600586, 0.022134048461914062, 0.02235215950012207, 0.02209404754638672, 0.022074911117553712, 0.022256511688232422, 0.02217363166809082, 0.02226131248474121, 0.022092159271240235, 0.02210147285461426, 0.02214476776123047, 0.022090656280517578, 0.02216655921936035, 0.022069280624389648, 0.02239788818359375, 0.022607872009277344, 0.022380352020263672, 0.02234796714782715, 0.022147071838378905, 0.022273279190063475, 0.022620927810668944, 0.022237184524536133, 0.022084703445434572, 0.02207836723327637, 0.022306528091430664, 0.02234956741333008, 0.022004255294799803, 0.022070816040039062, 0.022214719772338867, 0.022249631881713867, 0.021958335876464844, 0.02227801513671875, 0.022790304183959963, 0.02210256004333496, 0.022212608337402344, 0.02211840057373047, 0.022161407470703123, 0.02230067253112793, 0.021988447189331056, 0.02203887939453125, 0.021936704635620117, 0.022228992462158204, 0.022093856811523437, 0.022091392517089845, 0.021924192428588868, 0.022145023345947267, 0.021914623260498048, 0.022135295867919923, 0.022516223907470705, 0.022202495574951173, 0.02207935905456543, 0.022138879776000975, 0.02202822494506836, 0.022083648681640623, 0.02217091178894043, 0.02201260757446289, 0.02203241539001465, 0.02202956771850586, 0.021977247238159178, 0.021924415588378907, 0.022384639739990234, 0.022173696517944336, 0.02276118469238281, 0.022388832092285156, 0.022042816162109374, 0.02222447967529297, 0.022032800674438476, 0.02247065544128418, 0.02209526443481445, 0.022046304702758788, 0.022004735946655272, 0.02225276756286621, 0.022020896911621093, 0.021859807968139647, 0.021769952774047852, 0.022174528121948242, 0.021968896865844727, 0.021977088928222657, 0.022476512908935545, 0.022429983139038087, 0.02226121520996094, 0.02219910430908203, 0.02221232032775879, 0.022001407623291017, 0.022300928115844727, 0.022155263900756835, 0.02197292709350586, 0.02204579162597656, 0.022340576171875, 0.02244607925415039, 0.02197305679321289, 0.02193939208984375, 0.02189798355102539, 0.022087200164794922, 0.022317535400390626, 0.022386688232421875, 0.022001663208007814, 0.022437887191772463, 0.022466527938842774, 0.02238377571105957, 0.022278848648071288, 0.02201747131347656, 0.022186111450195313, 0.022233728408813477, 0.024420352935791017, 0.02215443229675293, 0.021907360076904296, 0.022097824096679687, 0.022471263885498048, 0.02209833526611328, 0.022478847503662108, 0.02235148811340332, 0.02224985694885254, 0.02223695945739746, 0.02208585548400879, 0.021927967071533203, 0.02222831916809082, 0.02212723159790039, 0.022094976425170897, 0.02211520004272461, 0.02207744026184082, 0.02208358383178711, 0.021825536727905274, 0.02224127960205078, 0.022147071838378905, 0.02266111946105957, 0.022267871856689454, 0.022077472686767578, 0.022180864334106445, 0.022019168853759766, 0.02209721565246582, 0.022114591598510744, 0.02209823989868164, 0.021999616622924805, 0.021958784103393556, 0.022101600646972655, 0.02198361587524414, 0.021899168014526366, 0.02199660873413086, 0.02198624038696289, 0.022955392837524412, 0.025219711303710937, 0.02224892807006836, 0.02210665512084961, 0.02208345603942871, 0.022214784622192382, 0.022169599533081053, 0.0220446720123291, 0.022003679275512694, 0.022013343811035157, 0.022015775680541992, 0.022168415069580078, 0.02205695915222168, 0.02176582336425781, 0.022004032135009767, 0.02287958335876465, 0.022302944183349608, 0.02210652732849121, 0.022126623153686523, 0.02214236831665039, 0.022057567596435547, 0.02209587287902832, 0.02208358383178711, 0.021893119812011717, 0.022255136489868165, 0.022125024795532227, 0.02214499282836914, 0.022173728942871094, 0.022066560745239258, 0.021936128616333008, 0.022063615798950196, 0.02314451217651367, 0.022220191955566407, 0.022147743225097657, 0.022246944427490235, 0.022104543685913088, 0.02239641571044922, 0.022266368865966796, 0.022054784774780272, 0.02197657585144043, 0.022050975799560547, 0.02203286361694336, 0.02230790328979492, 0.022092735290527344, 0.02200371170043945, 0.021759456634521484, 0.02286534309387207, 0.022354496002197265, 0.022920543670654298, 0.023920927047729492, 0.022344064712524415, 0.022279680252075194, 0.022114816665649413, 0.022081087112426758, 0.02207583999633789, 0.022041856765747072, 0.021721216201782228, 0.021979743957519532, 0.022044160842895507, 0.022157855987548828, 0.022300575256347658, 0.022467807769775392, 0.02224371147155762, 0.022094335556030274, 0.022091775894165038, 0.02204857635498047, 0.02233145523071289, 0.02224550437927246, 0.02212224006652832, 0.022069503784179687, 0.022219903945922853, 0.02267225646972656, 0.02209382438659668, 0.02209791946411133, 0.02211020851135254, 0.022171648025512695, 0.022589439392089843, 0.022351295471191406, 0.022153823852539063, 0.02206480026245117, 0.02210028839111328, 0.022345632553100587, 0.022088863372802733, 0.022074304580688476, 0.02187468719482422, 0.022122432708740234, 0.021950527191162108, 0.021807104110717773, 0.02168422317504883, 0.021946176528930664, 0.022071487426757814, 0.021727231979370116, 0.021753856658935547, 0.022136831283569337, 0.022475807189941407, 0.022106271743774414, 0.021881248474121092, 0.02194063949584961, 0.02214816093444824, 0.022157472610473634, 0.022190879821777344, 0.02186444854736328, 0.021999616622924805, 0.022095903396606446, 0.022104032516479494, 0.021796415328979492, 0.021633472442626953, 0.021941568374633787, 0.022097663879394533, 0.022413055419921876, 0.022702655792236327, 0.022215744018554688, 0.022393407821655272, 0.021983135223388673, 0.022109760284423827, 0.022420352935791015, 0.022024192810058595, 0.02188287925720215, 0.021893119812011717, 0.021960704803466798, 0.022023616790771486, 0.02185273551940918, 0.021831424713134765, 0.02197491264343262, 0.022038911819458006, 0.022023359298706056, 0.02199193572998047, 0.022109983444213867, 0.022261632919311523, 0.022151872634887694, 0.02181100845336914, 0.021643423080444337, 0.021516000747680664, 0.02148748779296875, 0.02157814407348633, 0.021399551391601563, 0.021527999877929686, 0.02169913673400879, 0.021677152633666992, 0.021767072677612305, 0.021935840606689454, 0.021911840438842773, 0.021983232498168945, 0.022124544143676757, 0.02201580810546875, 0.021821632385253906, 0.0217509765625, 0.02183865547180176, 0.022749183654785156, 0.021745664596557617, 0.0216944637298584, 0.02175721549987793, 0.02197372817993164, 0.021790464401245116, 0.021740928649902343, 0.021808223724365236, 0.021792543411254882, 0.02185420799255371, 0.02175574493408203, 0.021702816009521484, 0.021829120635986327, 0.021846527099609374, 0.02174550437927246, 0.02152668762207031, 0.021618688583374023, 0.021932031631469725, 0.022018400192260743, 0.02196553611755371, 0.021905887603759767, 0.02292732810974121, 0.023066816329956056, 0.022391103744506837, 0.024497823715209963, 0.022268287658691405, 0.021917535781860353, 0.02193401527404785, 0.02182476806640625, 0.022027103424072266, 0.022128288269042968, 0.02196054458618164, 0.0218240966796875, 0.021904991149902343, 0.02193040084838867, 0.022007167816162108, 0.021792415618896485, 0.021776351928710937, 0.021895999908447265, 0.0220960636138916, 0.02186240005493164, 0.021778432846069336, 0.02168160057067871, 0.02223161506652832, 0.021812223434448243, 0.02182655906677246, 0.021938175201416017, 0.021977088928222657, 0.02212819290161133, 0.021956031799316406, 0.021969919204711915, 0.0219238395690918, 0.02207846450805664, 0.022145792007446288, 0.021777887344360352, 0.021979936599731444, 0.021780479431152345, 0.022159231185913085, 0.021931360244750977, 0.02187696075439453, 0.021791296005249025, 0.021910879135131837, 0.022008480072021483, 0.021704736709594726, 0.021678047180175783, 0.02209334373474121, 0.021869024276733397, 0.021898719787597658, 0.021963167190551757, 0.021907167434692384, 0.022018463134765624, 0.0221441593170166, 0.021826400756835937, 0.021841983795166015, 0.021935359954833984, 0.021893407821655272, 0.021764127731323243, 0.021703039169311523, 0.02173516845703125, 0.021833984375, 0.022009855270385743, 0.02224051284790039, 0.022037248611450195, 0.02202009582519531, 0.021827583312988282, 0.02209382438659668, 0.021747711181640626, 0.021984256744384766, 0.02233907127380371, 0.02180339241027832, 0.02180726432800293, 0.021835136413574218, 0.021851743698120117, 0.02184239959716797, 0.02196329689025879, 0.022013952255249023, 0.02207139205932617, 0.02195027160644531, 0.02209187126159668, 0.021962751388549806, 0.022005184173583985, 0.02206979179382324, 0.021921087265014648, 0.021682912826538087, 0.021587968826293946, 0.021702495574951172, 0.021874528884887695, 0.02190745544433594, 0.02185843276977539, 0.021823680877685547, 0.021884288787841797, 0.021966880798339843, 0.02204323196411133, 0.022189760208129884, 0.02222876739501953, 0.022569503784179688, 0.022642688751220705, 0.022420799255371094, 0.02242953681945801, 0.02245075225830078, 0.022410560607910156, 0.022309856414794924, 0.022280191421508787, 0.02229212760925293, 0.02250992012023926, 0.022355520248413085, 0.02242195129394531, 0.022523551940917968, 0.022483295440673828, 0.02248294448852539, 0.022347776412963868, 0.022433792114257813, 0.022771711349487304, 0.022540288925170897, 0.023017471313476562, 0.025333696365356446, 0.02252364730834961, 0.022434112548828124, 0.022222848892211915, 0.02234377670288086, 0.022306175231933595, 0.02213532829284668, 0.022494207382202147, 0.0222873592376709, 0.022571008682250978, 0.02227609634399414, 0.02231430435180664, 0.022229696273803713, 0.02226585578918457, 0.022278144836425783, 0.02290278434753418, 0.02237811279296875, 0.022718175888061524, 0.02257695960998535, 0.02224006462097168, 0.022392736434936524, 0.022156991958618165, 0.022278560638427734, 0.0221265926361084, 0.021946367263793946, 0.022128639221191407, 0.02232035255432129, 0.02207619285583496, 0.02211020851135254, 0.02225904083251953, 0.022198944091796874, 0.022195711135864257, 0.022424064636230468, 0.02232249641418457, 0.02231091117858887, 0.022221504211425783, 0.02234880065917969, 0.02225868797302246, 0.022220800399780274, 0.022032384872436524, 0.0220897274017334, 0.022347583770751953, 0.022028255462646484, 0.021968704223632812, 0.021718496322631835, 0.02202239990234375, 0.022022848129272462, 0.021807104110717773, 0.02240716743469238, 0.022345727920532226, 0.022360063552856444, 0.022169599533081053, 0.022109344482421876, 0.022214847564697264, 0.022245759963989257, 0.022308416366577148, 0.02216009521484375, 0.02210358428955078, 0.022251840591430663, 0.02206096076965332, 0.02216886329650879, 0.022059999465942382, 0.02201190376281738, 0.02204876708984375, 0.022486272811889647, 0.02236288070678711, 0.022103071212768555, 0.02207423973083496, 0.022132831573486327, 0.021972864151000977, 0.02225369644165039, 0.022024192810058595, 0.022251487731933594, 0.02222012710571289, 0.022184608459472656, 0.022147296905517578, 0.022095680236816406, 0.02214240074157715]",tokens/s,45.150300536969816,, @@ -9166,7 +9166,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 96743 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 90198 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -9417,7 +9417,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 116609 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 110170 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -9526,7 +9526,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 87410 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 81049 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -9635,7 +9635,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 81793 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 75376 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.615616,9637.39648,0.0,9242.148864,8603.568128,s,1,7.7355849609375,7.7355849609375,0.0,7.7355849609375,7.7355849609375,7.7355849609375,7.7355849609375,[7.7355849609375],,kWh,1.466184626665381e-05,1.4242632171994635e-06,7.069727877992427e-06,2.31558373618457e-05,,MB,1139.339264,9886.957568,0.0,9481.224192,8972.090368,s,10,6.982478088378905,0.6982478088378905,0.0036517305287179706,0.6994198913574219,0.7011476623535157,0.7023103729248047,0.7032405413818359,"[0.69003857421875, 0.697864501953125, 0.6987088012695313, 0.6942640380859375, 0.6964507446289062, 0.700400634765625, 0.7002574462890625, 0.7034730834960937, 0.7001309814453125, 0.7008892822265625]",tokens/s,366.6320133908713,kWh,2.0463702589164413e-05,2.256788519096522e-06,1.354058490653324e-05,3.6261076014794174e-05,tokens/kWh,7059911.843089114,MB,1161.076736,9891.151872,0.0,9485.418496,8972.092928,s,10,24.507166992187496,2.45071669921875,0.0028596785383194555,2.45123291015625,2.453633666992187,2.4545537719726562,2.455289855957031,"[2.450625244140625, 2.447038330078125, 2.445943115234375, 2.45255517578125, 2.448777099609375, 2.44886376953125, 2.451840576171875, 2.45342919921875, 2.45262060546875, 2.455473876953125]",tokens/s,25.706765706572046,kWh,7.17809645554217e-05,7.917776365660735e-06,4.777901970466671e-05,0.00012747776062574915,tokens/kWh,494203.84928910236,,s,630,24.50396546554564,0.03889518327864389,0.0005780801186598636,0.03882415962219238,0.039298535919189456,0.03955597248077392,0.0418481328201294,"[0.04240982437133789, 0.03919449615478516, 0.038448894500732425, 0.03843920135498047, 0.038400318145751955, 0.03821561431884766, 0.038188255310058594, 0.03821200180053711, 0.038166912078857425, 0.03850604629516602, 0.03846115112304688, 0.0384453125, 0.038309761047363285, 0.03864358520507812, 0.03840892791748047, 0.03843600082397461, 0.03828412628173828, 0.03838771057128906, 0.03832371139526367, 0.038511104583740234, 0.041850879669189454, 0.03854441452026367, 0.038468257904052734, 0.03840646362304687, 0.03894681549072266, 0.039301185607910155, 0.03922323226928711, 0.03886297607421875, 0.03875212860107422, 0.03869465637207031, 0.03862963104248047, 0.03875859069824219, 0.038529983520507814, 0.038953857421875, 0.03902822494506836, 0.03879542541503906, 0.03858003234863281, 0.03874256134033203, 0.03874611282348633, 0.03896115112304688, 0.03889126586914062, 0.03896275329589844, 0.03880422210693359, 0.03891299057006836, 0.03892937469482422, 0.03910041427612305, 0.038809600830078124, 0.038768638610839845, 0.039041023254394534, 0.03917004776000976, 0.03905331039428711, 0.039137279510498044, 0.03910041427612305, 0.039375873565673826, 0.03939430236816406, 0.03921820831298828, 0.03953142547607422, 0.03900831985473633, 0.03985203170776367, 0.038983680725097655, 0.03911398315429687, 0.03894553756713867, 0.039024063110351566, 0.04228028869628906, 0.039467681884765626, 0.03859199905395508, 0.03863798522949219, 0.038174110412597655, 0.03832048034667969, 0.0382081298828125, 0.0384983024597168, 0.038449153900146485, 0.03842067337036133, 0.03831788635253906, 0.03850239944458008, 0.03843699264526367, 0.038504318237304686, 0.03838137435913086, 0.038328033447265625, 0.03826736068725586, 0.03842639923095703, 0.04023945617675781, 0.03832819366455078, 0.03829945755004883, 0.03852921676635742, 0.03851059341430664, 0.03851676940917969, 0.03917820739746094, 0.039392799377441404, 0.039156192779541014, 0.038991870880126955, 0.03890176010131836, 0.03886489486694336, 0.03877814483642578, 0.03855228805541992, 0.03853094482421875, 0.038678657531738284, 0.038649406433105465, 0.03873427200317383, 0.03861420822143555, 0.03869164657592773, 0.03888947296142578, 0.03896934509277344, 0.038778881072998046, 0.03892950439453125, 0.03887401580810547, 0.03883974456787109, 0.038812095642089844, 0.03884431838989258, 0.0386992301940918, 0.03865190505981445, 0.03917004776000976, 0.03892428970336914, 0.039000064849853515, 0.03933388900756836, 0.03938508987426758, 0.03971072006225586, 0.038909217834472654, 0.03890454483032227, 0.038979167938232424, 0.03910083389282227, 0.03894236755371094, 0.03895449447631836, 0.03881260681152344, 0.038983585357666016, 0.03899955368041992, 0.04168294525146484, 0.03928044891357422, 0.03848211288452148, 0.038397823333740234, 0.03837145614624023, 0.03829900741577148, 0.03815078353881836, 0.03844230270385742, 0.038287487030029294, 0.038359264373779296, 0.03845119857788086, 0.038693248748779295, 0.03854131317138672, 0.03895817565917969, 0.03849305725097656, 0.038330368041992184, 0.03844707107543945, 0.03850841522216797, 0.03833414459228516, 0.03849264144897461, 0.03854950332641602, 0.03890892791748047, 0.03855683135986328, 0.03847971343994141, 0.03858432006835937, 0.03910438537597656, 0.03901401519775391, 0.03887363052368164, 0.0387806396484375, 0.038785057067871095, 0.03871120071411133, 0.038760353088378906, 0.03867075347900391, 0.03887011337280273, 0.038924320220947266, 0.03873891067504883, 0.03874332809448242, 0.03887776184082031, 0.03867654418945313, 0.038662143707275394, 0.038950912475585936, 0.03869900894165039, 0.03893155288696289, 0.039447456359863284, 0.03983321762084961, 0.038766975402832034, 0.03877478408813476, 0.038715328216552734, 0.0390423698425293, 0.03913324737548828, 0.03901830291748047, 0.03903084945678711, 0.038886207580566406, 0.03893068695068359, 0.038843711853027346, 0.03899951934814453, 0.03906835174560547, 0.039080223083496096, 0.03912908935546875, 0.03902873611450195, 0.03894428634643555, 0.0389964485168457, 0.03914547348022461, 0.04183116912841797, 0.03911676788330078, 0.03839340972900391, 0.03831145477294922, 0.03812035369873047, 0.03829350280761719, 0.03823782348632813, 0.03829983901977539, 0.03849849700927734, 0.03841212844848633, 0.038434814453125, 0.03860617446899414, 0.03827590560913086, 0.04026163101196289, 0.03870719909667969, 0.03842876815795898, 0.03836710357666016, 0.038593727111816405, 0.03867939376831055, 0.03879731369018555, 0.03861836624145508, 0.03873276901245117, 0.0387968635559082, 0.03864303970336914, 0.03889849472045898, 0.03930492782592773, 0.039372638702392576, 0.03893503952026367, 0.038752254486083985, 0.04029439926147461, 0.038621185302734375, 0.0385986557006836, 0.038757633209228516, 0.03874819183349609, 0.03879600143432617, 0.04150476837158203, 0.038416385650634766, 0.0385269775390625, 0.038637569427490234, 0.038823486328125, 0.03871334457397461, 0.03867078399658203, 0.039019519805908204, 0.038816959381103515, 0.03877225494384766, 0.038856990814208986, 0.03897919845581055, 0.03913356781005859, 0.039144927978515626, 0.039119392395019534, 0.03903692626953125, 0.03902246475219726, 0.03912511825561524, 0.03929449462890625, 0.03948191833496094, 0.039147422790527346, 0.03908758544921875, 0.03906000137329101, 0.03908403015136719, 0.03907139205932617, 0.039184703826904296, 0.03899955368041992, 0.03895286560058594, 0.04184140777587891, 0.039497695922851565, 0.03907993698120117, 0.03835811233520508, 0.03825551986694336, 0.03832831954956055, 0.038371326446533204, 0.03828700637817383, 0.038516670227050784, 0.03856835174560547, 0.03844255828857422, 0.03854380798339844, 0.03834864044189453, 0.03848195266723633, 0.03841443252563476, 0.03829558563232422, 0.03884425735473633, 0.03888336181640625, 0.03840134429931641, 0.03855238342285156, 0.03867776107788086, 0.038615806579589844, 0.03846758270263672, 0.03863935852050781, 0.039032161712646486, 0.039371681213378903, 0.039139328002929685, 0.03901440048217773, 0.03904092788696289, 0.03867043304443359, 0.038752254486083985, 0.03890585708618164, 0.03881369781494141, 0.038803455352783206, 0.03864371109008789, 0.0388455696105957, 0.038817790985107424, 0.038806400299072265, 0.03878092956542969, 0.03868832015991211, 0.03870550537109375, 0.03929916763305664, 0.03864780807495117, 0.038788223266601564, 0.038824832916259766, 0.03894268798828125, 0.039077919006347654, 0.03896105575561523, 0.039172191619873044, 0.03911884689331055, 0.03901235198974609, 0.03905535888671875, 0.03905535888671875, 0.03911475372314453, 0.039180286407470705, 0.03903078460693359, 0.03909632110595703, 0.03928387069702149, 0.039023456573486326, 0.038905406951904295, 0.03894931030273437, 0.0391657600402832, 0.03923747253417969, 0.04187583923339844, 0.039359935760498045, 0.03847574234008789, 0.038427169799804685, 0.038367584228515626, 0.03836928176879883, 0.038255615234375, 0.038691841125488284, 0.03841999816894531, 0.03863619232177734, 0.03843462371826172, 0.03851878356933594, 0.03831193542480469, 0.03847568130493164, 0.038506591796875, 0.038338207244873045, 0.03842287826538086, 0.038561790466308594, 0.038413665771484376, 0.03881337738037109, 0.038652671813964846, 0.03866236877441406, 0.0385167350769043, 0.03869081497192383, 0.038834175109863284, 0.038940673828125, 0.0388485107421875, 0.03907583999633789, 0.03894883346557617, 0.039006240844726564, 0.03888483047485351, 0.038725536346435545, 0.038574718475341795, 0.03904512023925781, 0.03900201416015625, 0.03889564895629883, 0.038723777770996094, 0.03876236724853516, 0.03863347244262695, 0.03871091079711914, 0.03869529724121094, 0.039021568298339845, 0.03878806304931641, 0.038819103240966796, 0.038808319091796876, 0.039018497467041016, 0.03906355285644531, 0.03907174301147461, 0.039257377624511716, 0.039254753112792966, 0.039005470275878903, 0.03901103973388672, 0.03900572967529297, 0.03913679885864258, 0.039078208923339845, 0.03920550537109375, 0.03904716873168945, 0.03918048095703125, 0.03927366256713867, 0.03907648086547852, 0.03912089538574219, 0.039569408416748046, 0.039122943878173826, 0.041544288635253904, 0.03916595077514649, 0.03852313613891602, 0.03836883163452148, 0.03838969421386719, 0.03837974548339844, 0.038141632080078126, 0.0385043830871582, 0.0383944320678711, 0.03843609619140625, 0.03854217529296875, 0.03851590347290039, 0.03836521530151367, 0.03840252685546875, 0.03826921463012695, 0.03835644912719727, 0.03848988723754883, 0.03864451217651367, 0.03867824172973633, 0.038508033752441405, 0.03879919815063477, 0.03901740646362305, 0.03867232131958008, 0.03867388916015625, 0.039014209747314454, 0.03933795166015625, 0.03914956665039063, 0.04085228729248047, 0.03866009521484375, 0.038813793182373046, 0.03863951873779297, 0.038596607208251955, 0.03867427062988281, 0.03875446319580078, 0.03879683303833008, 0.03904710388183594, 0.038707744598388674, 0.038742015838623044, 0.03867443084716797, 0.03867567825317383, 0.03883087921142578, 0.03879116821289062, 0.03914547348022461, 0.03884236907958984, 0.038793216705322264, 0.03902054214477539, 0.03889273452758789, 0.038832416534423826, 0.03905795288085938, 0.0396308479309082, 0.042305057525634765, 0.039413951873779295, 0.03916009521484375, 0.039174144744873046, 0.0390709114074707, 0.039008544921875, 0.039019039154052734, 0.03905875015258789, 0.038918846130371096, 0.03890924835205078, 0.038892032623291016, 0.03895856094360352, 0.03892707061767578, 0.042289119720458984, 0.039257984161376956, 0.038289535522460935, 0.03848396682739258, 0.038345951080322266, 0.03889641571044922, 0.038098846435546875, 0.03801094436645508, 0.0381495361328125, 0.038375518798828126, 0.03826742553710937, 0.03844095993041992, 0.03830579376220703, 0.03846553421020508, 0.038539134979248046, 0.038324352264404296, 0.03925196838378906, 0.03841024017333984, 0.03835299301147461, 0.038743968963623046, 0.0384983024597168, 0.03860070419311523, 0.03845487976074219, 0.038650272369384765, 0.04048691177368164, 0.03898323059082031, 0.03893088150024414, 0.03907993698120117, 0.039239646911621094, 0.03926339340209961, 0.038836544036865234, 0.038591041564941406, 0.03861913681030273, 0.038733631134033206, 0.03861318588256836, 0.03873382568359375, 0.03885670471191406, 0.038760448455810545, 0.038916095733642575, 0.03890995025634766, 0.038874336242675785, 0.03903116989135742, 0.0387977294921875, 0.03885055923461914, 0.03893990325927734, 0.039088897705078125, 0.03906889724731445, 0.03907254409790039, 0.039448673248291016, 0.03914947128295899, 0.03933139038085937, 0.03974803161621094, 0.0392806396484375, 0.039298465728759766, 0.039199329376220705, 0.03995340728759766, 0.039610721588134765, 0.039182815551757816, 0.03916204833984375, 0.03927040100097656, 0.03917571258544922, 0.03916233444213867, 0.03933763122558594, 0.04176278305053711, 0.03926428985595703, 0.03845248031616211, 0.038432960510253904, 0.03846611022949219, 0.03868057632446289, 0.03832831954956055, 0.038388832092285156, 0.03837948989868164, 0.03873174285888672, 0.03843376159667969, 0.038569984436035154, 0.03841203308105469, 0.03857209777832031, 0.03845465469360351, 0.03844384002685547, 0.03842832183837891, 0.03875875091552734, 0.038645759582519534, 0.03867647933959961, 0.03896115112304688, 0.038943870544433594, 0.03871587371826172, 0.03870966339111328, 0.03891404724121094, 0.03907174301147461, 0.03901808166503906, 0.039010719299316404, 0.038828033447265625, 0.03888127899169922, 0.03886016082763672, 0.03882611083984375, 0.03859711837768555, 0.038950912475585936, 0.039024639129638675, 0.03890998458862305, 0.038760257720947267, 0.03876422500610351, 0.038902240753173827, 0.03875743865966797, 0.03901126480102539, 0.03870719909667969, 0.03909222412109375, 0.03882150268554688, 0.03886262512207031, 0.03905187225341797, 0.03906150436401367, 0.03932070541381836, 0.03965574264526367, 0.039745471954345704, 0.039712928771972654, 0.039465152740478515, 0.039112991333007815, 0.03900774383544922, 0.03898121643066406, 0.03892496109008789, 0.03925404739379883, 0.03916003036499023, 0.0391736946105957, 0.039389537811279296, 0.038991966247558595, 0.039061054229736325, 0.03904739379882813, 0.042189983367919924, 0.03953955078125, 0.03873708724975586, 0.03855974578857422, 0.03835481643676758, 0.03852134323120117, 0.038801151275634764, 0.038230270385742185, 0.03829542541503906, 0.03843260955810547, 0.038470497131347654, 0.038594017028808596, 0.03839215850830078, 0.038856769561767576, 0.038499679565429684, 0.03836937713623047, 0.03883875274658203, 0.03846358489990234, 0.03857408142089844, 0.03860595321655273, 0.038614974975585935, 0.03986928176879883, 0.0385081901550293, 0.03874246215820312, 0.03907353591918945, 0.03920111846923828, 0.03945257568359375, 0.039479263305664064, 0.03925747299194336, 0.03923830413818359, 0.03870505523681641, 0.038638847351074215, 0.03867324829101562, 0.038727294921875, 0.03866457748413086, 0.03866419219970703, 0.0389939193725586, 0.03876051330566406, 0.0386682243347168, 0.03870719909667969, 0.03889152145385742, 0.04039475250244141, 0.0388853759765625, 0.03886284637451172, 0.038776641845703126, 0.03885689544677735, 0.038950912475585936, 0.03937497711181641, 0.03889753723144531, 0.03926015853881836, 0.03914080047607422, 0.03937068939208985, 0.0393939208984375, 0.03947315216064453, 0.039103488922119144, 0.039136257171630856, 0.03924991989135742, 0.03964313507080078, 0.03922534561157227, 0.03917619323730469, 0.038957054138183594, 0.03916799926757813, 0.038960289001464844]",tokens/s,25.710124383166697,, @@ -9681,7 +9681,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 84785 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 78444 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.2304,3354.329088,0.0,2959.081472,2942.567424,s,1,7.491158203125,7.491158203125,0.0,7.491158203125,7.491158203125,7.491158203125,7.491158203125,[7.491158203125],,kWh,1.0103033966659798e-05,1.1042954326628756e-06,3.3333359999998535e-06,1.4540665399322528e-05,,MB,1107.894272,3547.267072,0.0,3141.533696,3105.830912,s,10,2.592166152954102,0.2592166152954101,0.002362535045213905,0.26002906799316405,0.2611373199462891,0.26149653167724607,0.2617839010620117,"[0.25391856384277345, 0.25702288818359376, 0.2618557434082031, 0.2610574951171875, 0.2597601623535156, 0.259385498046875, 0.2569664611816406, 0.26088134765625, 0.26102001953125, 0.2602979736328125]",tokens/s,987.5910142112442,kWh,7.643231509936003e-06,8.42910826341281e-07,5.049056745794699e-06,1.353519908207198e-05,tokens/kWh,18913648.661369473,MB,1129.177088,3589.210112,0.0,3183.476736,3163.057152,s,10,11.573644653320313,1.1573644653320314,0.012999661095040372,1.1591688232421875,1.171823876953125,1.1718432128906249,1.171858681640625,"[1.1349251708984376, 1.157430419921875, 1.171819580078125, 1.16227685546875, 1.170641357421875, 1.1583616943359376, 1.1599759521484374, 1.1343333740234376, 1.171862548828125, 1.1520177001953125]",tokens/s,54.43401960844391,kWh,3.340594418672954e-05,3.684344683805461e-06,2.217671503480601e-05,5.926700390534102e-05,tokens/kWh,1062986.077390063,,s,630,11.570615110397329,0.018366055730789425,0.00036591905731029006,0.01838521671295166,0.01866156406402588,0.018822656536102295,0.019628646717071544,"[0.019042463302612306, 0.01836031913757324, 0.01830499267578125, 0.0184421443939209, 0.018424192428588868, 0.01829449653625488, 0.018257120132446288, 0.018156320571899413, 0.018214912414550782, 0.018104320526123048, 0.01807910346984863, 0.018102432250976564, 0.018143711090087892, 0.01790755271911621, 0.01811043167114258, 0.018022592544555665, 0.017938432693481447, 0.01787446403503418, 0.017916000366210938, 0.017910144805908204, 0.01790540885925293, 0.01791756820678711, 0.017947263717651367, 0.01801935958862305, 0.01811324882507324, 0.01823904037475586, 0.018235424041748046, 0.018119071960449217, 0.017932640075683594, 0.01790483283996582, 0.017777376174926758, 0.018077695846557617, 0.01789548873901367, 0.017817535400390626, 0.018032447814941406, 0.01786092758178711, 0.018283775329589844, 0.018236032485961916, 0.017978975296020508, 0.017807775497436524, 0.01796505546569824, 0.018069055557250975, 0.017938880920410155, 0.017820991516113282, 0.01771779251098633, 0.01772764778137207, 0.017755903244018555, 0.017724832534790038, 0.017830560684204102, 0.01794476890563965, 0.017924095153808595, 0.0180633602142334, 0.01803398323059082, 0.018307775497436524, 0.017885183334350584, 0.017766271591186523, 0.017846399307250977, 0.017735679626464843, 0.017821695327758787, 0.017844224929809572, 0.017846271514892577, 0.017692256927490234, 0.017715616226196287, 0.018753568649291993, 0.01827987289428711, 0.01861894416809082, 0.01860767936706543, 0.01799622344970703, 0.01802239990234375, 0.017897472381591797, 0.018244735717773436, 0.018377599716186525, 0.018298784255981446, 0.018563167572021484, 0.01846067237854004, 0.01862841606140137, 0.01852191925048828, 0.018616479873657228, 0.019132640838623045, 0.01857472038269043, 0.018590047836303712, 0.018362655639648437, 0.018466623306274414, 0.01822972869873047, 0.018202335357666015, 0.018193536758422852, 0.018361215591430665, 0.018420799255371094, 0.018432607650756837, 0.018398719787597655, 0.018540992736816406, 0.018415935516357423, 0.018357471466064455, 0.018239904403686523, 0.01835212707519531, 0.01821129608154297, 0.018447679519653322, 0.018277055740356447, 0.01850531196594238, 0.018547103881835936, 0.018513568878173826, 0.01842620849609375, 0.018391040802001952, 0.018096128463745118, 0.018147327423095702, 0.018128448486328125, 0.017983488082885742, 0.017975839614868164, 0.018061216354370118, 0.01816985511779785, 0.018534400939941405, 0.01827030372619629, 0.018267967224121093, 0.018118751525878905, 0.018316703796386717, 0.01836911964416504, 0.018263168334960937, 0.018295679092407226, 0.018195743560791015, 0.01836310386657715, 0.018679584503173828, 0.018495296478271483, 0.01849920082092285, 0.01835296058654785, 0.018407392501831054, 0.018617664337158203, 0.018977664947509767, 0.01842585563659668, 0.01863199996948242, 0.01872550392150879, 0.018660959243774415, 0.018445856094360353, 0.018371519088745118, 0.01845814323425293, 0.01851644706726074, 0.01852592086791992, 0.018432287216186522, 0.018499584197998048, 0.01827020835876465, 0.01845417594909668, 0.018589408874511718, 0.020296319961547852, 0.018468288421630858, 0.018520639419555663, 0.018448383331298827, 0.0188723201751709, 0.01883942413330078, 0.01887808036804199, 0.018550527572631835, 0.018292800903320312, 0.018526912689208985, 0.01870751953125, 0.018760639190673827, 0.01858355140686035, 0.01848320007324219, 0.01841152000427246, 0.01862041664123535, 0.018668991088867187, 0.018606655120849608, 0.018442176818847657, 0.01848531150817871, 0.018354175567626953, 0.018464799880981445, 0.018673631668090822, 0.01845020866394043, 0.018643167495727538, 0.018517311096191407, 0.018490047454833985, 0.018589696884155273, 0.018746912002563478, 0.018659807205200194, 0.018564895629882814, 0.019021472930908202, 0.01849158477783203, 0.01832383918762207, 0.019398656845092774, 0.018646240234375, 0.018524959564208986, 0.018452159881591795, 0.018476863861083985, 0.01843667221069336, 0.018437055587768553, 0.01850060844421387, 0.018656959533691408, 0.018554719924926757, 0.018518495559692382, 0.01842134475708008, 0.018454559326171877, 0.01854502487182617, 0.01898748779296875, 0.01831465530395508, 0.01843084716796875, 0.01834569549560547, 0.018262304306030274, 0.018667520523071288, 0.018528255462646484, 0.018464767456054687, 0.018549983978271484, 0.018475296020507813, 0.018487039566040038, 0.018509727478027344, 0.018385759353637697, 0.018321407318115233, 0.018323455810546875, 0.018282400131225587, 0.018275840759277344, 0.01862883186340332, 0.01838528060913086, 0.01845849609375, 0.018361600875854492, 0.01828748893737793, 0.01862041664123535, 0.018540191650390624, 0.018477407455444336, 0.018470624923706054, 0.018391328811645506, 0.0185031681060791, 0.01838515281677246, 0.018352575302124023, 0.018351295471191405, 0.018362079620361328, 0.018502527236938477, 0.01837059211730957, 0.01831078338623047, 0.0184036808013916, 0.018307104110717773, 0.018464767456054687, 0.018485248565673826, 0.018613344192504884, 0.018480031967163087, 0.01839468765258789, 0.018385343551635742, 0.018267711639404296, 0.018248191833496095, 0.01829875183105469, 0.01852422332763672, 0.018501216888427735, 0.018561279296875, 0.01858780860900879, 0.018509952545166016, 0.01857472038269043, 0.018446975708007813, 0.01824732780456543, 0.018339456558227538, 0.018562847137451172, 0.018637279510498046, 0.018485599517822266, 0.018296096801757814, 0.018459392547607423, 0.018411487579345704, 0.018601984024047852, 0.018497535705566406, 0.019444000244140624, 0.01851798439025879, 0.018350080490112306, 0.018497535705566406, 0.01854182434082031, 0.018313983917236328, 0.018036319732666017, 0.018487615585327147, 0.018700351715087892, 0.0186265926361084, 0.018485248565673826, 0.018339839935302735, 0.018298879623413086, 0.018343936920166014, 0.01884320068359375, 0.021878528594970702, 0.01910223960876465, 0.018391199111938476, 0.018306175231933595, 0.018385440826416015, 0.018406848907470703, 0.018444448471069335, 0.018238208770751954, 0.01816160011291504, 0.018354240417480468, 0.01863862419128418, 0.0186144962310791, 0.018505727767944336, 0.018526111602783203, 0.018530559539794923, 0.018302175521850587, 0.01843222427368164, 0.018288415908813478, 0.018571264266967775, 0.018363008499145506, 0.018339839935302735, 0.01839427185058594, 0.01830179214477539, 0.01836358451843262, 0.01827734375, 0.018232799530029296, 0.01799942398071289, 0.018305856704711913, 0.018720640182495116, 0.01849888038635254, 0.018596511840820312, 0.01831747245788574, 0.018255872726440428, 0.018374656677246092, 0.018593631744384765, 0.019454111099243165, 0.018333311080932616, 0.018205055236816405, 0.01838809585571289, 0.018322303771972658, 0.018411104202270507, 0.02052751922607422, 0.020413984298706056, 0.01836079978942871, 0.018224672317504884, 0.01853228759765625, 0.018788896560668945, 0.018505727767944336, 0.01930905532836914, 0.018534175872802733, 0.01869238471984863, 0.018627904891967775, 0.018438848495483398, 0.01846067237854004, 0.018415615081787108, 0.018350048065185545, 0.01837171173095703, 0.018489343643188477, 0.018232000350952147, 0.01805948829650879, 0.018159616470336915, 0.018274303436279296, 0.01821059226989746, 0.01802579116821289, 0.0181844482421875, 0.01802511978149414, 0.01799577522277832, 0.018100223541259765, 0.0188272647857666, 0.01862403106689453, 0.01832803153991699, 0.018466144561767577, 0.01845305633544922, 0.018346080780029295, 0.01827769660949707, 0.018459327697753908, 0.01829680061340332, 0.01822313690185547, 0.018208736419677733, 0.018706464767456056, 0.018149375915527344, 0.017920000076293945, 0.018251775741577148, 0.018324575424194335, 0.018092960357666017, 0.01799081611633301, 0.01868067169189453, 0.018679359436035155, 0.01841596794128418, 0.018301023483276366, 0.01823315238952637, 0.018159807205200194, 0.01814873504638672, 0.01828438377380371, 0.01860585594177246, 0.018397184371948243, 0.018389055252075195, 0.018400127410888673, 0.018439807891845704, 0.018366783142089844, 0.018437503814697265, 0.018399744033813475, 0.018290943145751953, 0.01842790412902832, 0.018538463592529298, 0.018685983657836913, 0.018671615600585938, 0.018511199951171876, 0.01880950355529785, 0.01843596839904785, 0.0184586238861084, 0.01918976020812988, 0.018634752273559572, 0.018579456329345705, 0.018605535507202148, 0.018591999053955078, 0.018409631729125978, 0.018386592864990236, 0.018292671203613282, 0.01850339126586914, 0.018397216796875, 0.018489952087402343, 0.01849718475341797, 0.018430496215820314, 0.018323392868041993, 0.018409408569335938, 0.018434175491333006, 0.018298336029052734, 0.018311616897583007, 0.018255392074584962, 0.018534400939941405, 0.01845510482788086, 0.018315263748168945, 0.018487295150756835, 0.01845452880859375, 0.01826767921447754, 0.018452384948730468, 0.018260543823242187, 0.018546367645263673, 0.018382783889770507, 0.018680160522460937, 0.018450464248657227, 0.01835612869262695, 0.018667007446289064, 0.018256128311157225, 0.01845180892944336, 0.018276704788208007, 0.018393760681152345, 0.01830019187927246, 0.018598623275756836, 0.01845625686645508, 0.018633024215698242, 0.01845846366882324, 0.018409151077270508, 0.018391519546508788, 0.018288639068603514, 0.01816166305541992, 0.018440000534057616, 0.01828883171081543, 0.018359487533569335, 0.018494144439697265, 0.01823139190673828, 0.018257951736450194, 0.018069503784179687, 0.01821900749206543, 0.018501119613647463, 0.018438432693481447, 0.018378976821899415, 0.018468320846557616, 0.01806153678894043, 0.018140832901000978, 0.018532991409301758, 0.01826806449890137, 0.018147455215454102, 0.018881759643554687, 0.018193183898925783, 0.018026208877563475, 0.017942815780639648, 0.01794767951965332, 0.017914623260498048, 0.01784649658203125, 0.017958911895751953, 0.01791328048706055, 0.01780588722229004, 0.017952127456665037, 0.017985439300537108, 0.017879776000976563, 0.017870847702026366, 0.017757568359375, 0.017737855911254884, 0.017850879669189454, 0.017756160736083985, 0.017698816299438477, 0.017752128601074217, 0.017880159378051756, 0.017840991973876952, 0.017987583160400392, 0.01795686340332031, 0.018095392227172852, 0.017965791702270507, 0.01801603126525879, 0.017860416412353516, 0.01787487983703613, 0.01782831954956055, 0.017874752044677734, 0.017833759307861328, 0.01780940818786621, 0.017811168670654298, 0.017853120803833007, 0.018271615982055664, 0.017862783432006837, 0.017779199600219727, 0.017893375396728514, 0.017838048934936523, 0.018616352081298828, 0.017977344512939454, 0.017778688430786133, 0.017880607604980468, 0.018086368560791016, 0.018020191192626954, 0.017946752548217773, 0.018068960189819336, 0.017898048400878906, 0.017837503433227538, 0.017797760009765625, 0.017874591827392577, 0.017844224929809572, 0.0179931526184082, 0.017858528137207032, 0.018099071502685547, 0.01788876724243164, 0.01828096008300781, 0.01885798454284668, 0.018491167068481446, 0.018548959732055663, 0.019050495147705078, 0.018513343811035157, 0.0191362247467041, 0.01850192070007324, 0.018522111892700196, 0.01847500801086426, 0.018773792266845703, 0.01969993591308594, 0.020068351745605468, 0.01838057518005371, 0.018460895538330076, 0.01841766357421875, 0.018722623825073243, 0.018651296615600586, 0.018711904525756835, 0.018463424682617188, 0.018406496047973633, 0.01837148857116699, 0.018655231475830078, 0.018558048248291017, 0.018688896179199218, 0.01840480041503906, 0.018535007476806642, 0.018667327880859376, 0.018558464050292968, 0.018342592239379882, 0.018448383331298827, 0.01858121681213379, 0.018474720001220704, 0.01864147186279297, 0.018817024230957033, 0.018400415420532227, 0.018483999252319337, 0.018851903915405272, 0.01901705551147461, 0.01849616050720215, 0.018757631301879883, 0.01883456039428711, 0.018652032852172852, 0.018683904647827147, 0.01869331169128418, 0.018473472595214844, 0.018624832153320312, 0.018469087600708006, 0.0186345272064209, 0.018871871948242188, 0.01842118453979492, 0.01838387107849121, 0.01846886444091797, 0.01839923286437988, 0.018547775268554688, 0.01848201560974121, 0.018597984313964845, 0.018364032745361327, 0.018212287902832032, 0.01813190460205078, 0.018522111892700196, 0.01869593620300293, 0.018658784866333007, 0.018496288299560546, 0.01846272087097168, 0.018476800918579103, 0.018425952911376952, 0.018315391540527345, 0.018413183212280273, 0.019150848388671874, 0.01862041664123535, 0.018515968322753908, 0.018480703353881835, 0.018506175994873048, 0.01822425651550293, 0.018183040618896484, 0.018092031478881835, 0.01803638458251953, 0.01822960090637207, 0.018288639068603514, 0.01832111930847168, 0.018357984542846678, 0.018182527542114257, 0.018198720932006834, 0.018592992782592774, 0.01856105613708496, 0.01827302360534668, 0.019986431121826173, 0.018534400939941405, 0.018386943817138672, 0.01846067237854004, 0.018449951171875, 0.018217439651489257, 0.018054239273071288, 0.018031520843505858, 0.018033727645874024, 0.018011072158813476, 0.018120704650878908, 0.018182144165039063, 0.018096128463745118, 0.018128896713256838, 0.017919647216796876, 0.01848512077331543, 0.018495071411132814, 0.01829158401489258, 0.018335744857788085, 0.01845180892944336, 0.018164384841918946, 0.018201759338378906, 0.01823174476623535, 0.018198623657226562, 0.017982879638671876, 0.01789139175415039, 0.01803664016723633, 0.017882047653198244, 0.01792527961730957, 0.01786556816101074, 0.018120704650878908, 0.018124799728393554, 0.017960960388183594, 0.01781273651123047, 0.018192352294921874, 0.01858639907836914, 0.018356224060058594, 0.01845043182373047, 0.018343936920166014, 0.018388992309570314, 0.018276031494140626, 0.018383167266845704, 0.018312416076660155, 0.0182906551361084, 0.018258752822875975]",tokens/s,54.448272109050016,, @@ -9960,7 +9960,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 49376 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 43266 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10070,7 +10070,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 29881 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 24319 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10179,7 +10179,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 90813 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 84413 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.054848,1326.383104,0.0,931.135488,917.648384,s,1,7.24853369140625,7.24853369140625,0.0,7.24853369140625,7.24853369140625,7.24853369140625,7.24853369140625,[7.24853369140625],,kWh,9.548473929161597e-06,1.0427212493425665e-06,4.293058989995879e-06,1.4884254168500042e-05,,MB,1105.32608,1456.406528,0.0,1050.673152,1018.330112,s,10,0.6607934722900392,0.06607934722900391,0.001316728408823133,0.06577188873291015,0.06745689849853516,0.06821476974487305,0.06882106674194335,"[0.06897264099121093, 0.0657655029296875, 0.06409379577636719, 0.06572882843017579, 0.06587734222412109, 0.06715283203125, 0.06728848266601563, 0.064994140625, 0.06514163208007813, 0.06577827453613282]",tokens/s,3874.1302802645587,kWh,2.2091043958334123e-06,2.4347515192221887e-07,1.460139012721776e-06,3.912718560477407e-06,tokens/kWh,65427654.97776165,MB,1127.124992,1473.183744,0.0,1067.450368,1033.282048,s,10,11.373588623046876,1.1373588623046875,0.018928872592497294,1.143686767578125,1.158686865234375,1.1600309326171876,1.1611061865234376,"[1.161375, 1.15838818359375, 1.14709521484375, 1.1516285400390625, 1.153206298828125, 1.1402783203125, 1.10854150390625, 1.1196385498046875, 1.11889013671875, 1.114546875]",tokens/s,55.391488199546735,kWh,3.215545112375109e-05,3.5464884643910856e-06,1.5936513793477674e-05,5.163845338161984e-05,tokens/kWh,1220021.0477725924,,s,630,11.3675133228302,0.01804367194100032,0.0004886498748275712,0.018087935447692872,0.018502759170532224,0.01859877986907959,0.01939240159988404,"[0.018602048873901367, 0.01836595153808594, 0.018436063766479494, 0.018344032287597657, 0.018258527755737306, 0.01824291229248047, 0.01822313690185547, 0.01821571159362793, 0.018474176406860353, 0.01845030403137207, 0.018693023681640625, 0.018503679275512695, 0.01834156799316406, 0.018278112411499025, 0.018426464080810546, 0.018339136123657226, 0.018713279724121092, 0.018345951080322265, 0.018307167053222655, 0.01839052772521973, 0.01839689636230469, 0.018373600006103517, 0.01833679962158203, 0.01827299118041992, 0.01840127944946289, 0.018490720748901367, 0.018332319259643556, 0.018313472747802734, 0.01859584045410156, 0.018231039047241212, 0.018224512100219727, 0.018334335327148437, 0.018231296539306642, 0.018925695419311522, 0.018544511795043947, 0.01851571273803711, 0.01840563201904297, 0.018528127670288087, 0.01841574478149414, 0.01842585563659668, 0.018329599380493163, 0.018364416122436524, 0.019517696380615235, 0.01843120002746582, 0.018443967819213865, 0.0187544002532959, 0.018328960418701173, 0.018360960006713868, 0.018145280838012694, 0.018983104705810546, 0.018695775985717773, 0.018256095886230467, 0.018345983505249023, 0.018284543991088868, 0.01828659248352051, 0.018501632690429686, 0.0181711368560791, 0.018123519897460937, 0.018309120178222657, 0.018300928115844727, 0.018783584594726562, 0.018365087509155272, 0.018380416870117187, 0.018537567138671874, 0.018504159927368164, 0.018549280166625978, 0.018623712539672852, 0.01839583969116211, 0.019228128433227538, 0.020404767990112305, 0.018491392135620118, 0.018616031646728516, 0.018491680145263673, 0.018509952545166016, 0.01833888053894043, 0.018274112701416014, 0.01834623908996582, 0.01821891212463379, 0.01858236885070801, 0.01846681594848633, 0.01826201629638672, 0.018351776123046875, 0.018395456314086914, 0.018472991943359374, 0.018345504760742187, 0.018198335647583008, 0.018132768630981445, 0.017873056411743166, 0.018178335189819338, 0.01828316879272461, 0.018507551193237305, 0.018448640823364258, 0.01831500816345215, 0.0190928955078125, 0.018149215698242186, 0.018524448394775392, 0.018030656814575195, 0.018165151596069337, 0.018405567169189452, 0.018248512268066407, 0.018253311157226563, 0.018196672439575196, 0.01806982421875, 0.01813043212890625, 0.017992191314697266, 0.0184237117767334, 0.018450527191162108, 0.01842492866516113, 0.01839606475830078, 0.018404832839965822, 0.018065471649169922, 0.018008544921875, 0.018055200576782227, 0.01806035232543945, 0.01815750312805176, 0.018205280303955077, 0.01822774314880371, 0.01811622428894043, 0.01825404739379883, 0.018325504302978517, 0.018348031997680665, 0.018665184020996095, 0.018236928939819336, 0.01837148857116699, 0.018499456405639648, 0.018391040802001952, 0.01832476806640625, 0.018405664443969728, 0.018299232482910155, 0.01825391960144043, 0.01830611228942871, 0.018119232177734375, 0.017961536407470703, 0.01797715187072754, 0.01803264045715332, 0.01816761589050293, 0.018270399093627928, 0.01835811233520508, 0.01821126365661621, 0.018275360107421874, 0.01820128059387207, 0.018135040283203126, 0.018426048278808595, 0.0184237117767334, 0.019324832916259766, 0.018457599639892578, 0.018535423278808593, 0.018289920806884765, 0.017920896530151366, 0.018113920211791992, 0.01800595283508301, 0.0184102725982666, 0.018552608489990234, 0.018280448913574218, 0.01841971206665039, 0.018200128555297852, 0.018384992599487306, 0.018127199172973632, 0.01810371208190918, 0.017928319931030272, 0.018102975845336915, 0.018106271743774414, 0.017757728576660158, 0.017502080917358397, 0.018143711090087892, 0.01843731117248535, 0.018502656936645507, 0.018238271713256836, 0.018144287109375, 0.018056991577148438, 0.01793667221069336, 0.018529535293579102, 0.017931135177612304, 0.017847808837890625, 0.018087711334228516, 0.017950687408447265, 0.018184736251831056, 0.01821286392211914, 0.01818828773498535, 0.018397184371948243, 0.018085887908935547, 0.0179814395904541, 0.01852822494506836, 0.018233375549316408, 0.018313215255737304, 0.017970848083496093, 0.018266815185546875, 0.0178449592590332, 0.017841184616088867, 0.018251775741577148, 0.018103647232055663, 0.018148000717163087, 0.017978944778442384, 0.018205120086669923, 0.018142847061157225, 0.0183855037689209, 0.019420000076293947, 0.01832441520690918, 0.018442176818847657, 0.018222463607788085, 0.018020832061767578, 0.018223072052001955, 0.018020511627197266, 0.018534496307373048, 0.018228607177734377, 0.01834409523010254, 0.018298847198486328, 0.018164319992065428, 0.018437536239624023, 0.01828096008300781, 0.018295936584472657, 0.01809702491760254, 0.018120447158813478, 0.018286144256591797, 0.018243839263916015, 0.018163839340209962, 0.01805958366394043, 0.01799734306335449, 0.01784009552001953, 0.018194944381713866, 0.01821392059326172, 0.018659872055053713, 0.018347904205322264, 0.018702911376953124, 0.018316640853881835, 0.01809270477294922, 0.018299104690551758, 0.018269983291625977, 0.018524160385131837, 0.018271392822265625, 0.018383808135986328, 0.018132896423339845, 0.017944639205932617, 0.01784419250488281, 0.018128543853759765, 0.018186208724975585, 0.01803228759765625, 0.018426080703735352, 0.01831164741516113, 0.018203935623168944, 0.018208511352539064, 0.018209184646606445, 0.018217536926269533, 0.017903615951538086, 0.01846428871154785, 0.01873673629760742, 0.018449407577514648, 0.018370431900024416, 0.018280031204223633, 0.018104736328125, 0.018309120178222657, 0.018927104949951173, 0.0182457275390625, 0.018450464248657227, 0.018698400497436523, 0.018311071395874023, 0.01810188865661621, 0.017799360275268555, 0.017690303802490235, 0.018329919815063475, 0.018507680892944335, 0.01839321517944336, 0.018307039260864258, 0.018308448791503906, 0.018172576904296876, 0.018300384521484376, 0.018313760757446288, 0.018118656158447266, 0.018096128463745118, 0.018062976837158202, 0.018320991516113282, 0.018471616744995117, 0.018317407608032226, 0.018436031341552736, 0.018286048889160158, 0.01812950325012207, 0.018257919311523436, 0.018544544219970704, 0.018512256622314455, 0.018417375564575195, 0.018251136779785158, 0.018045215606689452, 0.018035295486450196, 0.01819241523742676, 0.018130176544189452, 0.01831545639038086, 0.018499872207641602, 0.018318912506103516, 0.018329599380493163, 0.018298431396484374, 0.01827315139770508, 0.018126848220825196, 0.018591360092163087, 0.018057760238647462, 0.017848031997680664, 0.018159263610839842, 0.01857174491882324, 0.018435903549194336, 0.018415456771850587, 0.01839344024658203, 0.018255168914794923, 0.01822585678100586, 0.01922649574279785, 0.018708608627319337, 0.018305023193359374, 0.018187711715698242, 0.01836031913757324, 0.018277952194213867, 0.018109439849853515, 0.018284799575805664, 0.018140224456787108, 0.01817865562438965, 0.018294591903686524, 0.01814556884765625, 0.018601184844970704, 0.01856492805480957, 0.018177824020385744, 0.018069856643676756, 0.01821273612976074, 0.018489343643188477, 0.018249631881713867, 0.01846895980834961, 0.018286720275878906, 0.018195743560791015, 0.018244192123413085, 0.01839923286437988, 0.018274303436279296, 0.018341888427734376, 0.018507360458374023, 0.018532960891723634, 0.01878611183166504, 0.02018284797668457, 0.018112703323364256, 0.01820057678222656, 0.01817190361022949, 0.018309343338012696, 0.01808380889892578, 0.018263872146606446, 0.01829043197631836, 0.01812879943847656, 0.018049375534057617, 0.018296831130981444, 0.018077695846557617, 0.017874399185180665, 0.018088159561157228, 0.018441984176635742, 0.01851375961303711, 0.023320735931396483, 0.01804319953918457, 0.01761859130859375, 0.017739616394042968, 0.017676544189453126, 0.01774608039855957, 0.018483072280883788, 0.017612800598144532, 0.017564352035522462, 0.01760220718383789, 0.017729663848876955, 0.017915903091430666, 0.018087167739868164, 0.017988351821899413, 0.01759436798095703, 0.01763759994506836, 0.017624927520751954, 0.01747551918029785, 0.017548799514770508, 0.01741423988342285, 0.017392032623291014, 0.01751795196533203, 0.017545087814331055, 0.017449728012084963, 0.01739072036743164, 0.01744985580444336, 0.017315200805664063, 0.0174881591796875, 0.017439071655273437, 0.01792745590209961, 0.017510143280029297, 0.018091936111450196, 0.017770015716552734, 0.01772172737121582, 0.01758969688415527, 0.017731679916381835, 0.01771404838562012, 0.01783488082885742, 0.017795711517333983, 0.017840415954589843, 0.018069759368896484, 0.017423744201660155, 0.017448448181152345, 0.01730191993713379, 0.017428064346313478, 0.017398591995239257, 0.01769683265686035, 0.017327871322631836, 0.017377536773681642, 0.017440479278564455, 0.01744540786743164, 0.017411840438842772, 0.017630912780761718, 0.017655136108398438, 0.017818592071533204, 0.017680383682250975, 0.017537023544311522, 0.01749545669555664, 0.017599071502685547, 0.018300832748413084, 0.017415456771850586, 0.01753152084350586, 0.017335903167724608, 0.017446624755859376, 0.0174553279876709, 0.01749260711669922, 0.01756777572631836, 0.018521663665771484, 0.017494112014770507, 0.01761110305786133, 0.017811456680297853, 0.01792201614379883, 0.01772265625, 0.017702816009521484, 0.017543327331542968, 0.017402559280395507, 0.01736000061035156, 0.01736342430114746, 0.017349023818969727, 0.017287168502807617, 0.017297407150268555, 0.017494016647338868, 0.01728339195251465, 0.017450815200805665, 0.017456480026245117, 0.01728156852722168, 0.01740595245361328, 0.017708959579467772, 0.017415584564208983, 0.01854879951477051, 0.017461824417114257, 0.01737481689453125, 0.017507904052734374, 0.017447872161865233, 0.017247871398925783, 0.01726268768310547, 0.017420576095581054, 0.01731279945373535, 0.017401056289672853, 0.017981184005737304, 0.0175710391998291, 0.017330976486206056, 0.017487871170043946, 0.017476640701293945, 0.017689119338989256, 0.01747603225708008, 0.017484960556030275, 0.01746614456176758, 0.017461280822753906, 0.017868831634521486, 0.017969120025634767, 0.02079350471496582, 0.018648895263671875, 0.017985599517822266, 0.017680383682250975, 0.017411775588989258, 0.017582399368286133, 0.01802239990234375, 0.017630495071411133, 0.017949344635009766, 0.017629247665405273, 0.017514432907104492, 0.017762208938598634, 0.01782707214355469, 0.017906591415405272, 0.017696767807006835, 0.017616287231445312, 0.017547168731689454, 0.018108383178710937, 0.01999331283569336, 0.01765376091003418, 0.017557504653930665, 0.01755945587158203, 0.017801023483276366, 0.017637088775634767, 0.017638015747070312, 0.01749100875854492, 0.017578367233276368, 0.017654144287109375, 0.017601728439331055, 0.017603519439697266, 0.017477344512939454, 0.017469728469848633, 0.01746086311340332, 0.017596960067749023, 0.01790755271911621, 0.01789743995666504, 0.01787718391418457, 0.017915903091430666, 0.017829727172851563, 0.017753503799438478, 0.017830751419067384, 0.017673471450805663, 0.017689088821411132, 0.017702911376953127, 0.017903167724609374, 0.018074047088623046, 0.017911104202270507, 0.017998559951782227, 0.017673696517944336, 0.017389184951782228, 0.017359552383422853, 0.01735055923461914, 0.01751219177246094, 0.017731456756591796, 0.018528959274291993, 0.01789673614501953, 0.017691360473632813, 0.01770086479187012, 0.01821468734741211, 0.017905439376831055, 0.017641183853149413, 0.01768726348876953, 0.017543167114257813, 0.017561279296875, 0.017733407974243165, 0.017960704803466798, 0.017903743743896486, 0.017914079666137697, 0.017827648162841797, 0.01804147148132324, 0.01772115135192871, 0.017637727737426757, 0.01756991958618164, 0.017615840911865233, 0.01762326431274414, 0.017822240829467772, 0.017873184204101562, 0.01782086372375488, 0.017714879989624024, 0.017674495697021484, 0.01761075210571289, 0.018047584533691406, 0.01782508850097656, 0.01776710319519043, 0.017827520370483397, 0.01782406425476074, 0.01798684883117676, 0.017783424377441407, 0.017854560852050783, 0.017834047317504882, 0.0176661434173584, 0.01764726448059082, 0.017645055770874024, 0.017521631240844725, 0.01752444839477539, 0.01758950424194336, 0.017572608947753907, 0.017613983154296874, 0.01789014434814453, 0.01771660804748535, 0.017483488082885742, 0.017656736373901367, 0.017628543853759764, 0.017766496658325196, 0.017629728317260743, 0.017671455383300783, 0.017918399810791016, 0.017917535781860353, 0.01805564880371094, 0.018364864349365233, 0.018130943298339842, 0.01801625633239746, 0.017974592208862303, 0.017605056762695314, 0.017854719161987304, 0.01759846305847168, 0.017757728576660158, 0.01793276786804199, 0.01787254333496094, 0.017768064498901368, 0.0176790714263916, 0.017541120529174805, 0.01760870361328125, 0.01790732765197754, 0.017666528701782227, 0.01757379150390625, 0.017600223541259764, 0.017612991333007814, 0.017760351181030275, 0.017522079467773437, 0.017424991607666016, 0.01745305633544922, 0.017502208709716797, 0.01778483200073242, 0.01775619125366211, 0.017782272338867186, 0.01877244758605957, 0.018358272552490236, 0.017922048568725587, 0.017746143341064453, 0.017565216064453125, 0.01750383949279785, 0.017511072158813475, 0.01744076728820801, 0.017622432708740234, 0.017857280731201172, 0.017608543395996094, 0.01753001594543457, 0.01749206352233887, 0.017695487976074217, 0.017648704528808595, 0.017670719146728516, 0.017641183853149413, 0.017678112030029298, 0.01777324867248535, 0.017604799270629884, 0.017543167114257813, 0.01736832046508789, 0.017274784088134765, 0.01750511932373047, 0.017573888778686524, 0.017704959869384765, 0.017737728118896484, 0.018524160385131837, 0.01763737678527832, 0.017358175277709963, 0.017361568450927733, 0.01739731216430664, 0.017336736679077147, 0.017293312072753905, 0.01738140869140625, 0.017378623962402345]",tokens/s,55.42109185258005,, @@ -10225,7 +10225,7 @@ ChildProcessError: Traceback (most recent call last): self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 119873 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 113572 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10260,7 +10260,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 75914 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 69100 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10303,7 +10303,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 93893 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 87456 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10379,7 +10379,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 46369 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 40377 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10422,7 +10422,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 54387 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 48100 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.706688,806.289408,0.0,411.041792,391.374848,s,1,7.4192998046875,7.4192998046875,0.0,7.4192998046875,7.4192998046875,7.4192998046875,7.4192998046875,[7.4192998046875],,kWh,5.054497087508025e-06,5.503927119673102e-07,2.0302794020016224e-06,7.635169201476957e-06,,MB,1098.924032,879.689728,0.0,473.956352,454.832128,s,16,0.37611609649658206,0.02350725603103638,0.0003895896859630637,0.0234224796295166,0.02354478359222412,0.023951807975769044,0.024774489402770997,"[0.024980159759521486, 0.023404767990112305, 0.023301408767700194, 0.02340281677246094, 0.023271360397338868, 0.023257152557373047, 0.02345427131652832, 0.023463552474975585, 0.023609024047851562, 0.023480543136596678, 0.023447519302368165, 0.02347452735900879, 0.023395904541015623, 0.02337260818481445, 0.023360288619995118, 0.0234401912689209]",tokens/s,10890.254467046512,kWh,8.355347409166522e-07,9.214411109693993e-08,5.548179041714452e-07,1.4824967561850374e-06,tokens/kWh,172681659.45858395,MB,1120.452608,906.952704,0.0,501.219328,454.834688,s,16,9.710575134277343,0.606910945892334,0.012089048943782812,0.6111069030761719,0.6191018981933594,0.6193922882080078,0.6196740142822266,"[0.6129658203125, 0.6042952880859375, 0.5861619262695312, 0.5873612060546874, 0.59046728515625, 0.5870277709960937, 0.6189288940429688, 0.6136454467773438, 0.61927490234375, 0.6182135620117187, 0.6197444458007813, 0.610319091796875, 0.6175580444335937, 0.6102855224609375, 0.6118947143554687, 0.6024312133789063]",tokens/s,103.80435618502786,kWh,1.734154936766225e-05,1.912482562088808e-06,7.800629956652067e-06,2.7054661886403127e-05,tokens/kWh,2328619.010820532,,s,1008,9.702061608314505,0.009625061119359637,0.0002930521366950281,0.009635200023651123,0.009933778953552247,0.009996521377563477,0.010454401950836181,"[0.009595935821533202, 0.009617247581481934, 0.009658559799194336, 0.00968832015991211, 0.009486175537109376, 0.009703424453735352, 0.009759455680847169, 0.0095, 0.009595552444458007, 0.009773056030273437, 0.009921759605407714, 0.010906399726867676, 0.009795583724975587, 0.009715200424194336, 0.009690752029418945, 0.009709823608398437, 0.009646719932556152, 0.00959488010406494, 0.009988096237182617, 0.009772576332092286, 0.009902560234069824, 0.009748895645141602, 0.00976956844329834, 0.00978006362915039, 0.00966051197052002, 0.009490495681762695, 0.009498335838317872, 0.009705471992492675, 0.01026857566833496, 0.00993727970123291, 0.009926655769348144, 0.009846783638000489, 0.0104017915725708, 0.009987808227539063, 0.0098155517578125, 0.009814335823059081, 0.009681568145751954, 0.009592160224914551, 0.00963811206817627, 0.009708864212036133, 0.009450464248657227, 0.009678815841674805, 0.009659744262695313, 0.00963651180267334, 0.009541824340820312, 0.009458623886108398, 0.009355456352233886, 0.009453472137451171, 0.009627519607543945, 0.009857952117919922, 0.009644351959228515, 0.009650079727172852, 0.009551199913024902, 0.009541728019714356, 0.009773407936096191, 0.009838144302368164, 0.009813983917236329, 0.009648608207702638, 0.009670656204223632, 0.00979270362854004, 0.009621855735778808, 0.009470432281494141, 0.009422975540161133, 0.010043392181396485, 0.010067392349243164, 0.011911935806274414, 0.010933183670043945, 0.009993120193481446, 0.009969823837280274, 0.009827263832092285, 0.00975171184539795, 0.009776288032531739, 0.00984547233581543, 0.010139488220214843, 0.009764415740966797, 0.009422944068908692, 0.009446880340576173, 0.009726847648620605, 0.009952832221984863, 0.009535807609558106, 0.009451647758483887, 0.009485312461853027, 0.009582719802856444, 0.009597824096679688, 0.009531295776367188, 0.009828096389770507, 0.009483712196350098, 0.009544608116149902, 0.009797504425048828, 0.010219967842102051, 0.009595711708068848, 0.009472895622253417, 0.009584799766540527, 0.009500384330749512, 0.009672351837158203, 0.009814687728881836, 0.009842592239379883, 0.009693375587463379, 0.00961731243133545, 0.009320608139038086, 0.009229920387268066, 0.009223872184753418, 0.009292160034179688, 0.009307456016540528, 0.009231040000915527, 0.00930406379699707, 0.009259008407592773, 0.009224287986755371, 0.009274911880493165, 0.009378175735473632, 0.00924403190612793, 0.009240703582763672, 0.009250880241394043, 0.009322943687438965, 0.009514816284179687, 0.009324735641479492, 0.009257216453552247, 0.009189151763916015, 0.009220064163208008, 0.00919753646850586, 0.00925209617614746, 0.009265952110290528, 0.009208928108215332, 0.009225055694580079, 0.009318016052246093, 0.00932476806640625, 0.009152511596679687, 0.00943887996673584, 0.009678879737854003, 0.009345439910888672, 0.009829888343811035, 0.009355584144592284, 0.009468031883239745, 0.009420160293579102, 0.00928553581237793, 0.009267328262329101, 0.009265376091003417, 0.009291423797607421, 0.009310912132263184, 0.009414112091064453, 0.009246560096740722, 0.009224896430969238, 0.00928767967224121, 0.009243935585021972, 0.009230719566345216, 0.00921225643157959, 0.009205375671386719, 0.009164575576782227, 0.009260640144348145, 0.009251775741577149, 0.009193023681640626, 0.009236639976501464, 0.009228416442871094, 0.009269472122192382, 0.009305343627929688, 0.00928223991394043, 0.009220319747924805, 0.009203136444091797, 0.009217568397521973, 0.009234335899353028, 0.009264191627502442, 0.009242591857910156, 0.009243616104125977, 0.009192416191101074, 0.009349087715148925, 0.009387392044067383, 0.009248671531677246, 0.00938595199584961, 0.009239583969116212, 0.00923209571838379, 0.009193120002746582, 0.009314432144165039, 0.009232640266418457, 0.009236767768859863, 0.009205471992492676, 0.009342144012451172, 0.009321311950683593, 0.009342368125915528, 0.009325119972229004, 0.009268320083618165, 0.00927836799621582, 0.009270912170410156, 0.009334976196289063, 0.00920524787902832, 0.009288415908813477, 0.009228256225585938, 0.009314432144165039, 0.009615232467651367, 0.009349151611328125, 0.00907529640197754, 0.00935321617126465, 0.00931430435180664, 0.00943824005126953, 0.009460543632507324, 0.009474047660827637, 0.009586784362792969, 0.009400383949279785, 0.009326592445373535, 0.00935529613494873, 0.009220352172851562, 0.009365216255187988, 0.009666560173034668, 0.009269536018371582, 0.009219807624816894, 0.00932249641418457, 0.009393407821655273, 0.009364224433898926, 0.009260607719421386, 0.009214400291442871, 0.00923363208770752, 0.009272095680236816, 0.009271295547485351, 0.009254560470581055, 0.009283935546875, 0.009248767852783203, 0.00923472023010254, 0.009413536071777345, 0.009356096267700195, 0.009383328437805176, 0.009321056365966796, 0.009390080451965332, 0.00929587173461914, 0.009464960098266601, 0.009462656021118164, 0.009324543952941895, 0.009312255859375, 0.009346847534179687, 0.009400511741638183, 0.009407999992370606, 0.009204256057739257, 0.00920684814453125, 0.009233344078063964, 0.009265279769897461, 0.009559103965759277, 0.009300800323486328, 0.009259008407592773, 0.009242624282836913, 0.009342880249023437, 0.00933897590637207, 0.00926028823852539, 0.009255680084228515, 0.009183232307434081, 0.009153792381286621, 0.00925158405303955, 0.009328767776489258, 0.009185248374938965, 0.009264960289001464, 0.009277536392211913, 0.009252863883972168, 0.009274944305419922, 0.009251423835754394, 0.009246560096740722, 0.00908675193786621, 0.009285887718200684, 0.009311679840087891, 0.009428576469421386, 0.009465888023376464, 0.009413311958312989, 0.009361408233642577, 0.009326432228088379, 0.009369759559631348, 0.009272704124450684, 0.009273152351379394, 0.009345536231994628, 0.009443648338317871, 0.009682304382324218, 0.00957913589477539, 0.010866687774658204, 0.009437184333801269, 0.009347200393676758, 0.009295519828796386, 0.009285856246948242, 0.009312447547912597, 0.009291487693786621, 0.009273440361022948, 0.009254816055297852, 0.009268671989440917, 0.009275808334350585, 0.009394432067871094, 0.009377792358398437, 0.00941004753112793, 0.009274080276489257, 0.009271072387695313, 0.009348575592041015, 0.009357119560241699, 0.009322976112365723, 0.009248640060424805, 0.009269856452941894, 0.009375519752502441, 0.009332991600036622, 0.009227999687194824, 0.009578495979309083, 0.009266528129577636, 0.009265312194824219, 0.00928159999847412, 0.009498944282531738, 0.009390239715576171, 0.009215999603271484, 0.009289728164672852, 0.009399616241455078, 0.009382335662841797, 0.00927667236328125, 0.009245408058166505, 0.009297696113586426, 0.009263615608215332, 0.00931222438812256, 0.009436927795410156, 0.009330400466918945, 0.009327168464660645, 0.00943446445465088, 0.009468255996704101, 0.009410016059875488, 0.009308671951293946, 0.009249247550964356, 0.009307200431823731, 0.00907852840423584, 0.009291775703430176, 0.009310272216796875, 0.009390015602111817, 0.009408415794372559, 0.009343071937561035, 0.009314240455627442, 0.009257023811340331, 0.009233951568603516, 0.009255647659301758, 0.009381728172302246, 0.009220000267028808, 0.009281536102294922, 0.009381631851196288, 0.00947430419921875, 0.009414752006530762, 0.009379712104797364, 0.009289759635925293, 0.009265215873718261, 0.009271231651306153, 0.009453568458557129, 0.00934716796875, 0.009275296211242675, 0.009248767852783203, 0.009281215667724609, 0.009312576293945312, 0.009227392196655273, 0.009233344078063964, 0.00923641586303711, 0.009242624282836913, 0.009367168426513672, 0.009244832038879394, 0.009305855751037598, 0.009247200012207031, 0.009320575714111328, 0.009295743942260742, 0.009340928077697755, 0.009285568237304687, 0.009265215873718261, 0.009234560012817384, 0.0092542724609375, 0.009236991882324219, 0.009303487777709962, 0.009245247840881347, 0.009240575790405273, 0.009344351768493653, 0.009362015724182129, 0.009381695747375489, 0.0092674560546875, 0.009273344039916993, 0.009262432098388671, 0.009212575912475585, 0.009254912376403808, 0.009352383613586425, 0.009234880447387695, 0.009349504470825195, 0.009393407821655273, 0.009487104415893555, 0.00940771198272705, 0.00938649559020996, 0.009382143974304199, 0.00945132827758789, 0.00941267204284668, 0.009422719955444336, 0.009874688148498536, 0.00984233570098877, 0.009905055999755859, 0.010100799560546875, 0.009820159912109374, 0.009754624366760254, 0.009803808212280274, 0.009934720039367675, 0.00997590446472168, 0.009852928161621094, 0.00994099235534668, 0.009751872062683105, 0.010789567947387696, 0.00984607982635498, 0.009918656349182129, 0.00986128044128418, 0.009813952445983886, 0.00966697597503662, 0.009576671600341797, 0.009762656211853028, 0.009985343933105469, 0.00990998363494873, 0.0099334716796875, 0.009839872360229492, 0.009741120338439942, 0.009863360404968262, 0.009777152061462402, 0.009538911819458007, 0.009591456413269042, 0.009760800361633301, 0.010008543968200683, 0.009789440155029297, 0.009641119956970215, 0.009715968132019043, 0.009969728469848633, 0.009904671669006347, 0.009881407737731933, 0.009738528251647949, 0.009743616104125977, 0.009793984413146972, 0.009873439788818359, 0.00992240047454834, 0.00973964786529541, 0.009845120429992676, 0.009763423919677734, 0.009775103569030762, 0.009598591804504394, 0.009484671592712402, 0.009584799766540527, 0.009719776153564454, 0.009797504425048828, 0.009814016342163086, 0.009918463706970216, 0.010140735626220703, 0.009993151664733886, 0.009967231750488281, 0.009863264083862304, 0.009818400382995605, 0.009803744316101074, 0.009582624435424805, 0.009481760025024414, 0.009559647560119629, 0.01043827247619629, 0.010006143569946288, 0.009634559631347656, 0.009580863952636718, 0.009482943534851074, 0.009728223800659179, 0.00961616039276123, 0.009576448440551758, 0.009748576164245605, 0.009754528045654296, 0.00963321590423584, 0.009611840248107911, 0.009512639999389649, 0.009462240219116212, 0.009446368217468262, 0.009718655586242676, 0.009745951652526855, 0.009664511680603028, 0.009598464012145995, 0.009546719551086425, 0.009498016357421875, 0.009451583862304687, 0.009390368461608886, 0.009633440017700196, 0.009744768142700195, 0.009699040412902833, 0.00971622371673584, 0.009573504447937012, 0.009616448402404785, 0.009702336311340333, 0.009892736434936523, 0.009828319549560547, 0.009772928237915039, 0.009695455551147461, 0.009557791709899902, 0.009494848251342773, 0.0095283203125, 0.009503583908081054, 0.009992192268371582, 0.010205183982849121, 0.009803775787353516, 0.009901247978210449, 0.009673184394836425, 0.009715776443481445, 0.00973142433166504, 0.009672991752624512, 0.009713727951049805, 0.00971014404296875, 0.009891712188720703, 0.009902432441711425, 0.009868255615234375, 0.009923328399658203, 0.010332256317138673, 0.009916095733642579, 0.009941087722778321, 0.009920255661010742, 0.009767583847045899, 0.009804736137390137, 0.00978384017944336, 0.009766752243041993, 0.009777664184570312, 0.009646080017089843, 0.009854975700378419, 0.009486047744750977, 0.009804575920104981, 0.009672703742980958, 0.009702912330627441, 0.009640064239501953, 0.009677184104919434, 0.009594911575317383, 0.009574496269226074, 0.009545599937438965, 0.009637344360351562, 0.009893471717834473, 0.009973888397216796, 0.010060832023620606, 0.009762751579284667, 0.00976041603088379, 0.009698559761047363, 0.009523743629455567, 0.009604736328125, 0.009975584030151367, 0.00992972755432129, 0.009787391662597657, 0.009901760101318359, 0.009875776290893555, 0.009764863967895507, 0.009877504348754883, 0.00976416015625, 0.009962176322937012, 0.009623295783996582, 0.009592991828918457, 0.009551551818847656, 0.009655839920043945, 0.010060223579406738, 0.010168191909790038, 0.010455615997314453, 0.010293184280395508, 0.009933216094970703, 0.011004896163940429, 0.010186944007873535, 0.009890303611755372, 0.009822208404541016, 0.009773216247558593, 0.009983839988708497, 0.00994649600982666, 0.009958271980285644, 0.010020223617553711, 0.00994326400756836, 0.00986672019958496, 0.009668831825256347, 0.009726431846618653, 0.00963584041595459, 0.009600031852722167, 0.009851263999938966, 0.009757280349731445, 0.009805824279785156, 0.009811327934265137, 0.009731840133666993, 0.009710399627685547, 0.009834560394287109, 0.009859071731567384, 0.009672703742980958, 0.009719807624816895, 0.009620800018310546, 0.009536479949951172, 0.009924927711486816, 0.00989132785797119, 0.009902239799499512, 0.00986736011505127, 0.00986956787109375, 0.009840640068054199, 0.009651616096496582, 0.009636704444885253, 0.009799424171447755, 0.009844736099243164, 0.009897664070129394, 0.009847040176391602, 0.009802016258239746, 0.009680864334106445, 0.009557248115539551, 0.009607744216918946, 0.009569791793823243, 0.009705984115600585, 0.009799679756164551, 0.009885696411132813, 0.009969663619995118, 0.01028502368927002, 0.009883968353271485, 0.00982755184173584, 0.00987609577178955, 0.009803647994995118, 0.009815520286560058, 0.009794079780578613, 0.009692768096923828, 0.009879263877868652, 0.009917375564575195, 0.009786463737487794, 0.009730367660522462, 0.009704031944274903, 0.009760607719421386, 0.009733375549316406, 0.009673376083374024, 0.00970137596130371, 0.009671680450439453, 0.009750911712646484, 0.009882240295410157, 0.009891839981079101, 0.009926655769348144, 0.009930751800537109, 0.009780799865722656, 0.009773504257202149, 0.009940896034240723, 0.009931967735290528, 0.010032032012939453, 0.009950976371765136, 0.009988351821899414, 0.009924192428588868, 0.009750304222106933, 0.00972662353515625, 0.00976252841949463, 0.009756192207336426, 0.009783295631408692, 0.009796319961547851, 0.0097260160446167, 0.009672479629516601, 0.009625151634216308, 0.009663071632385254, 0.0096278076171875, 0.009656864166259766, 0.00974396800994873, 0.009816415786743164, 0.009833951950073243, 0.009826592445373535, 0.010085856437683106, 0.01001683235168457, 0.01003600025177002, 0.009943231582641602, 0.009804896354675293, 0.009706208229064942, 0.009964768409729003, 0.009718560218811035, 0.009781248092651367, 0.009865216255187988, 0.010059935569763184, 0.00986511993408203, 0.009869248390197753, 0.009765983581542969, 0.00969820785522461, 0.009807871818542481, 0.009645407676696777, 0.009732768058776856, 0.009911616325378418, 0.009956031799316407, 0.009838591575622559, 0.009805824279785156, 0.009906175613403321, 0.009829631805419922, 0.009919327735900879, 0.009916319847106933, 0.009902400016784668, 0.00982323169708252, 0.010006239891052246, 0.00984499168395996, 0.009917344093322754, 0.010024352073669434, 0.010055839538574219, 0.009873824119567871, 0.009860992431640625, 0.009678815841674805, 0.00971782398223877, 0.009768992424011231, 0.009739263534545899, 0.009640128135681152, 0.009645888328552246, 0.00961622428894043, 0.009850784301757813, 0.009716992378234863, 0.009522080421447754, 0.009629695892333985, 0.009953503608703613, 0.009828415870666504, 0.009950559616088867, 0.009920895576477051, 0.009965567588806153, 0.009808992385864258, 0.00967356777191162, 0.00958620834350586, 0.009799391746520995, 0.00973087978363037, 0.009928383827209473, 0.009846847534179687, 0.009527296066284179, 0.009790656089782714, 0.009670559883117675, 0.00948691177368164, 0.009461088180541992, 0.009458527565002442, 0.009591967582702636, 0.009903103828430175, 0.00997920036315918, 0.00986736011505127, 0.009792384147644042, 0.009727456092834473, 0.009768704414367675, 0.009619487762451171, 0.009566720008850099, 0.009587776184082032, 0.009583519935607911, 0.009502655982971191, 0.009680959701538086, 0.009758720397949219, 0.009752415657043456, 0.009748127937316895, 0.009568767547607422, 0.00955388832092285, 0.009469440460205078, 0.009428768157958985, 0.009650943756103515, 0.00972544002532959, 0.009794048309326172, 0.009953280448913575, 0.00999833583831787, 0.009938943862915038, 0.009938336372375489, 0.009729824066162109, 0.009919296264648438, 0.009755680084228516, 0.009671648025512696, 0.009631232261657715, 0.00961996841430664, 0.009662464141845703, 0.009583776473999023, 0.009472064018249512, 0.00951968002319336, 0.009677023887634277, 0.00996771240234375, 0.009801631927490234, 0.009822400093078613, 0.009930527687072754, 0.009678879737854003, 0.009709024429321289, 0.00978764820098877, 0.00963817596435547, 0.009778656005859376, 0.009871904373168944, 0.009703424453735352, 0.009547776222229003, 0.00951523208618164, 0.009572064399719239, 0.009545536041259765, 0.009475520133972168, 0.009401151657104492, 0.00942841625213623, 0.009470399856567382, 0.009504704475402833, 0.009680191993713378, 0.009623807907104492, 0.00953331184387207, 0.009396415710449219, 0.009484671592712402, 0.009615360260009765, 0.009778656005859376, 0.009747039794921876, 0.009885631561279296, 0.00993449592590332, 0.009886207580566407, 0.00998969554901123, 0.00991875171661377, 0.00992204761505127, 0.00982476806640625, 0.00984438419342041, 0.009713919639587402, 0.00964031982421875, 0.009664223670959473, 0.009707103729248047, 0.009703200340270996, 0.009585472106933593, 0.009570112228393554, 0.00941875171661377, 0.009395936012268066, 0.009357600212097169, 0.0097259521484375, 0.009965567588806153, 0.009844127655029298, 0.009667167663574219, 0.009570143699645995, 0.009584223747253418, 0.009840224266052246, 0.011205632209777832, 0.009805215835571288, 0.011686464309692383, 0.009758720397949219, 0.009872384071350097, 0.009802111625671387, 0.009834527969360352, 0.009747039794921876, 0.009660415649414063, 0.009620896339416504, 0.009603967666625976, 0.009686752319335938, 0.009595295906066895, 0.009614687919616699, 0.00988595199584961, 0.009885919570922852, 0.009971487998962402, 0.010090496063232422, 0.009975808143615723, 0.010016768455505372, 0.009874848365783692, 0.009917023658752442, 0.010260479927062988, 0.00986678409576416, 0.009841312408447265, 0.009613056182861329, 0.009680031776428222, 0.009558943748474122, 0.009566240310668946, 0.009944640159606933, 0.009962719917297363, 0.009788928031921386, 0.009909407615661621, 0.009777536392211914, 0.010347071647644044, 0.00962342357635498, 0.009500800132751465, 0.009459487915039063, 0.009586784362792969, 0.00988806438446045, 0.009510368347167968, 0.009646400451660157, 0.010149920463562011, 0.009768768310546875, 0.009682432174682617, 0.009644736289978027, 0.009652223587036133, 0.009596896171569825, 0.009519136428833008, 0.009637887954711915, 0.009711615562438965, 0.00940886402130127, 0.009385631561279298, 0.009439455986022948, 0.009330207824707032, 0.009674592018127441, 0.01010934352874756, 0.01005894374847412, 0.009902912139892578, 0.009905535697937012, 0.009790080070495605, 0.009715104103088379, 0.00973862361907959, 0.009885408401489259, 0.009767104148864746, 0.00968735980987549, 0.009541631698608399, 0.009695072174072266, 0.009814175605773925, 0.009621503829956055, 0.009669983863830566, 0.009956000328063964, 0.0097892484664917, 0.00976095962524414, 0.009565183639526367, 0.009593855857849122, 0.009506272315979005, 0.009421343803405762, 0.009439359664916993, 0.009432064056396485, 0.009431936264038087, 0.009459263801574707, 0.009688544273376464, 0.009898943901062012, 0.009838624000549316, 0.009644031524658203, 0.009453536033630371, 0.00957033634185791, 0.009560288429260254, 0.009435071945190429, 0.009424736022949218, 0.009457759857177735, 0.009454079627990723, 0.009742688179016114, 0.009935968399047852, 0.010724287986755371, 0.009986240386962891, 0.010014495849609374, 0.009878815650939942, 0.009883808135986329, 0.009866175651550292, 0.009821824073791504, 0.009651424407958985, 0.009535296440124512, 0.009546719551086425, 0.009489855766296386, 0.009504735946655274, 0.009435744285583495, 0.009771103858947755, 0.009936800003051758, 0.00972390365600586, 0.00961740779876709, 0.009775103569030762, 0.00970137596130371, 0.009811712265014648, 0.009906432151794434, 0.009761055946350098, 0.009721920013427735, 0.009657088279724122, 0.009710623741149902, 0.009738304138183594, 0.00953286361694336, 0.010322367668151856, 0.010501919746398925, 0.009730400085449219, 0.009576224327087402, 0.009659711837768554, 0.009697919845581054, 0.009689184188842773, 0.00962281608581543, 0.009476832389831543, 0.009308223724365234, 0.009374688148498535, 0.009348064422607423, 0.00959488010406494, 0.009645503997802734, 0.009455679893493652, 0.009574399948120118, 0.00942131233215332, 0.009461503982543945, 0.009394432067871094, 0.009488384246826171, 0.009897983551025391, 0.01001471996307373, 0.010024959564208985, 0.009777376174926758, 0.009917471885681152, 0.009753279685974121, 0.00965180778503418, 0.009503328323364257, 0.009557184219360351, 0.009501376152038575, 0.009440704345703125, 0.00946233558654785, 0.009644031524658203, 0.009865280151367187, 0.009760767936706542, 0.00949836826324463, 0.009394432067871094, 0.009359519958496093, 0.009307871818542481, 0.009514880180358887, 0.009690943717956544, 0.009639424324035644, 0.00956281566619873, 0.009689599990844726, 0.009675935745239258, 0.009586496353149414, 0.009454367637634277, 0.009360639572143555, 0.009288448333740235, 0.00930406379699707, 0.009449472427368164, 0.00929587173461914, 0.009260319709777831, 0.009263808250427247, 0.009195008277893067, 0.009209568023681641, 0.009429823875427247, 0.009452735900878906, 0.009394847869873046, 0.009441439628601073, 0.009596927642822266, 0.00962559986114502, 0.009453696250915527, 0.00934825611114502, 0.00930844783782959, 0.009316576004028321, 0.009537376403808594, 0.009916319847106933, 0.00980016040802002, 0.009843839645385741, 0.009824959754943848, 0.009953472137451172, 0.009904128074645996, 0.009957375526428223, 0.009776224136352539, 0.009560928344726563, 0.009496640205383301, 0.009430399894714355, 0.009455615997314454, 0.009442079544067382, 0.009514847755432128, 0.009721952438354492, 0.009608192443847656, 0.00938486385345459, 0.009518624305725098, 0.00965231990814209, 0.009873791694641114, 0.009743616104125977, 0.00960588836669922, 0.009569952011108399, 0.009538047790527344, 0.009674592018127441, 0.009702848434448243, 0.009605695724487304, 0.009518912315368652, 0.00969536018371582]",tokens/s,103.89544415345289,, @@ -10466,7 +10466,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 78822 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 72348 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10509,7 +10509,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 149443 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 142896 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.452352,4675.534848,0.0,4280.287232,4115.121152,s,1,7.65303857421875,7.65303857421875,0.0,7.65303857421875,7.65303857421875,7.65303857421875,7.65303857421875,[7.65303857421875],,kWh,1.0351801950006727e-05,1.1340980008129043e-06,4.7408371259966e-06,1.622673707681623e-05,,MB,1142.31296,4981.71904,0.0,4575.985664,4408.408064,s,10,3.030411315917968,0.3030411315917969,0.002580261041039593,0.3030240173339844,0.3054149444580078,0.3060226119995117,0.30650874603271483,"[0.2980228271484375, 0.29910711669921874, 0.30268963623046874, 0.30315701293945313, 0.3052799072265625, 0.3048192138671875, 0.3028910217285156, 0.30517254638671876, 0.30264175415039063, 0.3066302795410156]",tokens/s,844.7698127818426,kWh,9.061008412627508e-06,9.98694762992155e-07,5.996208500666343e-06,1.6055911676286006e-05,tokens/kWh,15944283.025553923,MB,1163.50976,4981.71904,0.0,4575.985664,4408.410624,s,10,14.811874389648438,1.4811874389648438,0.010192541895537985,1.4821577758789062,1.491000354003906,1.492011590576172,1.4928205798339844,"[1.458283935546875, 1.4831822509765624, 1.4930228271484376, 1.48113330078125, 1.489197265625, 1.478267822265625, 1.4705389404296876, 1.490775634765625, 1.4900311279296874, 1.4774412841796876]",tokens/s,42.533441982217155,kWh,4.301731266653926e-05,4.745208429294184e-06,2.843334682073334e-05,7.619586791656678e-05,tokens/kWh,826816.4891695172,,s,630,14.80945721435548,0.02350707494342138,0.0003652562417825801,0.02352883243560791,0.023837715148925784,0.024032551956176755,0.024826040382385256,"[0.024179264068603514, 0.02332838439941406, 0.02325542449951172, 0.023571935653686524, 0.023126623153686524, 0.022810495376586915, 0.02295327949523926, 0.022874719619750978, 0.02343731117248535, 0.023152799606323243, 0.023086463928222656, 0.023060895919799804, 0.022964096069335936, 0.022962528228759764, 0.022966272354125978, 0.022945535659790038, 0.02320518493652344, 0.02302867126464844, 0.02311577606201172, 0.023164735794067384, 0.02349056053161621, 0.02337606430053711, 0.02351286315917969, 0.023074464797973634, 0.023984703063964843, 0.022965503692626954, 0.0229117431640625, 0.022939104080200196, 0.022780384063720703, 0.02317888069152832, 0.022918912887573244, 0.02284547233581543, 0.022947872161865234, 0.02283955192565918, 0.022862207412719725, 0.022763519287109374, 0.022816064834594727, 0.02278223991394043, 0.023113536834716796, 0.022927967071533203, 0.02285971260070801, 0.022884416580200195, 0.024978752136230468, 0.024314559936523438, 0.023011007308959962, 0.023246335983276366, 0.02304083251953125, 0.02294169616699219, 0.02297622489929199, 0.02327516746520996, 0.02326927947998047, 0.0232138557434082, 0.023081920623779298, 0.02295199966430664, 0.023014495849609375, 0.02323747253417969, 0.023005247116088867, 0.022987775802612305, 0.02320649528503418, 0.023129600524902344, 0.023249759674072265, 0.023013376235961915, 0.022957727432250975, 0.02403536033630371, 0.023434751510620116, 0.023245279312133788, 0.0235479679107666, 0.024457151412963868, 0.02391766357421875, 0.02341731262207031, 0.023945087432861327, 0.023502399444580078, 0.02369843292236328, 0.023676288604736327, 0.02342732810974121, 0.02323289680480957, 0.023358463287353515, 0.023931327819824218, 0.02350761604309082, 0.023482271194458008, 0.02353561592102051, 0.023469823837280274, 0.023475456237792968, 0.02337436866760254, 0.023605728149414064, 0.02333286476135254, 0.023357376098632813, 0.023224384307861327, 0.02347804832458496, 0.02313852882385254, 0.023324544906616212, 0.023433055877685547, 0.02327071952819824, 0.02337276840209961, 0.023993728637695312, 0.023318656921386717, 0.023343616485595704, 0.023050111770629884, 0.0231200008392334, 0.023093183517456053, 0.023076927185058594, 0.023128320693969726, 0.02336105537414551, 0.023158496856689453, 0.024087039947509766, 0.02369740867614746, 0.02366854476928711, 0.023543231964111327, 0.023788032531738282, 0.023597055435180665, 0.023731584548950194, 0.023657344818115236, 0.023812095642089845, 0.023607295989990236, 0.023692607879638672, 0.02377801513671875, 0.023883520126342775, 0.023617759704589843, 0.023711103439331055, 0.023794303894042967, 0.02367283248901367, 0.023450944900512697, 0.023521280288696288, 0.023560895919799804, 0.023623392105102538, 0.023568672180175783, 0.02430156707763672, 0.02373222351074219, 0.023830528259277343, 0.023736320495605468, 0.023558143615722657, 0.02396272087097168, 0.023570655822753906, 0.023548608779907228, 0.02373129653930664, 0.02364009666442871, 0.02370035171508789, 0.02362892723083496, 0.02355289649963379, 0.023558143615722657, 0.023578624725341796, 0.023613439559936524, 0.02361657524108887, 0.02374732780456543, 0.02379385566711426, 0.02372812843322754, 0.023512895584106446, 0.023521472930908203, 0.023777280807495117, 0.025438207626342774, 0.023703136444091798, 0.02349507141113281, 0.02356153678894043, 0.023567039489746092, 0.023493824005126954, 0.023429759979248045, 0.023705791473388672, 0.023538976669311523, 0.02402911949157715, 0.023810527801513673, 0.023572799682617187, 0.02347417640686035, 0.02349056053161621, 0.02355948829650879, 0.02335136032104492, 0.023652416229248047, 0.0238209285736084, 0.02361529541015625, 0.02406617546081543, 0.024272159576416017, 0.024326879501342772, 0.023631872177124022, 0.023412704467773438, 0.023238687515258788, 0.023769088745117187, 0.023404544830322265, 0.02347007942199707, 0.02389401626586914, 0.023455743789672853, 0.023642112731933593, 0.02343731117248535, 0.023690591812133788, 0.02358937644958496, 0.023570816040039064, 0.023779104232788086, 0.02357596778869629, 0.024078943252563476, 0.023686336517333983, 0.023548736572265624, 0.024302879333496095, 0.0236430721282959, 0.023540767669677734, 0.023364416122436525, 0.023197696685791015, 0.023150047302246093, 0.02304368019104004, 0.023753664016723634, 0.023439359664916993, 0.02346931266784668, 0.023785280227661132, 0.02355295944213867, 0.023442880630493164, 0.023685440063476563, 0.024180992126464844, 0.02349875259399414, 0.02360425567626953, 0.023598047256469728, 0.023395999908447266, 0.02345404815673828, 0.023414783477783203, 0.023502975463867187, 0.023393728256225585, 0.02328767967224121, 0.02324127960205078, 0.02355606460571289, 0.023672447204589844, 0.023694847106933595, 0.023565216064453123, 0.023793664932250977, 0.02368694305419922, 0.023747840881347657, 0.02361356735229492, 0.023458656311035157, 0.023431167602539063, 0.023473567962646484, 0.02335804748535156, 0.023377311706542968, 0.023306848526000977, 0.023453567504882814, 0.023160959243774416, 0.023787071228027343, 0.023583168029785155, 0.023513088226318358, 0.023525375366210938, 0.023814079284667968, 0.02359225654602051, 0.023369951248168944, 0.023286304473876952, 0.023134208679199218, 0.02326323127746582, 0.023321983337402343, 0.023325311660766603, 0.02327347183227539, 0.02329395294189453, 0.023178752899169923, 0.02317568016052246, 0.023928831100463867, 0.02393907165527344, 0.023721055984497072, 0.023638015747070314, 0.02356425666809082, 0.023342016220092774, 0.025831424713134765, 0.023721887588500978, 0.0236046085357666, 0.023611839294433595, 0.023316768646240233, 0.023382015228271484, 0.023488512039184572, 0.02386124801635742, 0.023400447845458985, 0.023513088226318358, 0.023562175750732422, 0.02355558395385742, 0.023674720764160155, 0.023710399627685546, 0.02361142349243164, 0.023628992080688478, 0.023724863052368164, 0.023851007461547852, 0.02351513671875, 0.023566335678100587, 0.023569728851318358, 0.023458175659179688, 0.023562496185302734, 0.02359916877746582, 0.023357440948486328, 0.023615039825439454, 0.02359856033325195, 0.023700447082519532, 0.023582719802856447, 0.02348646354675293, 0.02349260711669922, 0.023721120834350587, 0.023495616912841796, 0.02343107223510742, 0.023459840774536132, 0.023379968643188476, 0.02353936004638672, 0.023435615539550782, 0.023619583129882812, 0.023319679260253905, 0.023900159835815428, 0.02398044776916504, 0.023576320648193358, 0.02442313575744629, 0.02351628875732422, 0.023556991577148436, 0.023638015747070314, 0.023642112731933593, 0.02405116844177246, 0.023553823471069334, 0.02335001564025879, 0.023465120315551757, 0.023391103744506835, 0.02346544075012207, 0.023545503616333008, 0.023515775680541993, 0.023990495681762695, 0.02370150375366211, 0.02369254493713379, 0.023599872589111327, 0.023695232391357422, 0.02352681541442871, 0.023613216400146485, 0.02434454345703125, 0.023666336059570314, 0.023396703720092775, 0.023341056823730468, 0.023412736892700195, 0.023330816268920897, 0.023379968643188476, 0.02332467269897461, 0.023430624008178712, 0.023685663223266602, 0.023452991485595702, 0.023552032470703126, 0.02374518394470215, 0.023538848876953126, 0.023513023376464843, 0.023753440856933594, 0.023427263259887695, 0.023392255783081056, 0.023533344268798828, 0.02328335952758789, 0.02318547248840332, 0.023097856521606445, 0.023435327529907228, 0.02374239921569824, 0.023386112213134767, 0.02349260711669922, 0.02345952033996582, 0.023737855911254883, 0.023669567108154297, 0.023530879974365235, 0.02348451232910156, 0.023444000244140624, 0.02351923179626465, 0.02361100769042969, 0.023677312850952148, 0.023586816787719726, 0.023395519256591796, 0.023919424057006835, 0.023500320434570312, 0.023755231857299806, 0.023317792892456054, 0.02369340705871582, 0.025008031845092774, 0.023364320755004882, 0.023582719802856447, 0.02351103973388672, 0.023239967346191406, 0.023140703201293945, 0.023365535736083985, 0.024134111404418946, 0.023357440948486328, 0.023257087707519532, 0.023230464935302734, 0.023150592803955077, 0.023173120498657225, 0.023085216522216796, 0.023066463470458983, 0.022986751556396484, 0.02304204750061035, 0.023078399658203123, 0.02300979232788086, 0.02308095932006836, 0.023035903930664063, 0.024086143493652342, 0.02361587142944336, 0.02334867286682129, 0.023151168823242186, 0.02297225570678711, 0.022929567337036133, 0.022986560821533202, 0.02316716766357422, 0.023166976928710937, 0.022994047164916993, 0.022838144302368163, 0.023007232666015624, 0.023027711868286133, 0.02325299263000488, 0.024681631088256835, 0.022922079086303712, 0.02310553550720215, 0.02289651107788086, 0.02310544013977051, 0.02300499153137207, 0.02368889617919922, 0.023000799179077148, 0.02292799949645996, 0.02284172821044922, 0.022769472122192384, 0.022839679718017578, 0.02281667137145996, 0.02285763168334961, 0.02290483283996582, 0.02292857551574707, 0.02302239990234375, 0.02296118354797363, 0.022874143600463866, 0.022854591369628908, 0.022844831466674806, 0.02282966423034668, 0.022916479110717772, 0.022880064010620118, 0.02298476791381836, 0.023325439453125, 0.023514528274536133, 0.023757024765014647, 0.02361382484436035, 0.02384022331237793, 0.023582752227783204, 0.023685632705688478, 0.023541759490966797, 0.02352742385864258, 0.023564287185668945, 0.025159616470336914, 0.024561376571655275, 0.02389027214050293, 0.02356617546081543, 0.02382035255432129, 0.02366454315185547, 0.023980224609375, 0.0235284481048584, 0.02371686363220215, 0.023527584075927734, 0.023736127853393553, 0.02367695999145508, 0.023696895599365234, 0.023783071517944336, 0.024363231658935548, 0.023821823120117186, 0.02373023986816406, 0.023821023941040038, 0.02371753692626953, 0.02376406478881836, 0.023622207641601563, 0.02389139175415039, 0.023638111114501953, 0.023595903396606447, 0.02348646354675293, 0.023637088775634765, 0.023548416137695313, 0.023691680908203124, 0.02383635139465332, 0.023651775360107423, 0.02378432083129883, 0.023602848052978517, 0.023853471755981445, 0.02371945571899414, 0.02358844757080078, 0.023946048736572266, 0.023836288452148437, 0.02375433540344238, 0.02369820785522461, 0.02404159927368164, 0.023909631729125976, 0.023734912872314454, 0.023791616439819335, 0.02362739181518555, 0.023517568588256835, 0.023578624725341796, 0.023517183303833008, 0.023740415573120118, 0.023608608245849608, 0.02367148780822754, 0.023701536178588868, 0.023754751205444336, 0.023582719802856447, 0.02360099220275879, 0.023549823760986327, 0.023707935333251953, 0.023563840866088866, 0.02372243118286133, 0.023609344482421874, 0.02352921676635742, 0.023581247329711914, 0.023511903762817383, 0.023356256484985353, 0.023278656005859374, 0.02328428840637207, 0.0238022403717041, 0.02360438346862793, 0.02350166320800781, 0.023476224899291992, 0.023459840774536132, 0.023385087966918947, 0.023770111083984375, 0.02351420783996582, 0.023833471298217772, 0.023539743423461913, 0.023586240768432618, 0.023394271850585936, 0.02433932876586914, 0.02374358367919922, 0.02349910354614258, 0.023532096862792968, 0.023459840774536132, 0.02337295913696289, 0.023837535858154298, 0.02351513671875, 0.023503936767578126, 0.023350208282470704, 0.023502559661865235, 0.024280416488647462, 0.024840192794799806, 0.024897823333740233, 0.023560863494873047, 0.02346700859069824, 0.023286367416381838, 0.023815967559814452, 0.023839328765869142, 0.023596832275390625, 0.02350105667114258, 0.023760255813598634, 0.02357062339782715, 0.023536064147949218, 0.023443456649780273, 0.023582719802856447, 0.023640064239501952, 0.023649503707885742, 0.02366761589050293, 0.02366041564941406, 0.02368297576904297, 0.023451648712158202, 0.023681119918823244, 0.024008319854736327, 0.02358310317993164, 0.023379648208618164, 0.023437631607055663, 0.023465375900268554, 0.023538272857666017, 0.023570432662963867, 0.023584800720214842, 0.02341036796569824, 0.023509151458740236, 0.02374790382385254, 0.023690048217773436, 0.02360051155090332, 0.0235784969329834, 0.02354979133605957, 0.023499616622924803, 0.023737951278686522, 0.023480800628662108, 0.02360121536254883, 0.02350796890258789, 0.02357734489440918, 0.023474367141723632, 0.023621631622314454, 0.023601152420043944, 0.023551616668701172, 0.02374835205078125, 0.023620223999023436, 0.02377654457092285, 0.023673599243164062, 0.023571807861328124, 0.0242587833404541, 0.02387353515625, 0.02352742385864258, 0.02353984069824219, 0.02375433540344238, 0.02374684715270996, 0.02353971290588379, 0.023556095123291015, 0.023582176208496095, 0.023566047668457032, 0.02340108871459961, 0.02341231918334961, 0.02359328079223633, 0.023549280166625976, 0.023749696731567384, 0.02352921676635742, 0.023634048461914064, 0.023614463806152345, 0.023577600479125976, 0.023496511459350587, 0.023660127639770507, 0.023634271621704103, 0.023431615829467775, 0.023521087646484376, 0.02326937675476074, 0.023581823348999022, 0.02360204887390137, 0.023846912384033202, 0.023967744827270508, 0.023348608016967774, 0.02336319923400879, 0.023318656921386717, 0.022993183135986327, 0.022948448181152343, 0.023076864242553712, 0.023250944137573244, 0.023510208129882814, 0.023992671966552734, 0.023560672760009765, 0.023645503997802735, 0.023485088348388673, 0.023425056457519532, 0.02351820755004883, 0.02350979232788086, 0.02392460823059082, 0.024791391372680664, 0.02327756881713867, 0.02331222343444824, 0.023272991180419922, 0.023020160675048827, 0.022982656478881838, 0.022880159378051757, 0.022990943908691407, 0.022978143692016603, 0.023241119384765627, 0.02336128044128418, 0.023355648040771483, 0.023216127395629883, 0.023003135681152344, 0.022994623184204102, 0.022875808715820314, 0.02285430335998535, 0.022939647674560547]",tokens/s,42.54038422078784,, @@ -10553,7 +10553,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 97106 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 90557 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10804,7 +10804,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 116928 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 110622 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -10913,7 +10913,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 87842 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 81410 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11022,7 +11022,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 81054 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 74613 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11065,7 +11065,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 72544 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 65814 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.82976,3354.329088,0.0,2959.081472,2957.493248,s,1,7.60441064453125,7.60441064453125,0.0,7.60441064453125,7.60441064453125,7.60441064453125,7.60441064453125,[7.60441064453125],,kWh,1.005276507085379e-05,1.1015301663254118e-06,4.153892212005461e-06,1.5308187449184663e-05,,MB,1128.615936,3423.535104,0.0,3017.801728,2552.885248,s,10,0.5859242820739747,0.058592428207397476,0.002362301742255165,0.05809035110473633,0.05996758232116699,0.06253839855194092,0.06459505153656006,"[0.06510921478271485, 0.0585645751953125, 0.05581372833251953, 0.05712700653076172, 0.05939628982543945, 0.058537055969238284, 0.057635486602783205, 0.05768281555175781, 0.05849788665771485, 0.05756022262573242]",tokens/s,4369.1652288900905,kWh,2.154151239307669e-06,2.3743067485282434e-07,1.4369149567204602e-06,3.828496870880954e-06,tokens/kWh,66866973.8108192,MB,1138.286592,3423.535104,0.0,3017.801728,2552.887808,s,10,12.852337768554687,1.285233776855469,0.012224236546737058,1.289853271484375,1.2956299560546873,1.2969684204101561,1.298039191894531,"[1.298306884765625, 1.29533251953125, 1.294235595703125, 1.293533203125, 1.2904178466796874, 1.2892886962890624, 1.275995849609375, 1.2716734619140626, 1.2858623046875, 1.25769140625]",tokens/s,49.01831957306602,kWh,3.715348410902399e-05,4.097759460257526e-06,2.1655742406280632e-05,6.290698597556217e-05,tokens/kWh,1001478.5961049536,,s,630,12.84996529006958,0.020396770301697745,0.00037085017510198466,0.020403215408325195,0.020714876365661623,0.020847481441497805,0.02165227920532227,"[0.021066112518310545, 0.02083488082885742, 0.02062531280517578, 0.02076054382324219, 0.020461727142333984, 0.02046553611755371, 0.020426752090454102, 0.020465215682983397, 0.02067840003967285, 0.02053590393066406, 0.020598527908325195, 0.020515167236328125, 0.020353023529052734, 0.02046953582763672, 0.020455135345458984, 0.020515327453613282, 0.020817920684814452, 0.020729312896728514, 0.02049430465698242, 0.020668800354003907, 0.020670047760009767, 0.020471839904785155, 0.02052355194091797, 0.020469696044921874, 0.020738143920898438, 0.020580352783203124, 0.020473407745361327, 0.020440927505493166, 0.020510751724243163, 0.020568639755249023, 0.0203505916595459, 0.02026697540283203, 0.020344831466674804, 0.02058608055114746, 0.02115839958190918, 0.02113260841369629, 0.022361087799072265, 0.02051584053039551, 0.020597152709960938, 0.020817695617675783, 0.021066560745239257, 0.0205614070892334, 0.02032614326477051, 0.02053606414794922, 0.020586496353149415, 0.02038374328613281, 0.020475839614868162, 0.02048543930053711, 0.020498592376708983, 0.020699743270874024, 0.020553728103637696, 0.02041651153564453, 0.020514463424682616, 0.020670816421508788, 0.020668415069580077, 0.020525056838989256, 0.020358400344848634, 0.02049305534362793, 0.020557823181152343, 0.020657760620117187, 0.020686464309692384, 0.020398880004882814, 0.020418560028076172, 0.02129913520812988, 0.02044937515258789, 0.02044313621520996, 0.020438335418701173, 0.020499135971069334, 0.020596736907958983, 0.020344831466674804, 0.020529151916503906, 0.020545536041259766, 0.02068070411682129, 0.020444320678710937, 0.020466527938842773, 0.020559871673583984, 0.020563968658447264, 0.020537343978881836, 0.020682752609252928, 0.02056188774108887, 0.020637727737426757, 0.021139455795288087, 0.020524543762207033, 0.02073855972290039, 0.02047385597229004, 0.0210984001159668, 0.020659360885620117, 0.02035807991027832, 0.02040403175354004, 0.02050886344909668, 0.020812864303588866, 0.02082419204711914, 0.020410879135131836, 0.020457855224609377, 0.02056185531616211, 0.020596736907958983, 0.020531200408935548, 0.02075823974609375, 0.020402368545532228, 0.020547679901123047, 0.02059587287902832, 0.020553728103637696, 0.02042678451538086, 0.020369535446166993, 0.0203885440826416, 0.020549631118774413, 0.02048793601989746, 0.020549888610839843, 0.020465375900268555, 0.020435136795043947, 0.020608768463134766, 0.020525407791137696, 0.020568063735961914, 0.020458656311035155, 0.0205296630859375, 0.020626943588256837, 0.02035593605041504, 0.02057027244567871, 0.020567264556884766, 0.020607616424560545, 0.020518911361694335, 0.02039193534851074, 0.020479999542236327, 0.020534751892089843, 0.020465503692626952, 0.02038240051269531, 0.02088243293762207, 0.020664575576782227, 0.02041484832763672, 0.020543872833251955, 0.020746240615844725, 0.020398080825805662, 0.020849727630615236, 0.02044380760192871, 0.020334880828857423, 0.020512767791748047, 0.020389888763427736, 0.021529727935791016, 0.020369760513305663, 0.020412960052490235, 0.020612096786499022, 0.020242719650268554, 0.020423391342163085, 0.02060633659362793, 0.020472448348999025, 0.020578304290771485, 0.020781248092651368, 0.020649791717529297, 0.02050048065185547, 0.02042265510559082, 0.020436607360839843, 0.020599168777465822, 0.020567039489746093, 0.02037820816040039, 0.02026460838317871, 0.02044326400756836, 0.02032633590698242, 0.020087135314941405, 0.02034320068359375, 0.020228031158447266, 0.02033203125, 0.020213760375976563, 0.021139968872070314, 0.020799488067626954, 0.020573631286621093, 0.020531776428222657, 0.020448671340942384, 0.02056867218017578, 0.020701183319091796, 0.02047385597229004, 0.02041609573364258, 0.020422624588012694, 0.020314815521240235, 0.02025651168823242, 0.02041651153564453, 0.020320287704467775, 0.0203504638671875, 0.020083168029785155, 0.020351999282836913, 0.02077289581298828, 0.020705408096313476, 0.020458335876464843, 0.020699392318725585, 0.022826751708984374, 0.020645696640014647, 0.02057360076904297, 0.020406911849975586, 0.020424032211303712, 0.020321088790893553, 0.02088960075378418, 0.020375520706176757, 0.020336639404296874, 0.020813535690307618, 0.020738336563110353, 0.02062745666503906, 0.02045283126831055, 0.020380191802978516, 0.020395488739013673, 0.02068675231933594, 0.020378240585327147, 0.02040415954589844, 0.020387903213500976, 0.02037881660461426, 0.020322240829467774, 0.02026723289489746, 0.020404064178466796, 0.020703712463378907, 0.020490591049194335, 0.020297727584838866, 0.02039948844909668, 0.020417152404785158, 0.020733951568603515, 0.020858816146850586, 0.020471872329711913, 0.02045747184753418, 0.02034876823425293, 0.020319807052612306, 0.02026691246032715, 0.02046636772155762, 0.02065203285217285, 0.020466815948486327, 0.020539264678955078, 0.020494335174560546, 0.02041548728942871, 0.020412479400634766, 0.02055776023864746, 0.0204466552734375, 0.02082259178161621, 0.020447231292724608, 0.020410367965698242, 0.020369407653808593, 0.02038374328613281, 0.020798847198486327, 0.020638336181640626, 0.0203855037689209, 0.020336927413940428, 0.020356832504272462, 0.02049849510192871, 0.020762847900390624, 0.020410367965698242, 0.0204977912902832, 0.020458112716674803, 0.020467039108276366, 0.020286111831665038, 0.020400224685668947, 0.020483999252319335, 0.020504159927368162, 0.020430816650390624, 0.020422975540161134, 0.020510719299316405, 0.021272031784057618, 0.022427616119384767, 0.020874624252319337, 0.02043996810913086, 0.020322303771972656, 0.020555776596069338, 0.020563968658447264, 0.020361215591430663, 0.02037555122375488, 0.02037555122375488, 0.020426816940307617, 0.020391744613647463, 0.02035036849975586, 0.020415199279785155, 0.02046883201599121, 0.020540447235107423, 0.020559743881225587, 0.020361120223999024, 0.02067875289916992, 0.020927743911743166, 0.02074025535583496, 0.020564479827880858, 0.02065417671203613, 0.020415552139282228, 0.020495296478271485, 0.02044313621520996, 0.020551263809204103, 0.020318784713745118, 0.020387104034423828, 0.020437568664550782, 0.020215808868408205, 0.020375455856323242, 0.020284799575805663, 0.020392671585083007, 0.020576255798339844, 0.020168319702148437, 0.02047433662414551, 0.020367040634155273, 0.020367584228515624, 0.020745311737060547, 0.020517791748046875, 0.02040233612060547, 0.02052079963684082, 0.020455423355102538, 0.020440927505493166, 0.020528703689575194, 0.020400480270385744, 0.020340959548950197, 0.02033433532714844, 0.020318496704101564, 0.021116895675659178, 0.020486175537109377, 0.02063564872741699, 0.020516511917114257, 0.020412832260131835, 0.020492223739624022, 0.020350976943969725, 0.020206975936889648, 0.020854560852050782, 0.020509536743164063, 0.02059676742553711, 0.02047792053222656, 0.020379520416259764, 0.020434240341186523, 0.020489023208618163, 0.02088640022277832, 0.020379520416259764, 0.020236415863037108, 0.020536352157592773, 0.020687135696411132, 0.020585119247436525, 0.020477983474731447, 0.020431968688964845, 0.020341663360595702, 0.02047385597229004, 0.020628639221191406, 0.020433536529541017, 0.020408384323120116, 0.02032451248168945, 0.020545440673828123, 0.02067465591430664, 0.020461856842041017, 0.02064067268371582, 0.020521631240844728, 0.02047369575500488, 0.020277568817138672, 0.020316160202026368, 0.020928512573242186, 0.020516159057617188, 0.020480287551879882, 0.020394176483154298, 0.020347103118896485, 0.02036636734008789, 0.02031622314453125, 0.0204705924987793, 0.0208155517578125, 0.020304288864135742, 0.020307968139648438, 0.020391424179077147, 0.02025507164001465, 0.020297887802124024, 0.020365312576293947, 0.020338687896728515, 0.020197376251220703, 0.02053638458251953, 0.020701887130737305, 0.020522335052490234, 0.020585023880004882, 0.020396095275878906, 0.0204716796875, 0.02042255973815918, 0.02035353660583496, 0.02069708824157715, 0.020436992645263673, 0.020387840270996094, 0.02026905632019043, 0.020415552139282228, 0.02056604766845703, 0.020157344818115236, 0.02027724838256836, 0.020641183853149413, 0.02080214309692383, 0.020518911361694335, 0.0205897274017334, 0.02038256072998047, 0.02049238395690918, 0.020385696411132814, 0.02022604751586914, 0.02081875228881836, 0.020418336868286133, 0.02046175956726074, 0.020223648071289062, 0.02026473617553711, 0.02034262466430664, 0.020339424133300782, 0.02002943992614746, 0.020116607666015626, 0.01990950393676758, 0.020244768142700195, 0.020332256317138673, 0.02036735916137695, 0.020353023529052734, 0.020308063507080077, 0.020321279525756835, 0.020239263534545898, 0.020141439437866213, 0.02008127975463867, 0.020015104293823242, 0.019920896530151368, 0.019986080169677733, 0.020010879516601562, 0.01982512092590332, 0.021702335357666015, 0.02112544059753418, 0.020215808868408205, 0.020246143341064452, 0.02060736083984375, 0.02007859230041504, 0.020531200408935548, 0.02005731201171875, 0.020046144485473632, 0.019919008255004884, 0.020033855438232422, 0.02020966339111328, 0.020714559555053712, 0.020298688888549805, 0.020106592178344727, 0.02007107162475586, 0.020136959075927736, 0.020173824310302735, 0.02011039924621582, 0.020013311386108398, 0.02024323272705078, 0.020260768890380858, 0.020298912048339845, 0.020278112411499023, 0.020484256744384765, 0.020156160354614257, 0.020084320068359376, 0.020166976928710938, 0.019982143402099608, 0.020182432174682616, 0.020291872024536133, 0.020503231048583984, 0.020256767272949217, 0.020186656951904296, 0.020084768295288085, 0.020695232391357423, 0.02007046318054199, 0.019983936309814453, 0.020088544845581056, 0.020717727661132813, 0.020153375625610353, 0.02018604850769043, 0.020221439361572266, 0.0199616641998291, 0.019862016677856444, 0.020035776138305664, 0.020262912750244142, 0.020312000274658203, 0.020231231689453125, 0.020261215209960937, 0.02011408042907715, 0.019976192474365235, 0.019995744705200196, 0.02014668846130371, 0.020264991760253905, 0.01996633529663086, 0.01989017677307129, 0.02005638313293457, 0.02026652717590332, 0.019949216842651368, 0.019876224517822266, 0.01999488067626953, 0.019996543884277344, 0.019900224685668946, 0.019879552841186525, 0.019929664611816406, 0.01988096046447754, 0.019993024826049803, 0.020518943786621092, 0.019972192764282228, 0.02005990409851074, 0.019971839904785155, 0.02012460708618164, 0.02036729621887207, 0.02080508804321289, 0.02080419158935547, 0.020658079147338866, 0.020574304580688478, 0.020661279678344725, 0.02052102470397949, 0.020721759796142578, 0.02085487937927246, 0.020675296783447265, 0.02048409652709961, 0.020155967712402342, 0.02002579116821289, 0.02020672035217285, 0.01999875259399414, 0.019850080490112304, 0.01988582420349121, 0.019799808502197265, 0.0199869441986084, 0.019927040100097656, 0.019869632720947265, 0.020035648345947267, 0.020189184188842774, 0.020275136947631837, 0.020230207443237305, 0.0202128963470459, 0.020095775604248047, 0.020451391220092773, 0.020184320449829103, 0.021155391693115234, 0.02065043258666992, 0.02047529602050781, 0.020656864166259767, 0.020289535522460937, 0.020059808731079102, 0.020040031433105468, 0.02002707290649414, 0.020166976928710938, 0.020197376251220703, 0.020105215072631837, 0.020153440475463868, 0.02010745620727539, 0.02019606399536133, 0.0200392951965332, 0.020093311309814454, 0.020641759872436525, 0.021180448532104493, 0.020551679611206054, 0.020414464950561522, 0.02025494384765625, 0.020300575256347656, 0.020220928192138672, 0.019963903427124022, 0.01983692741394043, 0.019812351226806642, 0.020319520950317384, 0.022896383285522463, 0.020020191192626952, 0.020242240905761717, 0.020376928329467775, 0.02020796775817871, 0.020017759323120117, 0.020375200271606445, 0.020583776473999022, 0.020418495178222657, 0.02068070411682129, 0.021214176177978515, 0.02055276870727539, 0.020251583099365235, 0.020213056564331054, 0.021889759063720704, 0.023609024047851562, 0.020303808212280273, 0.02025904083251953, 0.020203039169311522, 0.020273632049560546, 0.020357248306274413, 0.020189184188842774, 0.019868736267089845, 0.020318464279174806, 0.020626111984252928, 0.02025267219543457, 0.0202259521484375, 0.0200479679107666, 0.01987993621826172, 0.019869695663452147, 0.019916479110717773, 0.0198590087890625, 0.020060928344726562, 0.020844736099243165, 0.020325439453125, 0.020484384536743165, 0.020815040588378905, 0.020240703582763673, 0.020048383712768555, 0.020106719970703124, 0.01999523162841797, 0.019959487915039063, 0.01983513641357422, 0.019748544692993163, 0.019869888305664062, 0.020027103424072264, 0.0198756160736084, 0.01991468811035156, 0.019755712509155275, 0.01980191993713379, 0.01981273651123047, 0.019928159713745116, 0.0199769287109375, 0.01981439971923828, 0.019974143981933593, 0.01995110321044922, 0.01984355163574219, 0.019891616821289062, 0.020498176574707032, 0.019849279403686523, 0.019995456695556642, 0.01989414405822754, 0.020402399063110352, 0.01985526466369629, 0.01974675178527832, 0.020035648345947267, 0.02002124786376953, 0.019998655319213868, 0.019843135833740234, 0.01981955146789551, 0.02006524848937988, 0.020015104293823242, 0.020245983123779298, 0.019849760055541992, 0.01994710350036621, 0.019967744827270508, 0.02000543975830078, 0.01996735954284668, 0.019847904205322266, 0.019860992431640623, 0.019906335830688477, 0.019872095108032225, 0.019827072143554687, 0.019736576080322265, 0.01998361587524414, 0.019966720581054687, 0.01983283233642578, 0.019861631393432617, 0.019925952911376953, 0.01978191947937012, 0.01995779228210449, 0.020001312255859376, 0.019867551803588866, 0.019775680541992188, 0.020377023696899414, 0.01984979248046875, 0.020045440673828126, 0.01998476791381836, 0.019994623184204103]",tokens/s,49.027369784948945,, @@ -11110,7 +11110,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 84001 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 77698 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.08352,6174.998528,0.0,5779.750912,5773.960192,s,1,7.7395419921875,7.7395419921875,0.0,7.7395419921875,7.7395419921875,7.7395419921875,7.7395419921875,[7.7395419921875],,kWh,9.994903391674369e-06,1.0953041804092718e-06,3.4397249740053537e-06,1.4529932546088994e-05,,MB,1095.151616,6491.66848,0.0,6085.935104,6038.345728,s,10,2.1448580932617185,0.2144858093261719,0.0026452678682763126,0.21521057891845702,0.21694376525878906,0.21700848846435547,0.2170602670288086,"[0.2076636505126953, 0.21692938232421874, 0.21582981872558593, 0.2149775390625, 0.21544361877441406, 0.21384474182128907, 0.2147804412841797, 0.21227523803710938, 0.21707321166992188, 0.21604045104980468]",tokens/s,1193.5521552882635,kWh,6.232499810460658e-06,6.873281304016718e-07,4.159891034765951e-06,1.107971897562828e-05,tokens/kWh,23105279.16485205,MB,1099.776,6512.64,0.0,6106.906624,6086.544896,s,10,16.34459362792969,1.6344593627929687,0.005604136420545625,1.6363401489257812,1.6394989379882812,1.6408669006347656,1.641961270751953,"[1.6247216796875, 1.636993408203125, 1.6390897216796876, 1.6391949462890625, 1.6359698486328125, 1.634955322265625, 1.627091064453125, 1.62763232421875, 1.63671044921875, 1.64223486328125]",tokens/s,38.54485552479287,kWh,4.784965462412298e-05,5.277586740818806e-06,3.1805276626234714e-05,8.493251799117652e-05,tokens/kWh,741765.3625498888,,s,630,16.341073549270607,0.02593821198296925,0.0004003684958193192,0.02587648010253906,0.02615531234741211,0.02629103021621704,0.028321831398010255,"[0.028504127502441405, 0.026898399353027343, 0.026089408874511718, 0.02574118423461914, 0.025610015869140624, 0.02552422332763672, 0.025497119903564455, 0.02549238395690918, 0.025530527114868164, 0.02555001640319824, 0.02552511978149414, 0.025526016235351563, 0.025544704437255858, 0.02552217674255371, 0.02555084800720215, 0.025491455078125, 0.025591455459594726, 0.025532768249511718, 0.025536512374877928, 0.025540607452392578, 0.02569148826599121, 0.025616992950439454, 0.02561840057373047, 0.025754751205444334, 0.0256212158203125, 0.025647424697875978, 0.025589696884155272, 0.025595327377319337, 0.025629247665405273, 0.0256856632232666, 0.025683935165405274, 0.025722848892211915, 0.025692575454711913, 0.025634815216064453, 0.025727136611938477, 0.02568284797668457, 0.025804864883422853, 0.025985919952392578, 0.026102975845336916, 0.02615113639831543, 0.026013376235961914, 0.026021024703979493, 0.025937536239624023, 0.02591961669921875, 0.025876352310180664, 0.02587392044067383, 0.025856767654418945, 0.02578761672973633, 0.025791135787963868, 0.02573311996459961, 0.025728607177734376, 0.025786783218383787, 0.025790464401245116, 0.025784320831298828, 0.025776128768920898, 0.025827392578125, 0.025849023818969728, 0.025817951202392577, 0.025812671661376952, 0.025790847778320313, 0.025710432052612305, 0.025745407104492187, 0.025769279479980468, 0.028258304595947265, 0.026853376388549805, 0.026139808654785157, 0.025758560180664063, 0.02566124725341797, 0.025710527420043944, 0.025781856536865235, 0.025791135787963868, 0.02577987289428711, 0.025882976531982422, 0.025825279235839844, 0.02570444869995117, 0.02587238311767578, 0.025820608139038085, 0.025772127151489257, 0.025956832885742187, 0.02571628761291504, 0.0257043514251709, 0.025907264709472657, 0.025809375762939454, 0.0258306884765625, 0.025844448089599608, 0.02579462432861328, 0.025888032913208008, 0.025910112380981447, 0.02596147155761719, 0.02587116813659668, 0.025858047485351563, 0.025937824249267577, 0.025958719253540038, 0.025874208450317383, 0.02575564765930176, 0.025968639373779297, 0.02588073539733887, 0.025882463455200195, 0.025862144470214843, 0.02609891128540039, 0.026186527252197264, 0.026232831954956053, 0.026042272567749023, 0.02619196891784668, 0.025995647430419922, 0.02600297546386719, 0.025996992111206055, 0.02592767906188965, 0.025993824005126953, 0.02593168067932129, 0.026070943832397463, 0.02613212776184082, 0.025948511123657227, 0.026040319442749024, 0.02595430374145508, 0.02598422431945801, 0.026015680313110353, 0.026076095581054688, 0.025995168685913086, 0.026041343688964845, 0.026024959564208985, 0.02612838363647461, 0.02586537551879883, 0.02601046371459961, 0.026011648178100585, 0.025948160171508788, 0.028299264907836914, 0.027129056930541993, 0.026391040802001952, 0.026112287521362305, 0.02572457695007324, 0.025762144088745116, 0.025656320571899413, 0.025699392318725586, 0.025771520614624024, 0.025801151275634766, 0.025634815216064453, 0.025831424713134765, 0.025807008743286133, 0.02568191909790039, 0.02580463981628418, 0.02586400032043457, 0.025733312606811522, 0.025832639694213868, 0.02566022491455078, 0.02572287940979004, 0.02587238311767578, 0.025968608856201173, 0.02602191925048828, 0.025835519790649415, 0.025784320831298828, 0.025738815307617187, 0.025850303649902345, 0.026075263977050782, 0.02616511917114258, 0.025911296844482422, 0.025979936599731444, 0.025959455490112304, 0.025847679138183595, 0.025878591537475584, 0.026015743255615235, 0.026089471817016603, 0.026183135986328127, 0.026545888900756837, 0.02611078453063965, 0.02634880065917969, 0.026229503631591797, 0.026226688385009765, 0.02604252815246582, 0.02597052764892578, 0.0260316162109375, 0.026122560501098634, 0.025904895782470704, 0.02593631935119629, 0.026060991287231446, 0.026068864822387697, 0.025902496337890626, 0.02609724807739258, 0.026115007400512695, 0.02601945686340332, 0.02591097640991211, 0.025891519546508788, 0.02591744041442871, 0.025957759857177735, 0.026071680068969726, 0.026137599945068358, 0.025967872619628907, 0.025921279907226563, 0.026077280044555663, 0.02840553665161133, 0.027213951110839844, 0.02630441665649414, 0.026099903106689453, 0.02587353515625, 0.02575961685180664, 0.0259102725982666, 0.025827520370483397, 0.025784128189086913, 0.02561561584472656, 0.025879295349121093, 0.025785472869873045, 0.02571558380126953, 0.02593388748168945, 0.025875680923461913, 0.025754335403442383, 0.025845760345458983, 0.025822751998901366, 0.02577574348449707, 0.025911584854125976, 0.02576646423339844, 0.02595756721496582, 0.025946975708007813, 0.02580886459350586, 0.026021600723266602, 0.025749439239501952, 0.025960800170898437, 0.02575334358215332, 0.025772287368774415, 0.025951616287231444, 0.025858688354492187, 0.025964544296264647, 0.025861568450927734, 0.02592972755432129, 0.025958335876464844, 0.025971328735351563, 0.026123743057250976, 0.026065439224243165, 0.026187776565551758, 0.02625846481323242, 0.026104799270629885, 0.026171072006225586, 0.02627382469177246, 0.026079519271850586, 0.026087104797363283, 0.026245664596557618, 0.026100576400756834, 0.026006080627441405, 0.025921920776367187, 0.026044639587402343, 0.02590492820739746, 0.026089471817016603, 0.025990848541259767, 0.02596281623840332, 0.026066144943237304, 0.025803552627563477, 0.025836736679077148, 0.02607823944091797, 0.025966367721557616, 0.02588572883605957, 0.026189823150634766, 0.025929952621459963, 0.02612505531311035, 0.0286167049407959, 0.027272960662841798, 0.026245376586914063, 0.02609561538696289, 0.025882623672485353, 0.02592064094543457, 0.025670175552368165, 0.025641311645507814, 0.025651391983032228, 0.025636512756347655, 0.025614688873291016, 0.025890623092651367, 0.025790111541748047, 0.025695615768432618, 0.025656095504760744, 0.025692352294921873, 0.02572902488708496, 0.02593507194519043, 0.026022111892700196, 0.025846336364746095, 0.025710048675537108, 0.025600223541259765, 0.025647424697875978, 0.02583296012878418, 0.025762304306030274, 0.02592767906188965, 0.025724159240722657, 0.025682687759399414, 0.025806848526000976, 0.02580611228942871, 0.02602057647705078, 0.025757696151733397, 0.025997312545776367, 0.02575564765930176, 0.025806848526000976, 0.025958208084106444, 0.026136831283569337, 0.026128320693969725, 0.02615500831604004, 0.02612428855895996, 0.02615910339355469, 0.026038528442382813, 0.02606787109375, 0.026158048629760743, 0.02597260856628418, 0.02589695930480957, 0.025932992935180664, 0.02603091239929199, 0.026149120330810547, 0.02593142318725586, 0.02588035202026367, 0.025922143936157226, 0.02586595153808594, 0.02587388801574707, 0.025873023986816405, 0.02586614418029785, 0.026094911575317382, 0.02614143943786621, 0.026158912658691406, 0.02587660789489746, 0.025918527603149413, 0.025977792739868163, 0.0259051513671875, 0.028317695617675782, 0.02716806411743164, 0.02651215934753418, 0.026007455825805666, 0.025974239349365234, 0.02576643180847168, 0.02564240074157715, 0.0257030086517334, 0.0256777286529541, 0.025659488677978515, 0.0257475528717041, 0.02586614418029785, 0.0256777286529541, 0.025908319473266602, 0.025847999572753907, 0.025786176681518554, 0.025965503692626953, 0.025910655975341798, 0.025733823776245116, 0.025941951751708985, 0.025769567489624022, 0.02566806411743164, 0.025636863708496094, 0.025860095977783205, 0.026001407623291017, 0.02590105628967285, 0.026011743545532227, 0.02604003143310547, 0.025884000778198243, 0.025789024353027344, 0.02601907157897949, 0.025882783889770507, 0.025807712554931642, 0.02579654312133789, 0.025901119232177736, 0.02584351921081543, 0.025964736938476562, 0.026060800552368164, 0.026232831954956053, 0.0260928955078125, 0.026402496337890626, 0.026239967346191405, 0.025970111846923827, 0.02584783935546875, 0.025794944763183593, 0.025792863845825194, 0.02580851173400879, 0.02585737609863281, 0.02587923240661621, 0.025862144470214843, 0.02590086364746094, 0.02597702407836914, 0.025931167602539062, 0.02600956726074219, 0.02582966423034668, 0.025795072555541993, 0.025796607971191408, 0.025855552673339843, 0.02577043151855469, 0.025816543579101563, 0.025844255447387696, 0.025849119186401367, 0.025848543167114258, 0.028672000885009766, 0.027183103561401366, 0.026251264572143555, 0.02586604881286621, 0.025708736419677733, 0.02574950408935547, 0.02556620788574219, 0.02550886344909668, 0.025683967590332032, 0.025622528076171876, 0.025734975814819337, 0.025660608291625978, 0.025647647857666017, 0.025546592712402345, 0.02551849555969238, 0.02553059196472168, 0.025544704437255858, 0.025561088562011718, 0.025613920211791992, 0.025638431549072267, 0.025668479919433593, 0.025665536880493164, 0.025634111404418944, 0.025596128463745118, 0.025610719680786133, 0.025747583389282225, 0.02568383979797363, 0.025675775527954102, 0.02558118438720703, 0.02560576057434082, 0.025622720718383788, 0.025607744216918946, 0.025689088821411132, 0.025708511352539064, 0.025778207778930664, 0.02576383972167969, 0.026013696670532226, 0.02612633514404297, 0.02600137519836426, 0.025987104415893556, 0.026062847137451172, 0.02599068832397461, 0.025960735321044922, 0.025888160705566408, 0.02579862403869629, 0.025823328018188478, 0.025795263290405275, 0.025769983291625977, 0.025759487152099608, 0.025751583099365233, 0.02582966423034668, 0.025830911636352538, 0.026020191192626954, 0.02585523223876953, 0.025770143508911134, 0.025720863342285155, 0.02580768013000488, 0.025728864669799803, 0.02577952003479004, 0.025787103652954103, 0.02580672073364258, 0.02580454444885254, 0.025872768402099608, 0.028495744705200197, 0.027072288513183593, 0.026278240203857422, 0.02599443244934082, 0.025718687057495117, 0.025629600524902343, 0.025589792251586915, 0.025525535583496094, 0.025537216186523437, 0.02551535987854004, 0.02561724853515625, 0.0256135368347168, 0.02559619140625, 0.025547071456909178, 0.025600000381469725, 0.02564233589172363, 0.0256529598236084, 0.025649728775024413, 0.025663616180419922, 0.02569651222229004, 0.025657344818115234, 0.0256646728515625, 0.025614368438720704, 0.025671552658081055, 0.02563987159729004, 0.025645055770874024, 0.025638912200927736, 0.025716703414916994, 0.02572496032714844, 0.025741119384765625, 0.025744640350341796, 0.025713600158691407, 0.025774080276489256, 0.025694208145141603, 0.02570240020751953, 0.025734495162963868, 0.02592425537109375, 0.02604035186767578, 0.026072256088256834, 0.026082080841064455, 0.025980768203735353, 0.025905311584472655, 0.025993215560913087, 0.02590105628967285, 0.025832511901855468, 0.02584419250488281, 0.025744863510131836, 0.025789440155029295, 0.025778175354003906, 0.025826719284057616, 0.025789024353027344, 0.025852928161621092, 0.02584582328796387, 0.02581190490722656, 0.025829376220703124, 0.025792512893676758, 0.025781696319580077, 0.025999807357788087, 0.025897087097167967, 0.025841312408447267, 0.025860448837280274, 0.025802751541137696, 0.025827327728271485, 0.028323520660400392, 0.027041791915893554, 0.026205184936523438, 0.025771936416625976, 0.025613983154296874, 0.025598207473754884, 0.025546335220336915, 0.025534975051879884, 0.025536672592163086, 0.025793567657470703, 0.025671968460083006, 0.02557792091369629, 0.025574880599975584, 0.0256231689453125, 0.02572902488708496, 0.025783903121948244, 0.025860511779785156, 0.025847583770751952, 0.02596441650390625, 0.025874784469604492, 0.025960447311401368, 0.025960447311401368, 0.025784320831298828, 0.025927072525024415, 0.025969247817993164, 0.02581667137145996, 0.025909664154052735, 0.0260032958984375, 0.026056863784790038, 0.025993215560913087, 0.025960447311401368, 0.025917600631713868, 0.02595737648010254, 0.025731231689453123, 0.025838272094726562, 0.025939968109130858, 0.026060895919799806, 0.026015647888183592, 0.026161151885986327, 0.026101024627685546, 0.026034271240234375, 0.026212991714477538, 0.026013696670532226, 0.026281984329223632, 0.025974079132080077, 0.02599740791320801, 0.02590166473388672, 0.02613603210449219, 0.026298431396484374, 0.026027904510498048, 0.02613279914855957, 0.025962783813476564, 0.02590105628967285, 0.02592576026916504, 0.026054399490356445, 0.025946239471435546, 0.026108928680419922, 0.026035200119018553, 0.02595756721496582, 0.026013792037963866, 0.025859807968139647, 0.0261146240234375, 0.025868255615234374, 0.028612672805786134, 0.027265024185180665, 0.02637151908874512, 0.026057279586791993, 0.025996959686279297, 0.025694784164428712, 0.02592131233215332, 0.025842784881591797, 0.02577440071105957, 0.025743967056274415, 0.025887807846069335, 0.02587676811218262, 0.0259072322845459, 0.025735807418823243, 0.02571820831298828, 0.025987232208251953, 0.025881216049194335, 0.0259434871673584, 0.025950559616088865, 0.025857152938842772, 0.026002304077148437, 0.026009599685668947, 0.02599920082092285, 0.02601308822631836, 0.025946495056152343, 0.026027616500854493, 0.0260053768157959, 0.02588083267211914, 0.02580956840515137, 0.0258602237701416, 0.025824256896972656, 0.025835712432861327, 0.026052480697631837, 0.02590755271911621, 0.025847871780395507, 0.0259465274810791, 0.02638047981262207, 0.026145792007446288, 0.02649078369140625, 0.026422176361083984, 0.026275840759277344, 0.026229856491088867, 0.026230752944946287, 0.02623583984375, 0.025980287551879883, 0.02598361587524414, 0.026034175872802736, 0.02609971237182617, 0.025837568283081053, 0.026021888732910156, 0.026193920135498046, 0.02589004707336426, 0.02596735954284668, 0.025970239639282227, 0.02589753532409668, 0.026238847732543945, 0.02634774398803711, 0.026011423110961916, 0.02611814308166504, 0.026003456115722655, 0.025941984176635742, 0.025982208251953125, 0.02591209602355957]",tokens/s,38.5531585853562,, @@ -11387,7 +11387,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 48663 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 42552 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11496,7 +11496,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 59018 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 52581 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11539,7 +11539,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 29086 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 23523 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11652,7 +11652,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 90099 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 83659 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.235648,2127.495168,0.0,1732.247552,1728.316416,s,1,7.494126953125,7.494126953125,0.0,7.494126953125,7.494126953125,7.494126953125,7.494126953125,[7.494126953125],,kWh,9.68461827082289e-06,1.0611642310189389e-06,4.302503442007999e-06,1.5048285943849828e-05,,MB,1107.70176,2328.82176,0.0,1923.088384,1891.2,s,11,0.4685140800476074,0.042592189095237036,0.003926996687913523,0.04137561416625977,0.04316851043701172,0.04874103927612305,0.05319906234741212,"[0.05431356811523438, 0.03801161575317383, 0.040621376037597655, 0.04316851043701172, 0.04131808090209961, 0.04131238555908203, 0.04130863952636719, 0.04286995315551758, 0.041873439788818356, 0.042340896606445313, 0.04137561416625977]",tokens/s,6010.491722498193,kWh,1.7661555776105405e-06,1.940468376263582e-07,1.16911405443373e-06,3.1293164696706284e-06,tokens/kWh,81807002.41767012,MB,1117.642752,2328.82176,0.0,1923.088384,1895.80032,s,11,10.273246093749998,0.9339314630681816,0.004601488466255629,0.93250439453125,0.939381103515625,0.9407118835449219,0.9417765075683594,"[0.9289093627929688, 0.93250439453125, 0.939381103515625, 0.9280496826171875, 0.9309269409179688, 0.9381678466796874, 0.9389963989257812, 0.9330540771484375, 0.9307843627929687, 0.9304292602539063, 0.9420426635742187]",tokens/s,67.45677010712369,kWh,2.6926773923902357e-05,2.9703234128768476e-06,1.599403098920306e-05,4.589112832598225e-05,tokens/kWh,1372814.3608169074,,s,693,10.270677665710455,0.014820602692222868,0.00037578824727001534,0.014731040000915527,0.015048230171203613,0.015191097450256347,0.016418733444213875,"[0.015498559951782226, 0.015063743591308593, 0.014892288208007812, 0.01481833553314209, 0.01478012752532959, 0.014735360145568848, 0.014867487907409669, 0.014663968086242676, 0.014787263870239258, 0.014648703575134278, 0.014684639930725097, 0.014583135604858398, 0.01581753635406494, 0.015015935897827149, 0.014782464027404785, 0.01469644832611084, 0.01491763210296631, 0.014620672225952149, 0.014581952095031738, 0.014794495582580567, 0.014727231979370117, 0.014692352294921876, 0.014645376205444337, 0.014677248001098632, 0.014613375663757325, 0.014599295616149902, 0.014619263648986816, 0.014577664375305176, 0.014755328178405762, 0.014710432052612304, 0.014722111701965333, 0.014655263900756835, 0.01466534423828125, 0.014629247665405274, 0.014617664337158203, 0.01479526424407959, 0.01461683177947998, 0.014622912406921387, 0.014985216140747071, 0.014940159797668457, 0.014796799659729003, 0.014663680076599121, 0.014673439979553223, 0.01463548755645752, 0.014587840080261231, 0.014778431892395019, 0.014650752067565918, 0.014575231552124023, 0.01461350440979004, 0.014645248413085938, 0.014620672225952149, 0.01459404754638672, 0.014607839584350586, 0.01471951961517334, 0.014577504158020019, 0.01457577610015869, 0.014611935615539551, 0.014742048263549804, 0.01476198387145996, 0.014876319885253906, 0.014836064338684082, 0.014735360145568848, 0.014689599990844727, 0.014716352462768555, 0.014578240394592285, 0.014592000007629394, 0.014694399833679199, 0.014569472312927247, 0.014691328048706055, 0.014739520072937011, 0.01469644832611084, 0.01472003173828125, 0.014605600357055664, 0.01851798439025879, 0.014973600387573243, 0.014905344009399414, 0.014911487579345703, 0.01552128028869629, 0.014874464035034179, 0.014854016304016114, 0.014725919723510741, 0.014745183944702148, 0.014657952308654786, 0.014601728439331055, 0.014766592025756836, 0.014609472274780273, 0.014563679695129395, 0.014537311553955079, 0.014645248413085938, 0.014650495529174804, 0.014691200256347656, 0.014681568145751953, 0.014647839546203613, 0.014704416275024414, 0.014622943878173828, 0.014581567764282226, 0.014588095664978028, 0.014667967796325683, 0.014759743690490722, 0.01464748764038086, 0.014624575614929199, 0.014622079849243163, 0.014627455711364746, 0.014720128059387208, 0.014791551589965821, 0.014905344009399414, 0.014962688446044922, 0.014825247764587402, 0.014799072265625, 0.014699935913085938, 0.01470524787902832, 0.014561280250549317, 0.014733311653137206, 0.014657535552978516, 0.01470464038848877, 0.01463100814819336, 0.014720864295959472, 0.014832799911499023, 0.014930848121643067, 0.014774463653564452, 0.014648960113525391, 0.01515443229675293, 0.015434687614440917, 0.01466163158416748, 0.014608384132385254, 0.014684160232543946, 0.014941887855529785, 0.01491811180114746, 0.014723072052001953, 0.014741344451904297, 0.014659487724304199, 0.014606592178344726, 0.014649344444274901, 0.014655488014221191, 0.014751744270324708, 0.01489305591583252, 0.014804991722106933, 0.01467801570892334, 0.014683520317077637, 0.014592639923095703, 0.01461017608642578, 0.014699775695800782, 0.014664287567138673, 0.01495081615447998, 0.01474732780456543, 0.014873920440673828, 0.014982144355773925, 0.01477222442626953, 0.015073280334472656, 0.014868415832519532, 0.015034432411193848, 0.014731264114379883, 0.01475984001159668, 0.014876607894897462, 0.014890368461608888, 0.014884703636169434, 0.014758848190307617, 0.014720416069030762, 0.014807135581970214, 0.014834176063537598, 0.014928895950317383, 0.01477734375, 0.015099040031433105, 0.014801440238952636, 0.014649632453918458, 0.01473459243774414, 0.014773247718811035, 0.014685055732727052, 0.014818207740783691, 0.014790656089782715, 0.014638976097106933, 0.01463923168182373, 0.014618623733520507, 0.014606240272521973, 0.014977375984191894, 0.017809152603149414, 0.01989017677307129, 0.015123647689819336, 0.014771007537841796, 0.014790656089782715, 0.014600192070007324, 0.014711039543151855, 0.014597887992858887, 0.014559231758117675, 0.014872575759887695, 0.014771360397338867, 0.014676992416381835, 0.014686047554016113, 0.014897151947021485, 0.015046336174011231, 0.014784000396728515, 0.014740480422973632, 0.014780223846435547, 0.01460223960876465, 0.015560704231262207, 0.01657823944091797, 0.014774592399597167, 0.014700511932373046, 0.014555040359497071, 0.014641280174255372, 0.014798272132873535, 0.014623104095458984, 0.014600383758544921, 0.014612480163574219, 0.0145830078125, 0.014584128379821776, 0.014715007781982422, 0.014640959739685058, 0.014842399597167969, 0.01477129554748535, 0.01472383975982666, 0.014670207977294921, 0.01468393611907959, 0.014548992156982422, 0.014618111610412597, 0.01464575958251953, 0.014619903564453125, 0.014664447784423829, 0.014673919677734374, 0.014796640396118164, 0.01468227195739746, 0.014929920196533204, 0.014632960319519044, 0.014648991584777832, 0.01456982421875, 0.014573568344116212, 0.014663680076599121, 0.014632896423339843, 0.014594112396240234, 0.014582880020141601, 0.014637984275817872, 0.014655488014221191, 0.01465664005279541, 0.015078399658203125, 0.014714431762695313, 0.014611935615539551, 0.014667648315429688, 0.01458790397644043, 0.014695327758789062, 0.014727456092834473, 0.014665504455566407, 0.014702591896057129, 0.014730879783630371, 0.014680447578430176, 0.014589952468872071, 0.014643487930297851, 0.014649056434631347, 0.014727168083190918, 0.01469644832611084, 0.014682111740112304, 0.014706687927246094, 0.014624544143676758, 0.014831616401672363, 0.014730208396911621, 0.014741503715515136, 0.014724767684936524, 0.014689888000488281, 0.014727295875549317, 0.014957183837890626, 0.01470809555053711, 0.014700672149658204, 0.01476863956451416, 0.01479916763305664, 0.01523475170135498, 0.014870176315307617, 0.014926176071166992, 0.014736800193786622, 0.014673983573913574, 0.014704895973205566, 0.014723360061645508, 0.014720128059387208, 0.015069664001464844, 0.014917759895324708, 0.015108384132385255, 0.014791999816894531, 0.014798912048339843, 0.014836352348327636, 0.014687487602233887, 0.014852864265441895, 0.01480294418334961, 0.014663680076599121, 0.014628864288330079, 0.014628288269042968, 0.014692416191101074, 0.014713376045227052, 0.01487664031982422, 0.014839936256408691, 0.014665599822998046, 0.014694399833679199, 0.014665727615356445, 0.014691840171813965, 0.014782976150512696, 0.01481935977935791, 0.014659487724304199, 0.014710335731506348, 0.014707200050354004, 0.01459404754638672, 0.01465334415435791, 0.01511843204498291, 0.01482652759552002, 0.014750656127929687, 0.01485580825805664, 0.014780832290649413, 0.014679231643676759, 0.0147542724609375, 0.014711135864257813, 0.014652576446533203, 0.014715423583984375, 0.014698816299438476, 0.014684160232543946, 0.014696576118469239, 0.014636672019958497, 0.014678272247314453, 0.014796192169189454, 0.015120991706848145, 0.01546656036376953, 0.015187359809875489, 0.0149302396774292, 0.015048447608947755, 0.01488700771331787, 0.014885343551635743, 0.014739456176757813, 0.014681568145751953, 0.01460860824584961, 0.014584128379821776, 0.014897151947021485, 0.014731072425842285, 0.01477552032470703, 0.014667872428894044, 0.0147010555267334, 0.01490777587890625, 0.014693440437316895, 0.014705120086669922, 0.014797280311584473, 0.014809087753295898, 0.014917183876037597, 0.015276288032531738, 0.01499289608001709, 0.014787263870239258, 0.015539551734924316, 0.01778755187988281, 0.015519743919372558, 0.014776320457458495, 0.014895071983337402, 0.014782496452331543, 0.014692352294921876, 0.014671872138977051, 0.014749695777893066, 0.01472697639465332, 0.014683903694152832, 0.014746047973632813, 0.01519820785522461, 0.01469983959197998, 0.014809632301330566, 0.014779840469360352, 0.01479139232635498, 0.014702591896057129, 0.014673919677734374, 0.014656671524047852, 0.01471564769744873, 0.01472707176208496, 0.014672063827514648, 0.015119744300842284, 0.015082112312316895, 0.014788607597351074, 0.014712575912475587, 0.014852352142333984, 0.014710559844970703, 0.014702176094055176, 0.015079104423522949, 0.015038463592529297, 0.014687359809875488, 0.014681920051574707, 0.014794015884399414, 0.01468489646911621, 0.014658687591552734, 0.014814080238342285, 0.014856191635131836, 0.014937984466552734, 0.014757375717163086, 0.014722751617431641, 0.015066047668457032, 0.014750847816467285, 0.01461952018737793, 0.01459404754638672, 0.014735360145568848, 0.014730527877807617, 0.014848608016967774, 0.015125696182250976, 0.014913951873779297, 0.014783007621765137, 0.01498259162902832, 0.014770751953125, 0.015138815879821778, 0.015023551940917968, 0.016580863952636717, 0.016404863357543945, 0.015110079765319823, 0.014892095565795899, 0.01483193588256836, 0.015051103591918946, 0.015292703628540039, 0.014703840255737304, 0.014749567985534668, 0.014644000053405761, 0.014759455680847168, 0.014762592315673829, 0.014968832015991211, 0.015050751686096191, 0.01470620822906494, 0.014612544059753417, 0.014690367698669434, 0.014712608337402344, 0.014612192153930665, 0.014715744018554687, 0.014743552207946778, 0.014728287696838378, 0.014719903945922852, 0.01491977596282959, 0.01506704044342041, 0.014935839653015136, 0.015369536399841309, 0.014918496131896972, 0.014718463897705078, 0.015339776039123535, 0.014767999649047852, 0.014821696281433105, 0.015124608039855957, 0.015025152206420898, 0.01480191993713379, 0.01478649616241455, 0.014687616348266602, 0.014731040000915527, 0.014723999977111817, 0.014757696151733399, 0.01494035243988037, 0.014726719856262206, 0.014757951736450196, 0.014858176231384277, 0.014704768180847167, 0.014727487564086914, 0.014962656021118164, 0.01484553623199463, 0.014786975860595703, 0.01475699234008789, 0.014899488449096679, 0.014897279739379883, 0.014789088249206543, 0.014858240127563477, 0.014880767822265625, 0.014774271965026856, 0.014749695777893066, 0.014723072052001953, 0.014702079772949218, 0.014688480377197265, 0.014767904281616211, 0.014966336250305176, 0.014889920234680176, 0.015042112350463868, 0.01531481647491455, 0.014983327865600585, 0.01514742374420166, 0.014841856002807617, 0.014904895782470702, 0.014888863563537597, 0.015196703910827636, 0.014999551773071289, 0.01470464038848877, 0.014673727989196778, 0.01463929557800293, 0.014675840377807618, 0.014787903785705566, 0.014777152061462403, 0.014993535995483398, 0.014759807586669922, 0.014551039695739745, 0.01469587230682373, 0.014703167915344238, 0.01485209560394287, 0.015103039741516113, 0.014960672378540039, 0.015010592460632324, 0.0147640323638916, 0.014640607833862306, 0.014639776229858398, 0.014710783958435059, 0.014773856163024903, 0.014629280090332031, 0.014735360145568848, 0.014667776107788086, 0.014784511566162109, 0.01493507194519043, 0.01462166404724121, 0.014671872138977051, 0.014796799659729003, 0.014661408424377442, 0.014702560424804688, 0.014624128341674805, 0.0148090238571167, 0.014683072090148925, 0.014669280052185058, 0.014708959579467773, 0.014761311531066895, 0.014689472198486328, 0.014790111541748047, 0.014873120307922363, 0.014700544357299805, 0.0147640323638916, 0.01468825626373291, 0.014669568061828613, 0.014659839630126953, 0.014712832450866698, 0.014643199920654297, 0.014722623825073243, 0.014659551620483399, 0.014628735542297364, 0.014715167999267578, 0.014686528205871583, 0.014632543563842774, 0.014666144371032714, 0.014654687881469727, 0.014635807991027832, 0.014630911827087402, 0.014721023559570312, 0.015084575653076172, 0.014773216247558594, 0.01469155216217041, 0.01477507209777832, 0.01466982364654541, 0.014735360145568848, 0.014698495864868164, 0.01527990436553955, 0.014732576370239258, 0.014726079940795899, 0.014654687881469727, 0.014756159782409668, 0.01479043197631836, 0.014762687683105468, 0.014680064201354981, 0.0148602876663208, 0.014804991722106933, 0.014725119590759277, 0.014796480178833008, 0.014729087829589843, 0.014690752029418946, 0.014825471878051758, 0.014854080200195312, 0.014673536300659179, 0.01470736026763916, 0.014780032157897948, 0.014764287948608398, 0.014671775817871094, 0.014731264114379883, 0.014790656089782715, 0.01472111988067627, 0.014710304260253906, 0.014829952239990235, 0.014716320037841797, 0.014742112159729004, 0.014753791809082031, 0.014892831802368164, 0.015126272201538086, 0.015077823638916016, 0.015091744422912598, 0.01495257568359375, 0.014948224067687988, 0.014907391548156738, 0.014872544288635254, 0.014878848075866698, 0.014788736343383789, 0.014872447967529297, 0.014742752075195313, 0.014779168128967285, 0.01460204792022705, 0.01468230438232422, 0.014610431671142577, 0.014695712089538574, 0.014678688049316406, 0.014669343948364258, 0.014643551826477051, 0.014600383758544921, 0.014827520370483398, 0.01459609603881836, 0.014698016166687011, 0.014633440017700195, 0.014583807945251465, 0.01464089584350586, 0.014704895973205566, 0.015078623771667481, 0.01504736042022705, 0.014864480018615723, 0.014800127983093261, 0.01464361572265625, 0.014682463645935059, 0.014636863708496093, 0.014825823783874512, 0.014767264366149902, 0.014744256019592285, 0.014841407775878905, 0.014785120010375977, 0.014831456184387207, 0.014732640266418457, 0.01470736026763916, 0.014714879989624024, 0.014730239868164062, 0.014707615852355957, 0.014675264358520507, 0.014635807991027832, 0.014733311653137206, 0.014774271965026856, 0.014855487823486328, 0.014703295707702636, 0.014728192329406739, 0.014655872344970703, 0.014623231887817383, 0.014604415893554688, 0.01467369556427002, 0.014705056190490723, 0.014767935752868652, 0.01473737621307373, 0.014743583679199219, 0.015027392387390136, 0.014891712188720704, 0.015210304260253906, 0.01563811206817627, 0.014886943817138672, 0.014805439949035645, 0.014782719612121582, 0.014687295913696288, 0.014713983535766601, 0.015320480346679688, 0.015183775901794434, 0.015125184059143067, 0.01496678352355957, 0.01495587158203125, 0.014879391670227051, 0.014899200439453125, 0.015009792327880859, 0.017104352951049805, 0.015395232200622559, 0.014792192459106445, 0.014868800163269042, 0.014770496368408203, 0.015015935897827149, 0.015674816131591798, 0.016312351226806642, 0.014946368217468262, 0.015055328369140626, 0.014984736442565918, 0.015106528282165528, 0.014891008377075195, 0.014927871704101562, 0.01556275177001953, 0.015281279563903809, 0.015017120361328126, 0.014738176345825195, 0.01463593578338623, 0.014647359848022462, 0.015914912223815917, 0.014717023849487304, 0.01547379207611084, 0.014780608177185059, 0.01478112030029297, 0.014693408012390137, 0.014664640426635742, 0.014716959953308106, 0.014716927528381347, 0.014733311653137206, 0.014640159606933593, 0.01467619228363037, 0.014680031776428223, 0.014713184356689453, 0.014993856430053711, 0.014791680335998534, 0.01481328010559082, 0.0146844482421875, 0.014867391586303711, 0.014716480255126953, 0.014591808319091798, 0.01465775966644287, 0.014661120414733888, 0.014701151847839355, 0.01466982364654541, 0.014695615768432617, 0.014711935997009278, 0.014687007904052734, 0.015067968368530273, 0.014706080436706542, 0.01479695987701416, 0.014688799858093262, 0.014696703910827637, 0.01465727996826172, 0.014972800254821778]",tokens/s,67.47363928221026,, @@ -11698,7 +11698,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 119093 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 112845 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11741,7 +11741,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 75088 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 68311 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11784,7 +11784,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 93145 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 86691 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11860,7 +11860,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 45695 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 39609 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11903,7 +11903,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 53677 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 47375 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.966464,1129.250816,0.0,734.0032,709.336064,s,1,7.4012548828125,7.4012548828125,0.0,7.4012548828125,7.4012548828125,7.4012548828125,7.4012548828125,[7.4012548828125],,kWh,5.018227141677774e-06,5.464397925686503e-07,1.0041674700089764e-06,6.568834404255401e-06,,MB,1117.376512,1276.051456,0.0,870.31808,809.960448,s,19,0.2845489587783813,0.01497626098833586,0.000721354646664666,0.01482044792175293,0.014997305488586425,0.01537096910476684,0.017479082736968996,"[0.018006111145019533, 0.014694175720214843, 0.014658592224121093, 0.014844799995422363, 0.014743328094482422, 0.014737407684326171, 0.01477455997467041, 0.014862208366394044, 0.01482044792175293, 0.014907584190368653, 0.01497708797454834, 0.014729791641235352, 0.015078175544738769, 0.014839903831481933, 0.014872063636779785, 0.014734848022460938, 0.014711680412292481, 0.01483948802947998, 0.014716704368591309]",tokens/s,17093.719199964766,kWh,5.896171635690091e-07,6.502351689316556e-08,3.9073656399191194e-07,1.0453772444540865e-06,tokens/kWh,244887672.2332783,MB,1127.317504,1309.605888,0.0,905.969664,809.963008,s,19,9.883693817138672,0.5201944114283512,0.007087630848022105,0.52034814453125,0.5285178955078126,0.5290438171386719,0.5304746325683594,"[0.5003568115234375, 0.5235473022460938, 0.5206721801757812, 0.518958984375, 0.5252100219726562, 0.5308323364257812, 0.527358642578125, 0.5236458740234375, 0.5288450927734375, 0.5238328247070313, 0.5284360961914063, 0.5189959106445312, 0.5198388061523438, 0.5151084594726563, 0.52034814453125, 0.5106662902832031, 0.5172429809570313, 0.5129843139648438, 0.516812744140625]",tokens/s,121.10856752000551,kWh,1.4459999236431325e-05,1.594679690931747e-06,7.439825256008322e-06,2.3494504183371386e-05,tokens/kWh,2681478.166480707,,s,1197,9.87563265323639,0.008250319676889214,0.0002649751535341334,0.008231072425842286,0.008551398658752441,0.008621772575378418,0.008864796257019043,"[0.007893343925476075, 0.007861472129821778, 0.007827648162841797, 0.0078115520477294925, 0.007755648136138916, 0.007863840103149415, 0.007809216022491455, 0.007858719825744629, 0.007843423843383789, 0.007976895809173584, 0.007849631786346435, 0.007805344104766846, 0.007824992179870606, 0.007926464080810547, 0.007839263916015625, 0.007803487777709961, 0.007813119888305664, 0.007804927825927735, 0.0079584641456604, 0.007798208236694336, 0.007791232109069824, 0.00779859209060669, 0.007766208171844482, 0.0077844481468200685, 0.00777785587310791, 0.007813600063323975, 0.007848991870880126, 0.007807936191558838, 0.007755360126495361, 0.00779699182510376, 0.007785920143127441, 0.0077749438285827635, 0.0077844481468200685, 0.007790048122406006, 0.0078032960891723634, 0.007833727836608887, 0.007806015968322754, 0.007857279777526855, 0.007810880184173584, 0.007821568012237549, 0.007825151920318603, 0.007868319988250732, 0.007917664051055907, 0.007870463848114014, 0.007855519771575928, 0.007848896026611329, 0.007898784160614014, 0.008034175872802734, 0.008136832237243652, 0.008252575874328613, 0.008133472442626953, 0.00814675235748291, 0.008493247985839844, 0.008154208183288575, 0.008208992004394532, 0.008403264045715331, 0.008435711860656739, 0.008345600128173827, 0.008331263542175293, 0.00829644775390625, 0.008318976402282715, 0.008258975982666016, 0.008069727897644043, 0.007967040061950684, 0.008228927612304688, 0.008381855964660645, 0.008514080047607422, 0.008398847579956055, 0.008409088134765624, 0.008484383583068848, 0.008565216064453126, 0.008537343978881836, 0.008313599586486817, 0.008328895568847656, 0.008457728385925293, 0.008162112236022949, 0.008193792343139649, 0.008510784149169922, 0.008345919609069824, 0.008247072219848633, 0.008231391906738281, 0.00831116771697998, 0.008439647674560546, 0.008395071983337402, 0.008331104278564453, 0.008305952072143555, 0.00849782371520996, 0.008475744247436523, 0.008368800163269043, 0.008397120475769043, 0.008384511947631837, 0.008136704444885253, 0.008265855789184571, 0.008190848350524903, 0.008176639556884765, 0.008426912307739258, 0.008436320304870605, 0.008615936279296875, 0.008404255867004395, 0.008436127662658692, 0.008482687950134278, 0.008280447959899902, 0.008165184020996094, 0.008310720443725586, 0.008370112419128418, 0.008553888320922852, 0.008442848205566407, 0.008437760353088379, 0.008281696319580078, 0.008396448135375977, 0.0084933443069458, 0.008259679794311523, 0.008149408340454101, 0.008048895835876466, 0.008050399780273438, 0.007923295974731445, 0.008352160453796387, 0.007975264072418213, 0.0080480318069458, 0.008184063911437988, 0.008363519668579102, 0.0083189115524292, 0.008245823860168457, 0.008046048164367676, 0.007946591854095459, 0.007879039764404298, 0.007845503807067871, 0.007882847785949706, 0.007893856048583985, 0.007968128204345703, 0.008161439895629882, 0.008217056274414063, 0.008203519821166991, 0.008290495872497559, 0.008127391815185547, 0.00802569580078125, 0.008048992156982421, 0.008435423851013184, 0.008085344314575195, 0.008283712387084962, 0.00825551986694336, 0.008249216079711914, 0.008295104026794434, 0.008364031791687012, 0.008305983543395996, 0.008198431968688965, 0.008036767959594727, 0.007948287963867188, 0.008017056465148926, 0.007942048072814942, 0.007928768157958984, 0.00800767993927002, 0.00848796844482422, 0.008551360130310059, 0.00869379234313965, 0.008538111686706543, 0.008468416213989258, 0.0085665283203125, 0.008523872375488281, 0.008462559700012208, 0.008455360412597656, 0.008371007919311524, 0.008291680335998535, 0.0082008638381958, 0.008112159729003907, 0.008010880470275879, 0.008014687538146973, 0.008093695640563964, 0.008437760353088379, 0.008528127670288085, 0.008328960418701173, 0.008343551635742187, 0.008365504264831543, 0.008278592109680176, 0.008408512115478516, 0.008274496078491211, 0.008278016090393067, 0.008184831619262695, 0.008113375663757324, 0.008284031867980957, 0.008564640045166015, 0.008697855949401855, 0.00841318416595459, 0.008391776084899903, 0.0085097599029541, 0.008290528297424316, 0.00822105598449707, 0.008324831962585448, 0.008116064071655273, 0.00847436809539795, 0.008275967597961426, 0.00808291244506836, 0.008046751976013183, 0.008059264183044434, 0.008085503578186035, 0.007964672088623047, 0.007993343830108643, 0.008001055717468262, 0.008038368225097656, 0.008157695770263672, 0.008253439903259278, 0.008138751983642578, 0.008196096420288086, 0.008193087577819825, 0.00810694408416748, 0.008030207633972167, 0.007966720104217529, 0.00798467206954956, 0.007971360206604003, 0.008165311813354493, 0.008267200469970704, 0.008436287879943848, 0.008556223869323731, 0.008446271896362306, 0.00830835247039795, 0.008552831649780273, 0.008658304214477539, 0.00851417636871338, 0.008351743698120117, 0.008291647911071778, 0.008297183990478515, 0.00825712013244629, 0.008360320091247559, 0.008189824104309082, 0.008130496025085449, 0.008180224418640136, 0.008197823524475097, 0.008269824028015137, 0.008309920310974122, 0.008174528121948242, 0.008161375999450684, 0.008107487678527831, 0.008431743621826172, 0.008214752197265624, 0.00818825626373291, 0.00803395175933838, 0.00808140754699707, 0.008119808197021485, 0.008130559921264649, 0.00838912010192871, 0.008478719711303711, 0.008484864234924316, 0.008355839729309082, 0.008289695739746094, 0.008280672073364258, 0.008333312034606934, 0.008519680023193359, 0.00829212760925293, 0.00808777618408203, 0.00809779167175293, 0.008170783996582031, 0.008368864059448242, 0.00807372760772705, 0.008327168464660644, 0.008491007804870606, 0.008148991584777832, 0.008132287979125977, 0.008038080215454102, 0.00799014377593994, 0.008025856018066406, 0.008228863716125488, 0.008476256370544433, 0.00838419246673584, 0.008294976234436036, 0.008198304176330566, 0.008206175804138184, 0.00829974365234375, 0.008547264099121094, 0.008525504112243652, 0.008403264045715331, 0.008376607894897461, 0.008361408233642578, 0.008531455993652343, 0.008562975883483887, 0.00856713581085205, 0.00840886402130127, 0.008364671707153321, 0.008375200271606445, 0.008358495712280273, 0.008333567619323731, 0.008259455680847167, 0.008468544006347656, 0.008330656051635741, 0.008266400337219239, 0.008178943634033204, 0.00832579231262207, 0.008216671943664551, 0.00854412841796875, 0.008306112289428711, 0.008290719985961915, 0.008437952041625977, 0.00832316780090332, 0.008240480422973633, 0.00841590404510498, 0.008347935676574707, 0.008347040176391601, 0.00865328025817871, 0.008306528091430664, 0.008287232398986816, 0.008217599868774414, 0.008336735725402831, 0.008370528221130371, 0.008427712440490722, 0.008331392288208008, 0.008293888092041016, 0.00841983985900879, 0.008529919624328614, 0.008481056213378906, 0.008519200325012207, 0.008364224433898925, 0.008295680046081543, 0.008223487854003906, 0.008154111862182617, 0.00812339210510254, 0.008159135818481445, 0.008236448287963867, 0.008600128173828126, 0.008493056297302246, 0.008465503692626953, 0.008273088455200195, 0.008203007698059083, 0.008550463676452636, 0.008588191986083984, 0.010002752304077149, 0.00867296028137207, 0.008240287780761718, 0.008157471656799316, 0.008440320014953612, 0.008505279541015626, 0.008552576065063476, 0.008591168403625489, 0.008485055923461915, 0.008316448211669922, 0.008450528144836426, 0.008347647666931152, 0.00828006362915039, 0.008299903869628906, 0.008428319931030274, 0.008572256088256836, 0.008470623970031739, 0.00844979190826416, 0.008491616249084472, 0.008536383628845215, 0.008495903968811036, 0.00866198444366455, 0.00853219223022461, 0.008431391716003418, 0.008480832099914551, 0.008421119689941406, 0.008550592422485351, 0.008783552169799804, 0.008365440368652343, 0.008335871696472168, 0.00822316837310791, 0.008155136108398438, 0.008136704444885253, 0.008103167533874512, 0.008094464302062988, 0.008187904357910156, 0.008322079658508301, 0.00836297607421875, 0.008487135887145995, 0.008410016059875488, 0.008301440238952637, 0.008353792190551757, 0.008359456062316894, 0.008275456428527832, 0.00817046356201172, 0.008167136192321777, 0.008337696075439454, 0.008477760314941405, 0.008567744255065918, 0.008835071563720704, 0.008524160385131836, 0.00831436824798584, 0.008167360305786133, 0.008154560089111328, 0.008239647865295411, 0.007981023788452148, 0.00805337619781494, 0.008075424194335937, 0.007984352111816407, 0.0080250244140625, 0.008019807815551758, 0.008181759834289551, 0.008272928237915039, 0.008317503929138183, 0.008543935775756836, 0.008490719795227052, 0.008501279830932617, 0.008325471878051757, 0.008342144012451171, 0.008417280197143554, 0.008378368377685547, 0.008416864395141601, 0.008936863899230957, 0.008348671913146973, 0.008507391929626466, 0.008493056297302246, 0.008456000328063965, 0.008521984100341797, 0.008660927772521972, 0.008773856163024903, 0.008524736404418946, 0.008498016357421876, 0.008807583808898926, 0.008489824295043946, 0.008476287841796874, 0.008496671676635743, 0.008440192222595216, 0.008616543769836426, 0.008331135749816894, 0.008194047927856446, 0.008173567771911621, 0.00827625560760498, 0.008230112075805663, 0.008634880065917968, 0.008355392456054687, 0.00826358413696289, 0.008108575820922852, 0.008154751777648926, 0.008067456245422364, 0.008034272193908691, 0.00803228759765625, 0.008056832313537597, 0.008468480110168456, 0.008479935646057129, 0.008289216041564941, 0.00835142421722412, 0.008232640266418458, 0.008229120254516602, 0.008286687850952148, 0.008476448059082032, 0.008456192016601562, 0.008443903923034669, 0.00850483226776123, 0.008499520301818848, 0.008613311767578125, 0.008526080131530762, 0.008360447883605958, 0.008357888221740722, 0.008232928276062012, 0.0083853759765625, 0.008403008460998534, 0.008356863975524903, 0.008250368118286134, 0.008292351722717285, 0.008486559867858887, 0.008591135978698731, 0.00847110366821289, 0.008386560440063476, 0.008230912208557128, 0.008144895553588867, 0.008285696029663087, 0.008130144119262696, 0.00812662410736084, 0.008098591804504394, 0.008072383880615234, 0.00818665599822998, 0.008347455978393554, 0.008509056091308594, 0.008597824096679687, 0.008417535781860351, 0.008417280197143554, 0.008645888328552245, 0.008354559898376464, 0.008396127700805663, 0.00839033603668213, 0.008549344062805176, 0.008593119621276856, 0.00849948787689209, 0.00844934368133545, 0.008407744407653808, 0.008566783905029298, 0.008455231666564941, 0.008331775665283203, 0.008417599678039551, 0.008460384368896484, 0.008302623748779298, 0.008165375709533691, 0.008308159828186034, 0.008319135665893554, 0.008290975570678712, 0.008187647819519042, 0.008077312469482421, 0.008122400283813476, 0.00819315242767334, 0.00810649585723877, 0.008220352172851563, 0.00801587200164795, 0.008063167572021484, 0.0080164155960083, 0.008062911987304687, 0.007990816116333007, 0.007985631942749023, 0.008064000129699708, 0.00842240047454834, 0.008576319694519044, 0.00855519962310791, 0.008228992462158203, 0.008252511978149414, 0.008172287940979003, 0.008324735641479493, 0.008303071975708008, 0.008181599617004395, 0.008525376319885253, 0.008513728141784667, 0.0084235200881958, 0.008639840126037598, 0.008487903594970703, 0.008484864234924316, 0.008605695724487305, 0.008460288047790527, 0.008486847877502441, 0.008362048149108887, 0.008273920059204102, 0.00830463981628418, 0.008208383560180664, 0.008187456130981445, 0.008864128112792968, 0.00864998435974121, 0.00861676788330078, 0.009211999893188477, 0.008236960411071777, 0.008302592277526855, 0.008456192016601562, 0.008475872039794921, 0.00861673641204834, 0.00847606372833252, 0.008650367736816406, 0.008737983703613282, 0.008650015830993653, 0.008571264266967773, 0.008437888145446777, 0.008568256378173829, 0.008350111961364747, 0.008177824020385742, 0.008120223999023438, 0.00813417625427246, 0.008196576118469238, 0.008304991722106934, 0.008441216468811035, 0.008232895851135253, 0.00831942367553711, 0.00852121639251709, 0.008184384346008301, 0.008292287826538085, 0.008134655952453614, 0.008112128257751466, 0.008132448196411133, 0.008179295539855956, 0.008274496078491211, 0.008304320335388183, 0.008236800193786621, 0.008333632469177246, 0.008198399543762207, 0.008365823745727538, 0.00832921600341797, 0.008423616409301758, 0.008585056304931641, 0.008304032325744629, 0.008250176429748535, 0.008218175888061523, 0.008159680366516113, 0.008155136108398438, 0.008089599609375, 0.008546303749084473, 0.008078528404235839, 0.008096927642822266, 0.008194496154785156, 0.007995007991790771, 0.008026495933532714, 0.008001440048217774, 0.008232864379882812, 0.00834598445892334, 0.008450048446655273, 0.008591391563415528, 0.008654815673828125, 0.008589311599731446, 0.008564736366271973, 0.008566271781921387, 0.008468159675598145, 0.008812383651733398, 0.008422368049621582, 0.008356127738952637, 0.008275679588317871, 0.008263360023498536, 0.008216896057128907, 0.00828758430480957, 0.00841983985900879, 0.008331423759460449, 0.00828611183166504, 0.008232416152954102, 0.008276063919067383, 0.00833795166015625, 0.008241151809692383, 0.008034303665161132, 0.007924799919128417, 0.007915552139282227, 0.008047136306762695, 0.008191519737243653, 0.008090047836303712, 0.008049056053161622, 0.008033760070800781, 0.008239359855651855, 0.008652928352355958, 0.008412991523742676, 0.008470175743103027, 0.008372063636779786, 0.008317184448242188, 0.00832380771636963, 0.008177536010742188, 0.008122367858886719, 0.008002943992614746, 0.007998079776763915, 0.007978367805480956, 0.008300224304199218, 0.008360256195068359, 0.0081844482421875, 0.00820019245147705, 0.009277440071105958, 0.008279871940612794, 0.00808569622039795, 0.008163328170776368, 0.008333312034606934, 0.008546431541442872, 0.008609663963317872, 0.008595680236816406, 0.008615488052368165, 0.008722559928894043, 0.008186847686767577, 0.008368127822875977, 0.00819200038909912, 0.008048192024230957, 0.008068896293640137, 0.008684255599975587, 0.008110015869140626, 0.008671232223510742, 0.008306112289428711, 0.009097151756286621, 0.008798848152160645, 0.008244288444519044, 0.00821446418762207, 0.008372575759887695, 0.008743583679199219, 0.008548352241516113, 0.008304032325744629, 0.008849856376647949, 0.008466591835021972, 0.008455391883850097, 0.008193920135498047, 0.008168352127075196, 0.008011743545532227, 0.008216608047485351, 0.00807913589477539, 0.008062687873840332, 0.008203071594238281, 0.008142144203186034, 0.008132991790771484, 0.008484864234924316, 0.00833743953704834, 0.008362175941467286, 0.008283967971801759, 0.008486880302429198, 0.008244447708129883, 0.008339327812194824, 0.008360447883605958, 0.008481184005737304, 0.008665087699890137, 0.008570879936218261, 0.00840294361114502, 0.008417280197143554, 0.008826848030090331, 0.0086212158203125, 0.008470720291137695, 0.008880831718444825, 0.011236672401428222, 0.00931059169769287, 0.008454463958740234, 0.008196096420288086, 0.00810153579711914, 0.008171456336975098, 0.008034111976623536, 0.007997727870941163, 0.008010047912597656, 0.00807919979095459, 0.00840719985961914, 0.008341504096984862, 0.008046208381652832, 0.007915616035461426, 0.007931968212127686, 0.00785814380645752, 0.00784819221496582, 0.00840499210357666, 0.008392543792724609, 0.008231072425842286, 0.008404352188110352, 0.00820844841003418, 0.008217151641845704, 0.008535264015197754, 0.008381216049194336, 0.008258624076843262, 0.008239680290222168, 0.008194432258605958, 0.008219903945922851, 0.008194815635681153, 0.008148991584777832, 0.007991551876068115, 0.007947264194488525, 0.007922080039978028, 0.007887167930603027, 0.007928224086761474, 0.00800921630859375, 0.008417407989501953, 0.008666624069213867, 0.008603424072265624, 0.008616095542907714, 0.00857363224029541, 0.008460160255432128, 0.008380415916442872, 0.00828166389465332, 0.008448543548583985, 0.008463616371154786, 0.008170080184936524, 0.008065088272094727, 0.007980576038360596, 0.007960608005523681, 0.007971263885498047, 0.008167424201965333, 0.00831488037109375, 0.008226816177368163, 0.008178688049316407, 0.008449024200439453, 0.008407039642333984, 0.008070624351501465, 0.008073760032653808, 0.008359935760498047, 0.008291711807250976, 0.00817625617980957, 0.008065376281738281, 0.008107680320739746, 0.00825260829925537, 0.008116479873657226, 0.008038975715637208, 0.008048992156982421, 0.007956128120422363, 0.008148063659667968, 0.008534879684448242, 0.008304703712463378, 0.008199551582336425, 0.008147295951843261, 0.008075263977050781, 0.008067487716674804, 0.008081119537353515, 0.008183296203613282, 0.00852560043334961, 0.008423359870910644, 0.008521920204162597, 0.008351552009582519, 0.008386655807495117, 0.008320704460144043, 0.008333632469177246, 0.008140800476074218, 0.008011775970458984, 0.008249343872070313, 0.007929855823516846, 0.007964672088623047, 0.007979008197784423, 0.00820633602142334, 0.008935423851013183, 0.00862003231048584, 0.008289823532104492, 0.008305215835571289, 0.0084683837890625, 0.00825654411315918, 0.008097920417785644, 0.008061216354370118, 0.008040384292602539, 0.008004511833190918, 0.008230624198913574, 0.008390175819396972, 0.008348128318786622, 0.008115424156188964, 0.008161312103271484, 0.008471296310424805, 0.008167327880859375, 0.008062080383300781, 0.008191136360168457, 0.008146719932556152, 0.008073247909545899, 0.00833676815032959, 0.008116352081298827, 0.007987711906433105, 0.007921664237976075, 0.007921664237976075, 0.008173824310302735, 0.008551456451416016, 0.008560832023620606, 0.008646719932556153, 0.00862399959564209, 0.008491616249084472, 0.00848908805847168, 0.008255359649658203, 0.00830787181854248, 0.008190784454345703, 0.008233023643493651, 0.008042624473571777, 0.00812399959564209, 0.008027711868286132, 0.007998144149780274, 0.007976960182189942, 0.008002911567687988, 0.00812713623046875, 0.008550016403198243, 0.00856112003326416, 0.00820576000213623, 0.008137184143066406, 0.008199423789978027, 0.008426112174987793, 0.00812880039215088, 0.008356063842773438, 0.008142848014831543, 0.008099136352539062, 0.00818239974975586, 0.00812992000579834, 0.008083744049072266, 0.008203840255737305, 0.008388928413391113, 0.008256031990051269, 0.008214112281799316, 0.008565152168273926, 0.008335200309753418, 0.008150527954101563, 0.008063936233520507, 0.008044256210327148, 0.007970464229583741, 0.007915872097015381, 0.00790502405166626, 0.008302783966064453, 0.008859231948852539, 0.008552927970886231, 0.008525216102600097, 0.008458847999572755, 0.008341407775878907, 0.008337504386901855, 0.00820019245147705, 0.008064800262451172, 0.00802019214630127, 0.008025728225708007, 0.008016256332397461, 0.007983104228973388, 0.007948287963867188, 0.007999743938446045, 0.007988096237182617, 0.008270272254943848, 0.008196543693542481, 0.007996992111206054, 0.00811411190032959, 0.008144895553588867, 0.00809177589416504, 0.008313216209411622, 0.008396544456481934, 0.008177472114562987, 0.00800812816619873, 0.007985151767730713, 0.007974080085754395, 0.008003487586975097, 0.007978208065032959, 0.007951648235321044, 0.008077247619628906, 0.00804911994934082, 0.00791756820678711, 0.007942143917083741, 0.008171520233154296, 0.008089632034301757, 0.007984384059906006, 0.008079551696777343, 0.008093695640563964, 0.008130208015441894, 0.008411328315734863, 0.008708800315856934, 0.008506367683410645, 0.008454143524169922, 0.00865884780883789, 0.008448127746582032, 0.00828822422027588, 0.00819200038909912, 0.008239104270935058, 0.008218624114990235, 0.008334495544433594, 0.008170175552368163, 0.008070816040039062, 0.008052255630493164, 0.008004608154296875, 0.007892960071563721, 0.00820633602142334, 0.008373503684997558, 0.008169568061828614, 0.008089599609375, 0.008493696212768555, 0.00831491184234619, 0.008093695640563964, 0.008099840164184571, 0.008072511672973633, 0.008121024131774903, 0.008099519729614257, 0.007995359897613526, 0.007963263988494873, 0.008063839912414552, 0.008090208053588867, 0.008007807731628418, 0.008288000106811523, 0.008364447593688965, 0.008232704162597656, 0.008270079612731934, 0.008054783821105957, 0.007943967819213867, 0.00787663984298706, 0.007902751922607422, 0.007883423805236817, 0.007914783954620362, 0.008302335739135741, 0.008564864158630371, 0.008680224418640137, 0.008549951553344726, 0.008594143867492675, 0.008519455909729004, 0.008575072288513183, 0.008613759994506837, 0.00842959976196289, 0.008267775535583496, 0.008349920272827148, 0.008248448371887207, 0.008104063987731934, 0.008280608177185058, 0.008378656387329101, 0.00829206371307373, 0.008140800476074218, 0.008396800041198731, 0.008746623992919922, 0.008175775527954102, 0.008378591537475586, 0.008978431701660156, 0.008488960266113281, 0.007985151767730713, 0.008249279975891113, 0.008201727867126465, 0.008133184432983399, 0.008124416351318359, 0.008108256340026855, 0.008061984062194823, 0.008010496139526367, 0.007982624053955079, 0.007936480045318604, 0.007929408073425293, 0.007891200065612792, 0.007823552131652832, 0.008043999671936036, 0.008509984016418457, 0.008179455757141114, 0.008226207733154297, 0.008029024124145507, 0.008134655952453614, 0.008047840118408204, 0.008176416397094726, 0.008130559921264649, 0.008011199951171876, 0.007973440170288086, 0.007966495990753174, 0.008197759628295899, 0.008629983901977539, 0.008559488296508789, 0.008609919548034669, 0.008513216018676759, 0.008305952072143555, 0.008380319595336914, 0.008340352058410645, 0.008226271629333495, 0.008026080131530762, 0.0080513916015625, 0.007966720104217529, 0.007896992206573486, 0.008033632278442383, 0.008243295669555664, 0.008204959869384765, 0.008069120407104492, 0.008114175796508789, 0.008463744163513184, 0.008304448127746581, 0.008116448402404785, 0.007993408203125, 0.008055359840393066, 0.008008735656738281, 0.00798528003692627, 0.007956384181976318, 0.007905759811401368, 0.007999584197998047, 0.008100064277648925, 0.008003583908081055, 0.007946368217468261, 0.007911424160003662, 0.008095711708068847, 0.007929887771606445, 0.007823359966278077, 0.007791711807250977, 0.007845920085906982, 0.007817440032958985, 0.008020832061767577, 0.008016448020935058, 0.008191264152526856, 0.008037088394165039, 0.007976960182189942, 0.00798089599609375, 0.007878431797027588, 0.007895423889160156, 0.007921664237976075, 0.008054783821105957, 0.008519680023193359, 0.008683775901794434, 0.008574720382690429, 0.00854422378540039, 0.008513567924499511, 0.008310784339904785, 0.00821350383758545, 0.008184831619262695, 0.008077280044555664, 0.00793398380279541, 0.00801091194152832, 0.008084320068359374, 0.00799948787689209, 0.007918784141540527, 0.007942751884460449, 0.008067584037780762, 0.00801529598236084, 0.009736063957214356, 0.010015135765075683, 0.008671232223510742, 0.008746272087097167, 0.008314784049987794, 0.008313023567199707, 0.00828604793548584, 0.008320863723754884, 0.00812947177886963, 0.008040224075317383, 0.008100159645080566, 0.008004960060119628, 0.008047264099121093, 0.008218015670776367, 0.00814095973968506, 0.008052767753601075, 0.007964735984802246, 0.007909887790679932, 0.007898880004882812, 0.007847936153411865, 0.00786787223815918, 0.008053088188171386, 0.008134016036987305, 0.008071904182434081, 0.008062047958374024, 0.007986239910125733, 0.008013216018676758, 0.007948639869689942, 0.00790118408203125, 0.007892288208007813, 0.007910272121429444, 0.007943999767303467, 0.008232224464416503, 0.00857795238494873, 0.0085731840133667, 0.008666303634643555, 0.008530495643615722, 0.00810086441040039, 0.008224767684936523, 0.008120256423950195, 0.008083104133605957, 0.0080797758102417, 0.008028287887573243, 0.00818284797668457, 0.008061984062194823, 0.008080256462097169, 0.008491359710693359, 0.008476799964904785, 0.00821292781829834, 0.008077312469482421, 0.008114175796508789, 0.008019935607910157, 0.008079392433166504, 0.007921664237976075, 0.007870816230773925, 0.007877823829650878, 0.007954271793365478, 0.00816316795349121, 0.0081081600189209, 0.00799180793762207, 0.007944352149963379, 0.007925439834594726, 0.007937471866607666, 0.00790822410583496, 0.008116543769836426, 0.008072319984436036, 0.008022591590881348, 0.007999167919158936, 0.00786198377609253, 0.007845536231994629, 0.007854047775268555, 0.008104415893554687, 0.008836768150329589, 0.00863702392578125, 0.00854860782623291, 0.00856454372406006, 0.008382528305053712, 0.008247424125671386, 0.008433664321899414, 0.00832431983947754, 0.008137503623962402, 0.008054847717285157, 0.008040063858032227, 0.007966432094573974, 0.007985023975372315, 0.00820911979675293, 0.007979008197784423, 0.008433759689331055, 0.008546015739440919, 0.008310815811157227, 0.008165696144104003, 0.00819388771057129, 0.008173567771911621, 0.008044544219970704, 0.00827187156677246, 0.008140800476074218, 0.007962175846099854, 0.007987552165985108, 0.008020064353942872, 0.008130559921264649, 0.007943999767303467, 0.008036607742309571, 0.007968512058258057, 0.007925280094146728, 0.00794159984588623, 0.008189151763916016, 0.008091423988342285, 0.008025631904602051, 0.007999328136444092, 0.007933631896972656, 0.008051648139953613, 0.00819974422454834, 0.008421152114868165, 0.008501343727111817, 0.0084137601852417, 0.00841318416595459, 0.008475872039794921, 0.008506048202514648, 0.008560480117797852, 0.008425727844238281, 0.008361087799072266, 0.008130496025085449, 0.008211392402648926, 0.008130304336547852, 0.00799564790725708, 0.007981056213378907, 0.007982624053955079, 0.008571136474609374, 0.008288479804992676, 0.008104127883911133, 0.008101471900939941, 0.008231167793273925, 0.008155103683471679, 0.008447999954223634, 0.008294400215148925, 0.008199839591979981, 0.008261088371276856, 0.008442239761352539, 0.008205120086669921, 0.008139552116394043, 0.008012096405029296, 0.007983712196350098, 0.008044544219970704, 0.008105376243591308, 0.007918176174163818, 0.008075136184692383, 0.008065152168273925, 0.008039423942565918, 0.007978303909301758, 0.008029888153076172, 0.008065024375915527, 0.007975135803222656, 0.007892159938812256, 0.008155743598937988, 0.007886847972869874, 0.007906911849975586, 0.00841801643371582, 0.008842944145202637, 0.00868556785583496, 0.00871014404296875, 0.00861184024810791, 0.0085032958984375, 0.008278016090393067]",tokens/s,121.20742458031037,, @@ -11947,7 +11947,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 78114 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 71581 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -11990,7 +11990,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 148670 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 142026 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,8760.786944,0.0,8365.539328,8230.228992,s,1,7.5355458984375,7.5355458984375,0.0,7.5355458984375,7.5355458984375,7.5355458984375,7.5355458984375,[7.5355458984375],,kWh,1.1459933887461678e-05,1.2341689698212937e-06,3.4725027780041495e-06,1.616660563528712e-05,,MB,1149.796352,8951.627776,0.0,8545.8944,8499.295232,s,10,2.679052764892578,0.2679052764892578,0.010860641831617654,0.2712129821777344,0.27328599548339844,0.2738173904418945,0.2742425064086914,"[0.2356790771484375, 0.27014712524414064, 0.27174835205078124, 0.27090145874023436, 0.26872930908203124, 0.2731679077148437, 0.2715245056152344, 0.2729480285644531, 0.27434878540039065, 0.26985821533203125]",tokens/s,955.5616199678128,kWh,7.3679221284370055e-06,8.121571587166401e-07,4.902892811200002e-06,1.3082972098353649e-05,tokens/kWh,19567419.243538313,MB,1154.797568,8953.724928,0.0,8547.991552,8499.297792,s,10,18.936921508789062,1.8936921508789062,0.005111415589837481,1.89559033203125,1.8990034301757812,1.8996725524902343,1.900207850341797,"[1.8848922119140625, 1.8963909912109376, 1.894943603515625, 1.892427734375, 1.885875732421875, 1.898854736328125, 1.8976529541015625, 1.896237060546875, 1.8893048095703124, 1.9003416748046875]",tokens/s,33.26834299374386,kWh,5.593598999989695e-05,6.169981117973483e-06,3.6891668402201307e-05,9.899763952007175e-05,tokens/kWh,636378.8096909802,,s,630,18.934200822830196,0.030054287020365396,0.000419725913338335,0.029986000061035155,0.030328757858276368,0.03050526542663574,0.0327036884689331,"[0.03242416000366211, 0.030667360305786134, 0.030141727447509765, 0.02985203170776367, 0.029776224136352537, 0.02967897605895996, 0.02964748764038086, 0.02971238327026367, 0.029749248504638674, 0.02972390365600586, 0.029741855621337892, 0.029601503372192382, 0.02968310356140137, 0.029616992950439455, 0.029657087326049804, 0.02959974479675293, 0.02976518440246582, 0.029729215621948243, 0.029764671325683594, 0.02981091117858887, 0.029688543319702148, 0.029853696823120116, 0.02996428871154785, 0.02980454444885254, 0.029784063339233398, 0.02978816032409668, 0.029814016342163085, 0.029776639938354492, 0.0298024959564209, 0.029878271102905272, 0.029820928573608397, 0.029859840393066408, 0.03020595169067383, 0.03010527992248535, 0.030054719924926757, 0.02996553611755371, 0.030034719467163087, 0.02991923141479492, 0.029914335250854494, 0.02989481544494629, 0.02987446403503418, 0.029851999282836914, 0.029911039352416992, 0.030033119201660158, 0.030029951095581056, 0.029911712646484376, 0.029937664031982423, 0.029884416580200194, 0.029866016387939454, 0.0299233283996582, 0.029976287841796876, 0.02992767906188965, 0.030025279998779297, 0.029931968688964843, 0.02997248077392578, 0.02997452735900879, 0.029962175369262694, 0.029883615493774413, 0.029870559692382812, 0.029886848449707033, 0.02982809638977051, 0.029907264709472657, 0.029999807357788087, 0.03290230560302734, 0.03099091148376465, 0.03013043212890625, 0.02987932777404785, 0.029702239990234375, 0.029717536926269533, 0.029621728897094725, 0.029688192367553712, 0.029620223999023438, 0.029624319076538085, 0.02974652862548828, 0.029745311737060548, 0.029775455474853517, 0.029754495620727538, 0.02989641571044922, 0.029706144332885744, 0.02975257682800293, 0.029741376876831056, 0.029698047637939453, 0.029844127655029296, 0.02996832084655762, 0.030048255920410157, 0.03000934410095215, 0.030011392593383788, 0.030007167816162108, 0.029967487335205076, 0.030071807861328126, 0.030019584655761718, 0.029916351318359374, 0.030004032135009767, 0.03016703987121582, 0.03041279983520508, 0.030543872833251953, 0.030498559951782227, 0.03054751968383789, 0.03041299247741699, 0.030374176025390626, 0.030316768646240236, 0.03024460792541504, 0.029997312545776367, 0.03013222312927246, 0.030242816925048828, 0.02996643257141113, 0.03014851188659668, 0.030123008728027343, 0.030043136596679686, 0.029932863235473634, 0.03013497543334961, 0.030253055572509766, 0.029961536407470703, 0.030270015716552735, 0.03019379234313965, 0.030119935989379884, 0.030228479385375977, 0.03021993637084961, 0.03014486312866211, 0.03008064079284668, 0.03000556755065918, 0.029921279907226563, 0.03004195213317871, 0.0303287353515625, 0.030200128555297853, 0.030318431854248047, 0.03271088027954101, 0.030930816650390627, 0.030321407318115234, 0.029898752212524415, 0.02976483154296875, 0.02975823974609375, 0.029944896697998047, 0.0296847038269043, 0.02999465560913086, 0.03007689666748047, 0.0297903995513916, 0.030206111907958983, 0.029803871154785156, 0.029818559646606447, 0.029751903533935548, 0.029932960510253907, 0.029704704284667968, 0.029819488525390625, 0.029714303970336912, 0.02979840087890625, 0.02978201675415039, 0.030007295608520508, 0.029859840393066408, 0.029837312698364257, 0.02976153564453125, 0.029734912872314452, 0.030007295608520508, 0.029826400756835937, 0.03021683120727539, 0.02993769645690918, 0.03018060874938965, 0.030399168014526367, 0.030398527145385743, 0.030492671966552733, 0.030305791854858398, 0.030429695129394533, 0.030385215759277343, 0.030120351791381835, 0.030062911987304687, 0.029926847457885743, 0.030183263778686523, 0.03015776062011719, 0.030044160842895507, 0.030097408294677733, 0.030070783615112305, 0.030005247116088866, 0.02993152046203613, 0.02992265510559082, 0.030134944915771483, 0.030225791931152345, 0.030245216369628906, 0.030140703201293945, 0.030291967391967774, 0.030013439178466796, 0.029976160049438476, 0.030105472564697266, 0.02993404769897461, 0.030019039154052733, 0.030037631988525392, 0.029876319885253907, 0.02994470405578613, 0.030137760162353516, 0.030101503372192383, 0.032686080932617184, 0.03074835205078125, 0.03022265625, 0.029915391921997072, 0.029895679473876953, 0.029819648742675783, 0.029845312118530275, 0.029944255828857423, 0.029928895950317384, 0.02967977523803711, 0.029718687057495117, 0.0296342716217041, 0.03002191925048828, 0.02975948715209961, 0.02978358459472656, 0.029948383331298827, 0.029847103118896483, 0.029849151611328124, 0.03005939292907715, 0.029984256744384766, 0.029870464324951173, 0.030093439102172853, 0.029841407775878907, 0.02986400032043457, 0.030054336547851564, 0.029869504928588867, 0.029872703552246093, 0.029870080947875976, 0.02983526420593262, 0.030052352905273437, 0.030082944869995118, 0.030328960418701173, 0.03038617515563965, 0.03034217643737793, 0.030172096252441407, 0.030150047302246095, 0.03005504035949707, 0.030192703247070313, 0.03009836769104004, 0.030039552688598634, 0.030101024627685546, 0.02992438316345215, 0.029999040603637697, 0.030089216232299806, 0.030052352905273437, 0.02998240089416504, 0.02999942398071289, 0.02992505645751953, 0.029970048904418945, 0.02995065689086914, 0.0299005126953125, 0.029832576751708983, 0.030040512084960936, 0.030007360458374023, 0.03006096076965332, 0.02997452735900879, 0.029969919204711915, 0.029929983139038087, 0.030109695434570313, 0.030003200531005858, 0.029990943908691406, 0.03002774429321289, 0.02998681640625, 0.032603233337402344, 0.030684064865112305, 0.03004140853881836, 0.029840063095092774, 0.02968329620361328, 0.029661600112915038, 0.02966281509399414, 0.02962499237060547, 0.029615583419799803, 0.02959347152709961, 0.02964521598815918, 0.029741056442260744, 0.029663103103637695, 0.029699935913085937, 0.029720863342285155, 0.029724128723144533, 0.02969215965270996, 0.02972217559814453, 0.029764320373535155, 0.029749248504638674, 0.029683391571044923, 0.029750783920288085, 0.02972329521179199, 0.02967302322387695, 0.029723039627075197, 0.029726911544799804, 0.029710336685180663, 0.029766975402832033, 0.02976633644104004, 0.029838560104370117, 0.030062400817871093, 0.03022332763671875, 0.030457632064819336, 0.030341344833374022, 0.03026460838317871, 0.03008995246887207, 0.03002572822570801, 0.030078975677490235, 0.029988224029541016, 0.029864576339721678, 0.029861440658569337, 0.02985004806518555, 0.02983103942871094, 0.029830400466918944, 0.02986073684692383, 0.029867488861083983, 0.029866527557373047, 0.029865983963012696, 0.029999103546142578, 0.029935039520263673, 0.02998963165283203, 0.029988319396972656, 0.029974880218505858, 0.02994175910949707, 0.029963903427124024, 0.029892192840576173, 0.029883167266845704, 0.030040063858032227, 0.029994047164916993, 0.03010825538635254, 0.030009183883666992, 0.030086816787719725, 0.03007369613647461, 0.03281955337524414, 0.03074662399291992, 0.030135583877563477, 0.029820959091186525, 0.02976838493347168, 0.029669376373291017, 0.029577215194702147, 0.029666784286499024, 0.02970889663696289, 0.029832927703857422, 0.030029504776000977, 0.029959903717041016, 0.030077760696411132, 0.030029823303222656, 0.029988704681396486, 0.030019744873046875, 0.02986537551879883, 0.03053219223022461, 0.030080320358276368, 0.029802431106567384, 0.0299400634765625, 0.03007263946533203, 0.029846176147460938, 0.030010911941528322, 0.03023094367980957, 0.029868032455444334, 0.03013327980041504, 0.02998179244995117, 0.030080896377563476, 0.030143680572509764, 0.03019830322265625, 0.030246463775634766, 0.030637983322143555, 0.030320671081542967, 0.03040336036682129, 0.03040870475769043, 0.030295040130615233, 0.030166015625, 0.030208000183105467, 0.030064640045166017, 0.03004787254333496, 0.029991296768188475, 0.030097408294677733, 0.030220287322998047, 0.030228479385375977, 0.030211231231689454, 0.03019206428527832, 0.03019817543029785, 0.030150400161743165, 0.030161151885986327, 0.030173248291015625, 0.03008505630493164, 0.030076927185058593, 0.030192640304565428, 0.03001241683959961, 0.03015065574645996, 0.03026915168762207, 0.03019411277770996, 0.03006857681274414, 0.029982591629028322, 0.030068864822387697, 0.030271488189697264, 0.03012777519226074, 0.03287305450439453, 0.0310064640045166, 0.030153087615966797, 0.030005247116088866, 0.029834400177001952, 0.029749536514282228, 0.029909568786621092, 0.02979430389404297, 0.029931167602539062, 0.029920927047729494, 0.029889215469360353, 0.030170591354370117, 0.029907487869262697, 0.02982649612426758, 0.029801023483276366, 0.029863199234008788, 0.029909727096557617, 0.029963327407836915, 0.029929695129394532, 0.03004899215698242, 0.029968095779418946, 0.029905055999755858, 0.029780096054077148, 0.029824447631835938, 0.02997920036315918, 0.029880319595336914, 0.03091654396057129, 0.030005216598510742, 0.02981488037109375, 0.03011686325073242, 0.030108671188354492, 0.0304202880859375, 0.03053228759765625, 0.03054128074645996, 0.030340639114379883, 0.03015920066833496, 0.030106239318847657, 0.030010976791381837, 0.02999545669555664, 0.029995008468627928, 0.030051776885986328, 0.029990976333618163, 0.03012444877624512, 0.029941408157348633, 0.030095008850097655, 0.03025315284729004, 0.03013907241821289, 0.02993561553955078, 0.030212095260620117, 0.030166816711425782, 0.029878496170043945, 0.03001651191711426, 0.030071456909179686, 0.030226783752441408, 0.030082975387573242, 0.030163040161132814, 0.030067840576171876, 0.029979103088378905, 0.030257568359375, 0.030275583267211914, 0.030216192245483397, 0.03019161605834961, 0.03012403106689453, 0.03308038330078125, 0.03097078323364258, 0.03037593650817871, 0.029882368087768556, 0.029797439575195313, 0.03003049659729004, 0.030408992767333984, 0.029917184829711913, 0.029773279190063475, 0.029830911636352538, 0.02995631980895996, 0.030059072494506837, 0.029949951171875, 0.030040063858032227, 0.029937599182128908, 0.02988425636291504, 0.02980067253112793, 0.030151775360107422, 0.02994883155822754, 0.029895967483520507, 0.029985504150390627, 0.0301527042388916, 0.029839359283447265, 0.030089216232299806, 0.030035968780517577, 0.02997657585144043, 0.029869407653808595, 0.029860511779785156, 0.029824256896972656, 0.029934335708618164, 0.03021004867553711, 0.030216192245483397, 0.03031804847717285, 0.030484672546386718, 0.030242399215698244, 0.030431999206542968, 0.030390335083007813, 0.030268928527832032, 0.030029951095581056, 0.029960512161254883, 0.030228479385375977, 0.029945951461791992, 0.029902624130249023, 0.029919359207153322, 0.029949951171875, 0.029965599060058593, 0.02992201614379883, 0.030099456787109374, 0.030189535140991212, 0.029957792282104493, 0.03001910400390625, 0.029997919082641603, 0.02994528007507324, 0.029986495971679687, 0.030022527694702147, 0.029952096939086913, 0.0299703369140625, 0.029988000869750977, 0.029958976745605468, 0.0299704647064209, 0.030044000625610353, 0.03005766487121582, 0.030186464309692383, 0.03293356704711914, 0.031051231384277345, 0.030247711181640626, 0.029863967895507812, 0.029722015380859376, 0.02969251251220703, 0.029677568435668947, 0.029628416061401368, 0.029765695571899415, 0.02968364715576172, 0.029885759353637697, 0.029809343338012696, 0.029818431854248047, 0.029802944183349608, 0.029773567199707033, 0.029773759841918945, 0.029729087829589843, 0.02975103950500488, 0.029899072647094727, 0.02981216049194336, 0.029796512603759765, 0.02971683120727539, 0.029829120635986327, 0.02978611183166504, 0.029841312408447264, 0.029800287246704103, 0.029804319381713868, 0.029765504837036133, 0.029790815353393556, 0.029845504760742186, 0.029930559158325196, 0.030208959579467773, 0.030510751724243165, 0.03034761619567871, 0.030334304809570313, 0.030233247756958008, 0.030117151260375976, 0.03006057548522949, 0.030079679489135744, 0.029976415634155273, 0.02997395133972168, 0.029997791290283203, 0.029951007843017578, 0.029905311584472655, 0.02990342330932617, 0.029892032623291015, 0.029827648162841797, 0.029855743408203125, 0.03000044822692871, 0.0298187198638916, 0.029948415756225585, 0.02991958427429199, 0.02997039985656738, 0.029968416213989258, 0.029906944274902345, 0.02991823959350586, 0.029927839279174806, 0.029976543426513673, 0.030095840454101564, 0.029976703643798827, 0.030035295486450196, 0.030077472686767578, 0.030111295700073242, 0.03284787368774414, 0.030930944442749023, 0.030246496200561523, 0.029925119400024413, 0.029825696945190428, 0.02974048042297363, 0.029772287368774415, 0.029750879287719727, 0.02976358413696289, 0.029780288696289063, 0.029792192459106446, 0.029762943267822264, 0.029709152221679688, 0.02976972770690918, 0.02976563262939453, 0.02976972770690918, 0.02977996826171875, 0.029838399887084963, 0.029847679138183595, 0.029764608383178712, 0.02987615966796875, 0.029941631317138673, 0.03006073570251465, 0.0300664005279541, 0.030038112640380858, 0.030145856857299806, 0.03008787155151367, 0.03019161605834961, 0.030012639999389648, 0.030761760711669923, 0.030089216232299806, 0.03032035255432129, 0.030791967391967774, 0.030568384170532228, 0.030522687911987305, 0.030348255157470704, 0.030376928329467773, 0.030362432479858398, 0.030300159454345704, 0.030242816925048828, 0.0303636474609375, 0.030300159454345704, 0.029994848251342774, 0.030375648498535156, 0.03017568016052246, 0.030074527740478516, 0.030249311447143556, 0.030121984481811522, 0.030176448822021484, 0.030276416778564453, 0.03013222312927246, 0.030121376037597656, 0.03034396743774414, 0.03032806396484375, 0.03026531219482422, 0.030003583908081055, 0.030066911697387694, 0.03020185661315918, 0.030023263931274413, 0.030156383514404295, 0.03029203224182129, 0.03025177574157715, 0.0302259521484375]",tokens/s,33.273123375789275,, @@ -12034,7 +12034,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 96346 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 89848 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -12185,7 +12185,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 26601 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 20933 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -12327,7 +12327,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 116216 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 109722 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): @@ -12436,7 +12436,7 @@ ChildProcessError: Traceback (most recent call last): self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) -torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 87038 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 80720 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last):