diff --git "a/perf-df-bnb-1xT4.csv" "b/perf-df-bnb-1xT4.csv" --- "a/perf-df-bnb-1xT4.csv" +++ "b/perf-df-bnb-1xT4.csv" @@ -95,7 +95,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. G ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1033.510912,1613.692928,0.0,1218.445312,1206.173696,s,1,9.1831015625,9.1831015625,0.0,9.1831015625,9.1831015625,9.1831015625,9.1831015625,[9.1831015625],,kWh,6.382324057082845e-05,7.032595791535425e-06,2.3776685687981747e-05,9.463252205034562e-05,,MB,1275.94496,1911.488512,0.0,1503.657984,1463.228416,s,10,1.9248706817626953,0.19248706817626954,0.0011163075534855215,0.19229170989990235,0.19386943054199218,0.19389991760253908,0.19392430725097656,"[0.19239820861816406, 0.19070640563964844, 0.1913957061767578, 0.1932269744873047, 0.19386265563964844, 0.19218521118164061, 0.1920773468017578, 0.1912587127685547, 0.19382905578613283, 0.19393040466308595]",tokens/s,1329.9594742934557,kWh,5.888236950166705e-06,6.493722790158906e-07,3.927919809000313e-06,1.0465529038182906e-05,tokens/kWh,24461257.43533825,MB,1293.959168,1911.488512,0.0,1503.657984,1463.230976,s,10,19.02480383300781,1.9024803833007808,0.010420759895332518,1.9060280151367186,1.91608125,1.916356787109375,1.9165772167968749,"[1.91602001953125, 1.907142822265625, 1.90618115234375, 1.8814990234375, 1.91663232421875, 1.894983154296875, 1.9058748779296875, 1.8914891357421875, 1.8981241455078126, 1.906857177734375]",tokens/s,33.114664704555715,kWh,5.504454917149943e-05,6.071256374725795e-06,3.197691447039852e-05,9.309272001662375e-05,tokens/kWh,676744.6475809276,,s,630,19.021390398025527,0.030192683171469073,0.0006366162060178063,0.030160079956054686,0.030726429557800295,0.03112152633666992,0.03256156150817872,"[0.03147190475463867, 0.030670848846435547, 0.03013222312927246, 0.03253247833251953, 0.031373311996459964, 0.030482080459594725, 0.030052703857421877, 0.03110403251647949, 0.03020899200439453, 0.030287872314453124, 0.030420799255371094, 0.030612960815429687, 0.03062652778625488, 0.030502912521362304, 0.030594335556030274, 0.030865280151367188, 0.030816703796386718, 0.0306911678314209, 0.03205366516113281, 0.03079311943054199, 0.030898944854736328, 0.0305860481262207, 0.030468927383422852, 0.030503999710083007, 0.03061199951171875, 0.03057411193847656, 0.030427520751953124, 0.030842527389526368, 0.03051532745361328, 0.03050569534301758, 0.030340576171875, 0.030360095977783202, 0.03041279983520508, 0.030324735641479493, 0.030393632888793945, 0.030544416427612305, 0.030306495666503907, 0.03021824073791504, 0.029959552764892577, 0.02966387176513672, 0.030558048248291017, 0.030363487243652343, 0.030124256134033203, 0.030185567855834962, 0.03015872001647949, 0.030052480697631837, 0.03001046371459961, 0.02985580825805664, 0.029909343719482423, 0.03005900764465332, 0.03001747131347656, 0.02985580825805664, 0.02969183921813965, 0.02971564865112305, 0.029700479507446288, 0.03053366470336914, 0.029576992034912108, 0.029780288696289063, 0.03010345649719238, 0.030109535217285155, 0.03006224060058594, 0.03016783905029297, 0.03027168083190918, 0.03059030342102051, 0.029733535766601562, 0.029560287475585936, 0.02962486457824707, 0.02972585678100586, 0.029810592651367186, 0.029748416900634764, 0.030097152709960936, 0.03027078437805176, 0.03016160011291504, 0.029750463485717773, 0.03001545524597168, 0.029565759658813476, 0.029846656799316407, 0.030018367767333985, 0.030181024551391603, 0.030147008895874024, 0.030040063858032227, 0.029724672317504884, 0.029503488540649415, 0.02959974479675293, 0.029951391220092775, 0.030121919631958007, 0.029962656021118163, 0.030168544769287108, 0.030272287368774416, 0.030215648651123046, 0.030045824050903322, 0.030067615509033203, 0.03006185531616211, 0.030034656524658202, 0.0300214729309082, 0.030553375244140625, 0.03001638412475586, 0.030294015884399415, 0.0302608642578125, 0.03039244842529297, 0.030893951416015624, 0.03100454330444336, 0.03044937515258789, 0.03053443145751953, 0.030562271118164064, 0.030666784286499025, 0.031053823471069338, 0.030410463333129883, 0.030341279983520507, 0.030695552825927733, 0.03063155174255371, 0.030656896591186523, 0.030756864547729492, 0.03167027282714844, 0.030574464797973634, 0.03072012710571289, 0.03052463912963867, 0.030728992462158204, 0.031059455871582032, 0.030797536849975587, 0.030519872665405273, 0.0305828800201416, 0.030630016326904298, 0.030220287322998047, 0.030007295608520508, 0.030038015365600586, 0.0328135986328125, 0.03022233581542969, 0.02997983932495117, 0.030430015563964845, 0.03312844848632813, 0.03140950393676758, 0.030319263458251953, 0.030099456787109374, 0.029825023651123047, 0.02958950424194336, 0.029716480255126954, 0.03003523254394531, 0.02948579216003418, 0.02978553581237793, 0.029833408355712892, 0.029782400131225586, 0.029634559631347656, 0.029586559295654298, 0.02984659194946289, 0.03013612747192383, 0.03012531280517578, 0.030110176086425782, 0.03011612892150879, 0.03003392028808594, 0.029754623413085938, 0.02961859130859375, 0.031641120910644534, 0.03035219192504883, 0.029744384765625, 0.0300053768157959, 0.029936384201049805, 0.02981670379638672, 0.029741056442260744, 0.029849311828613282, 0.030029727935791017, 0.029796415328979493, 0.029763391494750976, 0.029919647216796876, 0.030354816436767577, 0.030196319580078124, 0.02982310485839844, 0.02981260871887207, 0.029968095779418946, 0.03043951988220215, 0.029894975662231444, 0.02999622344970703, 0.029813568115234376, 0.030148672103881835, 0.030187456130981446, 0.030225887298583984, 0.03003878402709961, 0.03015452766418457, 0.03017932891845703, 0.030459552764892577, 0.030379648208618163, 0.030444255828857424, 0.030461759567260743, 0.030400608062744142, 0.03146275138854981, 0.032117694854736326, 0.030817888259887696, 0.03131619262695313, 0.03060918426513672, 0.031135839462280275, 0.03052947235107422, 0.030322751998901366, 0.030492671966552733, 0.030754175186157227, 0.03046272087097168, 0.033539104461669925, 0.030597984313964845, 0.03172761535644531, 0.03074662399291992, 0.031956703186035156, 0.031353120803833005, 0.030352928161621093, 0.0298951358795166, 0.02976153564453125, 0.031053823471069338, 0.029845279693603517, 0.02982464027404785, 0.029753952026367186, 0.029446144104003907, 0.029144512176513673, 0.029117151260375975, 0.029109407424926757, 0.02908844757080078, 0.029038591384887694, 0.029066911697387697, 0.029027999877929686, 0.02897737693786621, 0.02913942337036133, 0.029352960586547853, 0.029248287200927734, 0.03015497589111328, 0.030650367736816408, 0.030121984481811522, 0.02994611167907715, 0.02981452751159668, 0.029454336166381836, 0.029177087783813477, 0.029346559524536135, 0.029274112701416017, 0.029747200012207032, 0.029337600708007814, 0.02939673614501953, 0.029191936492919922, 0.02941798400878906, 0.029132352828979493, 0.029137344360351564, 0.029148160934448244, 0.029645471572875975, 0.030146848678588866, 0.03027078437805176, 0.0303721923828125, 0.03037161636352539, 0.029995647430419922, 0.030144512176513674, 0.029380607604980468, 0.02953990364074707, 0.02925312042236328, 0.029225536346435547, 0.02908812713623047, 0.029171072006225585, 0.02915977668762207, 0.029172000885009767, 0.03607721710205078, 0.031161344528198243, 0.030402559280395508, 0.030726144790649414, 0.030373888015747072, 0.03018547248840332, 0.031059839248657226, 0.0296646728515625, 0.030405344009399413, 0.030150432586669922, 0.030270944595336913, 0.03034601593017578, 0.03042505645751953, 0.03062918472290039, 0.03074892807006836, 0.030724544525146485, 0.030691551208496093, 0.030666528701782228, 0.030697471618652345, 0.030633983612060548, 0.030451711654663087, 0.030605024337768554, 0.030593311309814453, 0.03044175910949707, 0.030468864440917967, 0.03056662368774414, 0.030532352447509764, 0.03060940742492676, 0.03096985626220703, 0.03058483123779297, 0.03120742416381836, 0.030673952102661134, 0.030624736785888673, 0.030513151168823242, 0.0304167366027832, 0.0302675838470459, 0.029951423645019532, 0.030043872833251953, 0.0298536319732666, 0.029813631057739258, 0.029740991592407225, 0.030238143920898436, 0.029778560638427733, 0.029693952560424806, 0.030902271270751954, 0.030255104064941408, 0.030251007080078125, 0.03014633560180664, 0.030308223724365233, 0.029878591537475584, 0.029650976181030273, 0.029572479248046873, 0.02969254493713379, 0.02955580711364746, 0.030058496475219725, 0.029608991622924803, 0.030372831344604494, 0.031525152206420895, 0.030296607971191405, 0.029773920059204102, 0.030176799774169923, 0.02967190361022949, 0.029900800704956054, 0.030562559127807618, 0.02984934425354004, 0.029457887649536132, 0.029769535064697265, 0.029928159713745118, 0.030232288360595702, 0.029323551177978517, 0.029793664932250975, 0.02940787124633789, 0.029423519134521483, 0.029452384948730467, 0.030261247634887696, 0.0305930233001709, 0.030457855224609375, 0.03030966377258301, 0.03032143974304199, 0.029996992111206055, 0.0295731201171875, 0.029466623306274413, 0.029237152099609375, 0.0294421443939209, 0.02977791976928711, 0.029549760818481444, 0.029331584930419922, 0.029870784759521485, 0.030295391082763672, 0.029911712646484376, 0.03063596725463867, 0.03024492835998535, 0.03029769515991211, 0.03010806465148926, 0.030234624862670898, 0.03023244857788086, 0.03024028778076172, 0.03069615936279297, 0.030556032180786133, 0.03025833511352539, 0.030364511489868164, 0.03032268714904785, 0.030289920806884765, 0.0300579833984375, 0.03014713668823242, 0.030193599700927734, 0.030340896606445313, 0.030306528091430664, 0.030437376022338865, 0.030220287322998047, 0.030422336578369142, 0.030309055328369142, 0.030322656631469727, 0.030199840545654298, 0.03033420753479004, 0.03025814437866211, 0.030395296096801756, 0.030511615753173828, 0.030287456512451173, 0.02955753517150879, 0.030259199142456054, 0.03011337661743164, 0.030341535568237304, 0.029748319625854492, 0.03011062431335449, 0.02978201675415039, 0.030707231521606447, 0.02985603141784668, 0.030085823059082032, 0.029706239700317383, 0.029693952560424806, 0.02991923141479492, 0.02981180763244629, 0.029794912338256836, 0.029935039520263673, 0.030290815353393556, 0.029920448303222658, 0.029885248184204103, 0.030122112274169922, 0.029937088012695314, 0.02961862373352051, 0.02953625679016113, 0.029621984481811522, 0.029270303726196288, 0.029302783966064453, 0.02951372718811035, 0.029501279830932616, 0.029347999572753906, 0.030097183227539064, 0.02998089599609375, 0.030412607192993164, 0.03051247978210449, 0.030390911102294922, 0.030271455764770507, 0.030892288208007813, 0.030330495834350588, 0.030062976837158202, 0.030293088912963867, 0.030241695404052735, 0.03014588737487793, 0.030069408416748048, 0.030640127182006836, 0.030138368606567382, 0.02999091148376465, 0.030154367446899415, 0.030277055740356447, 0.029973440170288086, 0.02996544075012207, 0.03018227195739746, 0.03034316825866699, 0.030504543304443358, 0.030414848327636718, 0.030081600189208985, 0.029887712478637696, 0.029780607223510742, 0.029892608642578124, 0.03032678413391113, 0.030405792236328125, 0.03063075256347656, 0.03067193603515625, 0.0313306884765625, 0.03149676895141602, 0.030713855743408205, 0.03196723175048828, 0.030928415298461916, 0.030880224227905272, 0.03334944152832031, 0.030935232162475585, 0.030488576889038086, 0.031073888778686522, 0.030202272415161133, 0.03042508888244629, 0.030476287841796876, 0.030324575424194335, 0.03048464012145996, 0.031204479217529297, 0.030380287170410157, 0.030325376510620117, 0.030095392227172852, 0.030054527282714842, 0.03214102554321289, 0.030268831253051756, 0.029962976455688475, 0.02958470344543457, 0.02954924774169922, 0.029471872329711914, 0.029723007202148436, 0.02965318489074707, 0.029503807067871094, 0.029632511138916014, 0.02959974479675293, 0.0297042236328125, 0.029775840759277344, 0.029822975158691405, 0.02982707214355469, 0.02958745574951172, 0.02966067123413086, 0.029731327056884766, 0.029639904022216796, 0.03023072052001953, 0.02969455909729004, 0.029650943756103516, 0.029388799667358398, 0.029288448333740235, 0.029300159454345703, 0.02973548889160156, 0.030031871795654298, 0.029841407775878907, 0.030092832565307617, 0.030171104431152344, 0.03017900848388672, 0.030012224197387697, 0.03137273597717285, 0.03243244934082031, 0.029967744827270507, 0.02961417579650879, 0.02957391929626465, 0.029663232803344725, 0.02960588836669922, 0.029711456298828126, 0.03009222412109375, 0.029931488037109374, 0.029850976943969727, 0.030073503494262695, 0.030000768661499023, 0.03001350402832031, 0.02994550323486328, 0.029940383911132813, 0.029755392074584962, 0.029646848678588866, 0.03025263977050781, 0.030159263610839843, 0.030766304016113282, 0.030427520751953124, 0.03034169578552246, 0.030438655853271483, 0.03043609619140625, 0.030310144424438478, 0.030457151412963866, 0.03025948715209961, 0.03032534408569336, 0.030351295471191406, 0.030457984924316405, 0.030357183456420897, 0.030318912506103517, 0.030263296127319338, 0.030308351516723633, 0.03027939224243164, 0.030592607498168944, 0.030286495208740234, 0.03029609680175781, 0.03030191993713379, 0.030442848205566406, 0.03040287971496582, 0.030372480392456054, 0.03034854316711426, 0.030376096725463868, 0.0326907844543457, 0.030641216278076172, 0.029911231994628907, 0.029751903533935548, 0.0295118408203125, 0.029531776428222658, 0.029945663452148438, 0.029612607955932617, 0.029659135818481445, 0.030253055572509766, 0.02998681640625, 0.029995008468627928, 0.030125280380249024, 0.030415647506713866, 0.02988595199584961, 0.03000275230407715, 0.029937728881835938, 0.030157247543334962, 0.029712831497192383, 0.029604032516479493, 0.02938230323791504, 0.02953353691101074, 0.02948080062866211, 0.03003443145751953, 0.029743263244628906, 0.029941856384277345, 0.03022870445251465, 0.030332927703857423, 0.030121984481811522, 0.0301496639251709, 0.029737951278686524, 0.029608991622924803, 0.029509695053100585, 0.029567903518676757, 0.02953625679016113, 0.029878047943115233, 0.030072256088256834, 0.030046367645263673, 0.03093286323547363, 0.030395584106445314, 0.030153663635253906, 0.03016089630126953, 0.030114015579223632, 0.029929248809814454, 0.029468704223632812, 0.029554176330566406, 0.029436447143554687, 0.029460416793823243, 0.029993120193481444, 0.030185312271118165, 0.030063711166381835, 0.030452543258666993, 0.030347360610961913, 0.030359424591064454, 0.030214271545410155, 0.030494527816772463, 0.03057254409790039, 0.03004640007019043, 0.030040063858032227, 0.030523359298706056, 0.030780736923217773, 0.030468671798706055, 0.030453439712524413, 0.030441951751708985, 0.030451711654663087, 0.030410751342773438, 0.03057459259033203, 0.030395967483520508, 0.030413248062133788, 0.030379167556762697, 0.030392768859863282, 0.030478559494018554, 0.030447391510009764, 0.030369407653808595, 0.030289983749389647, 0.03035580825805664, 0.030615936279296874, 0.030416576385498047, 0.03034876823425293, 0.031877983093261716, 0.032573440551757815, 0.030557439804077147, 0.03061222457885742, 0.031016960144042968, 0.030611455917358397, 0.030390207290649413, 0.03079583930969238, 0.030473312377929686, 0.030247840881347656, 0.030003200531005858, 0.029865983963012696, 0.029671424865722655, 0.029715904235839842, 0.02972060775756836, 0.029571647644042968, 0.02958460807800293, 0.029520639419555662, 0.029822399139404297, 0.02959008026123047, 0.029546432495117188, 0.029445375442504883]",tokens/s,33.120607212046714,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -121,17 +121,17 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,824.107008,554.631168,0.0,159.383552,142.313472,s,1,8.16002978515625,8.16002978515625,0.0,8.16002978515625,8.16002978515625,8.16002978515625,8.16002978515625,[8.16002978515625],,kWh,2.1358915637529205e-05,2.3453210161104453e-06,8.320006655987422e-06,3.2024243309627075e-05,,MB,1142.02624,628.031488,0.0,220.20096,185.324544,s,18,0.20789641571044926,0.011549800872802735,0.00010180969120244798,0.011525103569030762,0.011686764812469483,0.01177346272468567,0.011777770700454711,"[0.011513312339782715, 0.011650015830993652, 0.011535327911376953, 0.011778847694396972, 0.01148038387298584, 0.011480416297912598, 0.011419615745544433, 0.011519935607910156, 0.01162441635131836, 0.011513759613037109, 0.011450624465942383, 0.011772512435913086, 0.011460927963256836, 0.011565407752990723, 0.011571999549865722, 0.011421536445617676, 0.011530271530151367, 0.011607104301452637]",tokens/s,22164.884297081193,kWh,3.491230954794669e-07,3.850182628413345e-08,1.9269818711959725e-07,5.803231088831976e-07,tokens/kWh,441133561.7715776,MB,1153.683456,632.225792,0.0,224.395264,185.327104,s,18,10.093456604003906,0.5607475891113282,0.0033757969675585003,0.5597496948242187,0.5635010986328125,0.5675152709960938,0.5706674096679688,"[0.5616351318359375, 0.5596442260742187, 0.5714554443359375, 0.5620497436523437, 0.5597603759765625, 0.559658935546875, 0.5604142456054687, 0.5586361083984375, 0.558799560546875, 0.55605615234375, 0.5620787353515625, 0.5601438598632813, 0.5668199462890625, 0.5591299438476562, 0.5607298583984375, 0.5581077880859375, 0.5585975341796875, 0.559739013671875]",tokens/s,112.35001491462904,kWh,1.649760427512293e-05,1.8194083952481014e-06,6.559899807548376e-06,2.4876912477919406e-05,tokens/kWh,2532468.6114451867,,s,1134,10.083655643463134,0.008892112560373136,0.00027634942801051267,0.00885923194885254,0.009003254222869874,0.009087467432022095,0.009776603565216065,"[0.008827327728271484, 0.00877177619934082, 0.008989760398864746, 0.009033984184265136, 0.008794655799865722, 0.008816479682922363, 0.008876352310180665, 0.008869888305664063, 0.008881343841552734, 0.008792767524719238, 0.008884160041809083, 0.008835264205932617, 0.008729920387268067, 0.008841119766235351, 0.00900380802154541, 0.008854592323303222, 0.008901568412780761, 0.008850751876831054, 0.008829376220703126, 0.009147647857666016, 0.00880742359161377, 0.00877126407623291, 0.008735039710998534, 0.008696000099182128, 0.008687423706054688, 0.00863385581970215, 0.008667648315429688, 0.008566783905029298, 0.008638463973999023, 0.009177087783813476, 0.008740927696228027, 0.008839103698730469, 0.009045951843261719, 0.008806464195251464, 0.008869888305664063, 0.009034815788269044, 0.008768447875976563, 0.00900607967376709, 0.00897107219696045, 0.008800224304199218, 0.00881715202331543, 0.008860992431640626, 0.008790528297424317, 0.009363360404968261, 0.009117695808410644, 0.009712639808654786, 0.009779359817504883, 0.009066656112670898, 0.008972991943359375, 0.00894927978515625, 0.008993375778198242, 0.008892288208007812, 0.008949312210083007, 0.00884342384338379, 0.008926912307739258, 0.008872511863708497, 0.008917247772216797, 0.008847135543823242, 0.00900432014465332, 0.008907711982727051, 0.00882256031036377, 0.008921088218688965, 0.008963232040405273, 0.008923359870910644, 0.00890345573425293, 0.00894156837463379, 0.00889241600036621, 0.008781824111938476, 0.00889241600036621, 0.009076607704162598, 0.008878208160400391, 0.008916576385498047, 0.00904643154144287, 0.00897433567047119, 0.008921088218688965, 0.010473471641540527, 0.008947711944580078, 0.008829119682312012, 0.008828512191772461, 0.00885372829437256, 0.008855551719665527, 0.008859647750854491, 0.00886905574798584, 0.008876864433288573, 0.008878016471862793, 0.008851327896118165, 0.008784064292907715, 0.008843551635742188, 0.00886684799194336, 0.00877184009552002, 0.008739263534545898, 0.00877184009552002, 0.008840000152587891, 0.008836031913757324, 0.008847135543823242, 0.008880640029907226, 0.008998623847961426, 0.008953599929809571, 0.008887968063354491, 0.008864224433898926, 0.00876966381072998, 0.00881868839263916, 0.008720383644104004, 0.008717823982238769, 0.008655136108398437, 0.008669343948364258, 0.008658335685729981, 0.008698080062866211, 0.009064895629882812, 0.008689663887023925, 0.008834303855895996, 0.008755968093872071, 0.008759008407592774, 0.008804191589355468, 0.008732928276062011, 0.00879190444946289, 0.008806719779968261, 0.008810527801513672, 0.00890067195892334, 0.008879648208618165, 0.008845408439636231, 0.008947903633117676, 0.008852864265441894, 0.008858048439025878, 0.008844736099243165, 0.00905673599243164, 0.008944607734680176, 0.008925439834594727, 0.009542880058288574, 0.008918784141540527, 0.008882687568664551, 0.00898691177368164, 0.008865792274475098, 0.008889535903930663, 0.008882975578308106, 0.008945440292358399, 0.008851712226867675, 0.00890060806274414, 0.008814592361450196, 0.008795455932617187, 0.00886240005493164, 0.008929023742675781, 0.008859135627746583, 0.008981247901916505, 0.01564243221282959, 0.008941760063171387, 0.00898252773284912, 0.009140255928039551, 0.0088406400680542, 0.00885750389099121, 0.008892959594726562, 0.008814080238342285, 0.008845824241638184, 0.008867936134338379, 0.008824576377868652, 0.008841183662414551, 0.008964384078979492, 0.00883407974243164, 0.008981472015380859, 0.008882176399230958, 0.008962047576904298, 0.008861696243286133, 0.008931679725646973, 0.008810144424438476, 0.00881884765625, 0.008891712188720704, 0.008826848030090331, 0.009091551780700684, 0.009545696258544923, 0.008978303909301757, 0.008984895706176757, 0.009049663543701172, 0.009135583877563476, 0.008991488456726074, 0.009088319778442383, 0.009071136474609375, 0.009021311759948731, 0.008978879928588868, 0.00930406379699707, 0.009080991744995118, 0.009072480201721191, 0.008873984336853028, 0.008962112426757813, 0.008892352104187012, 0.008900128364562988, 0.0088536958694458, 0.008841024398803712, 0.009269887924194337, 0.008907903671264648, 0.008943615913391113, 0.008919039726257324, 0.008972352027893066, 0.008912832260131835, 0.008937024116516113, 0.008884736061096191, 0.008996064186096192, 0.008946399688720703, 0.00901529598236084, 0.009771007537841797, 0.011583456039428711, 0.009238559722900391, 0.008893664360046388, 0.008876607894897462, 0.008851200103759766, 0.008853983879089356, 0.008824831962585449, 0.009244928359985352, 0.00882256031036377, 0.008754783630371094, 0.00873305606842041, 0.009052160263061524, 0.008803999900817872, 0.008765791893005371, 0.008787039756774903, 0.008747296333312988, 0.008759103775024413, 0.008888863563537597, 0.008784128189086913, 0.008765119552612305, 0.008917344093322755, 0.008839167594909669, 0.00910758399963379, 0.009039104461669922, 0.00901091194152832, 0.008965056419372558, 0.008880096435546875, 0.008847359657287598, 0.009020832061767578, 0.008855263710021972, 0.008776576042175292, 0.008904704093933105, 0.008880000114440918, 0.009015423774719238, 0.008990240097045899, 0.009058783531188965, 0.008992192268371581, 0.008770112037658692, 0.008802304267883301, 0.008654623985290528, 0.008637727737426758, 0.008632384300231933, 0.008718815803527832, 0.008596192359924317, 0.008613632202148438, 0.00864230442047119, 0.008566975593566895, 0.008601632118225098, 0.008706015586853027, 0.00903987216949463, 0.008644607543945313, 0.008755040168762208, 0.00872704029083252, 0.008990431785583496, 0.008894463539123536, 0.008890368461608887, 0.008816639900207519, 0.008843296051025391, 0.00880793571472168, 0.008843744277954101, 0.00882688045501709, 0.00881868839263916, 0.008865792274475098, 0.008790016174316406, 0.008843296051025391, 0.008754976272583008, 0.008857791900634765, 0.008730624198913574, 0.008810720443725585, 0.008734496116638183, 0.008731679916381836, 0.008725472450256348, 0.008817888259887696, 0.008794912338256836, 0.008800607681274414, 0.008943264007568359, 0.00895792007446289, 0.009414688110351562, 0.008965888023376466, 0.009216256141662597, 0.00927945613861084, 0.009127967834472657, 0.009128191947937011, 0.009047200202941894, 0.008927840232849121, 0.008790016174316406, 0.009003007888793945, 0.008970239639282226, 0.008945664405822755, 0.009082079887390137, 0.00897103977203369, 0.008902303695678711, 0.008871711730957032, 0.009026111602783203, 0.008838879585266112, 0.008675104141235351, 0.008798815727233887, 0.008785344123840333, 0.008810303688049316, 0.008805055618286133, 0.008773599624633789, 0.008855551719665527, 0.008751104354858399, 0.008803808212280273, 0.008859423637390137, 0.008760064125061036, 0.008898783683776855, 0.008922431945800781, 0.008839008331298828, 0.00889731216430664, 0.008777567863464356, 0.008803359985351563, 0.00882540798187256, 0.008745375633239747, 0.00871628761291504, 0.008738816261291504, 0.008753439903259278, 0.009354751586914062, 0.008805791854858398, 0.009441375732421875, 0.0089303035736084, 0.009033439636230468, 0.008884063720703125, 0.008892576217651367, 0.00885366439819336, 0.008826623916625977, 0.008822431564331054, 0.008878527641296386, 0.008859904289245605, 0.008840991973876952, 0.00884937572479248, 0.008822848320007324, 0.008826175689697265, 0.008810336112976075, 0.008857824325561524, 0.00889913558959961, 0.00886963176727295, 0.008868127822875977, 0.008960127830505371, 0.008856896400451661, 0.008800992012023926, 0.008933247566223145, 0.00894108772277832, 0.008915295600891113, 0.009019647598266602, 0.008843071937561035, 0.009033727645874023, 0.009057696342468263, 0.008942399978637695, 0.008988063812255859, 0.008950143814086915, 0.008949695587158203, 0.0089682559967041, 0.008946751594543457, 0.008973504066467285, 0.008947296142578125, 0.008927392005920411, 0.008841216087341308, 0.00875449562072754, 0.008785759925842285, 0.008739839553833008, 0.00876527976989746, 0.00869164752960205, 0.00879417610168457, 0.008816096305847168, 0.008792767524719238, 0.008768511772155761, 0.008774208068847657, 0.00879967975616455, 0.008784735679626465, 0.008750111579895019, 0.008845888137817382, 0.008726335525512696, 0.00882534408569336, 0.00875539207458496, 0.008730560302734376, 0.008805631637573242, 0.008881152153015137, 0.008762944221496581, 0.008781824111938476, 0.008816736221313477, 0.008816543579101563, 0.008964096069335938, 0.008833024024963379, 0.008870207786560058, 0.00882470417022705, 0.00885331153869629, 0.008822784423828126, 0.009063776016235352, 0.00885212802886963, 0.008962047576904298, 0.008943072319030762, 0.008886048316955567, 0.008929471969604492, 0.008898880004882813, 0.00895740795135498, 0.008868767738342285, 0.008843135833740235, 0.008888480186462402, 0.009006943702697754, 0.008996864318847657, 0.008945247650146485, 0.008948479652404785, 0.009033087730407716, 0.008877759933471679, 0.008829536437988282, 0.00882688045501709, 0.008742143630981445, 0.00882259178161621, 0.008837663650512695, 0.008807007789611816, 0.008851231575012207, 0.008806431770324707, 0.00889241600036621, 0.0088056640625, 0.008794783592224121, 0.008788064002990722, 0.008822751998901366, 0.008876031875610351, 0.008882176399230958, 0.009013248443603515, 0.00885756778717041, 0.008829312324523926, 0.0087807035446167, 0.008901375770568848, 0.00882688045501709, 0.008787967681884766, 0.008830656051635742, 0.009017663955688477, 0.009046015739440917, 0.008798272132873534, 0.009377440452575684, 0.009037983894348145, 0.008846847534179688, 0.008891008377075195, 0.008884223937988281, 0.008809696197509766, 0.0088307523727417, 0.008782848358154297, 0.0090316801071167, 0.008933664321899414, 0.008892127990722656, 0.008880127906799316, 0.008865023612976074, 0.008858367919921875, 0.008848992347717285, 0.008860063552856446, 0.008855487823486328, 0.008915007591247559, 0.008814208030700684, 0.008803008079528808, 0.00877667236328125, 0.00886678409576416, 0.008859392166137695, 0.008979999542236328, 0.008874272346496582, 0.0088536958694458, 0.008996864318847657, 0.008930591583251954, 0.008913311958312988, 0.008812959671020509, 0.008824735641479493, 0.008816639900207519, 0.008822400093078614, 0.008834815979003907, 0.008777471542358398, 0.008803135871887207, 0.008816703796386719, 0.008827103614807128, 0.00881436824798584, 0.008751456260681151, 0.008793760299682617, 0.008722432136535644, 0.008806400299072266, 0.008835071563720704, 0.008831328392028808, 0.008854559898376465, 0.008728799819946289, 0.008886688232421875, 0.008867136001586914, 0.008954400062561036, 0.008923295974731446, 0.008923135757446288, 0.008882176399230958, 0.008880127906799316, 0.008884287834167481, 0.008819840431213378, 0.008980319976806641, 0.00883187198638916, 0.008836576461791992, 0.008835712432861327, 0.00880844783782959, 0.00892518424987793, 0.008855263710021972, 0.008912384033203125, 0.008897215843200683, 0.008822879791259765, 0.00883523178100586, 0.008930879592895508, 0.008825119972229003, 0.008970239639282226, 0.008828927993774414, 0.008879903793334962, 0.008900832176208497, 0.00891808032989502, 0.00899350357055664, 0.008906304359436035, 0.008827648162841796, 0.008885951995849609, 0.008921088218688965, 0.008865311622619628, 0.008815072059631347, 0.008840736389160156, 0.008843232154846191, 0.008816991806030273, 0.008838399887084961, 0.00878275203704834, 0.008769536018371582, 0.008765439987182617, 0.008830207824707031, 0.008827327728271484, 0.008775296211242676, 0.008790719985961913, 0.008804512023925782, 0.008855392456054687, 0.008845312118530273, 0.008820735931396484, 0.008801695823669433, 0.008860095977783203, 0.00881065559387207, 0.008733856201171874, 0.008751520156860352, 0.008747776031494141, 0.008746432304382325, 0.00874726390838623, 0.008765439987182617, 0.008765439987182617, 0.008773632049560547, 0.008869888305664063, 0.009381600379943848, 0.008839232444763184, 0.008732895851135253, 0.009042176246643066, 0.008697600364685058, 0.008697855949401855, 0.008732864379882813, 0.008706944465637207, 0.008804384231567383, 0.008892576217651367, 0.009902848243713378, 0.00893727970123291, 0.008773695945739747, 0.00888435173034668, 0.008863391876220704, 0.008838720321655273, 0.008897024154663086, 0.00892460823059082, 0.00887440013885498, 0.008853119850158691, 0.00886070442199707, 0.008898336410522461, 0.008964384078979492, 0.008924896240234376, 0.008889439582824708, 0.008935808181762695, 0.008935711860656738, 0.00889030361175537, 0.008878399848937988, 0.008893952369689942, 0.008876383781433106, 0.008824480056762696, 0.008828927993774414, 0.00879372787475586, 0.008812640190124512, 0.00882697582244873, 0.009654175758361817, 0.008836000442504884, 0.008877311706542969, 0.008890848159790039, 0.008837120056152344, 0.008845024108886719, 0.008800224304199218, 0.008824895858764649, 0.008923392295837402, 0.008871935844421386, 0.008851455688476563, 0.00880844783782959, 0.008849056243896484, 0.008798144340515136, 0.00889510440826416, 0.008784735679626465, 0.00875820827484131, 0.008759296417236329, 0.008685248374938965, 0.008675904273986816, 0.008654591560363769, 0.008642239570617676, 0.008563008308410645, 0.008574591636657716, 0.008611264228820801, 0.008561599731445312, 0.008588543891906738, 0.00865187168121338, 0.008712127685546875, 0.008658368110656739, 0.008655136108398437, 0.0086364164352417, 0.008722496032714844, 0.008745247840881348, 0.00877952003479004, 0.009174943923950196, 0.008886272430419923, 0.008904704093933105, 0.008889535903930663, 0.00885638427734375, 0.008840928077697754, 0.008956255912780762, 0.008970175743103027, 0.008830975532531739, 0.00883407974243164, 0.008775775909423827, 0.008915840148925781, 0.008893728256225586, 0.008901023864746093, 0.008967488288879395, 0.008889344215393067, 0.008828031539916991, 0.008864319801330567, 0.00886195182800293, 0.008783935546875, 0.008846336364746094, 0.008839167594909669, 0.008792063713073731, 0.008781375885009766, 0.008868288040161133, 0.008781824111938476, 0.008811903953552245, 0.008880191802978516, 0.010013504028320313, 0.0090764799118042, 0.008869791984558105, 0.00885155200958252, 0.00896985626220703, 0.0088307523727417, 0.008839776039123535, 0.008865792274475098, 0.008890368461608887, 0.008853631973266602, 0.008830016136169433, 0.009368288040161132, 0.00883516788482666, 0.008863743782043456, 0.008859647750854491, 0.008914943695068359, 0.008871935844421386, 0.008955007553100585, 0.00888044834136963, 0.00879379177093506, 0.008827775955200194, 0.008833024024963379, 0.00937382411956787, 0.008836159706115723, 0.008903103828430176, 0.008845696449279785, 0.00885331153869629, 0.00876972770690918, 0.008732768058776855, 0.008755104064941406, 0.008815839767456054, 0.008774335861206056, 0.008748127937316894, 0.008746047973632813, 0.008850879669189453, 0.0089169282913208, 0.009007679939270019, 0.008895551681518555, 0.008997983932495117, 0.008939359664916992, 0.008804351806640624, 0.008865280151367188, 0.008886783599853516, 0.008782943725585938, 0.008885472297668458, 0.00883619213104248, 0.008804960250854492, 0.008890368461608887, 0.008799712181091308, 0.008917247772216797, 0.008876128196716309, 0.008791359901428222, 0.008860671997070312, 0.009241855621337891, 0.008879872322082519, 0.009955424308776856, 0.008900927543640138, 0.008900287628173829, 0.008841407775878907, 0.00884921646118164, 0.00890595245361328, 0.008810591697692872, 0.00888697624206543, 0.008863743782043456, 0.008838239669799805, 0.008931391716003418, 0.00882534408569336, 0.008855392456054687, 0.008829216003417968, 0.00878764820098877, 0.008999456405639648, 0.00881049633026123, 0.008799615859985352, 0.008773887634277344, 0.008810879707336426, 0.008748064041137696, 0.008784319877624512, 0.008806015968322754, 0.008786687850952149, 0.008855936050415038, 0.008893631935119628, 0.00881276798248291, 0.008849791526794434, 0.008828096389770508, 0.008818976402282715, 0.00881436824798584, 0.008860416412353515, 0.008894368171691895, 0.008775936126708985, 0.009041664123535156, 0.008962143898010254, 0.008859647750854491, 0.009093119621276855, 0.008970335960388183, 0.008937376022338867, 0.008902655601501466, 0.008928863525390626, 0.008958111763000489, 0.00895030403137207, 0.008988384246826173, 0.010232000350952149, 0.008892576217651367, 0.008858976364135742, 0.008874303817749023, 0.008875424385070801, 0.008856160163879395, 0.008924511909484864, 0.008839839935302735, 0.008869888305664063, 0.008817791938781738, 0.008727423667907715, 0.008751104354858399, 0.008724160194396972, 0.008778335571289063, 0.008918399810791016, 0.008838656425476075, 0.008837984085083008, 0.008832703590393067, 0.008816767692565917, 0.00887168025970459, 0.008837727546691895, 0.009317631721496583, 0.008844160079956054, 0.009297792434692382, 0.009973759651184083, 0.008919039726257324, 0.009072768211364745, 0.01081926441192627, 0.008917183876037597, 0.008890463829040527, 0.008869791984558105, 0.009002304077148438, 0.009071295738220214, 0.008887840270996093, 0.008884703636169434, 0.008898752212524414, 0.00892848014831543, 0.008890975952148437, 0.008955295562744141, 0.008892576217651367, 0.00892899227142334, 0.008872672080993653, 0.008902655601501466, 0.008980480194091797, 0.008855551719665527, 0.008841055870056153, 0.008861215591430664, 0.008888352394104003, 0.008923744201660156, 0.008828927993774414, 0.008857407569885255, 0.00878611183166504, 0.008794143676757812, 0.00881065559387207, 0.008836000442504884, 0.008846591949462891, 0.008909791946411132, 0.008900544166564942, 0.008886303901672363, 0.008938207626342773, 0.008948927879333496, 0.008903488159179688, 0.008890399932861328, 0.00885961627960205, 0.008855551719665527, 0.008884032249450684, 0.009906368255615234, 0.009743647575378418, 0.00913379192352295, 0.008881152153015137, 0.008935423851013183, 0.00899891185760498, 0.008962207794189454, 0.008900447845458985, 0.008872096061706542, 0.008898207664489746, 0.00886188793182373, 0.008828960418701171, 0.008913920402526856, 0.008891039848327637, 0.008870207786560058, 0.008839167594909669, 0.008871647834777832, 0.008903648376464844, 0.008906399726867676, 0.008870112419128417, 0.008886240005493163, 0.008818016052246094, 0.008794528007507324, 0.008869824409484863, 0.00886832046508789, 0.00924687957763672, 0.008782784461975097, 0.008842144012451172, 0.008867679595947265, 0.008872096061706542, 0.008788127899169922, 0.008833120346069336, 0.01009228801727295, 0.009058303833007812, 0.009010687828063964, 0.008984959602355957, 0.008883551597595215, 0.008890527725219727, 0.008862336158752442, 0.008931584358215332, 0.008836864471435547, 0.008878080368041993, 0.008847359657287598, 0.008769536018371582, 0.008867072105407716, 0.008775487899780273, 0.008724639892578125, 0.008780575752258301, 0.00879798412322998, 0.00892848014831543, 0.008897472381591796, 0.008821855545043946, 0.008842368125915528, 0.008841055870056153, 0.008812543869018554, 0.008824992179870605, 0.00873356819152832, 0.008737248420715332, 0.008867584228515624, 0.008801152229309082, 0.008851327896118165, 0.00886070442199707, 0.008749631881713868, 0.008811008453369141, 0.008753055572509766, 0.008822400093078614, 0.009271424293518066, 0.008763296127319336, 0.008755552291870116, 0.008720576286315918, 0.008716095924377441, 0.008766752243041992, 0.008706720352172851, 0.008763456344604492, 0.008740415573120117, 0.008831551551818848, 0.008763263702392578, 0.0089268798828125, 0.00890294361114502, 0.008881504058837891, 0.008790271759033203, 0.008802623748779296, 0.008810175895690917, 0.008839391708374023, 0.008922495841979981, 0.008874496459960938, 0.008927359580993652, 0.009014880180358887, 0.008992768287658692, 0.00888259220123291, 0.00889241600036621, 0.008988415718078614, 0.008900863647460937, 0.008935711860656738, 0.008903679847717285, 0.008944095611572266, 0.008917247772216797, 0.00887564754486084, 0.00885366439819336, 0.0089169921875, 0.009298144340515136, 0.008929408073425293, 0.00894105625152588, 0.009101056098937988, 0.008953472137451172, 0.008964991569519043, 0.00886188793182373, 0.008853440284729003, 0.008873215675354004, 0.008929056167602539, 0.008932543754577636, 0.008818079948425293, 0.008779935836791992, 0.008828831672668456, 0.00876576042175293, 0.00881161594390869, 0.008962207794189454, 0.008836864471435547, 0.008806528091430664, 0.008835647583007812, 0.008814911842346191, 0.00887388801574707, 0.009119423866271972, 0.008935968399047851, 0.008994976043701171, 0.008853280067443847, 0.008887807846069335, 0.008871711730957032, 0.008861536026000977, 0.008941887855529785, 0.00882265567779541, 0.00888092803955078, 0.008855392456054687, 0.008881376266479493, 0.009087008476257324, 0.008851936340332031, 0.008677663803100586, 0.008845312118530273, 0.00882688045501709, 0.008765631675720215, 0.008779583930969238, 0.008820735931396484, 0.008701888084411621, 0.008805600166320801, 0.0086429443359375, 0.009085247993469239, 0.008942912101745606, 0.008839008331298828, 0.008887295722961425, 0.008878080368041993, 0.00897433567047119, 0.008927231788635253, 0.008976672172546387, 0.008922271728515625, 0.00903657627105713, 0.00888748836517334, 0.008917695999145507, 0.008869471549987793, 0.00883456039428711, 0.008868672370910645, 0.008853504180908203, 0.008814240455627441, 0.008808799743652344, 0.008803584098815919, 0.008796319961547852, 0.009092960357666015, 0.008826687812805175, 0.00879097557067871, 0.008845120429992677, 0.008851167678833007, 0.008796640396118164, 0.008857631683349609, 0.008797504425048829, 0.00885750389099121, 0.008862719535827637, 0.00884006404876709, 0.008847264289855957, 0.008904800415039063, 0.008769696235656738, 0.00872316837310791, 0.00874112033843994, 0.008740799903869629, 0.008746815681457519, 0.00880844783782959, 0.008773823738098145, 0.008798015594482423, 0.0088307523727417, 0.009076959609985351, 0.008936448097229004, 0.008911104202270507, 0.008775648117065429, 0.008778047561645509, 0.008884256362915038, 0.008748671531677247, 0.008784704208374024, 0.008749055862426757, 0.008783424377441406, 0.00888649559020996, 0.008798720359802246, 0.008785408020019531, 0.00899289608001709, 0.008872032165527344, 0.008894559860229492, 0.00884931182861328, 0.008880288124084472, 0.008885215759277344, 0.008857407569885255, 0.008843263626098634, 0.008853407859802246, 0.008953951835632324, 0.008878080368041993, 0.008951680183410644, 0.008919136047363281, 0.008888256072998047, 0.008890560150146484, 0.008905632019042969, 0.008897791862487793, 0.008887519836425781, 0.008897055625915527, 0.008881888389587402, 0.008922783851623536, 0.008944255828857422, 0.008841440200805665, 0.008829024314880371, 0.008830656051635742, 0.008818752288818359, 0.008861632347106933, 0.008840767860412598, 0.008804800033569336, 0.008832320213317871, 0.008784576416015625, 0.008891488075256348, 0.008954784393310548, 0.008812543869018554, 0.008897919654846192, 0.008856191635131835, 0.008812543869018554, 0.00871769618988037, 0.008664735794067383, 0.008758560180664062, 0.008734399795532227, 0.008798272132873534, 0.008742848396301269, 0.00884761619567871, 0.008866720199584961, 0.008807264328002929, 0.009191424369812011, 0.008908127784729004, 0.008839839935302735, 0.00867302417755127, 0.0087740478515625, 0.008781503677368165, 0.008816800117492675, 0.008916576385498047, 0.00878223991394043, 0.008978272438049316, 0.008872096061706542, 0.008835391998291016, 0.008863391876220704, 0.008885919570922851, 0.008889920234680175, 0.008854016304016114, 0.008890399932861328, 0.008928576469421386, 0.00887497615814209, 0.008853504180908203, 0.008912896156311035, 0.008874239921569825, 0.00887388801574707, 0.008922207832336425, 0.008913984298706054, 0.008959839820861817, 0.0089552001953125, 0.008922016143798828, 0.008912896156311035, 0.008883999824523927, 0.009147680282592774, 0.008904671669006348, 0.008911520004272462, 0.008894559860229492, 0.008937472343444825, 0.008937536239624023, 0.008902848243713379, 0.008881279945373535, 0.008815232276916505, 0.008761343955993652, 0.008816639900207519, 0.008951199531555177, 0.008784159660339356, 0.00882310390472412, 0.008828927993774414, 0.008763392448425293, 0.008720383644104004, 0.008760736465454102, 0.008804832458496094, 0.00876147174835205, 0.008851263999938965, 0.008941791534423827, 0.008755167961120606, 0.008859328269958496, 0.008759615898132324, 0.008779264450073243, 0.008715904235839844, 0.008708703994750976, 0.008765343666076661, 0.008648736000061035, 0.008740511894226075, 0.008795999526977539, 0.008815168380737304, 0.008880288124084472, 0.008869664192199707, 0.00879036808013916, 0.008775263786315919, 0.008773632049560547, 0.00882703971862793, 0.008923392295837402, 0.008920639991760253, 0.00894438362121582, 0.009024864196777343, 0.008912351608276367, 0.008941632270812988, 0.00913923168182373, 0.009004128456115723, 0.0090033597946167, 0.009023776054382324, 0.008986687660217286, 0.008923135757446288, 0.008966143608093263, 0.00893238353729248, 0.00897532844543457, 0.009011103630065917]",tokens/s,112.45921519891753,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -157,16 +157,16 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -192,11 +192,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` @@ -273,7 +273,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1049.137152,5046.730752,0.0,4651.483136,4638.22848,s,1,14.198349609375,14.198349609375,0.0,14.198349609375,14.198349609375,14.198349609375,14.198349609375,[14.198349609375],,kWh,0.0002059586062625006,2.271175767481626e-05,7.954506363599956e-05,0.0003082154275733164,,MB,1222.680576,6172.901376,0.0,5765.070848,5418.530816,s,10,10.067480834960937,1.0067480834960938,0.0060373690484956935,1.0091661987304688,1.0126028259277344,1.0127268890380858,1.0128261395263671,"[0.9936867065429688, 0.9992445068359375, 1.0019698486328126, 1.007849609375, 1.0080123901367188, 1.0125752563476562, 1.0103200073242187, 1.0128509521484375, 1.0103255615234374, 1.01064599609375]",tokens/s,254.2840698648256,kWh,2.9188706434471595e-05,3.2190273342346356e-06,1.9385773084364752e-05,5.179350685307098e-05,tokens/kWh,4942704.511710836,MB,1240.367104,6172.901376,0.0,5765.070848,5418.533376,s,10,49.00916796875,4.900916796875,0.01320260211013012,4.905751953125,4.9146486328125,4.9152515625,4.91573390625,"[4.87516796875, 4.88421875, 4.890076171875, 4.89661669921875, 4.9038095703125, 4.9076943359375, 4.907861328125, 4.91335400390625, 4.9145146484375, 4.9158544921875]",tokens/s,12.854737717679894,kWh,0.00014398664825469116,1.5882754806762217e-05,9.576901348383259e-05,0.000255638416545286,tokens/kWh,246441.83316179962,,s,630,49.004861740112325,0.07778549482557509,0.0019485939166861061,0.07745126342773437,0.07909130020141601,0.07961224327087402,0.0891949423980713,"[0.08933990478515624, 0.07592518615722656, 0.07579583740234375, 0.07649581146240235, 0.07545414733886718, 0.07582675170898437, 0.07566413116455079, 0.075683837890625, 0.07550975799560547, 0.07577983856201172, 0.07732249450683594, 0.07804927825927735, 0.08050396728515626, 0.07935266876220703, 0.07704370880126953, 0.07665782165527343, 0.07588540649414062, 0.07588998413085937, 0.07575212860107422, 0.07571865844726562, 0.07590707397460937, 0.07600947570800781, 0.07672752380371094, 0.07752349090576172, 0.07847929382324219, 0.08017878723144531, 0.07869718170166015, 0.07727500915527344, 0.07706253051757812, 0.07684889221191406, 0.07630233764648438, 0.07626751708984375, 0.07732185363769531, 0.07647456359863282, 0.07617145538330078, 0.07733452606201172, 0.07787459564208984, 0.07901654052734375, 0.0784993896484375, 0.07908988952636718, 0.07763929748535156, 0.07766409301757812, 0.07689087677001953, 0.07647232055664062, 0.07668531036376953, 0.0768586883544922, 0.07644195556640625, 0.07701673889160156, 0.07734854125976562, 0.07863772583007812, 0.07846537780761718, 0.07910399627685546, 0.07791001892089844, 0.07895021057128906, 0.07781132507324219, 0.07657324981689453, 0.07714790344238281, 0.07704755401611328, 0.07659529876708984, 0.077295166015625, 0.07726780700683594, 0.0777359390258789, 0.0784783706665039, 0.0919291229248047, 0.07576172637939453, 0.0755568618774414, 0.07590707397460937, 0.07573661041259766, 0.07641340637207031, 0.0758104019165039, 0.07576812744140625, 0.07689132690429687, 0.07628278350830078, 0.07592960357666016, 0.07761920166015625, 0.08085708618164063, 0.07805731201171875, 0.0778733139038086, 0.07724237060546875, 0.07604774475097656, 0.0764975357055664, 0.07604383850097657, 0.07632940673828124, 0.0774771499633789, 0.07690108489990234, 0.07623052978515625, 0.07779097747802734, 0.07969558715820313, 0.07903456115722657, 0.07781977844238282, 0.07758448028564453, 0.07880089569091797, 0.07630694580078125, 0.07666687774658203, 0.07605833435058594, 0.0763661117553711, 0.07667711639404297, 0.07663398742675781, 0.07721087646484374, 0.0781443862915039, 0.07885004425048828, 0.078671875, 0.0779422378540039, 0.07854134368896484, 0.07750656127929688, 0.0770142059326172, 0.07685142517089844, 0.076404541015625, 0.07686428833007812, 0.0770355224609375, 0.07708057403564453, 0.07759852600097657, 0.07851641845703125, 0.07817327880859375, 0.07840860748291016, 0.0778629150390625, 0.07767040252685548, 0.07786495971679687, 0.07760076904296875, 0.0772894744873047, 0.07688396453857421, 0.07732204437255859, 0.07719475555419922, 0.07785337829589843, 0.07802880096435547, 0.07879065704345703, 0.08826876831054688, 0.07669334411621094, 0.07579094696044922, 0.07627366638183594, 0.07631667327880859, 0.07575961303710937, 0.07579647827148438, 0.07631053161621094, 0.07642726135253906, 0.07683891296386719, 0.07633920288085938, 0.07827561950683594, 0.08079049682617187, 0.07991705322265626, 0.07755356597900391, 0.07686112213134766, 0.07660995483398438, 0.07662156677246093, 0.07638861083984375, 0.07607046508789063, 0.076271484375, 0.07684767913818359, 0.07670524597167969, 0.07757794952392578, 0.07920102691650391, 0.07901315307617188, 0.07767734527587891, 0.0770703353881836, 0.07801139068603516, 0.07681126403808594, 0.07685270690917968, 0.07693571472167969, 0.07647232055664062, 0.07682240295410156, 0.07676322937011719, 0.0774512939453125, 0.0785080337524414, 0.07871231842041015, 0.07946617889404296, 0.07816684722900391, 0.07755980682373047, 0.07802780914306641, 0.07727788543701172, 0.07711177825927734, 0.07703327941894532, 0.07709081268310547, 0.07758233642578125, 0.07713996887207031, 0.0777871322631836, 0.0786513900756836, 0.07839449310302735, 0.07836966705322265, 0.0781475830078125, 0.07869235229492187, 0.0779606704711914, 0.07787773132324219, 0.07692908477783203, 0.07748607635498046, 0.07730707550048828, 0.07751254272460938, 0.07814630126953125, 0.07785692596435546, 0.07851628875732422, 0.08884003448486329, 0.07638502502441406, 0.07640064239501954, 0.0759582748413086, 0.07597440338134766, 0.07646028900146484, 0.07625727844238281, 0.07593126678466797, 0.0761200942993164, 0.07692323303222656, 0.07742873382568359, 0.0781946563720703, 0.08213302612304688, 0.07952588653564453, 0.077412353515625, 0.07644322967529296, 0.07687564849853516, 0.07666044616699219, 0.07652025604248047, 0.07665254211425782, 0.07653689575195312, 0.07612662506103515, 0.07665865325927734, 0.07773446655273437, 0.07901593780517578, 0.07959347534179688, 0.07904825592041016, 0.07747360229492188, 0.07706483459472656, 0.07739295959472656, 0.07693574523925781, 0.07688575744628906, 0.07686822509765626, 0.07686758422851563, 0.07674674987792969, 0.07716156768798828, 0.07800717163085938, 0.07906716918945313, 0.07922057342529297, 0.07867203521728515, 0.07757004547119141, 0.07769197082519531, 0.0779676513671875, 0.07718160247802734, 0.07712973022460938, 0.07709677124023437, 0.07709308624267579, 0.07721366119384766, 0.07903587341308593, 0.07867766571044922, 0.07856114959716796, 0.0788298568725586, 0.07819123077392579, 0.07798588562011718, 0.07786473846435547, 0.07791433715820313, 0.07738982391357421, 0.07722998046875, 0.07718675231933594, 0.07734636688232421, 0.0779559326171875, 0.07870054626464844, 0.07818240356445312, 0.0911994857788086, 0.0762798080444336, 0.07584358215332031, 0.07649603271484375, 0.0764582748413086, 0.07651779174804688, 0.07614479827880859, 0.07630643463134766, 0.076653564453125, 0.07694233703613282, 0.07694643402099609, 0.07857855987548829, 0.08163890838623047, 0.07821987152099609, 0.07759398651123046, 0.07686825561523437, 0.0766929931640625, 0.0770136947631836, 0.07673420715332031, 0.07662528228759766, 0.07662179565429687, 0.07683052825927734, 0.07660623931884766, 0.07949324798583984, 0.07966915130615235, 0.07928227233886719, 0.07841177368164062, 0.07741645050048829, 0.07729357147216796, 0.07710310363769532, 0.07671148681640624, 0.07656492614746094, 0.07674674987792969, 0.07674674987792969, 0.07674265289306641, 0.07807907104492187, 0.07892470550537109, 0.07920783996582031, 0.07908121490478516, 0.07890211486816406, 0.07831552124023437, 0.07692243194580078, 0.07714387512207031, 0.07698291015625, 0.07729718780517578, 0.07728790283203125, 0.07745126342773437, 0.07790169525146484, 0.07801436614990234, 0.0787042236328125, 0.07878031921386719, 0.07864329528808593, 0.0792828140258789, 0.07832575988769531, 0.07770435333251953, 0.07753814697265625, 0.07774553680419923, 0.07699887847900391, 0.07718701171875, 0.07758022308349609, 0.07819318389892578, 0.07874121856689453, 0.07854723358154297, 0.09250406646728515, 0.07675084686279297, 0.07657881927490234, 0.07647846221923828, 0.0760335693359375, 0.07605705261230469, 0.0766051483154297, 0.07653545379638672, 0.07656716918945312, 0.07651942443847656, 0.0773017578125, 0.07809843444824219, 0.08111007690429688, 0.08007350158691406, 0.0781416015625, 0.07743599700927735, 0.07678044891357422, 0.07651737976074219, 0.07716044616699219, 0.07665586853027344, 0.07667945861816407, 0.07683865356445313, 0.0768416976928711, 0.07736649322509766, 0.07966390228271485, 0.07912652587890626, 0.07844425964355468, 0.07831785583496094, 0.07772160339355469, 0.07743833923339843, 0.0773105926513672, 0.07698738861083984, 0.07756476593017578, 0.07682182312011719, 0.07649571228027344, 0.07733452606201172, 0.07904048156738282, 0.07885545349121094, 0.07853679656982422, 0.07855481719970703, 0.07960675048828125, 0.07807180786132813, 0.07713587188720702, 0.07713938903808594, 0.07747805023193359, 0.07735273742675781, 0.07709951782226562, 0.07797567749023437, 0.07788953399658204, 0.07851417541503906, 0.07857151794433594, 0.07862844848632812, 0.07839376068115235, 0.07795097351074219, 0.07811891174316406, 0.07767449951171874, 0.07791600036621094, 0.07697779083251953, 0.07707292938232421, 0.07745126342773437, 0.07808204650878907, 0.07814262390136718, 0.07821517181396484, 0.0922833251953125, 0.07605657958984376, 0.0760274887084961, 0.07657923126220703, 0.07725465393066407, 0.07603807830810547, 0.07654611206054687, 0.07631439971923829, 0.07631484985351562, 0.07741645050048829, 0.07675846099853516, 0.07832736206054687, 0.08169369506835937, 0.07808921813964843, 0.07723110198974609, 0.0768202896118164, 0.077050048828125, 0.07691651153564454, 0.07653807830810547, 0.07619174194335937, 0.07660749053955078, 0.07664230346679687, 0.07795420837402343, 0.07770403289794922, 0.07947408294677734, 0.0792080307006836, 0.07818956756591797, 0.07752448272705079, 0.07701478576660156, 0.07753190612792969, 0.07766015625, 0.07626956939697266, 0.0767262725830078, 0.07723417663574218, 0.07736524963378906, 0.07787725067138672, 0.07937840270996094, 0.0800123519897461, 0.07899190521240235, 0.078283203125, 0.07784243011474609, 0.07792230224609376, 0.07785049438476563, 0.07748531341552735, 0.07709123229980469, 0.07682198333740234, 0.07735807800292968, 0.07750656127929688, 0.07796736145019531, 0.07891763305664062, 0.07925350189208985, 0.07851827239990235, 0.078487548828125, 0.07799398040771484, 0.07774169921875, 0.07901427459716796, 0.07771340942382812, 0.07677529907226563, 0.07728131103515624, 0.07822908782958984, 0.07852671813964844, 0.07851606750488281, 0.07855760192871093, 0.09115846252441406, 0.07670630645751954, 0.07646975708007812, 0.07633283233642578, 0.0765337905883789, 0.0765283203125, 0.07622246551513671, 0.07657881927490234, 0.07638835144042969, 0.0765132827758789, 0.07781539154052734, 0.07809065246582031, 0.08151561737060548, 0.07953705596923828, 0.07803449249267579, 0.07733865356445313, 0.076499267578125, 0.07646627044677734, 0.07694950103759765, 0.07665849304199218, 0.07661126708984375, 0.07655174255371094, 0.07656707000732423, 0.07822582244873047, 0.07952515411376954, 0.08030281829833984, 0.07929631805419922, 0.07803718566894531, 0.07706419372558594, 0.07729737854003907, 0.077340576171875, 0.07714649963378906, 0.07733417510986328, 0.07831078338623047, 0.07705084991455079, 0.07731404876708985, 0.07851583862304687, 0.0790429458618164, 0.07931001281738281, 0.07871161651611328, 0.07838713836669922, 0.0775823974609375, 0.07878656005859375, 0.07711872100830078, 0.0767925796508789, 0.07751398468017578, 0.07763136291503907, 0.07734934234619141, 0.07833411407470703, 0.08007023620605469, 0.07871965026855468, 0.07824806213378906, 0.07788735961914063, 0.07836233520507813, 0.07840796661376953, 0.07781581115722656, 0.07737718200683594, 0.0773359375, 0.07852130889892578, 0.077264892578125, 0.07808348846435546, 0.07863970947265625, 0.07872512054443359, 0.0879636459350586, 0.07623270416259766, 0.07625113677978515, 0.07621568298339844, 0.07642556762695313, 0.0770041275024414, 0.07656543731689452, 0.07660749053955078, 0.07653135681152344, 0.077053955078125, 0.07745161437988281, 0.07872306823730468, 0.0814202880859375, 0.07960105895996093, 0.07732412719726563, 0.07663488006591797, 0.07662537384033204, 0.07727494049072266, 0.07751497650146484, 0.07678956604003906, 0.0767857894897461, 0.07705977630615235, 0.07730470275878906, 0.07856947326660156, 0.07934361267089844, 0.07948271942138672, 0.07875727844238281, 0.07802751922607422, 0.07794687652587891, 0.07712345886230469, 0.07699468994140625, 0.07711949157714844, 0.0773017578125, 0.07774534606933593, 0.07737782287597657, 0.07929459381103515, 0.07879647827148438, 0.07851900482177734, 0.0791592025756836, 0.07901936340332032, 0.07828966522216797, 0.0777359390258789, 0.07760486602783204, 0.07720550537109375, 0.07771955108642578, 0.07756755065917968, 0.07756832122802734, 0.07838527679443359, 0.07839315032958985, 0.07843583679199219, 0.07845958709716797, 0.07860991668701171, 0.07894057464599609, 0.07810467529296874, 0.07853670501708984, 0.07722803497314454, 0.07732192230224609, 0.0779717788696289, 0.07756185913085938, 0.07801446533203125, 0.07843424224853515, 0.0784814682006836, 0.079476318359375, 0.0905379867553711, 0.07649423980712891, 0.07646883392333985, 0.07641088104248046, 0.07612416076660156, 0.0765191650390625, 0.0762671356201172, 0.07626521301269532, 0.07647526550292968, 0.0767011489868164, 0.07777894592285156, 0.0785311050415039, 0.08133379364013672, 0.07809276580810547, 0.07787513732910156, 0.07721155548095703, 0.07694761657714844, 0.0767610855102539, 0.0765251235961914, 0.07650342559814453, 0.07674665832519531, 0.07671209716796876, 0.07822713470458985, 0.07817862701416016, 0.07986697387695313, 0.07917984008789063, 0.07864201354980468, 0.0779386215209961, 0.0781968002319336, 0.07752499389648437, 0.07719439697265625, 0.07749718475341796, 0.0767262725830078, 0.07666687774658203, 0.07760195159912109, 0.07822831726074218, 0.07923638153076172, 0.07961673736572265, 0.07945625305175781, 0.07831756591796875, 0.07862271881103515, 0.0783790054321289, 0.07693721771240235, 0.07699411010742188, 0.07758073425292969, 0.07721766662597657, 0.07713190460205079, 0.07794175720214844, 0.08017404937744141, 0.07924457550048829, 0.07884671783447265, 0.07851827239990235, 0.07830323028564454, 0.07865753936767578, 0.07808975982666015, 0.07773590087890625, 0.07788800048828125, 0.07826150512695312, 0.07806438446044922, 0.07742371368408203, 0.07833414459228516, 0.07877203369140626, 0.0786707534790039]",tokens/s,12.855867308453632,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -299,15 +299,15 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) @@ -315,10 +315,10 @@ ChildProcessError: Traceback (most recent call last): return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.75 GiB of which 239.06 MiB is free. Process 22842 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -344,11 +344,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` @@ -361,7 +361,7 @@ ValueError: GPTNeoForCausalLM does not support an attention implementation throu 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,881.729536,657.391616,0.0,262.144,258.163712,s,1,8.0430068359375,8.0430068359375,0.0,8.0430068359375,8.0430068359375,8.0430068359375,8.0430068359375,[8.0430068359375],,kWh,2.6612145262496746e-05,2.9283357066335143e-06,9.433063102012862e-06,3.897354407114312e-05,,MB,1172.221952,755.95776,0.0,348.127232,317.820928,s,10,0.20838342666625978,0.02083834266662598,0.0002567364207006522,0.020853376388549803,0.02121261806488037,0.021219717121124266,0.021225396366119385,"[0.021226816177368164, 0.020714208602905272, 0.02121104049682617, 0.020811904907226564, 0.02096156883239746, 0.02099456024169922, 0.020591007232666016, 0.020537759780883787, 0.020894847869873046, 0.020439712524414063]",tokens/s,12285.046085262884,kWh,6.035742818009889e-07,6.65637668998693e-08,4.0238668554542483e-07,1.072524734246283e-06,tokens/kWh,238689133.9899066,MB,1183.608832,781.123584,0.0,373.293056,317.823488,s,10,10.020481994628907,1.0020481994628907,0.010652903509280085,1.0036236572265627,1.015684619140625,1.0165244689941406,1.017196348876953,"[1.004661865234375, 0.9886724853515625, 1.0154979858398439, 0.9943750610351563, 1.00258544921875, 1.0109461669921875, 0.9888856201171875, 1.0173643188476562, 1.008752197265625, 0.9887408447265625]",tokens/s,62.871227186245854,kWh,2.9208369141114508e-05,3.221175133107925e-06,1.2214234518856399e-05,4.464377879307884e-05,tokens/kWh,1411170.8664269017,,s,630,10.01510766124725,0.015896996287694053,0.0004690713008814915,0.01582924795150757,0.016316352462768552,0.01651300172805786,0.017206590061187743,"[0.016197792053222658, 0.016232288360595704, 0.016127071380615234, 0.01612601661682129, 0.016044895172119142, 0.01597216033935547, 0.01581308841705322, 0.015744256019592284, 0.01567296028137207, 0.015479647636413574, 0.01549721622467041, 0.015454208374023438, 0.015538080215454102, 0.015534175872802734, 0.015527935981750488, 0.01571401596069336, 0.015755776405334473, 0.015605536460876465, 0.015478591918945312, 0.015444160461425781, 0.015376640319824218, 0.015672896385192873, 0.015435551643371582, 0.01535427188873291, 0.01536240005493164, 0.015480223655700684, 0.015542528152465821, 0.01576959991455078, 0.01568073558807373, 0.015721216201782226, 0.016295967102050782, 0.017291231155395506, 0.01617513656616211, 0.016330751419067382, 0.01627510452270508, 0.016594944000244142, 0.022169952392578126, 0.018266111373901366, 0.01623798370361328, 0.01572719955444336, 0.015545984268188477, 0.01549459171295166, 0.01565321636199951, 0.015553119659423829, 0.015564703941345214, 0.01600726318359375, 0.015694080352783205, 0.01569375991821289, 0.016228160858154296, 0.015629983901977538, 0.01576316833496094, 0.01586678409576416, 0.015830592155456544, 0.01559158420562744, 0.015557727813720702, 0.016085792541503906, 0.015579263687133789, 0.016134016036987303, 0.015759231567382813, 0.01577014446258545, 0.015695743560791016, 0.0157609920501709, 0.01595971202850342, 0.015769439697265623, 0.015710176467895506, 0.015570816040039062, 0.015528191566467285, 0.01551103973388672, 0.015547072410583496, 0.015563808441162109, 0.015512255668640137, 0.01562435245513916, 0.01576857566833496, 0.015600000381469727, 0.015484576225280761, 0.015526880264282226, 0.015510592460632324, 0.015522751808166504, 0.015433728218078613, 0.015482560157775879, 0.015483200073242187, 0.015344799995422364, 0.015393631935119629, 0.015468607902526856, 0.015439807891845703, 0.015330400466918945, 0.015469632148742675, 0.015626079559326173, 0.015638527870178224, 0.015577088356018067, 0.01562758445739746, 0.015673855781555175, 0.015691967964172365, 0.015775103569030762, 0.01676483154296875, 0.016123647689819335, 0.01583612823486328, 0.01562764835357666, 0.01555519962310791, 0.015474399566650391, 0.015567328453063965, 0.015574848175048828, 0.01558732795715332, 0.015505056381225586, 0.015405407905578613, 0.015460224151611328, 0.015353919982910157, 0.015641920089721678, 0.015636672019958497, 0.015450688362121582, 0.015433728218078613, 0.015586943626403808, 0.015716799736022947, 0.015835071563720705, 0.015835295677185057, 0.015684639930725098, 0.015696479797363282, 0.01601353645324707, 0.016946624755859375, 0.015904576301574706, 0.016505151748657226, 0.01609769630432129, 0.016074560165405274, 0.016111839294433595, 0.015984064102172853, 0.015935744285583496, 0.015714240074157716, 0.015853311538696287, 0.015574848175048828, 0.015575488090515137, 0.0155217924118042, 0.015561727523803711, 0.015551360130310058, 0.015628416061401366, 0.0157327356338501, 0.015976384162902833, 0.01609347152709961, 0.016144256591796875, 0.016123807907104493, 0.016115007400512697, 0.016074623107910156, 0.01613417625427246, 0.016089887619018556, 0.016134143829345703, 0.016033504486083986, 0.016050464630126954, 0.016070432662963867, 0.016109216690063478, 0.01610767936706543, 0.016101919174194335, 0.01614569664001465, 0.016182912826538085, 0.01623958396911621, 0.016300031661987305, 0.016519424438476562, 0.016210720062255858, 0.016206815719604493, 0.016205087661743164, 0.016182016372680665, 0.016127840042114257, 0.01615679931640625, 0.01603296089172363, 0.016057151794433594, 0.016107072830200197, 0.01621881675720215, 0.01619049644470215, 0.01608518409729004, 0.016116031646728514, 0.016091360092163085, 0.016457183837890625, 0.01609782409667969, 0.01675811195373535, 0.016206272125244142, 0.016279199600219726, 0.01624140739440918, 0.016445247650146485, 0.01621401596069336, 0.016193248748779296, 0.016328704833984374, 0.01625753593444824, 0.01621388816833496, 0.01656150436401367, 0.016468544006347657, 0.01620377540588379, 0.016130048751831053, 0.016087039947509766, 0.016111328125, 0.016127904891967772, 0.016111583709716798, 0.01550284767150879, 0.015867775917053222, 0.0157128963470459, 0.015638527870178224, 0.015560447692871094, 0.015511103630065918, 0.015439616203308106, 0.015505951881408691, 0.015372703552246094, 0.015350943565368652, 0.0153504638671875, 0.015372447967529297, 0.01553983974456787, 0.01574934387207031, 0.016052383422851563, 0.015711551666259767, 0.015710720062255858, 0.015636063575744628, 0.01569993591308594, 0.015796863555908203, 0.015882240295410157, 0.015943391799926758, 0.015868191719055177, 0.015879199981689452, 0.015754207611083985, 0.0172126407623291, 0.015982815742492676, 0.0162412166595459, 0.015811936378479004, 0.015744895935058595, 0.01585760021209717, 0.015811327934265137, 0.015599712371826172, 0.015437824249267578, 0.015368351936340332, 0.015379584312438965, 0.015366656303405762, 0.015548480033874513, 0.015524255752563476, 0.015432671546936035, 0.015462592124938964, 0.015673664093017577, 0.015900959968566896, 0.01597856044769287, 0.015868895530700682, 0.015535231590270995, 0.015603551864624024, 0.015540224075317383, 0.01562009620666504, 0.015560704231262207, 0.015793312072753907, 0.015712672233581543, 0.015601183891296387, 0.015676320075988768, 0.015931679725646974, 0.016498176574707032, 0.016636255264282227, 0.01637593650817871, 0.01633459281921387, 0.01627462387084961, 0.016259904861450195, 0.016158559799194335, 0.016071136474609376, 0.016126399993896486, 0.016113664627075194, 0.015970303535461427, 0.01599219226837158, 0.015743616104125977, 0.015550463676452637, 0.01551961612701416, 0.015409279823303222, 0.01545792007446289, 0.01566553592681885, 0.0163450870513916, 0.01633839988708496, 0.015810463905334474, 0.015671327590942384, 0.01563055992126465, 0.015986111640930174, 0.015471551895141602, 0.015453215599060058, 0.015413887977600097, 0.01546070384979248, 0.015495295524597169, 0.015781760215759277, 0.015892479896545412, 0.015769887924194335, 0.01573359966278076, 0.015844511985778808, 0.015875359535217286, 0.015847871780395508, 0.015773695945739748, 0.01571664047241211, 0.01557494354248047, 0.015646528244018555, 0.015523200035095214, 0.015546272277832032, 0.015528672218322754, 0.015564800262451172, 0.01557094383239746, 0.01560534381866455, 0.015481247901916503, 0.015482879638671876, 0.015648096084594727, 0.015672096252441405, 0.01572646427154541, 0.01572441577911377, 0.01576972770690918, 0.015779840469360353, 0.015834272384643553, 0.017191776275634764, 0.016029472351074218, 0.016107744216918945, 0.0165295352935791, 0.01626128005981445, 0.01657756805419922, 0.016689855575561522, 0.01661929512023926, 0.016582080841064453, 0.016177951812744142, 0.01617919921875, 0.016156095504760742, 0.017500415802001953, 0.01629420852661133, 0.016332319259643555, 0.016230016708374023, 0.015903039932250975, 0.01613884735107422, 0.016056352615356446, 0.016139392852783204, 0.0161102409362793, 0.015992863655090332, 0.016054464340209962, 0.016067840576171874, 0.016086816787719727, 0.016316640853881837, 0.0161779842376709, 0.016178495407104494, 0.01605081558227539, 0.01608687973022461, 0.01612406349182129, 0.016052223205566405, 0.016203647613525392, 0.016156543731689454, 0.016121280670166015, 0.01624982452392578, 0.016215904235839844, 0.016312320709228514, 0.016210208892822264, 0.016256736755371093, 0.016463872909545898, 0.01680335998535156, 0.016239072799682615, 0.01611782455444336, 0.016043968200683593, 0.016054271697998047, 0.016076608657836913, 0.015976832389831542, 0.016024927139282226, 0.016058847427368163, 0.016052127838134766, 0.016048288345336913, 0.01607468795776367, 0.016046079635620117, 0.01613209533691406, 0.0160501766204834, 0.01616089630126953, 0.016268960952758788, 0.016022783279418945, 0.015864864349365234, 0.015789183616638183, 0.01561849594116211, 0.015644607543945314, 0.015495488166809082, 0.015509056091308595, 0.015398528099060059, 0.015693056106567384, 0.01567695999145508, 0.015669407844543456, 0.01648134422302246, 0.015991456031799318, 0.015999263763427734, 0.016107519149780272, 0.01613209533691406, 0.015945119857788084, 0.01611612892150879, 0.015853759765625, 0.015699968338012696, 0.015751168251037596, 0.015431584358215332, 0.01575945568084717, 0.016082944869995116, 0.015696127891540526, 0.01554201602935791, 0.016002559661865236, 0.015493632316589356, 0.015361632347106934, 0.015494720458984375, 0.015571807861328125, 0.015742783546447755, 0.01564896011352539, 0.015567071914672851, 0.015460127830505372, 0.01553206443786621, 0.015456319808959961, 0.015487968444824219, 0.015547327995300293, 0.01575539207458496, 0.015883423805236815, 0.015913120269775392, 0.01594425582885742, 0.01637785530090332, 0.015923456192016603, 0.01592416000366211, 0.015911168098449707, 0.01587990379333496, 0.01594480037689209, 0.015686495780944824, 0.015915936470031738, 0.015550463676452637, 0.015480992317199706, 0.015387807846069336, 0.01547539234161377, 0.015519840240478516, 0.01570191955566406, 0.015736096382141112, 0.015788384437561035, 0.015598176002502441, 0.015525535583496093, 0.01562764835357666, 0.015433600425720215, 0.015443039894104003, 0.015398880004882813, 0.015331135749816895, 0.015500703811645507, 0.01568230438232422, 0.0156212797164917, 0.015572863578796387, 0.015626239776611327, 0.015584223747253418, 0.01561193561553955, 0.01572364807128906, 0.016351423263549804, 0.015996895790100097, 0.01596649646759033, 0.01607526397705078, 0.0159366397857666, 0.015766143798828125, 0.01567948818206787, 0.01565286445617676, 0.015601471900939941, 0.015511167526245118, 0.015323328018188477, 0.015689855575561525, 0.01575376033782959, 0.015654080390930174, 0.015565279960632325, 0.015520064353942872, 0.015470784187316894, 0.015484416007995605, 0.01557759952545166, 0.016189023971557616, 0.01604240036010742, 0.01591267204284668, 0.015669568061828613, 0.015784000396728514, 0.01577494430541992, 0.01573344039916992, 0.01583513641357422, 0.015930463790893554, 0.016055423736572264, 0.016330528259277343, 0.01635686492919922, 0.016421375274658204, 0.016830656051635744, 0.016834367752075197, 0.01626316833496094, 0.01617286491394043, 0.016108863830566405, 0.016137088775634766, 0.01701888084411621, 0.01604732894897461, 0.016103519439697265, 0.016746816635131837, 0.016250463485717775, 0.019272480010986328, 0.016350784301757813, 0.016225791931152343, 0.01614329528808594, 0.015933600425720215, 0.015970239639282225, 0.015927167892456056, 0.015829024314880372, 0.015800448417663576, 0.015928319931030274, 0.015947839736938477, 0.01612451171875, 0.01616908836364746, 0.016269407272338866, 0.016115999221801756, 0.016153791427612304, 0.016200223922729493, 0.016216064453125, 0.016325855255126955, 0.01696175956726074, 0.016360000610351564, 0.016342975616455077, 0.016228607177734375, 0.016219968795776366, 0.016119808197021485, 0.016151552200317384, 0.016288768768310546, 0.016244607925415037, 0.01619161605834961, 0.01616092872619629, 0.015939680099487305, 0.01618307113647461, 0.01627462387084961, 0.01623628807067871, 0.016142175674438475, 0.01609129524230957, 0.016668672561645507, 0.016300031661987305, 0.016144351959228517, 0.016108608245849608, 0.016573408126831054, 0.01649260711669922, 0.017542272567749023, 0.01627622413635254, 0.016404287338256836, 0.01631158447265625, 0.016356319427490235, 0.016436672210693358, 0.016765504837036132, 0.016373151779174804, 0.016316320419311522, 0.016441152572631835, 0.016270431518554687, 0.016558015823364258, 0.016205663681030272, 0.016707584381103514, 0.016186431884765626, 0.016008224487304688, 0.015841183662414552, 0.015755295753479004, 0.01589859199523926, 0.016106943130493163, 0.015665568351745606, 0.015847519874572755, 0.015515647888183593, 0.015511615753173828, 0.015900671958923338, 0.015736831665039062, 0.015726943969726563, 0.016909984588623046, 0.015659008026123047, 0.01586355209350586, 0.015569151878356934, 0.015558688163757325, 0.015701984405517577, 0.015644864082336427, 0.015527359962463379, 0.015829471588134764, 0.01553603172302246, 0.015590847969055176, 0.01605232048034668, 0.01570864009857178, 0.01570201587677002, 0.015609439849853515, 0.01555292797088623, 0.015617216110229492, 0.015584063529968262, 0.015604928016662597, 0.015508543968200684, 0.015490400314331054, 0.015578911781311036, 0.015522144317626952, 0.015537983894348144, 0.015133983612060547, 0.015426464080810547, 0.01535587215423584, 0.015413056373596192, 0.01544211196899414, 0.015388575553894043, 0.015451583862304687, 0.015343839645385743, 0.015425984382629394, 0.015396863937377929, 0.015405055999755859, 0.015581184387207032, 0.0155316801071167, 0.015560352325439453, 0.01546275234222412, 0.015447999954223633, 0.01552950382232666, 0.015532928466796875, 0.015828991889953615, 0.01552998447418213, 0.01603379249572754, 0.015588640213012695, 0.01561257553100586, 0.015893888473510743, 0.016078527450561524, 0.016076959609985352, 0.015809215545654298, 0.015685791969299315, 0.015544192314147949, 0.015488639831542968, 0.01561190414428711, 0.015532544136047363, 0.01570406436920166, 0.015871583938598634, 0.01566262435913086, 0.015754015922546386, 0.015904447555541993, 0.015888416290283203, 0.01576707172393799, 0.01565987205505371, 0.015638208389282225, 0.01574124813079834, 0.015830240249633788, 0.015864831924438477, 0.015916831970214845, 0.01591872024536133, 0.015804287910461425, 0.015849984169006348, 0.01592319965362549, 0.015917056083679198, 0.016377119064331053, 0.01608163261413574, 0.01599622440338135, 0.015835807800292968, 0.015830495834350585, 0.015768128395080565, 0.015977984428405763, 0.01577830410003662, 0.015624192237854004, 0.015602687835693359, 0.015530176162719726, 0.015534976005554198, 0.015529919624328613]",tokens/s,62.90496530933364,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2132.529152,11826.823168,0.0,11431.575552,10953.091072,s,1,22.00828515625,22.00828515625,0.0,22.00828515625,22.00828515625,22.00828515625,22.00828515625,[22.00828515625],,kWh,0.0004223264299625043,4.657848849772365e-05,0.0001591851273479994,0.0006280900458082273,,MB,1606.619136,12722.307072,0.0,12314.476544,11624.128512,s,10,18.991810424804687,1.899181042480469,0.006285078080570184,1.9012791137695313,1.9044025756835938,1.9050386291503907,1.9055474719238281,"[1.883064453125, 1.8956903076171876, 1.8957227783203126, 1.8987581787109375, 1.89982373046875, 1.9027657470703125, 1.9033148193359375, 1.90426123046875, 1.9027344970703124, 1.9056746826171875]",tokens/s,134.79494280631894,kWh,5.527992636249715e-05,6.09700754509649e-06,3.678297387080254e-05,9.815990777839619e-05,tokens/kWh,2607989.410278791,MB,1610.563584,12724.404224,0.0,12316.573696,11624.131072,s,10,93.7228251953125,9.37228251953125,0.025483996830139362,9.3828662109375,9.3948287109375,9.39677373046875,9.39832974609375,"[9.3171259765625, 9.34112109375, 9.3562275390625, 9.366119140625, 9.38015625, 9.385576171875, 9.390072265625, 9.394396484375, 9.3933115234375, 9.39871875]",tokens/s,6.721948454788035,kWh,0.0002742070814204203,3.024713545027438e-05,0.00018208203455439553,0.00048653625142509014,tokens/kWh,129486.75420478886,,s,630,93.71858575439465,0.14875965992761037,0.0018670747989621259,0.14857625579833983,0.15037186431884766,0.1509479766845703,0.15800266555786133,"[0.1578668212890625, 0.14541209411621095, 0.1457986602783203, 0.14605926513671874, 0.1453059539794922, 0.1458253173828125, 0.15310092163085937, 0.14805197143554688, 0.14697065734863282, 0.14716067504882813, 0.1450806427001953, 0.14627027893066405, 0.14753695678710937, 0.1493697967529297, 0.14764236450195312, 0.1489152069091797, 0.14611293029785155, 0.14620063781738282, 0.14700703430175782, 0.14798915100097657, 0.14824479675292968, 0.14863360595703126, 0.14670236206054688, 0.1462451171875, 0.14616213989257812, 0.1480745849609375, 0.1482296600341797, 0.1492954864501953, 0.1476485137939453, 0.14700953674316405, 0.14799667358398438, 0.1458524169921875, 0.14820358276367188, 0.14860841369628905, 0.1489126739501953, 0.14828134155273437, 0.14825471496582032, 0.14572134399414063, 0.14747238159179688, 0.1499832305908203, 0.14793516540527343, 0.14889718627929688, 0.1484554901123047, 0.1467065887451172, 0.14656761169433594, 0.14847109985351561, 0.1483701171875, 0.1493987274169922, 0.14844192504882814, 0.14801837158203124, 0.14762471008300782, 0.14702188110351563, 0.14848345947265626, 0.14958409118652344, 0.14734915161132814, 0.14943206787109375, 0.14713548278808594, 0.14738394165039062, 0.1484449920654297, 0.14857887268066405, 0.14798460388183593, 0.1500403594970703, 0.1471443786621094, 0.157739013671875, 0.14625526428222657, 0.14730096435546874, 0.14523155212402344, 0.14586297607421875, 0.14646885681152344, 0.15389430236816407, 0.14810812377929689, 0.14865177917480468, 0.14545510864257813, 0.1459568634033203, 0.14678118896484374, 0.14951321411132812, 0.14969378662109376, 0.14967196655273438, 0.14652275085449218, 0.146429443359375, 0.14754412841796874, 0.14606150817871094, 0.14944248962402343, 0.15077203369140624, 0.14768975830078124, 0.14713536071777344, 0.14652297973632813, 0.1462025604248047, 0.1475850830078125, 0.14964041137695314, 0.14925868225097655, 0.14944496154785156, 0.14662185668945313, 0.1462395477294922, 0.14750338745117186, 0.1484638671875, 0.1490513916015625, 0.15031826782226562, 0.14733544921875, 0.14775144958496095, 0.14702386474609375, 0.14782669067382812, 0.148791015625, 0.14926876831054686, 0.14798643493652344, 0.1500037078857422, 0.14678016662597657, 0.14770314025878906, 0.14914802551269532, 0.1483410186767578, 0.14852272033691405, 0.14906600952148438, 0.14852854919433595, 0.14907656860351562, 0.14708122253417968, 0.14872735595703124, 0.1490979766845703, 0.1485731201171875, 0.14914968872070314, 0.14837554931640626, 0.14685600280761718, 0.14935443115234376, 0.14855081176757812, 0.1491198425292969, 0.14914764404296876, 0.1484083251953125, 0.15698739624023436, 0.1463190155029297, 0.14816015625, 0.1452280578613281, 0.1454698486328125, 0.14778080749511718, 0.15338783264160155, 0.148872314453125, 0.14789926147460938, 0.14662246704101561, 0.14626815795898437, 0.14814002990722655, 0.14752153015136718, 0.15078604125976564, 0.1497046661376953, 0.14700108337402343, 0.14695452880859375, 0.14835098266601562, 0.1465528259277344, 0.14892031860351562, 0.15001365661621094, 0.148453125, 0.14939535522460937, 0.1458305206298828, 0.147615234375, 0.1497912292480469, 0.14783247375488281, 0.1486422119140625, 0.15010809326171876, 0.14765802001953124, 0.14784707641601563, 0.14703085327148438, 0.14873190307617187, 0.15023922729492187, 0.1482073211669922, 0.14931936645507812, 0.14695074462890625, 0.1481719055175781, 0.14858438110351563, 0.14817170715332031, 0.14839132690429688, 0.15061024475097656, 0.14844342041015626, 0.14807472229003907, 0.14878448486328125, 0.147706298828125, 0.14908546447753906, 0.1482872314453125, 0.1488506622314453, 0.15016563415527343, 0.14749990844726563, 0.14795529174804686, 0.14851461791992188, 0.1486037139892578, 0.14991500854492187, 0.14820147705078124, 0.1487445068359375, 0.14731590270996095, 0.147917724609375, 0.14872108459472655, 0.14986502075195313, 0.1490370635986328, 0.1496309814453125, 0.15805815124511718, 0.14560710144042968, 0.1467632598876953, 0.14735002136230468, 0.14718975830078124, 0.146513916015625, 0.15251251220703124, 0.14778976440429686, 0.1474512023925781, 0.14939622497558594, 0.14654197692871093, 0.14752616882324218, 0.1492071075439453, 0.14929075622558594, 0.1484126739501953, 0.1469706268310547, 0.14818861389160157, 0.14991593933105468, 0.147013916015625, 0.1480990753173828, 0.15048854064941405, 0.14776963806152343, 0.14847821044921874, 0.1482629089355469, 0.14788995361328125, 0.14904920959472656, 0.14798883056640624, 0.14853453063964844, 0.15028915405273438, 0.14759730529785156, 0.14849842834472657, 0.14829977416992188, 0.14836531066894532, 0.1494530487060547, 0.14814183044433593, 0.1485455322265625, 0.14985011291503905, 0.1477181396484375, 0.14847795104980469, 0.14866998291015626, 0.14788861083984375, 0.14911692810058594, 0.14912307739257813, 0.1486840057373047, 0.14943650817871093, 0.14737274169921874, 0.14937692260742189, 0.14788009643554687, 0.14844511413574218, 0.15067123413085937, 0.148555908203125, 0.14955711364746094, 0.1474397430419922, 0.14872157287597657, 0.15056291198730468, 0.14832025146484376, 0.1486929931640625, 0.14895513916015626, 0.1483014678955078, 0.15027235412597656, 0.14828924560546874, 0.14915525817871095, 0.14872198486328125, 0.15993215942382813, 0.14751708984375, 0.14737619018554687, 0.14643898010253906, 0.14692965698242189, 0.14870527648925783, 0.15146803283691407, 0.1508106231689453, 0.14653599548339843, 0.14792678833007813, 0.14833261108398438, 0.14645660400390625, 0.1486231689453125, 0.15122511291503907, 0.14828544616699219, 0.14971856689453125, 0.14693621826171874, 0.14740486145019532, 0.14841036987304687, 0.1483357391357422, 0.15048361206054686, 0.14838829040527343, 0.1484613494873047, 0.14987858581542968, 0.14722682189941405, 0.1478544616699219, 0.1494486083984375, 0.1489008331298828, 0.15049955749511718, 0.14792291259765625, 0.14841976928710937, 0.1482860870361328, 0.1481871337890625, 0.15021670532226564, 0.14863526916503905, 0.14864012145996094, 0.14857420349121095, 0.14819532775878907, 0.14851890563964842, 0.1487337646484375, 0.1484781494140625, 0.1506299591064453, 0.14853575134277344, 0.14922752380371093, 0.14804893493652344, 0.14816458129882812, 0.15020541381835936, 0.1488690185546875, 0.14906381225585938, 0.14823628234863281, 0.14857830810546874, 0.1500584259033203, 0.1488143310546875, 0.14919686889648437, 0.14810931396484375, 0.1489304656982422, 0.14990911865234374, 0.1488056640625, 0.14905699157714844, 0.14861817932128907, 0.14947535705566406, 0.14953855895996093, 0.14837910461425782, 0.16095333862304687, 0.14748570251464843, 0.14649932861328124, 0.14717747497558595, 0.14834857177734376, 0.1456494445800781, 0.15342060852050782, 0.14872166442871093, 0.1476091766357422, 0.14962693786621092, 0.14657160949707032, 0.1472184295654297, 0.14970675659179689, 0.14991352844238282, 0.1497290496826172, 0.14698687744140626, 0.14824412536621093, 0.1495392608642578, 0.14683782958984376, 0.14890412902832031, 0.14968608093261718, 0.14815635681152345, 0.15051712036132814, 0.147901123046875, 0.14726499938964843, 0.14922921752929688, 0.14900070190429687, 0.14986483764648437, 0.14855760192871093, 0.14835952758789062, 0.14913690185546874, 0.14779592895507812, 0.1493282928466797, 0.14874188232421875, 0.1486168975830078, 0.14991007995605468, 0.14843904113769532, 0.1491183624267578, 0.14833721923828125, 0.1489775390625, 0.14949392700195313, 0.14821171569824218, 0.14970841979980468, 0.1482710418701172, 0.1488501739501953, 0.14969952392578126, 0.14888345336914063, 0.14876057434082032, 0.14870527648925783, 0.1490145263671875, 0.1499129638671875, 0.14834136962890626, 0.14980709838867187, 0.1486069793701172, 0.14851887512207032, 0.14970474243164061, 0.14883634948730468, 0.15015936279296874, 0.14893670654296876, 0.1485701141357422, 0.14867625427246095, 0.14870358276367188, 0.15068365478515625, 0.15983421325683594, 0.1475067901611328, 0.14824703979492188, 0.14649754333496093, 0.14741299438476563, 0.14839590454101562, 0.15120375061035157, 0.15081289672851564, 0.14701158142089843, 0.14820565795898438, 0.14740879821777345, 0.14737788391113282, 0.14829977416992188, 0.1518011474609375, 0.1477840576171875, 0.14971731567382812, 0.14720643615722656, 0.14742323303222657, 0.14916812133789062, 0.14943846130371094, 0.1512489013671875, 0.14784259033203126, 0.14839447021484375, 0.14763357543945313, 0.14719418334960938, 0.14937635803222657, 0.15040777587890625, 0.14934255981445313, 0.1489320068359375, 0.14788652038574218, 0.1487787170410156, 0.14705094909667968, 0.14965475463867187, 0.15120005798339844, 0.14869961547851562, 0.15063040161132812, 0.1474949188232422, 0.14834072875976562, 0.14789427185058593, 0.150466552734375, 0.15071846008300782, 0.1488343048095703, 0.14881129455566405, 0.1471453399658203, 0.14837539672851563, 0.1502578582763672, 0.14976937866210938, 0.14887388610839844, 0.14923139953613282, 0.14886729431152343, 0.14908758544921874, 0.14841923522949219, 0.1494950714111328, 0.14941462707519532, 0.1492295684814453, 0.14889503479003907, 0.14904595947265625, 0.14950726318359375, 0.14831494140625, 0.14960435485839843, 0.14936679077148438, 0.14997299194335936, 0.14920399475097657, 0.15953305053710937, 0.14723043823242188, 0.14750968933105468, 0.14654812622070312, 0.14725692749023436, 0.148378662109375, 0.15187120056152345, 0.1507197723388672, 0.14664691162109375, 0.14829779052734374, 0.14833689880371093, 0.1466343994140625, 0.14940269470214843, 0.1504930877685547, 0.14905958557128907, 0.14892236328125, 0.14779705810546875, 0.14837446594238282, 0.1477242889404297, 0.14979644775390624, 0.15104365539550782, 0.14844195556640624, 0.1482395477294922, 0.14779061889648437, 0.14775299072265624, 0.14894688415527343, 0.1495347900390625, 0.1491763916015625, 0.14946870422363281, 0.147685791015625, 0.14830738830566406, 0.1484125061035156, 0.14931709289550782, 0.15089881896972657, 0.14942912292480467, 0.14970675659179689, 0.146998779296875, 0.14823065185546874, 0.15011839294433593, 0.1492991943359375, 0.14949334716796875, 0.14953712463378907, 0.14853330993652344, 0.14871128845214843, 0.1482254333496094, 0.14880216979980468, 0.14944674682617187, 0.1495451202392578, 0.150596923828125, 0.14859494018554686, 0.150186279296875, 0.14766831970214844, 0.14924656677246093, 0.14919480895996093, 0.1499402313232422, 0.15055258178710937, 0.14867251586914063, 0.14881190490722657, 0.14751731872558593, 0.14920256042480468, 0.15068588256835938, 0.15008607482910155, 0.15136093139648438, 0.15862098693847657, 0.14686630249023438, 0.1481856689453125, 0.14656716918945312, 0.1464617919921875, 0.14816569519042969, 0.1531820526123047, 0.14896307373046874, 0.1480133056640625, 0.14709735107421876, 0.1475536346435547, 0.14741334533691405, 0.14935478210449218, 0.1525148468017578, 0.14903910827636718, 0.14906777954101563, 0.14587289428710937, 0.14798841857910155, 0.14929046630859374, 0.1497379913330078, 0.14989935302734375, 0.149032958984375, 0.14734693908691407, 0.14878115844726564, 0.14779023742675781, 0.14837350463867188, 0.1515166473388672, 0.14926626586914063, 0.1508338623046875, 0.1469246368408203, 0.14834918212890624, 0.14894966125488282, 0.14850662231445313, 0.1506078643798828, 0.1495548553466797, 0.14880546569824218, 0.14855833435058594, 0.14803135681152343, 0.1485489959716797, 0.14964198303222656, 0.14907725524902343, 0.15048069763183594, 0.14822898864746092, 0.14866233825683595, 0.1474140167236328, 0.14947021484375, 0.15194090270996094, 0.14911509704589843, 0.14987059020996094, 0.14783238220214845, 0.1486299591064453, 0.1496678466796875, 0.14896258544921875, 0.15022265625, 0.14943904113769532, 0.14874453735351562, 0.14895513916015626, 0.1489469451904297, 0.14966989135742187, 0.14976141357421874, 0.1493059844970703, 0.15016978454589844, 0.14903482055664063, 0.15973487854003907, 0.14637324523925782, 0.14639094543457032, 0.14741127014160157, 0.14830918884277344, 0.14672773742675782, 0.15436618041992187, 0.14774864196777343, 0.14762745666503907, 0.14828729248046876, 0.1473115234375, 0.14725436401367187, 0.15095639038085937, 0.1503678741455078, 0.15055474853515624, 0.14770399475097656, 0.1484169921875, 0.14728707885742187, 0.14742947387695313, 0.15051046752929687, 0.15043075561523436, 0.1493553924560547, 0.14874227905273438, 0.14767417907714844, 0.14769244384765626, 0.14814413452148437, 0.14986367797851563, 0.1510996551513672, 0.14797013854980468, 0.14934672546386718, 0.14725030517578125, 0.14796890258789064, 0.1509396514892578, 0.1497681884765625, 0.15033958435058595, 0.14846937561035156, 0.14851036071777343, 0.14906851196289062, 0.14833868408203124, 0.14983576965332032, 0.14883021545410155, 0.15006271362304688, 0.14914802551269532, 0.14860858154296874, 0.14947573852539062, 0.14853289794921876, 0.1492545623779297, 0.14992778015136718, 0.14949594116210937, 0.15028633117675783, 0.14834063720703125, 0.14954505920410155, 0.148279296875, 0.1499334716796875, 0.1506185302734375, 0.14935250854492188, 0.150136962890625, 0.14760557556152343, 0.14960633850097657, 0.14959222412109374, 0.1496629180908203, 0.1509547882080078, 0.14948953247070312]",tokens/s,6.7222525279139616,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1832.112128,2899.247104,0.0,2503.999488,2349.010944,s,1,10.43385546875,10.43385546875,0.0,10.43385546875,10.43385546875,10.43385546875,10.43385546875,[10.43385546875],,kWh,8.925259466669407e-05,9.8379611483825e-06,3.31683598679966e-05,0.00013225891568307316,,MB,1861.627904,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0485160675048824,0.20485160675048827,0.0014639113538269938,0.2049086685180664,0.20690233612060546,0.2069673957824707,0.2070194435119629,"[0.20393798828125, 0.20473356628417969, 0.20267205810546876, 0.20703245544433593, 0.20374549865722658, 0.20508377075195314, 0.20688787841796874, 0.20291651916503906, 0.205378173828125, 0.20612815856933595]",tokens/s,1249.6850967432786,kWh,6.244455116046322e-06,6.886439501866311e-07,4.1591522634890845e-06,1.1092251329722038e-05,tokens/kWh,23079174.13609624,MB,1879.646208,3310.288896,0.0,2902.458368,2642.30144,s,10,26.158725097656248,2.615872509765625,0.02669456699365264,2.600468994140625,2.6611926513671875,2.6641700561523436,2.666551979980469,"[2.59824267578125, 2.632306640625, 2.6671474609375, 2.660531005859375, 2.597810546875, 2.6013447265625, 2.598972412109375, 2.59959326171875, 2.61577734375, 2.5869990234375]",tokens/s,24.08374252369227,kWh,7.543729570770369e-05,8.320425516245945e-06,4.1119217292509205e-05,0.00012487693851645883,tokens/kWh,504496.67287204176,,s,630,26.156115360260003,0.04151764342898414,0.0009361234186335529,0.04123801612854004,0.04229381065368652,0.04348384552001952,0.045189528236389165,"[0.04186492919921875, 0.04122652816772461, 0.04103760147094727, 0.04131657409667969, 0.04110784149169922, 0.041498241424560545, 0.041033313751220706, 0.04120412826538086, 0.04109910583496094, 0.0409623031616211, 0.040944671630859374, 0.041351966857910157, 0.04093552017211914, 0.04110927963256836, 0.04083919906616211, 0.04199852752685547, 0.04134265518188476, 0.04110982513427734, 0.04149571228027344, 0.04102844619750977, 0.04103372955322265, 0.04091865539550781, 0.0406798095703125, 0.041180286407470706, 0.041122688293457034, 0.041474048614501956, 0.04111705780029297, 0.04172876739501953, 0.04131110382080078, 0.04123955154418945, 0.041213024139404295, 0.041243553161621094, 0.040982112884521485, 0.04104438400268555, 0.04104502487182617, 0.04149327850341797, 0.04101987075805664, 0.04109897613525391, 0.04203424072265625, 0.04169814300537109, 0.041987552642822265, 0.041237342834472654, 0.04123583984375, 0.0411426887512207, 0.041148414611816404, 0.04139606475830078, 0.04099430465698242, 0.04137027359008789, 0.04108697509765625, 0.04107030487060547, 0.04103952026367187, 0.04097846221923828, 0.04143929672241211, 0.04146435165405273, 0.04114022445678711, 0.04130985641479492, 0.041175071716308596, 0.04144678497314453, 0.04125913619995117, 0.041834976196289064, 0.041140575408935544, 0.04089984130859375, 0.041014015197753904, 0.041705406188964844, 0.04106006240844726, 0.04095475387573242, 0.04138940811157227, 0.04096387100219727, 0.04108969497680664, 0.04085318374633789, 0.04096441650390625, 0.04099440002441406, 0.04099507141113281, 0.04089023971557617, 0.041302303314208984, 0.041866912841796874, 0.043305023193359375, 0.04122652816772461, 0.041867263793945314, 0.04118492889404297, 0.04124502563476563, 0.04098867034912109, 0.04105420684814453, 0.04170751953125, 0.041266590118408206, 0.04099046325683594, 0.0410939826965332, 0.040855552673339846, 0.04120371246337891, 0.04120489501953125, 0.04112665557861328, 0.041168991088867186, 0.04114009475708008, 0.041119232177734374, 0.04122841644287109, 0.04109363174438477, 0.04149248123168945, 0.0411357421875, 0.04152336120605469, 0.041414878845214845, 0.04168646240234375, 0.04149919891357422, 0.04133679962158203, 0.04169116973876953, 0.04150076675415039, 0.041253887176513675, 0.04128969573974609, 0.04130297470092773, 0.0421580810546875, 0.04152012634277344, 0.04154995346069336, 0.04115446472167969, 0.04155491256713867, 0.04135251235961914, 0.04124697494506836, 0.04136131286621094, 0.043536895751953124, 0.049504287719726564, 0.04478524780273437, 0.04494742584228516, 0.044902847290039065, 0.044802047729492187, 0.044951553344726565, 0.041488254547119144, 0.041600704193115234, 0.041403839111328125, 0.046214622497558595, 0.04475344085693359, 0.044951553344726565, 0.044686622619628906, 0.044634624481201174, 0.04434524917602539, 0.044681537628173826, 0.04374844741821289, 0.04186531066894531, 0.042082656860351564, 0.04227324676513672, 0.04208025741577148, 0.042394687652587894, 0.04214470291137695, 0.04253900909423828, 0.042420223236083986, 0.042298881530761716, 0.042402305603027345, 0.04173209762573242, 0.04200243377685547, 0.04137321472167969, 0.041523681640625, 0.04141056060791016, 0.0413322868347168, 0.041912769317626955, 0.04142691040039063, 0.041406112670898436, 0.04125734329223633, 0.041430561065673825, 0.04178992080688477, 0.041506046295166014, 0.0418056640625, 0.04169206237792969, 0.04170751953125, 0.041430526733398435, 0.04147251129150391, 0.041656158447265626, 0.04128908920288086, 0.041396350860595704, 0.041212158203125, 0.04144579315185547, 0.04229324722290039, 0.04127849578857422, 0.04174947357177734, 0.041875457763671874, 0.041885696411132815, 0.0420711669921875, 0.04162035369873047, 0.04215193557739258, 0.04154150390625, 0.040986942291259765, 0.04100076675415039, 0.04088422393798828, 0.040978431701660156, 0.04090838241577149, 0.04109148788452149, 0.04213862228393555, 0.04388764953613281, 0.044763103485107425, 0.04426137542724609, 0.04429385757446289, 0.04473680114746094, 0.044730369567871096, 0.04576448059082031, 0.04506547164916992, 0.04521567916870117, 0.04585744094848633, 0.04274310302734375, 0.04158355331420899, 0.04161663818359375, 0.04143795013427734, 0.04160921478271484, 0.04300614547729492, 0.04137532806396484, 0.0413199348449707, 0.041759456634521484, 0.04210483169555664, 0.04229939270019531, 0.04254719924926758, 0.044943359375, 0.04259775924682617, 0.04247615814208985, 0.04211711883544922, 0.04208380889892578, 0.04246716690063477, 0.042393566131591794, 0.04161814498901367, 0.0420882568359375, 0.04157846450805664, 0.04251670455932617, 0.042229759216308595, 0.042646656036376955, 0.046682430267333985, 0.04197548675537109, 0.04311872100830078, 0.04288486480712891, 0.04238643264770508, 0.04196966552734375, 0.04213145446777344, 0.04168716812133789, 0.041770881652832034, 0.04177695846557617, 0.04187919998168945, 0.04182479858398438, 0.04192873764038086, 0.04154774475097656, 0.041041439056396484, 0.04102396774291992, 0.041344863891601566, 0.041524383544921876, 0.04122281646728516, 0.041535518646240235, 0.04156447982788086, 0.0413260498046875, 0.04147776031494141, 0.042119937896728514, 0.04152131271362305, 0.04164217758178711, 0.04187936019897461, 0.041177055358886716, 0.04161539077758789, 0.041110591888427736, 0.041374622344970705, 0.04145475387573242, 0.04177091217041016, 0.04190307235717773, 0.04189091110229492, 0.04141120147705078, 0.041224193572998044, 0.041369888305664064, 0.0413034553527832, 0.041129600524902346, 0.041226688385009765, 0.0410588493347168, 0.04148857498168945, 0.04206572723388672, 0.04151910400390625, 0.04134905624389648, 0.041433151245117185, 0.0415579833984375, 0.04109286499023437, 0.04107497787475586, 0.04089606475830078, 0.041142719268798825, 0.0409804801940918, 0.04101020812988281, 0.04106444931030274, 0.04096713638305664, 0.040860992431640625, 0.04112044906616211, 0.041127296447753904, 0.041172897338867184, 0.04146623992919922, 0.04071664047241211, 0.04089641571044922, 0.04114761734008789, 0.04152204895019531, 0.04115990447998047, 0.04103247833251953, 0.041568321228027345, 0.041067550659179684, 0.04097468948364258, 0.04106092834472656, 0.04106444931030274, 0.04094976043701172, 0.04064230346679688, 0.040869152069091794, 0.04110230255126953, 0.040767105102539065, 0.04117747116088867, 0.04099686431884766, 0.04095296096801758, 0.040834049224853515, 0.04341900634765625, 0.041256832122802733, 0.041175647735595705, 0.04122000122070312, 0.04095139312744141, 0.04106089782714844, 0.04118732833862305, 0.04148223876953125, 0.04193689727783203, 0.041484001159667966, 0.041236766815185545, 0.04165836715698242, 0.04125395202636719, 0.0411385612487793, 0.041273952484130856, 0.041310081481933596, 0.041942337036132815, 0.04124947357177734, 0.041279167175292966, 0.04127097702026367, 0.041291553497314455, 0.0424332160949707, 0.0410805778503418, 0.04165603256225586, 0.04096684646606445, 0.041250656127929684, 0.04115785598754883, 0.04114527893066406, 0.0411822395324707, 0.041333919525146486, 0.04161075210571289, 0.04142931365966797, 0.04101939010620117, 0.041283550262451174, 0.04089859390258789, 0.041134078979492186, 0.04091222381591797, 0.04095657730102539, 0.04533606338500976, 0.041195102691650394, 0.04091791915893555, 0.041176353454589844, 0.04120383834838867, 0.04122889709472656, 0.04127878570556641, 0.041210784912109374, 0.04192233657836914, 0.04129792022705078, 0.04095795059204101, 0.04109113693237305, 0.04099020767211914, 0.04117513656616211, 0.041369632720947264, 0.041174335479736326, 0.04108803176879883, 0.041861087799072265, 0.04140380859375, 0.041087776184082034, 0.04155152130126953, 0.040951969146728516, 0.04074812698364258, 0.040979007720947265, 0.0408436164855957, 0.04127961730957031, 0.040777599334716794, 0.04091904067993164, 0.0412421760559082, 0.04166086578369141, 0.04082601547241211, 0.04098748779296875, 0.04112319946289063, 0.04143942260742187, 0.04106428909301758, 0.04129622268676758, 0.04102783966064453, 0.041095169067382815, 0.041491840362548826, 0.0409865608215332, 0.04133958435058594, 0.04172851181030274, 0.04114659118652344, 0.04092006301879883, 0.04126512145996094, 0.04126188659667969, 0.04111715316772461, 0.0429901123046875, 0.041339969635009764, 0.041102272033691406, 0.041339935302734374, 0.04111663818359375, 0.041651294708251956, 0.04107561492919922, 0.04118732833862305, 0.041289726257324216, 0.04111273574829102, 0.041632606506347654, 0.04166598510742187, 0.04194566345214844, 0.04102143859863281, 0.04175468826293945, 0.041840576171875, 0.041618751525878905, 0.04106515121459961, 0.040940799713134766, 0.041055103302001954, 0.04094553756713867, 0.04111769485473633, 0.04095180892944336, 0.04101849746704102, 0.04078067016601562, 0.04105833435058594, 0.04107465744018555, 0.0409989128112793, 0.0409700813293457, 0.04135692977905273, 0.04106908798217773, 0.040938785552978516, 0.040723041534423826, 0.04084339141845703, 0.04093952178955078, 0.04125289535522461, 0.04106972885131836, 0.04096195220947266, 0.04256182479858398, 0.04120844650268555, 0.04115769577026367, 0.04203615951538086, 0.04082483291625977, 0.04097228622436523, 0.040861217498779294, 0.04109142303466797, 0.0410830078125, 0.04129587173461914, 0.042958782196044924, 0.04114633560180664, 0.0409150390625, 0.04096409606933594, 0.04097571182250977, 0.041263393402099606, 0.04096243286132813, 0.04108697509765625, 0.0410928955078125, 0.04179148864746094, 0.0441712646484375, 0.04512550354003906, 0.041660320281982424, 0.0407977294921875, 0.04086608123779297, 0.04079782485961914, 0.041075328826904296, 0.04106406402587891, 0.04103631973266601, 0.04088934326171875, 0.04115865707397461, 0.040847423553466794, 0.041011199951171876, 0.04094454574584961, 0.04112380981445313, 0.04128947067260742, 0.041301502227783206, 0.04093948745727539, 0.04091747283935547, 0.04090288162231445, 0.041369758605957034, 0.04127884674072266, 0.04242086410522461, 0.041104896545410156, 0.04233587265014648, 0.041232769012451174, 0.041320255279541016, 0.04106515121459961, 0.04089158248901367, 0.04105503845214844, 0.041199520111083986, 0.04141884613037109, 0.041017345428466793, 0.040844833374023434, 0.04116105651855469, 0.0409288330078125, 0.040872512817382814, 0.04083302307128906, 0.04102950286865235, 0.04126019287109375, 0.040970558166503905, 0.04075945663452148, 0.04126464080810547, 0.04089548873901367, 0.04111328125, 0.04134051132202148, 0.04119625473022461, 0.0409169921875, 0.040879390716552735, 0.0416426887512207, 0.04129385757446289, 0.04140236663818359, 0.0410398063659668, 0.04091910552978516, 0.04109827041625977, 0.04098553466796875, 0.041084190368652344, 0.04104793548583984, 0.04139302444458008, 0.041215999603271485, 0.04132352066040039, 0.04120383834838867, 0.04252467346191406, 0.04122943878173828, 0.04180217742919922, 0.041443264007568356, 0.04148688125610352, 0.04136460876464844, 0.04185993576049805, 0.04140031814575195, 0.04169676971435547, 0.04127596664428711, 0.04132447814941406, 0.041191425323486325, 0.041398273468017575, 0.041280960083007814, 0.04153401565551758, 0.041455520629882815, 0.04149033737182617, 0.042753982543945315, 0.04289152145385742, 0.041240577697753904, 0.04202905654907227, 0.041442913055419923, 0.04129219055175781, 0.04107001495361328, 0.0415032958984375, 0.04118732833862305, 0.04215155029296875, 0.04116313552856445, 0.041635841369628904, 0.04145142364501953, 0.04132812881469727, 0.04152105712890625, 0.041366207122802735, 0.04138393783569336, 0.04106230545043945, 0.04131375885009766, 0.04118387222290039, 0.04123648071289063, 0.04260790252685547, 0.041158878326416015, 0.04130031967163086, 0.04123868942260742, 0.041192958831787106, 0.041361919403076174, 0.041137535095214846, 0.041378337860107424, 0.04118947219848633, 0.04129974365234375, 0.04113225555419922, 0.04157196807861328, 0.041650558471679686, 0.04118707275390625, 0.04461081695556641, 0.04124160003662109, 0.04099814224243164, 0.04104268646240234, 0.04108515167236328, 0.04110313415527344, 0.040936511993408205, 0.04110784149169922, 0.04118316650390625, 0.043580127716064454, 0.04125788879394531, 0.04158035278320313, 0.04111529541015625, 0.0409106559753418, 0.041021984100341795, 0.04106428909301758, 0.041070751190185543, 0.041051200866699215, 0.04112879943847656, 0.04107049560546875, 0.04124179077148438, 0.04163481521606445, 0.04106444931030274, 0.04117913436889648, 0.04093244934082031, 0.04105104064941406, 0.04113564682006836, 0.04088265609741211, 0.04082198333740234, 0.04106729507446289, 0.041139518737792966, 0.04099862289428711, 0.041231327056884766, 0.04072447967529297, 0.040974273681640624, 0.041054271697998045, 0.041510814666748046, 0.04095347213745117, 0.041030303955078125, 0.040918464660644534, 0.04081919860839844, 0.04092502212524414, 0.04121807861328125, 0.04074905776977539, 0.04116889572143555, 0.04118022537231445, 0.04102169418334961, 0.04095657730102539, 0.040796192169189456, 0.040869056701660154, 0.04114924621582031, 0.0414453125, 0.04088966369628906, 0.04101599884033203, 0.04098668670654297, 0.040804351806640625, 0.04108489608764648, 0.04080028915405273, 0.04081459045410156, 0.04117647933959961, 0.04230335998535156, 0.0407861442565918, 0.04096803283691406, 0.04073257446289062, 0.04086819076538086, 0.04083248138427734, 0.04114118576049805, 0.041371646881103515, 0.040827999114990236, 0.04099728012084961, 0.04092979049682617, 0.04165740966796875, 0.04099760055541992, 0.04089263916015625]",tokens/s,24.086145489218296,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -387,7 +387,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -399,7 +399,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -441,7 +441,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,807.604224,4683.923456,0.0,4288.67584,4213.842432,s,1,13.5787783203125,13.5787783203125,0.0,13.5787783203125,13.5787783203125,13.5787783203125,13.5787783203125,[13.5787783203125],,kWh,0.00018497083590416515,2.03963994005206e-05,6.22283831159991e-05,0.00026759561842068486,,MB,1192.230912,5107.54816,0.0,4699.717632,4535.11424,s,10,8.553767395019532,0.855376739501953,0.007683236508205055,0.8569793090820312,0.8610069885253906,0.8619868316650391,0.8627707061767578,"[0.8337443237304687, 0.852953857421875, 0.85672607421875, 0.8551661376953125, 0.8570130615234375, 0.8589288330078125, 0.8607892456054688, 0.8629666748046875, 0.856945556640625, 0.8585336303710938]",tokens/s,299.28333116593416,kWh,2.48531106444444e-05,2.7390895892942575e-06,1.6417744615666776e-05,4.400994484940543e-05,tokens/kWh,5816867.093925897,MB,1232.44544,5115.936768,0.0,4708.10624,4535.1168,s,10,40.23405615234375,4.023405615234375,0.0061507504575199655,4.025970825195312,4.029557641601563,4.030256481933594,4.030815554199219,"[4.015683837890625, 4.010197021484375, 4.019560546875, 4.02631640625, 4.025761962890625, 4.022799560546875, 4.0261796875, 4.027199462890625, 4.030955322265625, 4.02940234375]",tokens/s,15.658376516017778,kWh,0.0001180998546972224,1.3028841168202256e-05,7.851052577133302e-05,0.00020963922163675768,tokens/kWh,300516.2846347533,,s,630,40.23095680236817,0.06385866159106059,0.0015793124110772148,0.06369526290893554,0.06435266647338868,0.06462068290710449,0.07443461128234863,"[0.0795832290649414, 0.06496665954589843, 0.06382553482055664, 0.0633240966796875, 0.06302150344848632, 0.06267267227172851, 0.06260348892211914, 0.06262319946289062, 0.06255990219116211, 0.06251110458374023, 0.0625406723022461, 0.0626558723449707, 0.0636995849609375, 0.06333849716186524, 0.06316032028198242, 0.0640646743774414, 0.06386947250366211, 0.06376176071166992, 0.06457443237304687, 0.06376364898681641, 0.06333523178100586, 0.06309888076782226, 0.0625459213256836, 0.0625172462463379, 0.06259507369995117, 0.06265753555297851, 0.062499679565429685, 0.06303321456909179, 0.06385036849975587, 0.06337561416625977, 0.0632762565612793, 0.06387731170654297, 0.06397161483764649, 0.06438889312744141, 0.06388556671142578, 0.06335715103149414, 0.06384867095947265, 0.06398745727539062, 0.06370655822753907, 0.0633639030456543, 0.06284284973144531, 0.06338313674926757, 0.06284649658203124, 0.06317763137817382, 0.06418812561035156, 0.06393593597412109, 0.06363631820678711, 0.0637050895690918, 0.06414070129394531, 0.06332067108154296, 0.0645630111694336, 0.06414915466308593, 0.06368515014648438, 0.063246337890625, 0.0632828483581543, 0.06490048217773438, 0.06420988464355469, 0.06366207885742188, 0.06340329742431641, 0.06301123046875, 0.06356784057617187, 0.06435033416748047, 0.06386483383178711, 0.07312252807617188, 0.06415724945068359, 0.0634764175415039, 0.06313958358764649, 0.06327705764770508, 0.06235340881347656, 0.06369859313964844, 0.06333190536499024, 0.06303577423095703, 0.06275542449951171, 0.062441280364990234, 0.06250700759887695, 0.062476287841796874, 0.0642231674194336, 0.0637768325805664, 0.06471459197998047, 0.06404045104980469, 0.06379996871948242, 0.06357401657104492, 0.06384569549560547, 0.06351119995117188, 0.06330972671508789, 0.0628040657043457, 0.06274051284790039, 0.06330368041992188, 0.06374560165405273, 0.06343718338012695, 0.06299619293212891, 0.06276287841796875, 0.06390422439575195, 0.06333440017700195, 0.0637248649597168, 0.0641173095703125, 0.06391158294677735, 0.06370352172851562, 0.06410854339599609, 0.0637393913269043, 0.06341856002807617, 0.0632111358642578, 0.06312825775146484, 0.06380915069580079, 0.0633081283569336, 0.06303696060180664, 0.0627143669128418, 0.06400780487060546, 0.06390412902832031, 0.0633733139038086, 0.06342009735107422, 0.06392863845825195, 0.0635431022644043, 0.06429920196533204, 0.06409830474853516, 0.06335279846191406, 0.06341020965576172, 0.06396627044677734, 0.06368966293334961, 0.06398102569580078, 0.06364543914794922, 0.06342086410522461, 0.06347398376464844, 0.06394473648071289, 0.06361280059814453, 0.06330364990234374, 0.07437910461425781, 0.06458464050292968, 0.06365695953369141, 0.06322102355957031, 0.06286921691894531, 0.0636409912109375, 0.06352137756347656, 0.06316646575927734, 0.06252665710449219, 0.06272697448730469, 0.06378086471557617, 0.0632845115661621, 0.06389014434814454, 0.06371680068969726, 0.06330182266235351, 0.06348393630981446, 0.06390819168090821, 0.06345523071289062, 0.06521616363525391, 0.06425122833251953, 0.06332115173339843, 0.06318073654174805, 0.06293289566040039, 0.06377251052856445, 0.0633182716369629, 0.06302505493164062, 0.06321734237670898, 0.06329385757446289, 0.06403616333007812, 0.0636110725402832, 0.06324684906005859, 0.06309478378295899, 0.06383206558227539, 0.0639360008239746, 0.06431385803222656, 0.06393446350097656, 0.0636467514038086, 0.06354838562011719, 0.06402047729492187, 0.06362931060791016, 0.06343270492553711, 0.0631596794128418, 0.06395967864990235, 0.063825439453125, 0.06346185684204102, 0.06345913696289063, 0.06400364685058593, 0.06367295837402344, 0.06465948486328126, 0.06430003356933593, 0.06346441650390625, 0.06420480346679687, 0.0636701774597168, 0.06337340927124023, 0.0633507843017578, 0.06420025634765625, 0.06356832122802734, 0.0633481903076172, 0.06321763229370117, 0.06413497924804687, 0.06366396713256836, 0.0634090576171875, 0.06420233917236329, 0.07695600128173828, 0.06464921569824218, 0.06372963333129883, 0.06340790557861328, 0.06316249465942383, 0.06255628967285157, 0.06267497634887695, 0.06326800155639649, 0.06375507354736328, 0.06320876693725586, 0.06297875213623047, 0.06274383926391601, 0.06377510452270507, 0.06346550369262695, 0.06382009506225586, 0.06616886138916016, 0.06432262420654297, 0.06374697494506835, 0.06332412719726563, 0.06311939239501953, 0.0638914566040039, 0.06340403366088868, 0.06329958343505859, 0.0628936653137207, 0.06392464065551758, 0.06333030319213867, 0.0631621437072754, 0.06397974395751953, 0.0635590705871582, 0.06332851028442382, 0.06389139175415039, 0.06386633682250976, 0.0639119987487793, 0.06394355010986329, 0.06377459335327149, 0.06328537750244141, 0.06301283264160157, 0.06409219360351562, 0.06359775924682617, 0.06438790130615234, 0.06381568145751954, 0.0634511375427246, 0.06315766525268554, 0.06394326400756836, 0.06369894409179687, 0.06428444671630859, 0.06386825561523438, 0.06373260879516601, 0.06383580780029297, 0.06431308746337891, 0.06373430252075195, 0.06334265518188477, 0.06407782745361328, 0.0639815673828125, 0.06369865417480469, 0.06437328338623047, 0.0643663330078125, 0.06412249755859376, 0.06380992126464843, 0.06351033782958984, 0.06418450927734375, 0.06368438339233398, 0.06358780670166016, 0.07429385375976562, 0.06462054443359375, 0.06384022521972656, 0.06342838287353515, 0.06279193496704101, 0.06415468597412109, 0.06344595336914062, 0.06334156799316407, 0.06272905731201171, 0.06370470428466797, 0.06331856155395507, 0.06311731338500977, 0.0627341423034668, 0.06273763275146485, 0.06359139251708984, 0.06443993377685547, 0.06490528106689453, 0.06442208099365235, 0.06388953781127929, 0.06334627151489258, 0.0631624641418457, 0.06402285003662109, 0.06332956695556641, 0.0637993278503418, 0.06358291244506836, 0.0633133773803711, 0.06330217742919922, 0.0638579216003418, 0.06359116744995118, 0.06416588592529297, 0.06381510543823242, 0.06354585647583008, 0.06401958465576171, 0.06356268692016602, 0.06339142227172852, 0.06431161499023437, 0.06392012786865234, 0.0636701774597168, 0.063587646484375, 0.06413507080078125, 0.06363958358764649, 0.06346428680419922, 0.06417203521728515, 0.06385868835449218, 0.06366412734985352, 0.06375804901123047, 0.06428294372558593, 0.06409552001953125, 0.06460867309570313, 0.0641416015625, 0.06375116729736328, 0.06361804962158203, 0.06415071868896484, 0.06364652633666992, 0.06332387161254883, 0.06406172943115235, 0.06379520034790039, 0.06346108627319336, 0.06395318222045898, 0.06386198425292969, 0.06359939193725586, 0.06357932662963867, 0.06406358337402343, 0.07445728302001953, 0.06475315093994141, 0.06353561782836914, 0.06343024063110352, 0.06279529571533203, 0.06280928039550782, 0.06386246490478516, 0.06349619293212891, 0.06315145492553711, 0.06297868728637696, 0.06260678482055664, 0.06386134338378906, 0.06337068939208984, 0.06317091369628906, 0.06400994873046875, 0.06505027008056641, 0.06404787445068359, 0.06393836975097657, 0.06415302276611329, 0.06372438430786133, 0.06333440017700195, 0.06293724822998047, 0.06370902252197265, 0.0632176628112793, 0.06301907348632813, 0.0629554557800293, 0.06402566528320312, 0.06337631988525391, 0.06329344177246093, 0.06374115371704102, 0.06409072113037109, 0.06431254577636719, 0.06407881927490235, 0.06416793823242188, 0.06366207885742188, 0.06313478469848632, 0.06464752197265625, 0.06399814224243164, 0.06348144149780273, 0.06342851257324218, 0.06331836700439453, 0.06376710510253907, 0.06335283279418945, 0.06342860794067383, 0.06381523132324218, 0.0637014389038086, 0.06395302581787109, 0.06361619186401367, 0.06457414245605468, 0.06394684982299804, 0.06339369583129882, 0.06410610961914062, 0.06351248168945313, 0.0634150733947754, 0.06509331512451172, 0.0639870719909668, 0.06342105484008789, 0.06325411224365235, 0.06406934356689453, 0.06366073608398437, 0.06332527923583985, 0.0642159652709961, 0.0636948471069336, 0.07531222534179688, 0.06462079620361329, 0.06379996871948242, 0.06337696075439453, 0.06303801727294922, 0.0636102409362793, 0.06324070358276367, 0.06300991821289062, 0.06269583892822266, 0.06371737670898438, 0.06325500869750976, 0.06313926315307618, 0.06258652877807618, 0.06288272094726563, 0.06478438568115234, 0.06436438751220704, 0.06378307342529296, 0.06399107360839844, 0.06361072158813477, 0.06359132766723632, 0.06384409713745118, 0.06353737640380859, 0.06320115280151367, 0.06280121612548828, 0.06375711822509765, 0.06329139328002929, 0.06425775909423828, 0.06379312133789063, 0.06327436828613281, 0.06305385589599609, 0.0638554573059082, 0.06383967971801757, 0.06440409851074219, 0.06389990234375, 0.06346867370605469, 0.06398425674438477, 0.06354102325439454, 0.0631764793395996, 0.06457929229736328, 0.06410518646240235, 0.0635945930480957, 0.06369612884521485, 0.06408448028564454, 0.06363561630249023, 0.06340563201904297, 0.06410886383056641, 0.06390182495117187, 0.06461974334716797, 0.06414415740966797, 0.06425395202636719, 0.06407373046875, 0.06354534530639648, 0.06346342468261719, 0.06413311767578125, 0.06381158447265625, 0.06408956909179687, 0.0638100814819336, 0.06348185729980468, 0.06382976150512695, 0.0641170883178711, 0.06343670272827148, 0.0639447021484375, 0.06460633850097657, 0.07571673583984374, 0.06435225677490235, 0.06355267333984375, 0.06304240036010743, 0.06290633773803711, 0.06353475189208985, 0.06309747314453125, 0.06297369766235351, 0.06274867248535156, 0.06370950317382812, 0.06312419128417969, 0.06291292953491211, 0.0626324462890625, 0.06366758346557617, 0.06419321441650391, 0.06503833770751953, 0.06456114959716797, 0.06379296112060547, 0.06356150436401367, 0.06330995178222656, 0.06420451354980469, 0.0638039665222168, 0.06333030319213867, 0.06318080139160157, 0.0638230094909668, 0.06338032150268555, 0.06447305297851562, 0.06386415863037109, 0.06330780792236328, 0.06332828903198243, 0.06437542724609376, 0.0640552978515625, 0.06458163452148437, 0.064052734375, 0.06363977432250977, 0.06350467300415039, 0.06399385452270508, 0.06348121643066407, 0.06449625396728516, 0.06398361587524413, 0.06338355255126953, 0.06317670440673828, 0.06400972747802734, 0.06347574234008789, 0.06323247909545898, 0.0639283218383789, 0.06360883331298828, 0.06386191940307617, 0.06383190536499024, 0.06400415802001953, 0.06360547256469727, 0.06441558074951172, 0.06400991821289062, 0.0636844482421875, 0.0638902702331543, 0.06449533081054687, 0.06392214584350586, 0.06343702316284179, 0.06347558212280273, 0.0640857925415039, 0.06349427032470703, 0.06471900939941407, 0.06391334533691406, 0.07597491455078124, 0.0642171859741211, 0.06341791915893555, 0.06361747360229492, 0.06329296112060546, 0.06308911895751954, 0.06257247924804688, 0.06369635009765626, 0.06357852935791015, 0.06301417541503906, 0.06384118270874023, 0.06354742431640625, 0.06318025588989258, 0.06273247909545898, 0.06388518524169921, 0.06417996978759766, 0.06533542633056641, 0.06374662399291992, 0.0634511375427246, 0.06325417709350586, 0.0636399040222168, 0.06313129425048829, 0.06416121673583984, 0.06388214492797852, 0.06344294357299805, 0.06315795135498047, 0.06286515045166016, 0.06378515243530274, 0.06464374542236329, 0.06358537673950196, 0.06363590240478516, 0.06431145477294922, 0.06379868698120117, 0.06421977233886719, 0.064036865234375, 0.06374604797363281, 0.06342361450195312, 0.06404390716552734, 0.06351052856445312, 0.06409830474853516, 0.06376652908325195, 0.06351052856445312, 0.0638416976928711, 0.06405795288085937, 0.06359827041625976, 0.06482793426513672, 0.06409756469726563, 0.06391609573364258, 0.06394265747070313, 0.06387302398681641, 0.06441004943847656, 0.0641370849609375, 0.06354940795898438, 0.06423772430419922, 0.0638914566040039, 0.06347980880737304, 0.0645406723022461, 0.0639733772277832, 0.0636129264831543, 0.06380710220336915, 0.06404061126708985, 0.06463970947265625, 0.06415360260009766, 0.07515340423583984, 0.06424278259277344, 0.06361385726928712, 0.06319862365722656, 0.0627677116394043, 0.06386073684692382, 0.06337075042724609, 0.06318540954589844, 0.06317232131958007, 0.06365008163452149, 0.06380748748779297, 0.06350848007202148, 0.0631541748046875, 0.06285619354248047, 0.06392863845825195, 0.06398396682739257, 0.06446115112304687, 0.06371532821655274, 0.06376144027709961, 0.06311040115356445, 0.06364499282836913, 0.06317712020874024, 0.06383606338500977, 0.06353724670410156, 0.06316851043701172, 0.06351804733276367, 0.0635338897705078, 0.06326051330566407, 0.06312691116333008, 0.06395967864990235, 0.06397132873535157, 0.06395296096801757, 0.0638463363647461, 0.0638683853149414, 0.06458188629150391, 0.06406294250488281, 0.0636956787109375, 0.06351811218261719, 0.06378147125244141, 0.06360678482055664, 0.06482125091552735, 0.06421094512939453, 0.06389980697631836, 0.06352646255493163, 0.06325385665893554, 0.06406444549560547, 0.06366617584228515, 0.06444457244873047, 0.06416681671142578, 0.06398867034912109, 0.06407513427734375, 0.06455270385742187, 0.06422335815429688, 0.06396355056762695, 0.06360303878784179, 0.0640401611328125, 0.06465740966796875, 0.06404521942138672, 0.0636646728515625, 0.06338713455200196, 0.0640722885131836, 0.06435635375976563, 0.0642718734741211]",tokens/s,15.659582820633176,,, -4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -467,11 +467,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` @@ -621,7 +621,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14032.388096,7835.942912,0.0,7440.695296,7427.899392,s,1,31.7943828125,31.7943828125,0.0,31.7943828125,31.7943828125,31.7943828125,31.7943828125,[31.7943828125],,kWh,0.0007155273096708242,7.892078725048271e-05,0.00027156299502797965,0.0010660110919492867,,MB,1232.048128,8416.854016,0.0,8000.63488,7884.32384,s,10,1.18458154296875,0.11845815429687498,0.0005181299128065095,0.11830814361572266,0.11893774261474609,0.11936403961181641,0.11970507720947265,"[0.11794691467285157, 0.11884300994873047, 0.11811257934570313, 0.11855452728271484, 0.1179632339477539, 0.11837686157226562, 0.11820524597167968, 0.11854940795898437, 0.11979033660888672, 0.11823942565917969]",tokens/s,2161.1006985506738,kWh,3.5326098436747686e-06,3.8957991699163003e-07,2.3319262296625744e-06,6.254115990328973e-06,tokens/kWh,40933043.19840959,MB,1254.621184,8458.797056,0.0,8042.57792,7975.158272,s,10,51.00871875,5.100871875,0.011844859828030999,5.098849853515625,5.114230322265625,5.118435229492188,5.121799155273437,"[5.08038671875, 5.0956025390625, 5.11215966796875, 5.12264013671875, 5.0955703125, 5.1132958984375, 5.09888427734375, 5.0988154296875, 5.103091796875, 5.08827197265625]",tokens/s,12.350829729476395,kWh,0.00014881954907215942,1.6415289741388052e-05,8.737122886613683e-05,0.0002526060676796843,tokens/kWh,249400.1849547288,,s,630,51.005125488281166,0.08096051664806547,0.0009011453758256804,0.08084107208251953,0.08152753982543945,0.08219734191894532,0.08396592407226564,"[0.08004889678955078, 0.07973391723632813, 0.07983353424072266, 0.0806155548095703, 0.08050505828857422, 0.08078720092773438, 0.07982838439941406, 0.07984630584716797, 0.07974092864990234, 0.08012009429931641, 0.07998220825195312, 0.08041891479492187, 0.07992320251464843, 0.08207769775390625, 0.08002559661865234, 0.0801334686279297, 0.08059318542480469, 0.08030451202392579, 0.0802117462158203, 0.08243987274169921, 0.08063030242919922, 0.08403353881835937, 0.08063385772705078, 0.08059490966796876, 0.08050211334228516, 0.0803081283569336, 0.08033139038085937, 0.08036777496337891, 0.08077926635742187, 0.08039833831787109, 0.07983103942871093, 0.08004402923583985, 0.08027942657470703, 0.08069132995605469, 0.08065599822998047, 0.08053388977050781, 0.0803082275390625, 0.08032665252685547, 0.07985151672363282, 0.07983881378173828, 0.08025334167480469, 0.08040447998046875, 0.08015257263183594, 0.0828006362915039, 0.08027954864501953, 0.08037785339355469, 0.0799109115600586, 0.0802806396484375, 0.08057337951660157, 0.080442626953125, 0.08004841613769531, 0.07978160095214844, 0.08043292999267578, 0.08143711853027344, 0.08266336059570313, 0.08175062561035157, 0.08216140747070312, 0.08129881286621093, 0.08122252655029297, 0.0813465576171875, 0.08169881439208984, 0.08120223999023438, 0.08140032196044922, 0.08118975830078125, 0.08097689819335938, 0.08098303985595703, 0.08020582580566406, 0.08011366271972656, 0.07996211242675781, 0.07992249298095704, 0.08051757049560547, 0.08083884429931641, 0.080404541015625, 0.08034413146972656, 0.08112429046630859, 0.0808980484008789, 0.08125440216064453, 0.08090160369873046, 0.08059324645996094, 0.08050911712646484, 0.0808095703125, 0.08083293151855468, 0.08079277038574219, 0.08090707397460938, 0.0809144287109375, 0.08101888275146485, 0.08074649810791015, 0.08096514892578124, 0.08108489227294922, 0.08265510559082032, 0.08139170837402344, 0.08125440216064453, 0.0809552993774414, 0.08106610870361328, 0.08107212829589844, 0.08144281768798828, 0.08083660888671874, 0.08111270141601562, 0.08083084869384766, 0.08103321838378906, 0.08073011016845703, 0.08105165100097657, 0.08126054382324219, 0.08063085174560547, 0.08065449523925781, 0.08044035339355468, 0.08105753326416015, 0.0806903076171875, 0.08100953674316407, 0.08043692779541016, 0.08067833709716797, 0.079967041015625, 0.08032249450683594, 0.08075071716308593, 0.08089177703857423, 0.08078054046630859, 0.0804668197631836, 0.08044915008544921, 0.08434633636474609, 0.08055490875244141, 0.08090764617919922, 0.08079341125488282, 0.08096441650390625, 0.08107612609863281, 0.08100179290771484, 0.08087334442138672, 0.08291065979003906, 0.08087238311767578, 0.08107955169677734, 0.08168319702148437, 0.08113152313232422, 0.0807383041381836, 0.08071766662597656, 0.08222281646728516, 0.08216620635986328, 0.08061542510986328, 0.08112127685546874, 0.08092025756835937, 0.08056588745117188, 0.08085574340820313, 0.08094310760498047, 0.08212480163574219, 0.08206130981445313, 0.08191382598876953, 0.08083245086669921, 0.08097187042236328, 0.0809814682006836, 0.08033062744140625, 0.08018806457519531, 0.08141209411621093, 0.08112332916259765, 0.08071782684326172, 0.08326143646240235, 0.08258560180664062, 0.08129945373535157, 0.08103862762451172, 0.08113839721679687, 0.08151859283447266, 0.08114380645751954, 0.08117453002929688, 0.08117862701416016, 0.08144838714599609, 0.08123859405517578, 0.08148172760009766, 0.0809349136352539, 0.08097567749023438, 0.08053369903564453, 0.0805516128540039, 0.08087551879882812, 0.08033721923828124, 0.08062889862060547, 0.0804497299194336, 0.08049839782714843, 0.08056845092773438, 0.08132691192626954, 0.08110079956054687, 0.08049404907226562, 0.0802043228149414, 0.08057619476318359, 0.08092626953125, 0.08078550720214844, 0.08090636444091796, 0.08090064239501953, 0.08057036590576172, 0.08291123199462891, 0.08111011505126953, 0.08103932952880859, 0.08149292755126954, 0.08139366149902344, 0.08096649932861329, 0.08095958709716797, 0.08088889312744141, 0.08139228820800781, 0.08089826965332031, 0.0810558090209961, 0.08134041595458984, 0.08101478576660157, 0.08101251220703125, 0.08053533172607422, 0.08062137603759766, 0.0915882568359375, 0.08059161376953125, 0.08042015838623047, 0.08066294097900391, 0.08052377319335938, 0.08016281890869141, 0.08044748687744141, 0.08073126220703125, 0.08040035247802735, 0.08440064239501953, 0.09083535766601562, 0.08089766693115234, 0.08106009674072266, 0.0811357421875, 0.08083455657958985, 0.08096934509277344, 0.08074483489990235, 0.08132403564453125, 0.08083622741699219, 0.08070909118652343, 0.08089488220214844, 0.08169801330566406, 0.0811743392944336, 0.08082736206054687, 0.08098925018310547, 0.08123388671875, 0.08099734497070313, 0.0809202880859375, 0.08095772552490234, 0.08094924926757813, 0.08061542510986328, 0.08086937713623046, 0.08088780975341797, 0.08078540802001953, 0.08082637023925782, 0.08070953369140625, 0.08069308471679687, 0.08096953582763672, 0.08057401275634765, 0.08450956726074219, 0.08097586822509766, 0.08085913848876954, 0.0804290542602539, 0.08049868774414062, 0.08143030548095703, 0.0804085464477539, 0.08128492736816406, 0.08056368255615234, 0.08176710510253907, 0.0807503662109375, 0.08054220581054687, 0.08071340942382813, 0.08091126251220702, 0.08127251434326171, 0.08380038452148438, 0.08160665893554687, 0.08105779266357421, 0.08145101165771484, 0.0815308837890625, 0.0810618896484375, 0.0811473617553711, 0.08192607879638672, 0.08238345336914063, 0.08071903991699218, 0.08081081390380859, 0.08052249908447266, 0.0805445098876953, 0.08058060455322266, 0.08064595031738281, 0.08101074981689453, 0.08066780853271484, 0.08064665222167969, 0.08113404846191406, 0.08073654174804687, 0.08034480285644531, 0.08060022735595704, 0.08074050903320312, 0.08024281311035156, 0.08073990631103516, 0.08038706970214844, 0.08062566375732422, 0.08103116607666015, 0.08034265899658204, 0.08129074859619141, 0.08023948669433593, 0.0802529296875, 0.08023859405517578, 0.08051039886474609, 0.08079212951660156, 0.08089190673828126, 0.08092784118652344, 0.08074742126464844, 0.08110284423828125, 0.08100454711914062, 0.08120320129394532, 0.08103282928466797, 0.08135664367675781, 0.08090678405761718, 0.0802911376953125, 0.08042697906494141, 0.08101961517333985, 0.08063999938964844, 0.08032978820800782, 0.0802948455810547, 0.08022179412841797, 0.0836710433959961, 0.0809005126953125, 0.0814571533203125, 0.08068096160888671, 0.08060313415527344, 0.08034674835205079, 0.07997382354736328, 0.08023545837402343, 0.08017715454101562, 0.08024172973632812, 0.08123939514160156, 0.08133904266357422, 0.08138098907470703, 0.08116591644287109, 0.08124425506591797, 0.08126329803466797, 0.08127078247070313, 0.08150947570800782, 0.08163581085205078, 0.08141458892822266, 0.08143257904052735, 0.0809512939453125, 0.08140799713134765, 0.08148915100097656, 0.08261068725585938, 0.08113123321533203, 0.08202909088134766, 0.08114304351806641, 0.08084146881103516, 0.08121139526367188, 0.08123299407958984, 0.0810912322998047, 0.08125443267822266, 0.08090032196044922, 0.08061542510986328, 0.0805580825805664, 0.08080793762207031, 0.0808427505493164, 0.0807995834350586, 0.08105299377441406, 0.08079216003417969, 0.08053289794921875, 0.08067120361328126, 0.08055846405029297, 0.08075263977050781, 0.08085094451904297, 0.08082841491699219, 0.0811878433227539, 0.08141497802734375, 0.08142457580566406, 0.08128717041015625, 0.08237789154052734, 0.08122454071044923, 0.08117862701416016, 0.08277094268798828, 0.08162723541259766, 0.08117670440673828, 0.08117081451416015, 0.08144528198242187, 0.08143462371826173, 0.08103846740722656, 0.0808600311279297, 0.08057357025146485, 0.08130239868164063, 0.0809144287109375, 0.08043094635009766, 0.08051875305175782, 0.08091907501220703, 0.08052329254150391, 0.08084067535400391, 0.08097180938720704, 0.08166925048828125, 0.08079814147949219, 0.08140204620361328, 0.08101888275146485, 0.08317478179931641, 0.08085363006591798, 0.08183586883544922, 0.08112115478515625, 0.08056861114501954, 0.08135065460205078, 0.08077721405029296, 0.08056832122802735, 0.08079564666748047, 0.08159228515625, 0.0813199691772461, 0.08103321838378906, 0.08123168182373047, 0.08070982360839844, 0.08101683044433594, 0.08100563049316406, 0.08061023712158204, 0.0810322265625, 0.08115094757080078, 0.08021593475341797, 0.08009740447998047, 0.08063795471191407, 0.08045299530029297, 0.08105391693115234, 0.08084931182861328, 0.08064720153808594, 0.08066950225830079, 0.0803430404663086, 0.08022566223144531, 0.08052531433105468, 0.08069404602050781, 0.08019967651367188, 0.08020301055908204, 0.08060176086425781, 0.08104934692382812, 0.08076528167724609, 0.08087142181396484, 0.08062566375732422, 0.08054579162597657, 0.08055712127685546, 0.08075154876708984, 0.08087075042724609, 0.08075325012207031, 0.08074237060546875, 0.08084489440917969, 0.0826235809326172, 0.08175913238525391, 0.08191795349121093, 0.08123916625976563, 0.08141820526123048, 0.08096246337890625, 0.08109260559082031, 0.08105165100097657, 0.08069734191894531, 0.08076083374023438, 0.08137865447998047, 0.08043170928955078, 0.08011504364013672, 0.08052310180664063, 0.08127334594726562, 0.08130802917480469, 0.08099430084228515, 0.08097996520996094, 0.08072767639160157, 0.08068134307861329, 0.08069660949707032, 0.08046870422363281, 0.08032169342041015, 0.08022911834716796, 0.08085453033447265, 0.08131439971923828, 0.08150761413574219, 0.0812033920288086, 0.08137372589111327, 0.08083455657958985, 0.08109017944335938, 0.08152716827392578, 0.08161228942871093, 0.08114636993408203, 0.08093875122070313, 0.08092697906494141, 0.08156732940673828, 0.08150640106201172, 0.08116665649414062, 0.08132198333740234, 0.08115724945068359, 0.08122000122070312, 0.08005248260498046, 0.0801487045288086, 0.07994080352783203, 0.08049657440185547, 0.08038079833984375, 0.08004605102539063, 0.07992323303222656, 0.08062710571289063, 0.08065673828125, 0.08066483306884766, 0.08076668548583985, 0.08093110656738281, 0.08085081481933594, 0.08140940856933594, 0.08228457641601562, 0.080650146484375, 0.08051181030273437, 0.08005836486816406, 0.07957478332519531, 0.07974118041992187, 0.08030352020263672, 0.08082006072998046, 0.08195353698730469, 0.08097100830078124, 0.08158428955078124, 0.08113008117675781, 0.08093081665039062, 0.080932861328125, 0.0810414047241211, 0.08338822174072266, 0.08137075042724609, 0.08134217834472657, 0.08150828552246094, 0.08105792236328126, 0.08148397064208984, 0.08108707427978516, 0.08048226928710937, 0.08066416168212891, 0.08056310272216796, 0.08062322998046875, 0.08073458862304687, 0.08014643096923828, 0.08061542510986328, 0.08065023803710937, 0.08083232116699218, 0.08081161499023437, 0.08032316589355469, 0.08068035125732421, 0.08053616333007813, 0.0807852783203125, 0.08068313598632812, 0.08058265686035156, 0.0810250244140625, 0.08065638732910156, 0.0803737564086914, 0.08051020812988281, 0.08195954895019532, 0.08096112060546876, 0.08099894714355468, 0.08116143798828125, 0.08078825378417968, 0.08065760040283203, 0.08083683013916015, 0.08108092498779297, 0.08096153259277344, 0.08125186920166015, 0.08105788421630859, 0.08095782470703125, 0.08235826873779296, 0.08088780975341797, 0.08094499206542968, 0.08076509094238281, 0.08044454193115234, 0.08045148468017578, 0.08071676635742188, 0.08059699249267578, 0.08047821044921875, 0.0805946273803711, 0.08053548431396484, 0.08214073944091797, 0.08063632202148438, 0.08095763397216797, 0.08097200012207031, 0.08092467498779297, 0.08097792053222656, 0.08060313415527344, 0.08065229034423828, 0.08093695831298828, 0.08077311706542968, 0.08065424346923829, 0.08088127899169922, 0.08126306915283203, 0.08127078247070313, 0.08161603546142578, 0.08125116729736329, 0.08143993377685547, 0.08660809326171875, 0.08251433563232421, 0.0809959716796875, 0.08138604736328126, 0.08077455902099609, 0.08055414581298828, 0.08166143798828125, 0.081355712890625, 0.08076070404052735, 0.08063807678222656, 0.08092655944824219, 0.08052751922607422, 0.08066028594970703, 0.0798082275390625, 0.08052374267578125, 0.08027545928955078, 0.08044892883300782, 0.08014851379394532, 0.08025145721435546, 0.08020582580566406, 0.080648193359375, 0.08056832122802735, 0.08072176361083984, 0.08075443267822266, 0.0803117446899414, 0.08000800323486328, 0.07980867004394532, 0.0807442855834961, 0.0808951644897461, 0.08038658905029297, 0.08074285125732422, 0.08074960327148438, 0.08101577758789062, 0.08071887969970704, 0.08105878448486328, 0.08091648101806641, 0.08107606506347656, 0.08022370910644532, 0.08024467468261719, 0.08276863861083984, 0.08038768005371094, 0.07992361450195312, 0.07995954895019532, 0.08014899444580079, 0.08000921630859376, 0.08063180541992188, 0.08131734466552734, 0.08291712188720703, 0.08053225708007812, 0.08039628601074218, 0.0802930908203125, 0.08041142272949219, 0.08047615814208985, 0.0805580825805664, 0.0825384979248047, 0.08075414276123047, 0.08031215667724609, 0.08017375946044922, 0.08142642974853516, 0.08091033935546875, 0.08060022735595704, 0.08081455993652344, 0.08329663848876953, 0.08177436828613281, 0.08138755035400391, 0.0812171859741211, 0.08086102294921875]",tokens/s,12.351699833475491,,,True -8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -642,7 +642,7 @@ RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1896.706048,1057.947648,0.0,662.700032,622.833664,s,1,9.2692841796875,9.2692841796875,0.0,9.2692841796875,9.2692841796875,9.2692841796875,9.2692841796875,[9.2692841796875],,kWh,5.9649310204152544e-05,6.572552757026549e-06,2.1403072677991797e-05,8.762493563917089e-05,,MB,1939.795968,1181.679616,0.0,765.46048,733.871104,s,10,0.6155335044860839,0.06155335044860839,0.00022180813588524277,0.0615022087097168,0.061793860244750975,0.0618812822341919,0.06195121982574463,"[0.06154595184326172, 0.06177443313598633, 0.06167647933959961, 0.061381248474121096, 0.061244766235351564, 0.0612828483581543, 0.06174854278564453, 0.06145206451416016, 0.06145846557617188, 0.06196870422363281]",tokens/s,4158.99375313026,kWh,1.8151124012682475e-06,2.0017424931970968e-07,9.000007200000811e-07,2.9152873705880382e-06,tokens/kWh,87812955.45089355,MB,1944.014848,1192.165376,0.0,775.94624,748.240384,s,10,37.76376586914063,3.7763765869140626,0.010053992689648775,3.7759786376953124,3.7856682861328124,3.7918682006835938,3.796828132324219,"[3.7733896484375, 3.780630859375, 3.7761025390625, 3.762035888671875, 3.761979736328125, 3.78429052734375, 3.771435791015625, 3.798068115234375, 3.775854736328125, 3.77997802734375]",tokens/s,16.682658243965452,kWh,0.00011029425315748479,1.2165164813399175e-05,4.338264581719974e-05,0.0001658420637880837,tokens/kWh,379879.4983672095,,s,630,37.75729852676391,0.059932219883752254,0.0006360604396337902,0.05986535835266113,0.06039576988220215,0.060804128265380855,0.06275842662811279,"[0.05902950286865234, 0.05941420745849609, 0.05929391860961914, 0.0595497932434082, 0.06069657516479492, 0.06140550231933594, 0.05985782241821289, 0.05959964752197266, 0.05993401718139649, 0.05960358428955078, 0.05967468643188477, 0.05986713409423828, 0.05979340744018555, 0.06025328063964844, 0.06011209487915039, 0.06067910385131836, 0.060885761260986326, 0.05981798553466797, 0.05941443252563477, 0.059832191467285155, 0.0596453742980957, 0.059781150817871095, 0.0598823356628418, 0.05989980697631836, 0.05949625778198242, 0.06007212829589844, 0.060018848419189454, 0.059637599945068356, 0.059393726348876956, 0.059488574981689454, 0.06198681640625, 0.0605010871887207, 0.05932287979125977, 0.05945180892944336, 0.059891712188720705, 0.05943078231811524, 0.05938188934326172, 0.06029248046875, 0.059579071044921876, 0.059604961395263674, 0.05943088150024414, 0.05950006484985351, 0.060953056335449216, 0.05972524642944336, 0.059736640930175784, 0.05976019287109375, 0.059685310363769534, 0.06000230407714844, 0.0598076171875, 0.060353919982910155, 0.059605758666992185, 0.05964137649536133, 0.059609569549560544, 0.05962710571289063, 0.059781375885009765, 0.05970249557495117, 0.059651359558105466, 0.060028736114501956, 0.06010358428955078, 0.06028908920288086, 0.06004038238525391, 0.06013100814819336, 0.06007625579833984, 0.05989718246459961, 0.059832286834716794, 0.05980448150634766, 0.060006271362304686, 0.05983148956298828, 0.059916511535644534, 0.05975875091552734, 0.05994089508056641, 0.06012911987304687, 0.06172083282470703, 0.05999852752685547, 0.06017638397216797, 0.05978726577758789, 0.05953289413452149, 0.05940620803833008, 0.05956047821044922, 0.059913887023925784, 0.05991049575805664, 0.05984988784790039, 0.05936624145507813, 0.05921791839599609, 0.05910444641113281, 0.059636417388916015, 0.05961068725585938, 0.059593441009521485, 0.05960073471069336, 0.05974649429321289, 0.05950611114501953, 0.06014604949951172, 0.059469825744628904, 0.05955088043212891, 0.06031241607666016, 0.059570049285888674, 0.06382777786254883, 0.059993473052978516, 0.05960540771484375, 0.0599967041015625, 0.05989116668701172, 0.05999055862426758, 0.06005724716186524, 0.059963520050048826, 0.059905952453613284, 0.05998579025268555, 0.05991596984863281, 0.06014009475708008, 0.060179710388183594, 0.05990086364746094, 0.060031326293945315, 0.06001193618774414, 0.059881599426269534, 0.06461436462402344, 0.06070697784423828, 0.059822078704833984, 0.06014976119995117, 0.05958019256591797, 0.059611358642578126, 0.05988457489013672, 0.05974310302734375, 0.059920063018798826, 0.05980815887451172, 0.05979878234863281, 0.060035839080810546, 0.0596492805480957, 0.05893734359741211, 0.05932003021240234, 0.05986556625366211, 0.05959158325195312, 0.06453270721435547, 0.059460319519042966, 0.05935449600219726, 0.0594601936340332, 0.059672031402587894, 0.059423263549804685, 0.05957638549804688, 0.05989574432373047, 0.05937356948852539, 0.05966438293457031, 0.05989580917358398, 0.05941398239135742, 0.05999465560913086, 0.0599285774230957, 0.05957222366333008, 0.06003612899780274, 0.06003152084350586, 0.0595865592956543, 0.06011945724487305, 0.06079286575317383, 0.06024192047119141, 0.059811809539794925, 0.05982620620727539, 0.060037120819091794, 0.05986713409423828, 0.05994697570800781, 0.060053535461425785, 0.060561214447021484, 0.06123779296875, 0.060006366729736325, 0.06000028610229492, 0.0599384651184082, 0.060071617126464846, 0.061071582794189457, 0.05976822280883789, 0.059340862274169924, 0.059142112731933594, 0.0588950080871582, 0.059445247650146485, 0.060746910095214844, 0.0603185920715332, 0.05994838333129883, 0.059764511108398435, 0.06009328079223633, 0.05907257461547852, 0.059316158294677734, 0.06000559997558594, 0.05966713714599609, 0.0610263671875, 0.060190399169921874, 0.06332860946655273, 0.060080127716064455, 0.05918339157104492, 0.05908160018920899, 0.05897097778320313, 0.0591129264831543, 0.05926380920410156, 0.05962688064575195, 0.05989852905273438, 0.059498336791992186, 0.05980313491821289, 0.060453697204589846, 0.05951027297973633, 0.060193279266357425, 0.060085342407226565, 0.06000323104858398, 0.060391681671142575, 0.05984844970703125, 0.05957244873046875, 0.05973728179931641, 0.060091201782226565, 0.059797279357910155, 0.0600384635925293, 0.060157886505126955, 0.05985279846191406, 0.06008668899536133, 0.06008457565307617, 0.059256542205810545, 0.05921206283569336, 0.05919295883178711, 0.05922035217285156, 0.05944054412841797, 0.05947865676879883, 0.05900694274902344, 0.058589183807373046, 0.058448223114013674, 0.058861217498779296, 0.05922796630859375, 0.05954313659667969, 0.0594392318725586, 0.05979388809204102, 0.05943033599853516, 0.05977350234985351, 0.05963481521606445, 0.05946252822875977, 0.059445152282714846, 0.0594741439819336, 0.05924844741821289, 0.0597402229309082, 0.059612895965576174, 0.05934067153930664, 0.059566497802734375, 0.059344894409179685, 0.059436927795410155, 0.059705535888671876, 0.05978339385986328, 0.060058494567871094, 0.061217086791992184, 0.06011958312988281, 0.060386592864990235, 0.05981872177124024, 0.05971068954467774, 0.05962847900390625, 0.05976700973510742, 0.059889278411865234, 0.05996140670776367, 0.059951038360595704, 0.05994905471801758, 0.06019001770019531, 0.06018463897705078, 0.059908447265625, 0.05973430252075195, 0.059392158508300784, 0.059708446502685544, 0.05990572738647461, 0.06007583999633789, 0.05936966323852539, 0.05891206359863281, 0.05847635269165039, 0.05914316940307617, 0.05967184066772461, 0.05973462295532227, 0.059639041900634765, 0.05957708740234375, 0.059535358428955076, 0.05900492858886719, 0.058721694946289066, 0.05905452728271485, 0.059676929473876955, 0.0599463996887207, 0.05945529556274414, 0.05971142578125, 0.060141727447509764, 0.05961724853515625, 0.05935577774047852, 0.05941459274291992, 0.05936848068237305, 0.06106569671630859, 0.05961103820800781, 0.05971308898925781, 0.0596715202331543, 0.05954767990112304, 0.0593919677734375, 0.059991424560546874, 0.05990393447875977, 0.05995187377929687, 0.06003078460693359, 0.060031105041503906, 0.06046121597290039, 0.06048972702026367, 0.05986899185180664, 0.05962895965576172, 0.059703296661376956, 0.05993471908569336, 0.060177024841308595, 0.059996158599853515, 0.05997875213623047, 0.059705886840820316, 0.059478271484375, 0.059619518280029295, 0.05959683227539062, 0.05977088165283203, 0.05957846450805664, 0.05954470443725586, 0.05964905548095703, 0.060953887939453125, 0.05964617538452149, 0.05994112014770508, 0.05961318588256836, 0.059648159027099606, 0.05974204635620117, 0.059868766784667966, 0.05984035110473633, 0.05966700744628906, 0.05974009704589844, 0.059166465759277344, 0.05948441696166992, 0.059743518829345706, 0.059626209259033204, 0.05977088165283203, 0.059466785430908206, 0.060016830444335936, 0.05984700775146484, 0.05983814239501953, 0.05947264099121094, 0.05958860778808594, 0.05971795272827148, 0.06004732894897461, 0.0598504638671875, 0.059963520050048826, 0.059574207305908206, 0.0602391357421875, 0.059908576965332035, 0.060768192291259765, 0.06136975860595703, 0.0603138542175293, 0.060068126678466796, 0.060289344787597655, 0.060337310791015626, 0.06001948928833008, 0.060216991424560544, 0.060170463562011715, 0.06270921707153321, 0.06036108779907227, 0.06159600067138672, 0.06106316757202149, 0.060030975341796876, 0.059569919586181644, 0.059807998657226566, 0.05985452651977539, 0.06025872039794922, 0.059840415954589846, 0.05995481491088867, 0.05985500717163086, 0.060575008392333984, 0.05983942413330078, 0.05963740921020508, 0.05924105453491211, 0.05935488128662109, 0.05969510269165039, 0.059881599426269534, 0.06000966262817383, 0.05968147277832031, 0.05996953582763672, 0.06009980773925781, 0.06042499160766602, 0.06013132858276367, 0.05970095825195312, 0.05981622314453125, 0.05976268768310547, 0.0598355827331543, 0.059919105529785154, 0.05993024063110351, 0.060429790496826175, 0.06083273696899414, 0.06041151809692383, 0.06022409439086914, 0.060437278747558595, 0.0598919677734375, 0.06031167984008789, 0.060192798614501955, 0.0600159683227539, 0.060015392303466794, 0.059947135925292966, 0.05982527923583984, 0.05988230514526367, 0.059694976806640626, 0.05990412902832031, 0.059529216766357425, 0.06002483367919922, 0.05930521774291992, 0.059411201477050785, 0.05961872100830078, 0.05981961441040039, 0.05950156784057617, 0.05984441757202148, 0.06003760147094726, 0.05964566421508789, 0.05923193740844727, 0.059256481170654296, 0.05979344177246094, 0.06241984176635742, 0.05969689559936524, 0.05947600173950195, 0.05908009719848633, 0.059118144989013674, 0.059276512145996094, 0.05927123260498047, 0.05968764877319336, 0.05987945556640625, 0.05979043197631836, 0.05960793685913086, 0.05959804916381836, 0.05955641555786133, 0.05976505661010742, 0.059786975860595705, 0.05980527877807617, 0.060037185668945316, 0.06002067184448242, 0.059894527435302734, 0.060558494567871095, 0.06014841461181641, 0.0600964469909668, 0.05987916946411133, 0.06045727920532227, 0.06029836654663086, 0.06006889724731445, 0.060042335510253904, 0.06002259063720703, 0.06015871810913086, 0.06035612869262695, 0.060186878204345706, 0.06016041564941406, 0.05997772979736328, 0.05983027267456055, 0.05969900894165039, 0.059649856567382815, 0.05974784088134766, 0.059853694915771485, 0.05954719924926758, 0.05958252716064453, 0.060255615234375, 0.06484662628173828, 0.06008921432495117, 0.06019580841064453, 0.060167743682861326, 0.059859390258789065, 0.060052833557128905, 0.06041667175292969, 0.059988094329833985, 0.05996297454833984, 0.05997187042236328, 0.0613397102355957, 0.06001657485961914, 0.0600186882019043, 0.06005385589599609, 0.05993231964111328, 0.060254207611083986, 0.060037120819091794, 0.06009347152709961, 0.06018899154663086, 0.06035932922363281, 0.060108993530273436, 0.05999801635742188, 0.060259742736816405, 0.06040636825561523, 0.06031561660766602, 0.0604549446105957, 0.060313568115234376, 0.06058111953735352, 0.06035257720947266, 0.06013513565063477, 0.06020540618896485, 0.060650142669677734, 0.06029939270019531, 0.06039459228515625, 0.0603015022277832, 0.06026633453369141, 0.060193313598632815, 0.06002070236206054, 0.060090625762939456, 0.0597212142944336, 0.06006604766845703, 0.060209312438964845, 0.05997151947021485, 0.060497600555419924, 0.06003142547607422, 0.0608133430480957, 0.0596338882446289, 0.05938166427612305, 0.060129150390625, 0.06074399948120117, 0.05991187286376953, 0.059582462310791014, 0.05973516845703125, 0.0598287353515625, 0.059910526275634764, 0.060114944458007816, 0.06220764923095703, 0.06090172958374023, 0.06014963150024414, 0.06012688064575195, 0.06020316696166992, 0.060112319946289065, 0.059360832214355466, 0.05961772918701172, 0.06007859039306641, 0.060385280609130856, 0.06143385696411133, 0.06011084747314453, 0.060055553436279295, 0.06010713577270508, 0.06032313537597656, 0.059980224609375, 0.06004851150512695, 0.06005740737915039, 0.0599090576171875, 0.05995315170288086, 0.06082262420654297, 0.06003977584838867, 0.059983840942382814, 0.060229984283447266, 0.06012313461303711, 0.06006079864501953, 0.06026847839355469, 0.05963462448120117, 0.059711360931396486, 0.05983379364013672, 0.059488960266113285, 0.06020473480224609, 0.0596932487487793, 0.05983251190185547, 0.05987526321411133, 0.05975244903564453, 0.059776992797851564, 0.05978319931030274, 0.059719680786132816, 0.05945910263061523, 0.059443233489990234, 0.059393505096435546, 0.05973417663574219, 0.05972598266601563, 0.059392673492431644, 0.059268447875976564, 0.05935801696777344, 0.05955712127685547, 0.059539520263671875, 0.059454143524169924, 0.059686752319335935, 0.059668479919433595, 0.05964822387695313, 0.060093441009521485, 0.05984703826904297, 0.05985456085205078, 0.05988217544555664, 0.05971558380126953, 0.05984467315673828, 0.05987033462524414, 0.06001155090332031, 0.06013708877563476, 0.05984476852416992, 0.05984675216674805, 0.0599054069519043, 0.060087936401367184, 0.059974559783935545, 0.06038457489013672, 0.06227555084228516, 0.06062992095947266, 0.06008339309692383, 0.060057952880859376, 0.05984115219116211, 0.059963039398193356, 0.05997308731079101, 0.05985772705078125, 0.05969510269165039, 0.059799327850341796, 0.059815841674804686, 0.06042371368408203, 0.05955686569213867, 0.05938560104370117, 0.05933599853515625, 0.05978367996215821, 0.05993929672241211, 0.06013443374633789, 0.05986515045166016, 0.059652576446533205, 0.05945337677001953, 0.05935878372192383, 0.06280876922607422, 0.060006401062011716, 0.06009267044067383, 0.06003424072265625, 0.060006977081298825, 0.06028265762329101, 0.05951715087890625, 0.059305950164794924, 0.05924252700805664, 0.05968384170532227, 0.059931198120117185, 0.0599571533203125, 0.06061248016357422, 0.05974822235107422, 0.059797889709472654, 0.060174175262451175, 0.06008505630493164, 0.060419841766357424, 0.06277852630615234, 0.061300575256347654, 0.06042086410522461, 0.060246112823486325, 0.060123584747314454, 0.06023667144775391, 0.06026739120483399, 0.06020083236694336, 0.06077040100097656, 0.060211200714111325, 0.05970246505737305, 0.060146495819091796, 0.059123233795166014, 0.05942335891723633, 0.05982428741455078, 0.05946540832519531, 0.05973811340332031, 0.06003628921508789, 0.05971673583984375, 0.05910086441040039, 0.05920767974853516, 0.05960819244384766, 0.05978015899658203, 0.059606849670410154]",tokens/s,16.68551576997571,,,True 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14728.306688,10142.810112,0.0,9747.562496,9611.730944,s,1,34.2201796875,34.2201796875,0.0,34.2201796875,34.2201796875,34.2201796875,34.2201796875,[34.2201796875],,kWh,0.0007770563281250058,8.570774036808844e-05,0.00029328690129595825,0.0011560509697890525,,MB,4534.358016,10528.68608,0.0,10112.466944,9989.953536,s,10,1.3355377807617186,0.1335537780761719,0.0007079238234458477,0.1334502716064453,0.13443924865722656,0.13473276214599608,0.13496757293701173,"[0.13502627563476563, 0.1336026611328125, 0.1330015106201172, 0.1329153594970703, 0.13297628784179688, 0.1338626251220703, 0.13385903930664061, 0.1343740234375, 0.13329788208007812, 0.13262211608886718]",tokens/s,1916.8308353956963,kWh,3.955573593243509e-06,4.3622652555131006e-07,2.6355989553242327e-06,7.027399074119053e-06,tokens/kWh,36428840.499867566,MB,4534.358016,10530.783232,0.0,10114.564096,9989.956096,s,10,80.05053466796876,8.005053466796877,0.02531685858535615,8.001396240234374,8.034038037109374,8.038192846679687,8.041516694335938,"[7.952193359375, 7.99734033203125, 8.03311474609375, 7.99462353515625, 8.0054521484375, 8.03254443359375, 7.98501123046875, 8.01132421875, 8.04234765625, 7.9965830078125]",tokens/s,7.870028633950982,kWh,0.00023490137661509009,2.591078800471422e-05,0.00012195066738027532,0.00038276283200007954,tokens/kWh,164592.78365875114,,s,630,80.04725710296626,0.12705913825867668,0.0013620803168982011,0.12683967971801757,0.12818678588867188,0.12899595718383788,0.1319671975708008,"[0.12649116516113282, 0.12545555114746093, 0.12538333129882812, 0.12505817413330078, 0.12551462554931642, 0.12662364959716796, 0.12541961669921875, 0.12550348663330077, 0.12609331512451172, 0.12630445098876952, 0.1256118392944336, 0.12536217498779298, 0.12581068420410157, 0.12666675567626953, 0.12659302520751953, 0.12585747528076172, 0.12592508697509766, 0.1260813446044922, 0.12538655853271485, 0.12597090911865233, 0.12633497619628906, 0.1263250274658203, 0.12574281311035157, 0.12630016326904298, 0.12559359741210938, 0.12646195220947265, 0.13100604248046874, 0.1264193572998047, 0.12596189117431641, 0.126712158203125, 0.12619481658935547, 0.1268048629760742, 0.1268652801513672, 0.126609375, 0.12605878448486327, 0.12571772766113282, 0.12595820617675782, 0.12764841461181642, 0.12527814483642577, 0.12552191925048828, 0.1249053726196289, 0.1253561248779297, 0.126959228515625, 0.1260752639770508, 0.12618697357177736, 0.1267790069580078, 0.12664720153808592, 0.12654796600341797, 0.12597862243652344, 0.12626739501953124, 0.1260481948852539, 0.12601094055175782, 0.12568831634521485, 0.12668041229248048, 0.12605715179443358, 0.12601446533203126, 0.1261987533569336, 0.12671385955810546, 0.1270456314086914, 0.12625263977050782, 0.12705219268798829, 0.12717884826660156, 0.12660294342041015, 0.12774195098876953, 0.1273446044921875, 0.12821098327636718, 0.12652134704589843, 0.12745113372802735, 0.1281640625, 0.12818409729003907, 0.1298101806640625, 0.13154920959472657, 0.12657481384277344, 0.1287598114013672, 0.1274464340209961, 0.1335650177001953, 0.12803890991210937, 0.12666265869140625, 0.13019287109375, 0.12668163299560548, 0.1264739532470703, 0.1282596435546875, 0.125899169921875, 0.12760665893554687, 0.1260835189819336, 0.12578819274902345, 0.12661551666259765, 0.12666284942626954, 0.12578797149658202, 0.1274449920654297, 0.12633087921142577, 0.12660921478271484, 0.12563670349121095, 0.12578966522216797, 0.1257890853881836, 0.12621526336669922, 0.1261164779663086, 0.12610963439941406, 0.1271539535522461, 0.1268800354003906, 0.12502425384521484, 0.12535142517089845, 0.12551980590820314, 0.12851402282714844, 0.1266951675415039, 0.12675312042236328, 0.1258604507446289, 0.12747299194335937, 0.12592918395996094, 0.12617132568359374, 0.12609337615966798, 0.1262816619873047, 0.1256678695678711, 0.1273079071044922, 0.12688531494140626, 0.1264911346435547, 0.12661331176757812, 0.12654003143310547, 0.12668873596191407, 0.1259727325439453, 0.12722000122070312, 0.12524063873291016, 0.12676390075683594, 0.12656118774414063, 0.12633590698242186, 0.12491161346435548, 0.12725308990478515, 0.12686150360107423, 0.12669747161865236, 0.12809429931640626, 0.1320928955078125, 0.1259980163574219, 0.12838706970214844, 0.12708573150634767, 0.1264293746948242, 0.12899754333496094, 0.1289272918701172, 0.12822569274902343, 0.1265669403076172, 0.12716441345214843, 0.12686675262451172, 0.12775494384765626, 0.12632592010498048, 0.1285804443359375, 0.12642892456054688, 0.127219970703125, 0.12674588775634765, 0.1269378204345703, 0.12834121704101562, 0.1308168029785156, 0.1271329574584961, 0.1267453155517578, 0.1269188766479492, 0.12718057250976564, 0.12930400085449217, 0.12838557434082032, 0.13739004516601563, 0.1263863983154297, 0.12533539581298828, 0.12798902130126952, 0.1272814712524414, 0.12999722290039062, 0.13082829284667968, 0.12764524841308594, 0.1275457305908203, 0.12752041625976562, 0.12770883178710937, 0.1266604766845703, 0.12734681701660155, 0.1268087387084961, 0.12625289916992188, 0.126281982421875, 0.1268875198364258, 0.12698870086669922, 0.12653135681152344, 0.12620646667480467, 0.12657023620605468, 0.12632870483398437, 0.12506492614746093, 0.12639065551757814, 0.126382080078125, 0.12764125061035156, 0.12761328125, 0.12553116607666015, 0.1279333724975586, 0.1272393569946289, 0.1266146240234375, 0.12666646575927734, 0.12677731323242186, 0.1271214065551758, 0.12734668731689452, 0.13081382751464843, 0.1256736297607422, 0.12571180725097655, 0.12634700775146485, 0.12658735656738282, 0.12598652648925782, 0.12669120025634767, 0.1273002243041992, 0.12740338897705078, 0.1284917755126953, 0.12654332733154297, 0.12675564575195314, 0.12715436553955078, 0.1259306869506836, 0.12638905334472655, 0.12634127807617188, 0.12785852813720702, 0.1263677444458008, 0.1265864028930664, 0.1260157470703125, 0.12689020538330079, 0.12804710388183593, 0.12587213134765626, 0.12829029846191406, 0.12704560089111328, 0.1268045120239258, 0.12610559844970703, 0.126023681640625, 0.12735641479492188, 0.12574934387207032, 0.127048095703125, 0.12612403106689454, 0.12627731323242186, 0.1257616958618164, 0.12623200225830078, 0.1289940185546875, 0.12815359497070314, 0.12667001342773437, 0.1251233901977539, 0.12625305938720705, 0.1297257537841797, 0.1290185546875, 0.12845465087890626, 0.12857139587402344, 0.12759468841552735, 0.1264392318725586, 0.1261480941772461, 0.12576588439941405, 0.12654208374023437, 0.12618256378173828, 0.12730387115478517, 0.12676493072509765, 0.12622108459472656, 0.12669276428222656, 0.1265320281982422, 0.12790595245361328, 0.12589430236816407, 0.12678179168701173, 0.12693910217285156, 0.12657052612304687, 0.12602108764648437, 0.1269273910522461, 0.12696275329589843, 0.12670047760009764, 0.12592908477783202, 0.12537657928466797, 0.1253288345336914, 0.12832850646972657, 0.12700998687744142, 0.12632685089111328, 0.12740665435791015, 0.1276246109008789, 0.12834060668945313, 0.1271968994140625, 0.12635596466064453, 0.12646342468261718, 0.1258317413330078, 0.12493824005126954, 0.1279273910522461, 0.12657350158691405, 0.12676297760009766, 0.12626700592041015, 0.12670365142822265, 0.12760918426513673, 0.12791216278076173, 0.12755126190185548, 0.12724649810791017, 0.1270208969116211, 0.12647628784179688, 0.12739552307128907, 0.1270572509765625, 0.12701590728759765, 0.12668723297119142, 0.12617113494873047, 0.1276570587158203, 0.12700569915771484, 0.12662364959716796, 0.12676464080810546, 0.12714556884765624, 0.12687648010253907, 0.12641484832763672, 0.12619366455078124, 0.1271398391723633, 0.1267116470336914, 0.12721974182128906, 0.12651238250732422, 0.12714278411865235, 0.126823486328125, 0.12666483306884765, 0.12629856109619142, 0.1274493408203125, 0.12670169830322264, 0.12700582122802734, 0.12966761779785158, 0.12836457824707032, 0.1275149459838867, 0.12661555480957032, 0.12851405334472657, 0.1298041229248047, 0.127401123046875, 0.12813209533691405, 0.1283524169921875, 0.1269390411376953, 0.12801164245605468, 0.12712989044189454, 0.1270214080810547, 0.12682879638671876, 0.1270023651123047, 0.1269844512939453, 0.12798566436767578, 0.1280307159423828, 0.1323351287841797, 0.12742610931396484, 0.12845257568359375, 0.12756473541259766, 0.1275014114379883, 0.128787353515625, 0.1275533142089844, 0.128272705078125, 0.1274059829711914, 0.12705792236328126, 0.12691267395019531, 0.12571222686767577, 0.12474285125732422, 0.13084072875976563, 0.12620252990722655, 0.1264353256225586, 0.12670172882080077, 0.12781346893310547, 0.12749199676513673, 0.12737750244140625, 0.127421630859375, 0.12795785522460937, 0.12852940368652344, 0.12772988891601564, 0.12761110687255858, 0.1274043197631836, 0.12747545623779297, 0.12663017272949217, 0.1277496337890625, 0.1275144958496094, 0.12865350341796875, 0.1280047607421875, 0.12893798828125, 0.12760889434814454, 0.1281383056640625, 0.1283162841796875, 0.12760387420654296, 0.12738601684570314, 0.12795539093017577, 0.1274286117553711, 0.12753715515136718, 0.12708175659179688, 0.12740476989746094, 0.12761027526855467, 0.12720829010009765, 0.12727616119384766, 0.12790557098388672, 0.12633773040771484, 0.12668534088134767, 0.12684083557128906, 0.12672525024414064, 0.12667378997802733, 0.12666883087158204, 0.1268080291748047, 0.12755165100097657, 0.12623046112060546, 0.1278726043701172, 0.12691305541992187, 0.13165945434570311, 0.1277740478515625, 0.12698095703125, 0.12597862243652344, 0.12592127990722657, 0.1281249237060547, 0.12802653503417968, 0.12632073974609376, 0.12982791137695313, 0.12613053131103516, 0.12624246215820312, 0.12543670654296876, 0.1250057907104492, 0.12553644561767577, 0.12720681762695313, 0.12676080322265626, 0.1259362564086914, 0.12617945861816407, 0.1264754867553711, 0.12457561492919922, 0.12570057678222657, 0.12595244598388672, 0.12682444763183592, 0.1258711395263672, 0.12613075256347656, 0.12583773040771484, 0.12711116790771484, 0.12596224212646484, 0.12602095794677734, 0.12560185241699218, 0.1262200927734375, 0.1257520980834961, 0.12574489593505858, 0.12815589904785157, 0.1264005126953125, 0.1257850875854492, 0.12623145294189453, 0.12673414611816405, 0.1271995162963867, 0.1265519027709961, 0.12600768280029298, 0.12684060668945313, 0.12770918273925783, 0.1269078369140625, 0.13011820983886718, 0.12739584350585936, 0.12715817260742188, 0.12646409606933592, 0.1264353256225586, 0.12657244873046875, 0.12594390106201173, 0.12697420501708984, 0.12629376220703126, 0.12624636840820314, 0.12814306640625, 0.12722486114501952, 0.12750771331787109, 0.12684092712402345, 0.12763590240478515, 0.12678758239746094, 0.12680806732177735, 0.1280128936767578, 0.1276006393432617, 0.12689817810058593, 0.12746342468261718, 0.1271357421875, 0.1271357421875, 0.12662783813476564, 0.12694937896728514, 0.12766617584228515, 0.12811468505859375, 0.12797456359863282, 0.12768956756591796, 0.1278908462524414, 0.12688236999511718, 0.12651113891601562, 0.12710486602783203, 0.12768067169189454, 0.13018316650390624, 0.12640460968017578, 0.12690870666503906, 0.12695318603515626, 0.12619065856933595, 0.13767366027832031, 0.12683058929443358, 0.1276804504394531, 0.13040771484375, 0.12647459411621093, 0.12775801849365234, 0.12641542053222657, 0.12630786895751953, 0.12656707000732423, 0.12717874908447266, 0.12827381896972656, 0.12689043426513671, 0.126210205078125, 0.12740560150146485, 0.12616486358642579, 0.12706400299072265, 0.12647081756591796, 0.126501953125, 0.1275381088256836, 0.12665184020996093, 0.12670550537109376, 0.12683747100830078, 0.12617298889160156, 0.1270540771484375, 0.12573693084716797, 0.12631993865966798, 0.12690089416503905, 0.12744703674316407, 0.12767964935302734, 0.1268376007080078, 0.12544796752929688, 0.1254832305908203, 0.1262449264526367, 0.12742649841308593, 0.1262998046875, 0.1261182403564453, 0.12658287811279298, 0.12662774658203124, 0.12582281494140626, 0.12650717163085937, 0.1262808303833008, 0.1264625930786133, 0.12685699462890626, 0.12705391693115234, 0.12739798736572266, 0.1268019485473633, 0.1265739517211914, 0.12800099182128907, 0.12821670532226562, 0.12783673858642577, 0.12719699096679687, 0.1272845458984375, 0.1279086380004883, 0.1270885467529297, 0.12740755462646483, 0.12698271942138672, 0.1275555877685547, 0.12719087982177735, 0.12695977783203125, 0.12734003448486328, 0.12680652618408203, 0.1269614715576172, 0.12716464233398436, 0.12683875274658202, 0.12712754821777345, 0.12732621002197267, 0.12679759979248048, 0.12772310638427734, 0.1277221145629883, 0.12638800048828125, 0.12651881408691407, 0.1279813461303711, 0.12848016357421874, 0.131235107421875, 0.1263458251953125, 0.1275141143798828, 0.12699404907226564, 0.13524044799804688, 0.13757008361816406, 0.1280619201660156, 0.12792217254638671, 0.12945408630371094, 0.12721766662597656, 0.12676505279541014, 0.12680397033691407, 0.12685517120361328, 0.12711526489257813, 0.12762079620361327, 0.12784877014160156, 0.1269516830444336, 0.12766531372070314, 0.12742716979980467, 0.12698419189453125, 0.12757401275634767, 0.12773094177246094, 0.12768118286132812, 0.1276655044555664, 0.12768720245361329, 0.1276539535522461, 0.12869573974609375, 0.12751305389404297, 0.1261647644042969, 0.12559529876708983, 0.12651602935791015, 0.12801632690429687, 0.12506527709960938, 0.12434786987304687, 0.12576131439208985, 0.12668211364746093, 0.12708223724365234, 0.12663926696777345, 0.1268580780029297, 0.12575926208496094, 0.12725062561035155, 0.12702342224121094, 0.12741990661621094, 0.12759664154052736, 0.12732166290283203, 0.1270052490234375, 0.12753510284423827, 0.12722994995117187, 0.12735874938964845, 0.12684925079345702, 0.12648652648925782, 0.1266841583251953, 0.1280369873046875, 0.12664630126953125, 0.12703215789794922, 0.12669068908691405, 0.12759465789794922, 0.1266990737915039, 0.12612086486816407, 0.1264680938720703, 0.12721695709228514, 0.12626998138427734, 0.12640681457519531, 0.126536865234375, 0.1272369613647461, 0.12636160278320313, 0.12615042877197266, 0.1269697952270508, 0.12692483520507813, 0.12669773101806642, 0.1266728973388672, 0.12652543640136718, 0.12617436981201172, 0.12767654418945312, 0.1273947525024414, 0.1271703338623047, 0.1276374740600586, 0.12987091064453124, 0.12684796905517579, 0.12650847625732423, 0.12627410888671875, 0.12585158538818358, 0.12682041931152344, 0.1270203552246094, 0.1270074234008789, 0.12680985260009767, 0.13043122863769532, 0.12649267578125, 0.12600633239746092, 0.12930758666992187, 0.1258138885498047, 0.12746431732177735, 0.1279815673828125, 0.12647792053222656]",tokens/s,7.870350875228854,,,True -8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -695,7 +695,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,5245.898752,3461.28384,0.0,3066.036224,2865.160192,s,1,13.449072265625,13.449072265625,0.0,13.449072265625,13.449072265625,13.449072265625,13.449072265625,[13.449072265625],,kWh,0.0001751543946833332,1.931352334526294e-05,6.485394077201945e-05,0.0002593218588006156,,MB,5298.737152,3798.925312,0.0,3382.706176,3158.448128,s,10,0.9427298507690429,0.09427298507690429,0.0004287602052404247,0.09424558258056641,0.0946105682373047,0.09491861114501954,0.09516504547119141,"[0.09522665405273438, 0.09449411010742187, 0.09451715087890625, 0.093910400390625, 0.0945421142578125, 0.09384623718261718, 0.09432064056396484, 0.09417052459716797, 0.09367011260986328, 0.09403190612792969]",tokens/s,2715.518128456047,kWh,2.76874553930794e-06,3.0534151131535305e-07,1.556185731320564e-06,4.630272781943857e-06,tokens/kWh,55288319.2969308,MB,5302.882304,3798.925312,0.0,3382.706176,3158.450688,s,10,56.98800439453125,5.698800439453126,0.015593749483295881,5.695649658203125,5.715228124999999,5.721299609375,5.726156796875,"[5.666302734375, 5.69577392578125, 5.70157568359375, 5.68772216796875, 5.695525390625, 5.72737109375, 5.71081103515625, 5.6939208984375, 5.69512255859375, 5.71387890625]",tokens/s,11.05495808623993,kWh,0.00016488129306569104,1.8186982917514692e-05,7.059048309708114e-05,0.0002536587590802869,tokens/kWh,248365.1667635082,,s,630,56.98545406341552,0.09045310168796115,0.0011146891581628154,0.09024007797241211,0.09134676818847656,0.09213751525878906,0.09454165992736817,"[0.08968605041503906, 0.08971260833740234, 0.08952556610107422, 0.08906617736816407, 0.08922316741943359, 0.08949964904785156, 0.08911430358886718, 0.09006463623046874, 0.09064096069335938, 0.08963890838623047, 0.08953651428222656, 0.09333964538574219, 0.09175859069824219, 0.08936038208007813, 0.08958566284179688, 0.0897804183959961, 0.08917817687988282, 0.08923545837402344, 0.08925567626953125, 0.0894527359008789, 0.08956813049316406, 0.08997369384765624, 0.0896626205444336, 0.0895005111694336, 0.09261017608642579, 0.08917375946044923, 0.09015974426269531, 0.09012822723388672, 0.0900240936279297, 0.08954265594482422, 0.08973513793945312, 0.08937065887451172, 0.08998422241210938, 0.09084188842773437, 0.08923689270019532, 0.08978316497802734, 0.08978112030029296, 0.08964752197265625, 0.09041260528564453, 0.09004940795898438, 0.08969833374023438, 0.09047036743164062, 0.08974736022949219, 0.08973731231689454, 0.08982342529296874, 0.09008927917480469, 0.09005875396728516, 0.08941964721679688, 0.08937305450439453, 0.090212158203125, 0.08958560180664063, 0.08950784301757812, 0.0895692138671875, 0.0903043212890625, 0.08969859313964844, 0.08989286041259766, 0.08940294647216797, 0.09046086120605469, 0.09075494384765626, 0.09033859252929688, 0.09165267181396484, 0.09034732818603515, 0.09006297302246094, 0.0917606430053711, 0.09150259399414062, 0.08984780883789062, 0.08934528350830079, 0.09013116455078125, 0.0895589141845703, 0.08977327728271485, 0.0900618896484375, 0.09017945861816407, 0.0914776611328125, 0.08999152374267579, 0.08980025482177735, 0.09080601501464844, 0.09085385894775391, 0.09026764678955078, 0.09011427307128907, 0.08976793670654297, 0.09000291442871093, 0.09255171203613281, 0.0908448028564453, 0.09116505432128906, 0.0898653106689453, 0.08953334045410156, 0.08950784301757812, 0.0895283203125, 0.09028975677490235, 0.09052191925048828, 0.08989641571044922, 0.08996927642822265, 0.09094758605957032, 0.0903024673461914, 0.08976902770996094, 0.0904151382446289, 0.08941603088378906, 0.09003065490722656, 0.08934809875488281, 0.08947718048095703, 0.09004825592041016, 0.08998521423339843, 0.08981913757324218, 0.09037551879882813, 0.09034732818603515, 0.08961945343017579, 0.08959715270996094, 0.08981910705566407, 0.09006147003173828, 0.09084681701660156, 0.09063670349121093, 0.09121382141113281, 0.09021990203857422, 0.09131072235107422, 0.09187091064453125, 0.09068576049804687, 0.0896552963256836, 0.09023385620117187, 0.09018057250976562, 0.09010179138183594, 0.09034870147705078, 0.09456111907958985, 0.09081241607666016, 0.0912916488647461, 0.09234636688232421, 0.09089024353027343, 0.0900060806274414, 0.08999571228027343, 0.08985382080078125, 0.09007308959960937, 0.08956928253173828, 0.09059097290039063, 0.09013273620605469, 0.09430016326904297, 0.0902715835571289, 0.09114844512939453, 0.0913583984375, 0.09006368255615234, 0.0898325424194336, 0.09005967712402344, 0.09075917053222657, 0.09047411346435547, 0.09083484649658204, 0.09061328125, 0.08987741088867188, 0.08961027526855468, 0.09022179412841796, 0.09050777435302734, 0.09155596923828126, 0.0913174057006836, 0.09060399627685548, 0.09087026977539063, 0.09222886657714843, 0.09193551635742188, 0.08987033843994141, 0.0908779525756836, 0.08987455749511719, 0.0898435821533203, 0.09076652526855469, 0.09061990356445312, 0.09025318145751954, 0.09061881256103516, 0.09070796966552734, 0.09055846405029297, 0.09007695770263671, 0.09031078338623047, 0.08999267578125, 0.09006511688232421, 0.09028031921386719, 0.09021392059326172, 0.0903741455078125, 0.08972544097900391, 0.08988057708740234, 0.09074073791503906, 0.0896674575805664, 0.09005305480957031, 0.0893655014038086, 0.08983622741699218, 0.08909190368652344, 0.08969209289550781, 0.0903662109375, 0.09327935791015625, 0.08970738983154297, 0.08980480194091797, 0.0894744644165039, 0.09334998321533203, 0.09082502746582032, 0.0904705581665039, 0.09199961853027344, 0.09024310302734374, 0.09082061004638672, 0.10126249694824219, 0.09058493041992187, 0.09043436431884766, 0.08972911834716797, 0.09009696197509766, 0.08971327972412109, 0.0893186264038086, 0.08988153839111328, 0.08981404876708984, 0.08971568298339844, 0.09490620422363281, 0.09056620788574218, 0.08984432220458985, 0.09010995483398437, 0.08984371185302735, 0.09000141143798829, 0.08936653137207032, 0.08981641387939453, 0.09067088317871094, 0.09157107543945313, 0.09022029113769531, 0.08970060729980468, 0.08928665924072265, 0.08964915466308594, 0.08943545532226563, 0.08986201477050781, 0.08951289367675781, 0.09022041320800782, 0.08995945739746093, 0.08963990020751954, 0.08993920135498047, 0.09027597045898438, 0.09034815979003906, 0.08998092651367187, 0.08941270446777344, 0.08972380828857422, 0.08946435546875, 0.08916835021972656, 0.09043968200683594, 0.09152921295166015, 0.08972697448730468, 0.0897798080444336, 0.08991942596435547, 0.09008175659179687, 0.08910438537597656, 0.08950099182128907, 0.08917043304443359, 0.09000160217285157, 0.09057443237304688, 0.08898397064208985, 0.08888044738769531, 0.08918428802490234, 0.09224691009521484, 0.08962847900390625, 0.0935874252319336, 0.09057075500488282, 0.09003008270263672, 0.08971068572998046, 0.08980636596679688, 0.08956352233886719, 0.091340576171875, 0.08974867248535157, 0.08952301025390624, 0.08948735809326172, 0.09013855743408203, 0.09082428741455079, 0.09040870666503906, 0.09026387023925782, 0.09010832214355469, 0.08950374603271484, 0.09034957122802735, 0.08947711944580078, 0.08999664306640626, 0.09027641296386718, 0.0911258544921875, 0.09056690979003906, 0.089746337890625, 0.09001455688476563, 0.08958975982666016, 0.08982112121582031, 0.09112950134277344, 0.09219261169433594, 0.09070012664794921, 0.09026563262939453, 0.08980883026123047, 0.09010173034667969, 0.0915235824584961, 0.09104156494140625, 0.09029341125488281, 0.08990396881103516, 0.09011808013916016, 0.08980646514892578, 0.09058963012695312, 0.09127043151855468, 0.09009225463867188, 0.0898677749633789, 0.08968179321289063, 0.09069222259521484, 0.0906424331665039, 0.09250816345214843, 0.09106022644042969, 0.09028928375244141, 0.09139699554443359, 0.09588057708740234, 0.09126783752441406, 0.09034265899658203, 0.08998489379882812, 0.09031478118896484, 0.09181651306152344, 0.0910716781616211, 0.09185993957519531, 0.09025865936279297, 0.0897380142211914, 0.09030592346191406, 0.08947161865234375, 0.08986150360107421, 0.09024143981933594, 0.09010368347167969, 0.09028854370117187, 0.09022991943359375, 0.08924034881591797, 0.08884630584716798, 0.0892541732788086, 0.08895622253417969, 0.09024716949462891, 0.09016115570068359, 0.09003948974609376, 0.09034835052490234, 0.09103075408935547, 0.0917154541015625, 0.09049180603027343, 0.08923308563232422, 0.09007135772705079, 0.09092915344238281, 0.0899399642944336, 0.09044786834716798, 0.09039670562744141, 0.09196137237548828, 0.09060150146484375, 0.09027574157714843, 0.09055232238769531, 0.09018982696533204, 0.09007465362548828, 0.09115814208984375, 0.09168982696533202, 0.0920025634765625, 0.09076505279541015, 0.0927674560546875, 0.09076348876953125, 0.09042348480224609, 0.09124697875976563, 0.09162957000732422, 0.09101110076904297, 0.09062601470947265, 0.09083699035644531, 0.0912384033203125, 0.09142179107666015, 0.09244560241699219, 0.09168057250976562, 0.09125619506835937, 0.09110201263427735, 0.0944940185546875, 0.09075782775878906, 0.09167667388916016, 0.09044377899169923, 0.0907874526977539, 0.09046041870117187, 0.09059728240966797, 0.09049520111083985, 0.09064393615722656, 0.0909927978515625, 0.09082099151611328, 0.0905129623413086, 0.0908272933959961, 0.09064873504638672, 0.09068310546875, 0.09120361328125, 0.09045782470703125, 0.09060995483398437, 0.09124409484863281, 0.09037049865722656, 0.09112322998046875, 0.09089469146728515, 0.09073177337646485, 0.09106317138671875, 0.09104793548583984, 0.09075711822509766, 0.09037404632568359, 0.08997628784179687, 0.09021459197998047, 0.09015078735351563, 0.09073222351074219, 0.09042134094238281, 0.09001811218261718, 0.09018319702148438, 0.09040108489990234, 0.09018025970458984, 0.09000281524658203, 0.09022147369384766, 0.09029385375976562, 0.09118259429931641, 0.09095024108886719, 0.09065062713623047, 0.09016457366943359, 0.09062179565429687, 0.091351806640625, 0.09075225830078125, 0.09233817291259766, 0.09012079620361328, 0.09081597137451172, 0.09030636596679688, 0.09145629119873047, 0.09058512115478516, 0.09080633544921875, 0.09123846435546876, 0.09120697784423829, 0.09497062683105469, 0.09103968048095704, 0.09424877166748047, 0.09023439788818359, 0.0907209243774414, 0.09041680145263672, 0.09036547088623047, 0.090823486328125, 0.08988617706298828, 0.09031938934326172, 0.08999321746826172, 0.09092649841308593, 0.09134674835205078, 0.09069033813476562, 0.08996665954589844, 0.09070585632324218, 0.09101900482177734, 0.0912938232421875, 0.09091852569580078, 0.09110572814941406, 0.09057491302490234, 0.09113753509521484, 0.09042380523681641, 0.09127760314941406, 0.09023040008544922, 0.08959394836425781, 0.08942189025878906, 0.09005449676513672, 0.08955654144287109, 0.0898620834350586, 0.0902039337158203, 0.09021727752685547, 0.08956313323974609, 0.08970982360839844, 0.08956422424316406, 0.08929539489746094, 0.08968572998046875, 0.0899959716796875, 0.09014067077636718, 0.09439437103271485, 0.09054962921142579, 0.08911526489257812, 0.08970457458496094, 0.08998067474365234, 0.08958812713623047, 0.08929251098632812, 0.08888102722167969, 0.08913318634033203, 0.08970649719238281, 0.09007718658447265, 0.08935964965820313, 0.08927510070800782, 0.08949350738525391, 0.09262659454345704, 0.08973283386230468, 0.09009011077880859, 0.08979046630859375, 0.08927964782714844, 0.08982745361328125, 0.09207017517089844, 0.09151737976074219, 0.09085104370117188, 0.09079420471191406, 0.0910623016357422, 0.09066684722900391, 0.09070406341552735, 0.0910561294555664, 0.09044898986816406, 0.09054246520996094, 0.09061634826660156, 0.09185266876220703, 0.09059033966064453, 0.09101824188232421, 0.09102950286865234, 0.0909024658203125, 0.09029049682617188, 0.08984703826904297, 0.09086160278320313, 0.09087606048583985, 0.09050511932373047, 0.09025766754150391, 0.09389004516601562, 0.09057286071777344, 0.09088265228271485, 0.09004236602783203, 0.08998297882080078, 0.08988262176513671, 0.09018163299560547, 0.09036905670166015, 0.09010594940185547, 0.08958380889892578, 0.08982189178466797, 0.09024915313720704, 0.09023903656005859, 0.09010755157470703, 0.09039715576171875, 0.09042655944824218, 0.0903597412109375, 0.0905291519165039, 0.0908642578125, 0.09082653045654297, 0.09008354949951172, 0.09030860900878906, 0.09258735656738282, 0.09081718444824219, 0.09034751892089844, 0.08987648010253907, 0.09021849822998047, 0.09050236511230468, 0.09045238494873047, 0.09238361358642579, 0.0899788818359375, 0.08943180847167968, 0.08983372497558594, 0.0905871353149414, 0.09008953857421875, 0.08999040222167969, 0.09061017608642578, 0.09012271881103516, 0.08994950103759766, 0.09043516540527344, 0.09052857971191407, 0.09009152221679688, 0.09001983642578125, 0.08976505279541015, 0.09081529235839844, 0.09115010833740235, 0.09067648315429687, 0.09013142395019531, 0.09074393463134765, 0.09027059173583984, 0.09088419342041015, 0.09019792175292969, 0.09005606079101562, 0.09010768127441406, 0.0898056640625, 0.09019391632080079, 0.09046825408935547, 0.0900506591796875, 0.0913469467163086, 0.08984780883789062, 0.09010176086425781, 0.09030598449707031, 0.09022054290771485, 0.0898115234375, 0.09005900573730469, 0.08998886108398438, 0.09042329406738281, 0.09055955505371094, 0.09308274841308593, 0.09062703704833984, 0.09028284454345703, 0.09005820465087891, 0.09012073516845703, 0.090355712890625, 0.09018367767333985, 0.08972605133056641, 0.09025833892822266, 0.08997273254394532, 0.09039600372314453, 0.09112528228759766, 0.09114380645751953, 0.09529183959960938, 0.09382867431640625, 0.09088086700439453, 0.08991305541992188, 0.08990544128417968, 0.08972697448730468, 0.08993587493896485, 0.09005792236328125, 0.09099308776855469, 0.0908139877319336, 0.09036067199707032, 0.09033113861083984, 0.09039974212646484, 0.09159353637695312, 0.09015030670166016, 0.09039513397216797, 0.09005903625488282, 0.09043968200683594, 0.09042329406738281, 0.09027174377441406, 0.0902739486694336, 0.09050492858886719, 0.09015625762939453, 0.09024111938476563, 0.09013945770263672, 0.08995571136474609, 0.08975222778320313, 0.0896839370727539, 0.08993382263183594, 0.0900505599975586, 0.09006412506103516, 0.09018450927734376, 0.09002492523193359, 0.0898338851928711, 0.09006956481933594, 0.09012633514404297, 0.09080774688720702, 0.09001407623291016, 0.09011148834228516, 0.09007791900634765, 0.0902221450805664, 0.09061116790771484, 0.09024928283691407, 0.09015113830566407, 0.09010198211669922, 0.08986873626708984, 0.09019187164306641, 0.09128345489501953, 0.09073458862304687, 0.09016320037841796, 0.09132646179199219, 0.09377184295654296, 0.09038841247558593, 0.0904089584350586, 0.09077760314941406, 0.090818115234375, 0.09026195526123047, 0.09037596893310547, 0.10164399719238282, 0.09004815673828125, 0.09018873596191407]",tokens/s,11.055452840630394,,,True -8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -721,7 +721,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -733,7 +733,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -1233,7 +1233,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1249,61 +1249,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -1312,7 +1312,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1328,53 +1328,53 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( @@ -1595,7 +1595,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1621,15 +1621,15 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) @@ -1637,10 +1637,10 @@ ChildProcessError: Traceback (most recent call last): return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.75 GiB of which 239.06 MiB is free. Process 24522 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1656,61 +1656,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -1932,7 +1932,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -1948,53 +1948,53 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -2287,7 +2287,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2313,7 +2313,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -2325,7 +2325,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -2436,7 +2436,7 @@ ChildProcessError: Traceback (most recent call last): ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2452,53 +2452,53 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -2507,7 +2507,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, -4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2523,61 +2523,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -2805,7 +2805,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1051.111424,5046.730752,0.0,4651.483136,4638.22848,s,1,14.167484375,14.167484375,0.0,14.167484375,14.167484375,14.167484375,14.167484375,[14.167484375],,kWh,0.00020650797157087102,2.2772040955236278e-05,7.706478387400906e-05,0.0003063447964001164,,MB,1251.110912,6172.901376,0.0,5765.070848,5418.661888,s,10,10.221631225585938,1.0221631225585939,0.007044564666882446,1.0222241821289062,1.0298222412109377,1.0307274047851562,1.0314515356445313,"[1.0081405029296875, 1.0177132568359375, 1.017791259765625, 1.0163858642578125, 1.019808837890625, 1.0292550048828124, 1.0246395263671875, 1.02962109375, 1.026643310546875, 1.031632568359375]",tokens/s,250.44926230482866,kWh,2.973801881458409e-05,3.2795504039942414e-06,1.9803376953803075e-05,5.282094617238141e-05,tokens/kWh,4846562.179415393,MB,1274.10176,6172.901376,0.0,5765.070848,5418.664448,s,10,52.74841650390624,5.274841650390624,0.017688636116568155,5.276070068359376,5.29464609375,5.295215625,5.29567125,"[5.24542919921875, 5.252671875, 5.26422802734375, 5.2634521484375, 5.2657001953125, 5.28643994140625, 5.2933076171875, 5.2868828125, 5.29578515625, 5.29451953125]",tokens/s,11.943486492212442,kWh,0.0001548616572941675,1.708246161493997e-05,0.00010273677663379388,0.00027468089554290136,tokens/kWh,229357.05038925895,,s,630,52.74474177551269,0.08372181234208365,0.0016123843966272529,0.08350317001342773,0.08451623992919921,0.08499251632690429,0.09415560745239258,"[0.09373286437988282, 0.08188425445556641, 0.08277494049072266, 0.08255078125, 0.08146883392333984, 0.08222502136230468, 0.08254438018798828, 0.08226850891113281, 0.08222566223144531, 0.08239523315429688, 0.08211869049072265, 0.08367922973632813, 0.0836648941040039, 0.08332864379882812, 0.083159423828125, 0.08293990325927734, 0.08295219421386718, 0.08243199920654297, 0.08239513397216797, 0.08242572784423828, 0.0826103057861328, 0.08262973022460937, 0.08279535675048828, 0.08468281555175781, 0.08369971466064453, 0.08407360076904297, 0.08286707305908203, 0.08282931518554687, 0.08255030059814453, 0.08345600128173829, 0.08336022186279297, 0.08388960266113281, 0.08249606323242188, 0.08229865264892579, 0.0836171875, 0.08336380767822266, 0.08358521270751954, 0.08457417297363282, 0.0827992935180664, 0.08306278228759766, 0.0833062744140625, 0.08291964721679687, 0.08293888092041016, 0.08280323028564453, 0.08288508605957032, 0.083378173828125, 0.08305628967285156, 0.08392124938964844, 0.08400272369384766, 0.08334524536132812, 0.08345420837402344, 0.0834326400756836, 0.08384288024902344, 0.08277693176269531, 0.08317938995361328, 0.08288591766357421, 0.08290406036376953, 0.08331257629394531, 0.08328195190429688, 0.08376937866210937, 0.0842260513305664, 0.08376028442382813, 0.08324585723876952, 0.09474403381347657, 0.08245712280273437, 0.0822838363647461, 0.08258834838867188, 0.08222675323486328, 0.08281951904296875, 0.08237184143066406, 0.08274406433105469, 0.08244838714599609, 0.08250326538085938, 0.08283123016357422, 0.0852750701904297, 0.08408803558349609, 0.08409897613525391, 0.0830248031616211, 0.08250588989257812, 0.08279235076904297, 0.08234700775146485, 0.0827585906982422, 0.08258268737792969, 0.08262448120117187, 0.08277024078369141, 0.08378841400146485, 0.0840244140625, 0.08388819122314453, 0.08336653137207031, 0.08340838623046876, 0.08259248352050781, 0.08261734771728516, 0.08312934112548828, 0.08275113677978516, 0.08275593566894532, 0.0827146224975586, 0.0833628158569336, 0.0834420166015625, 0.0834464340209961, 0.08394927978515625, 0.08349314880371093, 0.08389990234375, 0.08259839630126953, 0.08308121490478515, 0.08254838562011718, 0.08253215789794922, 0.08416515350341797, 0.08279039764404297, 0.08401878356933594, 0.0833089599609375, 0.08357039642333984, 0.08373481750488282, 0.08353791809082031, 0.08342118072509766, 0.08315081787109375, 0.0827449951171875, 0.08274278259277344, 0.08333958435058594, 0.08254534149169922, 0.08296636962890624, 0.08361094665527344, 0.08418946838378906, 0.08419987487792968, 0.08418656158447266, 0.08418319702148437, 0.08361615753173827, 0.09531391906738282, 0.08266342163085938, 0.08249932861328126, 0.08223881530761719, 0.08286239624023438, 0.08204723358154296, 0.08270883178710937, 0.0821860122680664, 0.08229199981689453, 0.082430908203125, 0.08282521820068359, 0.08570230102539063, 0.08478755187988281, 0.08360108947753907, 0.08312223815917968, 0.08263814544677735, 0.0827742691040039, 0.08268051147460938, 0.0826060791015625, 0.08261631774902344, 0.08297779083251954, 0.08302285003662109, 0.08396364593505859, 0.08368339538574218, 0.08384121704101563, 0.08333516693115234, 0.08339250946044922, 0.0830750732421875, 0.08344985961914063, 0.08257855987548827, 0.08258854675292969, 0.0828006362915039, 0.08269209289550782, 0.08280604553222656, 0.08355289459228515, 0.08343116760253906, 0.08400931549072266, 0.0841195526123047, 0.0833597412109375, 0.08349612426757813, 0.08328479766845703, 0.08298601531982422, 0.08394786834716797, 0.08350131225585937, 0.08236444854736329, 0.08387824249267578, 0.08419020843505859, 0.08362432098388672, 0.08404188537597657, 0.08434121704101563, 0.08422156524658203, 0.08327926635742187, 0.08301372528076172, 0.08393119812011719, 0.08326172637939454, 0.08341078186035156, 0.0833148193359375, 0.08365318298339844, 0.08426290893554687, 0.08564326477050781, 0.08353177642822265, 0.08437904357910156, 0.0850370864868164, 0.093502685546875, 0.08262319946289062, 0.08272697448730469, 0.08185855865478515, 0.08269596862792969, 0.08230524444580078, 0.08299314880371093, 0.08247062683105469, 0.0826370849609375, 0.08252620697021484, 0.08317282867431641, 0.08695657348632813, 0.08456396484375, 0.0847093734741211, 0.0830525131225586, 0.08315702056884766, 0.08279596710205078, 0.08251853179931641, 0.08263276672363282, 0.08300112152099609, 0.08184435272216797, 0.08306288146972657, 0.0831448974609375, 0.08414803314208984, 0.08451622772216796, 0.08310198211669922, 0.08334166717529297, 0.08253440093994141, 0.08271459197998046, 0.08262659454345703, 0.08343551635742187, 0.08301158142089844, 0.08328524780273437, 0.08290787506103516, 0.08414825439453125, 0.08377497863769531, 0.08368998718261719, 0.08449219512939453, 0.0836278076171875, 0.08283168029785157, 0.08412764739990235, 0.08323193359375, 0.0832437744140625, 0.08290525054931641, 0.08354608154296875, 0.08372140502929687, 0.08433545684814453, 0.08415805053710937, 0.08418345642089843, 0.0841928939819336, 0.08401548767089843, 0.08317132568359376, 0.08376866912841797, 0.08383554840087891, 0.08307917022705077, 0.08286822509765625, 0.08324294281005859, 0.08347241973876954, 0.0833656997680664, 0.08403782653808593, 0.08403097534179688, 0.08350323486328125, 0.08395174407958984, 0.09424813079833984, 0.08263107299804688, 0.08235049438476562, 0.08203469085693359, 0.08314662170410156, 0.08288182067871094, 0.08285065460205078, 0.0826429443359375, 0.08223232269287109, 0.0826497573852539, 0.08272489929199218, 0.08669580841064453, 0.08457430267333985, 0.08375299072265625, 0.0823967056274414, 0.08287484741210938, 0.0827886734008789, 0.08164940643310546, 0.0830047378540039, 0.0823447036743164, 0.0831219482421875, 0.08300790405273438, 0.08436736297607422, 0.08464383697509766, 0.08426496124267578, 0.08397209930419922, 0.08342733001708984, 0.08272064208984375, 0.08317324829101562, 0.08349065399169922, 0.08281298828125, 0.08227996826171875, 0.08366073608398437, 0.08352243041992187, 0.08410050964355469, 0.08394608306884765, 0.08352687835693359, 0.08399542236328125, 0.08322361755371094, 0.08305350494384765, 0.08349081420898438, 0.08285903930664062, 0.08314115142822266, 0.0830665283203125, 0.08406095886230469, 0.08399052429199219, 0.08406416320800782, 0.08346633911132813, 0.08402329254150391, 0.08387789154052734, 0.08379801940917969, 0.08333055877685547, 0.08345241546630859, 0.08350310516357422, 0.083019775390625, 0.08354611206054688, 0.08403148651123046, 0.0842072296142578, 0.08413426971435548, 0.08406832122802735, 0.08382208251953124, 0.08403408050537109, 0.08356249237060547, 0.09464048004150391, 0.08275360107421875, 0.08243170928955078, 0.08230844879150391, 0.08279750061035156, 0.0830382080078125, 0.08288870239257813, 0.08281702423095703, 0.08288003540039063, 0.08283948516845703, 0.08289311981201172, 0.08619235229492188, 0.08539110565185547, 0.08424678039550781, 0.08404505920410156, 0.0832577896118164, 0.08309161376953125, 0.08295645141601563, 0.08341011047363281, 0.08302674865722656, 0.08274508666992188, 0.08358118438720703, 0.08449024200439453, 0.08418099212646485, 0.08396940612792969, 0.08395225524902344, 0.08351744079589844, 0.08359635162353515, 0.08263775634765624, 0.08344576263427735, 0.08328806304931641, 0.08296857452392578, 0.08333106994628907, 0.08411545562744141, 0.0843563232421875, 0.08447875213623048, 0.08434073638916016, 0.08426207733154296, 0.0841346206665039, 0.08322672271728515, 0.0828310089111328, 0.08379542541503907, 0.08343436431884765, 0.08353913879394531, 0.08412038421630859, 0.0849731216430664, 0.0843411865234375, 0.08442880249023438, 0.0841890869140625, 0.08400870513916016, 0.08481158447265626, 0.0839234848022461, 0.08333277130126954, 0.08311228942871093, 0.08377318572998046, 0.084500732421875, 0.08421689605712891, 0.08454351806640625, 0.08374291229248047, 0.084457763671875, 0.08406416320800782, 0.0841016616821289, 0.08527590179443359, 0.09516851043701172, 0.08265727996826172, 0.08262044525146485, 0.08278550720214843, 0.08243276977539063, 0.08337129974365234, 0.08266825866699219, 0.08340444946289062, 0.08307542419433593, 0.08304025268554688, 0.08301363372802735, 0.08621414184570313, 0.08516044616699218, 0.08294121551513672, 0.08338505554199219, 0.08289673614501954, 0.08354217529296876, 0.08328396606445312, 0.0830013427734375, 0.08354351806640625, 0.08281683349609376, 0.0836956787109375, 0.08421443176269532, 0.08514790344238281, 0.08464358520507813, 0.08396390533447265, 0.08329945373535157, 0.08472598266601562, 0.08347456359863281, 0.08437814331054687, 0.08338547515869141, 0.08357772827148438, 0.08367922973632813, 0.08362592315673828, 0.08453126525878907, 0.08423014068603515, 0.08380745697021484, 0.0846012191772461, 0.08380210876464844, 0.08429535675048828, 0.0836487045288086, 0.08342550659179687, 0.08331295776367187, 0.08421945953369141, 0.08322000122070312, 0.08412973022460937, 0.08420223999023438, 0.08401299285888672, 0.08435945892333985, 0.08454342651367187, 0.08477005004882812, 0.08379270172119141, 0.08424652862548829, 0.08394137573242187, 0.08443289947509766, 0.08412156677246094, 0.08451634979248047, 0.08407001495361328, 0.08514211273193359, 0.08414553833007812, 0.08437763214111328, 0.0839197769165039, 0.08428953552246093, 0.09392908477783203, 0.08289129638671874, 0.08263382720947265, 0.08307798767089844, 0.08284166717529297, 0.08329567718505859, 0.08275411224365234, 0.08307472229003907, 0.08289929962158203, 0.08225791931152343, 0.08451686096191406, 0.0861338882446289, 0.0846192626953125, 0.08428224182128906, 0.08369939422607423, 0.08298902130126953, 0.08269859313964843, 0.08315904235839844, 0.08288665771484376, 0.08291942596435548, 0.08283084869384766, 0.08316316986083984, 0.08372886657714844, 0.08420146942138672, 0.08523538970947266, 0.08371561431884765, 0.08368787384033204, 0.08320832061767579, 0.08395388793945313, 0.08356012725830078, 0.0835771484375, 0.08306278228759766, 0.08309283447265625, 0.08400086212158203, 0.08376582336425781, 0.08492237091064453, 0.08413129425048828, 0.08426140594482422, 0.08372153472900391, 0.08392774200439453, 0.08409907531738281, 0.08411750030517579, 0.08356249237060547, 0.08320745849609375, 0.08424505615234375, 0.08415001678466796, 0.08441680145263672, 0.08429376220703125, 0.084168701171875, 0.08416595458984374, 0.08390262603759766, 0.08414784240722656, 0.08365872192382813, 0.08387577819824218, 0.08397926330566406, 0.08385740661621094, 0.08415846252441406, 0.08427836608886718, 0.08441744232177735, 0.0841295394897461, 0.08408882904052735, 0.084216064453125, 0.0840847396850586, 0.09591603088378907, 0.08282316589355469, 0.08293170928955078, 0.08317747497558593, 0.08296857452392578, 0.0830135040283203, 0.08340902709960937, 0.08342323303222657, 0.08300482940673828, 0.08303472137451172, 0.08328601837158203, 0.08601805114746094, 0.08506813049316406, 0.08354166412353516, 0.08297792053222657, 0.08307392120361329, 0.08327372741699218, 0.08322866821289063, 0.08354611206054688, 0.0837201919555664, 0.08268185424804687, 0.08351129913330078, 0.08463884735107421, 0.08422284698486328, 0.08500838470458984, 0.08376525115966797, 0.08428749084472656, 0.08339584350585938, 0.08365033721923829, 0.08385635375976562, 0.0840327377319336, 0.08320899200439454, 0.08331791687011719, 0.08413654327392578, 0.08425286102294922, 0.08424419403076172, 0.08418748474121093, 0.0837918701171875, 0.0840002212524414, 0.08358326721191406, 0.08315110778808593, 0.08414733123779297, 0.08385330963134766, 0.08356047821044922, 0.08436208343505859, 0.084242431640625, 0.08447404479980469, 0.08486441802978516, 0.0841480941772461, 0.0841938247680664, 0.08421311950683594, 0.08370035552978515, 0.08382669067382813, 0.08425167846679688, 0.0842208023071289, 0.08402748870849609, 0.08433869171142579, 0.0840847396850586, 0.08571084594726562, 0.08439926147460937, 0.08427811431884766, 0.0842977294921875, 0.08384307098388671, 0.096570556640625, 0.08266819000244141, 0.08274524688720702, 0.08321663665771484, 0.08266130828857422, 0.0829573745727539, 0.08254566192626953, 0.08302735900878906, 0.0825898208618164, 0.08306121826171875, 0.08286402893066407, 0.08687567901611327, 0.08484921264648437, 0.08342095947265625, 0.08356204986572266, 0.08309622192382812, 0.08263597106933594, 0.08283993530273437, 0.08373462677001953, 0.08279821014404297, 0.08317001342773438, 0.08344739532470703, 0.08518492889404297, 0.08490937805175781, 0.08434758758544922, 0.08422182464599609, 0.08392867279052735, 0.08348694610595703, 0.08338361358642578, 0.08401769256591797, 0.08336227416992187, 0.0832034912109375, 0.08292601776123047, 0.08379804992675781, 0.0849420166015625, 0.0846447982788086, 0.08409420776367188, 0.08451148986816406, 0.08368128204345703, 0.08322649383544922, 0.08468252563476562, 0.08388028717041016, 0.08450048065185548, 0.08355760192871094, 0.0834543685913086, 0.08367926025390625, 0.08439228820800782, 0.08402861022949219, 0.08584889221191407, 0.08421785736083984, 0.08421171569824219, 0.08411254119873048, 0.08425965118408203, 0.08483229064941407, 0.08315833282470703, 0.08396819305419922, 0.08337664031982422, 0.08394054412841796, 0.08453987121582031, 0.08447216033935546, 0.08451891326904297, 0.0843345947265625, 0.08496742248535157]",tokens/s,11.94431859542223,,, -4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2831,15 +2831,15 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) @@ -2847,7 +2847,7 @@ ChildProcessError: Traceback (most recent call last): return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) -torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU +torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.75 GiB of which 239.06 MiB is free. Process 21222 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1047.486464,1393.491968,0.0,998.244352,995.459072,s,1,9.1995439453125,9.1995439453125,0.0,9.1995439453125,9.1995439453125,9.1995439453125,9.1995439453125,[9.1995439453125],,kWh,6.138048507500286e-05,6.759885228847262e-06,2.232835119599791e-05,9.046872149984803e-05,,MB,1316.298752,1619.984384,0.0,1212.153856,1174.001664,s,10,2.065621322631836,0.2065621322631836,0.001057923379171417,0.20703142547607423,0.20757976837158204,0.20789478073120116,0.20814679061889646,"[0.20546205139160156, 0.2072406005859375, 0.207006591796875, 0.2082097930908203, 0.20497737121582033, 0.20498719787597655, 0.2070726776123047, 0.20705625915527343, 0.20609901428222657, 0.207509765625]",tokens/s,1239.3365482586466,kWh,6.249252624379373e-06,6.890893309256835e-07,4.169678276638249e-06,1.1108020231943305e-05,tokens/kWh,23046411.030457206,MB,1338.712064,1619.984384,0.0,1212.153856,1174.004224,s,10,22.765936279296877,2.2765936279296874,0.010787243324537987,2.280182861328125,2.28728349609375,2.2916496826171873,2.2951426318359376,"[2.286313232421875, 2.257575927734375, 2.296015869140625, 2.26831201171875, 2.279771240234375, 2.28084423828125, 2.280594482421875, 2.27190283203125, 2.263395263671875, 2.281211181640625]",tokens/s,27.67292292621042,kWh,6.532558710478703e-05,7.2054362435162614e-06,3.535490889856203e-05,0.00010788593224686533,tokens/kWh,583950.0914340061,,s,630,22.761943065643333,0.03613006835816398,0.0007618873217216914,0.03608678245544433,0.0365433364868164,0.036807624435424804,0.038606347618103026,"[0.03631513595581055, 0.03638886260986328, 0.03630489730834961, 0.03641753768920898, 0.03643932723999024, 0.03671318435668945, 0.03632534408569336, 0.03666128158569336, 0.03630819320678711, 0.03644054412841797, 0.03646905517578125, 0.03628851318359375, 0.03624284744262695, 0.03624406433105469, 0.036173824310302735, 0.03623321533203125, 0.036154464721679686, 0.0362259521484375, 0.03627798461914063, 0.03643830490112305, 0.036062782287597656, 0.035994049072265624, 0.036268096923828125, 0.03680006408691406, 0.0360816650390625, 0.03593801498413086, 0.03613056182861328, 0.03600476837158203, 0.03606118392944336, 0.03620249557495117, 0.036171775817871094, 0.03632467269897461, 0.03680716705322266, 0.03615094375610352, 0.03638457489013672, 0.03647151947021485, 0.03633148956298828, 0.03638070297241211, 0.036269855499267575, 0.03617337417602539, 0.036276897430419924, 0.036345855712890625, 0.036216705322265626, 0.03619782257080078, 0.03608000183105469, 0.03618572616577148, 0.03661606216430664, 0.03612713623046875, 0.03635036849975586, 0.03609395217895508, 0.03621683120727539, 0.03639839935302734, 0.036049633026123046, 0.0360807991027832, 0.03611334228515625, 0.036359870910644534, 0.0361064338684082, 0.03632332611083984, 0.03639449691772461, 0.03618790435791015, 0.03641622543334961, 0.036286495208740235, 0.03637452697753906, 0.036089855194091795, 0.03622611236572266, 0.03609417724609375, 0.03588927841186523, 0.03541183853149414, 0.035952865600585936, 0.036131328582763675, 0.03614672088623047, 0.036176353454589846, 0.03618815994262695, 0.03607932662963867, 0.03604246520996094, 0.0361060791015625, 0.036063968658447264, 0.036073471069335936, 0.03620364761352539, 0.03607846450805664, 0.036116481781005856, 0.03645183944702148, 0.036114944458007815, 0.03611580657958984, 0.03637315368652344, 0.036345855712890625, 0.03615913772583008, 0.036267616271972655, 0.036692737579345706, 0.03616358566284179, 0.03586867141723633, 0.03549507141113281, 0.035915775299072264, 0.03579375839233399, 0.03560451126098633, 0.03560444641113281, 0.03557891082763672, 0.035503070831298826, 0.035604480743408204, 0.03596492767333984, 0.03577974319458008, 0.03558281707763672, 0.03577446365356445, 0.03547663879394531, 0.03570979309082031, 0.035563518524169925, 0.03548364639282227, 0.03538739013671875, 0.03550931167602539, 0.035275104522705075, 0.03582419204711914, 0.03569052886962891, 0.035741504669189454, 0.03582585525512695, 0.035827713012695314, 0.03561593627929688, 0.035447616577148434, 0.03531478500366211, 0.03510774230957031, 0.03537510299682617, 0.03563430404663086, 0.035447486877441405, 0.035483230590820314, 0.03567196655273437, 0.03564358520507813, 0.03540838241577148, 0.036004417419433596, 0.035833854675292966, 0.03548160171508789, 0.035438591003417966, 0.03548364639282227, 0.035315711975097655, 0.03712575912475586, 0.03573183822631836, 0.03578019332885742, 0.036105758666992185, 0.03603340911865234, 0.03621478271484375, 0.03589529418945313, 0.03587891387939453, 0.035776512145996094, 0.038744224548339846, 0.043791969299316405, 0.0459975357055664, 0.03665795135498047, 0.03726889419555664, 0.03670412826538086, 0.037017887115478515, 0.03653023910522461, 0.0365428466796875, 0.0364620475769043, 0.036416030883789065, 0.03625369644165039, 0.03636627197265625, 0.03639302444458008, 0.03640444946289063, 0.0364095344543457, 0.03645091247558594, 0.0363392333984375, 0.036831710815429686, 0.036397056579589845, 0.03664896011352539, 0.03649331283569336, 0.03610009765625, 0.035910816192626954, 0.035939167022705075, 0.035846080780029294, 0.03612377548217773, 0.03573356628417969, 0.035773311614990234, 0.03599788665771484, 0.03612854385375976, 0.03589532852172852, 0.0359444465637207, 0.03590348815917969, 0.03618406295776367, 0.0360263671875, 0.03596083068847656, 0.03633273696899414, 0.035928768157958986, 0.03603263854980469, 0.0359659538269043, 0.03593667221069336, 0.03576073455810547, 0.03557331085205078, 0.03573907089233398, 0.03591993713378906, 0.035851200103759764, 0.03592547225952149, 0.03603308868408203, 0.03596083068847656, 0.03570217514038086, 0.03559513473510742, 0.03530902481079102, 0.03533849716186523, 0.035514240264892576, 0.036141151428222655, 0.036136768341064454, 0.03607686233520508, 0.036037151336669924, 0.03591551971435547, 0.03571161651611328, 0.03590758514404297, 0.03583590316772461, 0.03603046417236328, 0.036050945281982424, 0.03593625640869141, 0.03629040145874023, 0.03593977737426758, 0.036444896697998046, 0.036087806701660154, 0.03611795043945312, 0.03611091232299805, 0.0362630729675293, 0.0359637451171875, 0.03611603164672852, 0.03605686569213867, 0.03604665756225586, 0.03621897506713867, 0.03638502502441406, 0.03612416076660156, 0.0359444465637207, 0.03610726547241211, 0.0363675537109375, 0.03611936187744141, 0.036797534942626955, 0.036170654296875, 0.03596822357177734, 0.03600259017944336, 0.035985408782958986, 0.0361085433959961, 0.03600950241088867, 0.03635398483276367, 0.03862748718261719, 0.03642176055908203, 0.035934207916259765, 0.03549184036254883, 0.035489791870117186, 0.03578265762329102, 0.03605500793457031, 0.036081695556640626, 0.03584342575073242, 0.03549801635742188, 0.035299457550048825, 0.035197311401367185, 0.03527692794799805, 0.035186622619628904, 0.035397022247314454, 0.03545353698730469, 0.03559596633911133, 0.03563484954833984, 0.03836928176879883, 0.03598332977294922, 0.03587376022338867, 0.035811328887939455, 0.035837310791015625, 0.03582169723510742, 0.03607807922363281, 0.03552175903320313, 0.03606403350830078, 0.03562905502319336, 0.0360079345703125, 0.03608575820922852, 0.03604044723510742, 0.03588940811157226, 0.036046527862548826, 0.036182334899902344, 0.03597721481323242, 0.03600352096557617, 0.03594249725341797, 0.03581267166137695, 0.035754913330078124, 0.0355404167175293, 0.03668751907348633, 0.035715744018554686, 0.035975425720214844, 0.036018177032470705, 0.036208641052246096, 0.03609600067138672, 0.035905025482177735, 0.03604326248168945, 0.036239360809326174, 0.036308609008789065, 0.03619190216064453, 0.03636912155151367, 0.036675582885742186, 0.03707411193847656, 0.03697747039794922, 0.03709952163696289, 0.03701929473876953, 0.036439937591552736, 0.03645196914672852, 0.036380992889404294, 0.03628441619873047, 0.03621532821655273, 0.03634902572631836, 0.03626691055297852, 0.03624905776977539, 0.03640169525146485, 0.036239360809326174, 0.036224414825439456, 0.036799072265625, 0.03633135986328125, 0.036369728088378905, 0.036363105773925784, 0.036359169006347655, 0.036365310668945314, 0.036428958892822265, 0.03614761734008789, 0.03631507110595703, 0.03589580917358398, 0.03579904174804688, 0.03594649505615234, 0.03587481689453125, 0.03630617523193359, 0.03628630447387695, 0.03569084930419922, 0.03552854537963867, 0.03561033630371094, 0.04100960159301758, 0.035587425231933596, 0.03542287826538086, 0.03538236618041992, 0.03623004913330078, 0.03603235244750977, 0.03594460678100586, 0.03570073699951172, 0.03851878356933594, 0.03585971069335937, 0.035526496887207035, 0.03513817596435547, 0.03518288040161133, 0.03566947174072266, 0.035435039520263674, 0.035604480743408204, 0.03538905715942383, 0.03527923202514648, 0.03544678497314453, 0.035794944763183595, 0.03634918212890625, 0.03610265731811523, 0.036398944854736326, 0.03721420669555664, 0.03613737487792969, 0.035872608184814456, 0.036019935607910156, 0.03630329513549805, 0.03643734359741211, 0.03562972640991211, 0.03586492919921875, 0.03560524749755859, 0.03623004913330078, 0.0363765754699707, 0.03624755096435547, 0.03659161758422851, 0.03575603103637695, 0.03544416046142578, 0.03628268814086914, 0.03634991836547852, 0.036260128021240234, 0.03649331283569336, 0.03633724975585938, 0.036647327423095705, 0.03644163131713867, 0.03692303848266602, 0.0366569938659668, 0.03649212646484375, 0.03632326507568359, 0.03637267303466797, 0.03637990570068359, 0.03635891342163086, 0.03686604690551758, 0.036517887115478515, 0.03700735855102539, 0.03647283172607422, 0.036547744750976566, 0.036413345336914066, 0.0363570556640625, 0.03855459213256836, 0.036674560546875, 0.036335166931152345, 0.036018623352050784, 0.035694591522216795, 0.03568025588989258, 0.0360079345703125, 0.03698908615112305, 0.03581353759765625, 0.03591446304321289, 0.036236255645751954, 0.036239360809326174, 0.03592192077636719, 0.036044769287109375, 0.03585436630249023, 0.03557580947875977, 0.03546908950805664, 0.035727039337158206, 0.04241667175292969, 0.03923932647705078, 0.03579702377319336, 0.03566828918457031, 0.03592752075195312, 0.03591222381591797, 0.03601747131347656, 0.035754688262939455, 0.03562656021118164, 0.03557011032104492, 0.03562496185302735, 0.035624286651611325, 0.035830432891845704, 0.03587071990966797, 0.035880863189697264, 0.03617728042602539, 0.03585004806518555, 0.036109214782714845, 0.0357498893737793, 0.035703968048095704, 0.0362402229309082, 0.03618201446533203, 0.03595017623901367, 0.035665599822998044, 0.03557449722290039, 0.03554508972167969, 0.03590566253662109, 0.03616748809814453, 0.035856063842773435, 0.03563763046264649, 0.03566947174072266, 0.035846687316894534, 0.0364738883972168, 0.036293598175048826, 0.036238624572753904, 0.03631135940551758, 0.036729248046875, 0.03618764877319336, 0.0363524169921875, 0.03630259323120117, 0.036331871032714846, 0.036274177551269535, 0.036173473358154296, 0.03651824188232422, 0.03658863830566406, 0.036259166717529295, 0.03645708847045898, 0.03639292907714844, 0.036344894409179686, 0.035871742248535156, 0.03554099273681641, 0.03529523086547852, 0.03530137634277344, 0.035619998931884764, 0.03618697738647461, 0.03595468902587891, 0.036224384307861325, 0.03605295944213867, 0.03575260925292969, 0.03583590316772461, 0.035676063537597655, 0.035468929290771486, 0.03545471954345703, 0.03521200180053711, 0.035398815155029295, 0.036025150299072266, 0.0357540168762207, 0.036066753387451175, 0.03582534408569336, 0.03592057418823242, 0.03614739227294922, 0.03609190368652344, 0.03606902313232422, 0.03583158493041992, 0.03618649673461914, 0.03573574447631836, 0.03577967834472656, 0.03621571350097656, 0.03596492767333984, 0.035764320373535156, 0.03631504058837891, 0.036435966491699216, 0.0372262077331543, 0.03676803207397461, 0.036029792785644534, 0.03587676620483399, 0.035832576751708985, 0.03578060913085938, 0.036122623443603515, 0.035899391174316404, 0.03649846267700195, 0.03775968170166016, 0.036117889404296874, 0.03606780624389649, 0.03690335845947266, 0.03605692672729492, 0.03600751876831055, 0.03616128158569336, 0.03617670440673828, 0.036083713531494144, 0.03609132766723633, 0.03614777755737305, 0.036261886596679685, 0.03626598358154297, 0.036222976684570314, 0.03626355361938476, 0.03615071868896484, 0.03620345687866211, 0.03631382369995117, 0.03618953704833985, 0.03646716690063476, 0.03631052780151367, 0.036324031829833986, 0.03648716735839844, 0.036190208435058595, 0.0365588493347168, 0.03680799865722656, 0.036307647705078126, 0.03648876953125, 0.036630977630615236, 0.03636633682250977, 0.03693772888183594, 0.036206592559814454, 0.03641753768920898, 0.03631039810180664, 0.03772684860229492, 0.03576617431640625, 0.035458656311035154, 0.03545753479003906, 0.03533824157714844, 0.03544863891601562, 0.03528278350830078, 0.03553519821166992, 0.035683551788330076, 0.03586947250366211, 0.03572447967529297, 0.03551110458374023, 0.035422206878662106, 0.03552249526977539, 0.035272640228271486, 0.035461246490478514, 0.03572323226928711, 0.036388607025146485, 0.036030750274658206, 0.036111679077148434, 0.03584451293945313, 0.03639865493774414, 0.03620060729980469, 0.036041278839111325, 0.03581542587280274, 0.035672065734863284, 0.035432449340820314, 0.03598745727539063, 0.036111934661865235, 0.03669561767578125, 0.035289985656738285, 0.035278335571289066, 0.03565824127197265, 0.03575807952880859, 0.03544582366943359, 0.03556857681274414, 0.03561417770385742, 0.03551286315917969, 0.03536076736450195, 0.03554099273681641, 0.03572531127929687, 0.03574915313720703, 0.035482303619384765, 0.03573276901245117, 0.03549078369140625, 0.035643199920654296, 0.03655267333984375, 0.03637152099609375, 0.0363570556640625, 0.03666329574584961, 0.03645587158203125, 0.03635843276977539, 0.03630313491821289, 0.03613654327392578, 0.036192798614501955, 0.036124542236328126, 0.036534271240234374, 0.0364150390625, 0.03649491119384766, 0.03656179046630859, 0.036296703338623046, 0.036347904205322266, 0.03629171371459961, 0.03660070419311524, 0.036474815368652345, 0.036467937469482424, 0.035936351776123046, 0.036082176208496096, 0.036319488525390624, 0.03579084777832031, 0.0355676155090332, 0.03625574493408203, 0.036536319732666016, 0.03630080032348633, 0.036446208953857424, 0.03794851303100586, 0.035977886199951174, 0.03591932678222656, 0.03552259063720703, 0.035584766387939455, 0.03611033630371094, 0.0355810546875, 0.03575616073608399, 0.03627494430541992, 0.0366196174621582, 0.036238304138183595, 0.03626361465454102, 0.03598057556152344, 0.03578339385986328, 0.035741119384765624, 0.03582624053955078, 0.035706241607666014, 0.03554982376098633, 0.03544063949584961, 0.03544073486328125, 0.035727264404296875, 0.03570406341552734, 0.03633023834228516, 0.03594623947143555, 0.03628249740600586, 0.0366429443359375, 0.03623321533203125, 0.036459617614746094, 0.03705948638916016, 0.03685171127319336, 0.03671244812011719, 0.03612089538574219, 0.03596681594848633, 0.03631497573852539]",tokens/s,27.67777769161179,,, @@ -2859,7 +2859,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. G 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,884.334592,657.391616,0.0,262.144,258.163712,s,1,8.331421875,8.331421875,0.0,8.331421875,8.331421875,8.331421875,8.331421875,[8.331421875],,kWh,2.553798450001068e-05,2.8098189440193495e-06,8.393062269990148e-06,3.6740865714020175e-05,,MB,1195.66336,758.054912,0.0,350.224384,317.820928,s,10,0.23482944107055664,0.023482944107055665,0.0001976525830758702,0.023503056526184084,0.02369461154937744,0.023746105670928955,0.023787300968170166,"[0.02336067199707031, 0.02362454414367676, 0.023585119247436524, 0.023522176742553712, 0.023483936309814452, 0.023074464797973634, 0.023319007873535157, 0.02379759979248047, 0.023378751754760743, 0.023683168411254882]",tokens/s,10901.529162311572,kWh,6.968638696897281e-07,7.68513307585397e-08,4.622689988257993e-07,1.2359841992740671e-06,tokens/kWh,207122388.90299484,MB,1229.115392,779.026432,0.0,371.195904,317.823488,s,10,11.461855712890623,1.1461855712890623,0.008816181875136324,1.14757421875,1.1575904418945313,1.1585895935058594,1.159388914794922,"[1.1345552978515625, 1.1515673828125, 1.1480777587890625, 1.157368408203125, 1.1485537109375, 1.1332515869140625, 1.1595887451171876, 1.1347701416015625, 1.1470706787109375, 1.147052001953125]",tokens/s,54.96492154332984,kWh,3.322266337739506e-05,3.6639666195696823e-06,1.38264368765738e-05,5.0713066873538534e-05,tokens/kWh,1242283.3775188746,,s,630,11.456464830398557,0.018184864810156444,0.00044804628817504525,0.0181625919342041,0.018491910934448245,0.018595945358276366,0.019085622653961187,"[0.01769340705871582, 0.018038816452026367, 0.017971168518066405, 0.017991199493408203, 0.017947359085083006, 0.01788697624206543, 0.01800396728515625, 0.018288576126098632, 0.01803225517272949, 0.01802729606628418, 0.01840905570983887, 0.018012224197387697, 0.017982528686523436, 0.017949600219726563, 0.017825056076049804, 0.017947519302368164, 0.017827007293701173, 0.01778521537780762, 0.017997919082641603, 0.017994304656982422, 0.017968128204345703, 0.018207487106323243, 0.017945920944213867, 0.017820255279541015, 0.017889280319213868, 0.017879039764404296, 0.018149375915527344, 0.017958015441894532, 0.01788812828063965, 0.01796847915649414, 0.018225248336791993, 0.018433631896972655, 0.018410303115844726, 0.01839529609680176, 0.018440000534057616, 0.018106399536132814, 0.018067615509033203, 0.01818934440612793, 0.018217983245849608, 0.01787696075439453, 0.018159839630126955, 0.01795465660095215, 0.018069440841674805, 0.017939647674560546, 0.018033567428588866, 0.01786979293823242, 0.017748928070068358, 0.017718719482421874, 0.017820064544677734, 0.017721248626708985, 0.017753536224365235, 0.017774816513061523, 0.017752351760864257, 0.01766383934020996, 0.01775868797302246, 0.017811679840087892, 0.01778659248352051, 0.018026399612426757, 0.017965024948120117, 0.01808211135864258, 0.018124671936035158, 0.018335071563720703, 0.01846134376525879, 0.017756351470947264, 0.018103103637695312, 0.018481151580810547, 0.017893375396728514, 0.017821695327758787, 0.017915903091430666, 0.01804697608947754, 0.018247840881347656, 0.01840320014953613, 0.018009727478027343, 0.017759904861450196, 0.017966880798339843, 0.017768768310546874, 0.01819926452636719, 0.018120576858520508, 0.018160991668701172, 0.018094751358032228, 0.01834966468811035, 0.01807401657104492, 0.018218399047851563, 0.01812950325012207, 0.018081855773925782, 0.018442432403564454, 0.018593536376953126, 0.018255872726440428, 0.01844633674621582, 0.017954944610595703, 0.017932159423828125, 0.017983104705810545, 0.017885568618774415, 0.01828873634338379, 0.017960832595825194, 0.01847260856628418, 0.01811289596557617, 0.018194463729858397, 0.01818623924255371, 0.018184127807617186, 0.01797228813171387, 0.01801254463195801, 0.018409631729125978, 0.018345855712890626, 0.018516544342041016, 0.018367712020874023, 0.018385887145996094, 0.018453664779663086, 0.018451103210449217, 0.01857161521911621, 0.018523807525634765, 0.01840127944946289, 0.01848121643066406, 0.01849955177307129, 0.018532320022583006, 0.018710527420043945, 0.018792448043823243, 0.01849888038635254, 0.018476959228515624, 0.018921375274658203, 0.01862499237060547, 0.018557600021362305, 0.018480640411376953, 0.018520320892333984, 0.01856060791015625, 0.01842838478088379, 0.018151424407958985, 0.01849113655090332, 0.018448640823364258, 0.018409023284912108, 0.018479551315307617, 0.01830297660827637, 0.018381856918334962, 0.01866032028198242, 0.018595680236816407, 0.018452640533447265, 0.018539648056030273, 0.018402175903320314, 0.01840947151184082, 0.018548959732055663, 0.018510751724243164, 0.018617311477661134, 0.018536352157592775, 0.018366592407226563, 0.01848512077331543, 0.018381919860839844, 0.01818281555175781, 0.017939744949340822, 0.018190399169921875, 0.018172832489013673, 0.01803264045715332, 0.018093631744384765, 0.018022335052490235, 0.01802627182006836, 0.01808697509765625, 0.018110111236572267, 0.017876575469970703, 0.017902048110961914, 0.017858495712280275, 0.017989023208618164, 0.018666048049926758, 0.01835580825805664, 0.018155904769897462, 0.018155584335327147, 0.01822329521179199, 0.018187904357910158, 0.01865292739868164, 0.01842835235595703, 0.018290016174316408, 0.01867024040222168, 0.018869760513305665, 0.01801849555969238, 0.018071008682250977, 0.018216096878051757, 0.017823423385620117, 0.01788217544555664, 0.017777599334716798, 0.017864704132080078, 0.017819648742675782, 0.017958656311035156, 0.017856000900268554, 0.01780950355529785, 0.017775487899780274, 0.017870624542236327, 0.018104320526123048, 0.018179424285888673, 0.018000736236572265, 0.018083776473999023, 0.018145151138305664, 0.018109888076782227, 0.018170080184936523, 0.018233823776245116, 0.018835552215576173, 0.01825564765930176, 0.018464799880981445, 0.01833977508544922, 0.018152576446533203, 0.01800284767150879, 0.017954975128173827, 0.018294271469116212, 0.01780678367614746, 0.017770816802978515, 0.01774444770812988, 0.018429983139038087, 0.017922048568725587, 0.017903039932250977, 0.017905599594116212, 0.01797385597229004, 0.017799200057983397, 0.017733600616455077, 0.018282751083374023, 0.017780511856079102, 0.018292736053466797, 0.018685951232910156, 0.018441503524780273, 0.01809619140625, 0.018069183349609375, 0.018351423263549806, 0.018107679367065428, 0.01826464080810547, 0.018115936279296876, 0.018080255508422852, 0.018157855987548828, 0.018670944213867186, 0.01872496032714844, 0.01848067283630371, 0.018395967483520508, 0.01812879943847656, 0.01813811111450195, 0.018039392471313476, 0.01806502342224121, 0.01807436752319336, 0.01800998306274414, 0.017960159301757813, 0.017924543380737304, 0.018096607208251955, 0.018126880645751953, 0.01795187187194824, 0.017927040100097658, 0.0179182071685791, 0.018015167236328126, 0.020692800521850584, 0.01842870330810547, 0.018358015060424806, 0.018285343170166016, 0.018300607681274415, 0.018301984786987305, 0.018546911239624025, 0.020617984771728517, 0.02458624076843262, 0.018993152618408202, 0.018486623764038087, 0.017826784133911134, 0.018275520324707032, 0.01834441566467285, 0.01838425636291504, 0.0182893123626709, 0.01842835235595703, 0.018463903427124024, 0.018419519424438476, 0.018506528854370118, 0.01847859191894531, 0.01834566307067871, 0.018264480590820312, 0.018270336151123046, 0.018298879623413086, 0.01827155113220215, 0.018303871154785156, 0.01827235221862793, 0.018230815887451172, 0.018276832580566407, 0.018300224304199218, 0.018291391372680665, 0.018304927825927735, 0.018415712356567384, 0.018372608184814454, 0.01859584045410156, 0.018519872665405272, 0.01828022384643555, 0.018383487701416016, 0.018462303161621094, 0.01836684799194336, 0.018377920150756837, 0.018284223556518556, 0.018432416915893556, 0.018306623458862303, 0.01838960075378418, 0.01828656005859375, 0.01829724884033203, 0.018364416122436524, 0.018328863143920897, 0.018350751876831054, 0.01839849662780762, 0.018403711318969725, 0.018552928924560546, 0.018461055755615234, 0.018219104766845705, 0.018132831573486326, 0.0180849609375, 0.018006431579589845, 0.01835468864440918, 0.018354175567626953, 0.01807356834411621, 0.018062559127807618, 0.017865535736083984, 0.017738752365112305, 0.01780393600463867, 0.017754655838012695, 0.017767679214477538, 0.017657920837402342, 0.017705472946166992, 0.017663999557495116, 0.017770719528198243, 0.017856096267700194, 0.01772163200378418, 0.01734275245666504, 0.018203456878662108, 0.017975296020507812, 0.017812160491943358, 0.017941791534423827, 0.01769375991821289, 0.017700735092163085, 0.017712703704833986, 0.01780726432800293, 0.017840511322021486, 0.017826303482055664, 0.017915552139282226, 0.01802979278564453, 0.018282527923583983, 0.018131711959838866, 0.01870470428466797, 0.01816339111328125, 0.01802444839477539, 0.018506784439086914, 0.017996768951416015, 0.018355871200561525, 0.018198751449584962, 0.017879487991333008, 0.017880256652832032, 0.01791811180114746, 0.01764182472229004, 0.017649824142456055, 0.018350112915039064, 0.01896780776977539, 0.01784480094909668, 0.01785241508483887, 0.01789030456542969, 0.01796112060546875, 0.01816569519042969, 0.018039712905883788, 0.017957984924316408, 0.018069408416748048, 0.018043392181396483, 0.01811097526550293, 0.018145503997802733, 0.018328895568847658, 0.01808252716064453, 0.017961984634399415, 0.01785116767883301, 0.01771321678161621, 0.01761622428894043, 0.017723968505859375, 0.01760665512084961, 0.0176312313079834, 0.01757798385620117, 0.017563648223876953, 0.017737247467041015, 0.01774991989135742, 0.0178156795501709, 0.018004447937011718, 0.019564512252807618, 0.01809619140625, 0.01793222427368164, 0.017922048568725587, 0.017918239593505858, 0.017821407318115233, 0.017894399642944335, 0.018076671600341796, 0.0177096004486084, 0.018045055389404298, 0.01794892883300781, 0.018273344039916994, 0.0182260799407959, 0.018186431884765625, 0.018607168197631835, 0.01838345527648926, 0.01839689636230469, 0.018444095611572266, 0.018524768829345704, 0.018239391326904296, 0.01838703918457031, 0.018227071762084962, 0.018337919235229493, 0.018307071685791015, 0.018126047134399415, 0.01802329635620117, 0.01862646484375, 0.01812895965576172, 0.018005823135375975, 0.018091295242309572, 0.018121536254882813, 0.018019424438476563, 0.01793734359741211, 0.0178787841796875, 0.01794256019592285, 0.018090208053588866, 0.018085599899291992, 0.018186527252197264, 0.01844540786743164, 0.018545568466186522, 0.01833091163635254, 0.018250463485717773, 0.01828668785095215, 0.018319263458251953, 0.018403135299682617, 0.01912339210510254, 0.01840015983581543, 0.018466911315917968, 0.018394912719726562, 0.018311391830444335, 0.018470943450927733, 0.018417631149291992, 0.018466304779052735, 0.018345567703247072, 0.018347999572753907, 0.018384159088134764, 0.018407072067260742, 0.018435232162475584, 0.018370752334594728, 0.018206943511962892, 0.018423967361450196, 0.018274911880493162, 0.01835100746154785, 0.018342784881591797, 0.018419488906860352, 0.02021107292175293, 0.022358943939208984, 0.018523967742919922, 0.018596031188964843, 0.01852604866027832, 0.018341888427734376, 0.018132543563842772, 0.018364864349365233, 0.01845248031616211, 0.018357887268066406, 0.018315168380737306, 0.018465248107910157, 0.01833123207092285, 0.01829478454589844, 0.018306528091430664, 0.01814860725402832, 0.01808729553222656, 0.018009727478027343, 0.017937088012695314, 0.017721343994140625, 0.017696767807006835, 0.017688575744628905, 0.01769267272949219, 0.017889280319213868, 0.017735103607177734, 0.017760032653808593, 0.017750751495361327, 0.017968223571777343, 0.017849279403686524, 0.017786720275878905, 0.01779840087890625, 0.017646528244018553, 0.017729631423950197, 0.017679391860961916, 0.01776278305053711, 0.017718879699707032, 0.017775136947631835, 0.017823808670043944, 0.017750240325927733, 0.017734912872314452, 0.01780790328979492, 0.018024799346923828, 0.018207679748535155, 0.01813395118713379, 0.018161535263061523, 0.018161792755126954, 0.01802239990234375, 0.017879039764404296, 0.017790016174316407, 0.017719968795776368, 0.017815839767456054, 0.01818828773498535, 0.018121919631958007, 0.018111488342285157, 0.01817350387573242, 0.018432256698608398, 0.017989023208618164, 0.017938880920410155, 0.018484512329101564, 0.017934656143188475, 0.01795907211303711, 0.01817622375488281, 0.018341951370239258, 0.01822265625, 0.018207456588745115, 0.018042560577392577, 0.018071456909179686, 0.018077951431274414, 0.017912832260131836, 0.017520832061767577, 0.017829120635986326, 0.017867103576660156, 0.018158367156982422, 0.018278432846069337, 0.01805308723449707, 0.01814089584350586, 0.018397279739379883, 0.018657184600830077, 0.018361440658569338, 0.018383583068847655, 0.018300960540771485, 0.01830694389343262, 0.01834124755859375, 0.01839606475830078, 0.01824176025390625, 0.018058687210083007, 0.01803094482421875, 0.017928192138671875, 0.01811187171936035, 0.017887872695922853, 0.017954336166381837, 0.017799135208129882, 0.01789148712158203, 0.01803081512451172, 0.018039039611816406, 0.018347391128540037, 0.018573535919189452, 0.017994239807128908, 0.0180467529296875, 0.018268159866333008, 0.01821900749206543, 0.018179264068603516, 0.018279327392578124, 0.018263967514038085, 0.018247903823852538, 0.018475999832153322, 0.018413503646850585, 0.018299776077270506, 0.01829395294189453, 0.01827465629577637, 0.018355743408203125, 0.018383808135986328, 0.018433759689331055, 0.018155807495117186, 0.01822719955444336, 0.01813929557800293, 0.018208608627319336, 0.01836851119995117, 0.01819171142578125, 0.018233535766601562, 0.018231008529663088, 0.01807436752319336, 0.0182043514251709, 0.018065536499023437, 0.01804243278503418, 0.018252544403076172, 0.018210687637329102, 0.018126848220825196, 0.01824723243713379, 0.018319807052612304, 0.01835139274597168, 0.018503423690795898, 0.01827299118041992, 0.01864521598815918, 0.018702367782592773, 0.018557056427001953, 0.018509695053100586, 0.018479103088378905, 0.01848431968688965, 0.01841878318786621, 0.018443552017211914, 0.018407968521118163, 0.01831020736694336, 0.01837766456604004, 0.018507776260375978, 0.01840127944946289, 0.01838051223754883, 0.018469152450561525, 0.01837980842590332, 0.018364864349365233, 0.018401567459106444, 0.018364479064941406, 0.01824563217163086, 0.01837094306945801, 0.01830019187927246, 0.018290719985961913, 0.01835856056213379, 0.018415872573852538, 0.01844220733642578, 0.018757631301879883, 0.018315263748168945, 0.018427711486816406, 0.018386463165283203, 0.0184550724029541, 0.018435583114624024, 0.01851587104797363, 0.018406112670898436, 0.0183985595703125, 0.01884774398803711, 0.01844291114807129, 0.018275583267211914, 0.018119232177734375, 0.018002111434936522, 0.01790332794189453, 0.017885471343994142, 0.017893375396728514, 0.01783782386779785, 0.017832191467285156, 0.017674240112304687, 0.017749343872070313, 0.017754079818725586, 0.017625791549682617, 0.017657855987548828, 0.017573312759399416, 0.017813568115234376, 0.01793280029296875, 0.017942527770996093, 0.01780531120300293, 0.01769584083557129, 0.018309696197509766, 0.017867103576660156, 0.01776639938354492, 0.017615936279296876, 0.017666080474853515, 0.017697471618652344]",tokens/s,54.9907854933015,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2549.338112,11826.823168,0.0,11431.575552,10953.091072,s,1,22.083626953125,22.083626953125,0.0,22.083626953125,22.083626953125,22.083626953125,22.083626953125,[22.083626953125],,kWh,0.00042438384648334394,4.6805431210738926e-05,0.00015918096067799037,0.0006303702383720733,,MB,1918.636032,12722.307072,0.0,12314.476544,11624.259584,s,10,19.116585571289065,1.9116585571289062,0.006993612625285195,1.9133917846679687,1.9185032348632811,1.919634002685547,1.9205386169433594,"[1.897379150390625, 1.902699462890625, 1.90814013671875, 1.909576904296875, 1.911366943359375, 1.9154166259765626, 1.91584521484375, 1.9171444091796874, 1.918251953125, 1.9207647705078126]",tokens/s,133.91512780633948,kWh,5.5732573974582165e-05,6.145645191757081e-06,3.701130738680281e-05,9.888952655314205e-05,tokens/kWh,2588747.351949639,MB,1922.7648,12724.404224,0.0,12316.573696,11624.262144,s,10,94.21018066406249,9.42101806640625,0.025053647449437427,9.42879052734375,9.44482841796875,9.445260400390625,9.445605986328124,"[9.3704345703125, 9.3852255859375, 9.40620703125, 9.4164833984375, 9.4247998046875, 9.43278125, 9.4416455078125, 9.4421787109375, 9.444732421875, 9.4456923828125]",tokens/s,6.687175372760116,kWh,0.00027596498239625583,3.0441381840470628e-05,0.00018339111893499736,0.0004897974831717238,tokens/kWh,128624.58906901344,,s,630,94.20603550720212,0.14953338969397165,0.0020462793337056094,0.14950914764404297,0.15111215820312499,0.15163519210815432,0.16009703063964842,"[0.16060806274414063, 0.14670208740234375, 0.14601875305175782, 0.14674493408203124, 0.14774313354492188, 0.1459621124267578, 0.15375244140625, 0.14825053405761718, 0.1471837158203125, 0.14752310180664063, 0.14631753540039064, 0.14619606018066406, 0.14904960632324218, 0.1505774383544922, 0.14821340942382813, 0.14878767395019532, 0.14591180419921876, 0.14713845825195312, 0.14888560485839844, 0.14901248168945314, 0.14934962463378906, 0.14868553161621093, 0.14756253051757812, 0.14836898803710938, 0.1469444122314453, 0.1486684112548828, 0.150724609375, 0.14895423889160156, 0.1495040588378906, 0.14795245361328124, 0.14724908447265625, 0.14920211791992188, 0.14875736999511718, 0.14877212524414063, 0.14951712036132814, 0.1475782470703125, 0.14903053283691406, 0.1475729217529297, 0.1484738311767578, 0.14982585144042967, 0.1487548828125, 0.1505536346435547, 0.14810585021972655, 0.1473161926269531, 0.14818342590332031, 0.14907373046875, 0.15020697021484375, 0.1489409942626953, 0.14891416931152343, 0.1492064666748047, 0.14695001220703124, 0.1493162841796875, 0.14908146667480468, 0.14946981811523438, 0.14889369201660158, 0.1488497314453125, 0.1505494384765625, 0.1476259765625, 0.1487626190185547, 0.1492900848388672, 0.14863043212890625, 0.15067074584960938, 0.14942678833007814, 0.15974668884277343, 0.1473144989013672, 0.145940673828125, 0.1466429443359375, 0.1480494384765625, 0.1463537902832031, 0.15428489685058594, 0.14886912536621094, 0.14752153015136718, 0.14823834228515625, 0.14589132690429688, 0.14756863403320314, 0.15134300231933595, 0.15032534790039062, 0.1501114501953125, 0.14701962280273437, 0.14706375122070312, 0.1485312042236328, 0.14752153015136718, 0.15014451599121093, 0.1499632568359375, 0.14844435119628907, 0.14915632629394532, 0.14640777587890624, 0.14795365905761718, 0.14988902282714844, 0.14902444458007813, 0.15089491271972658, 0.14842271423339845, 0.14776722717285157, 0.1468408966064453, 0.14810499572753907, 0.1500142059326172, 0.15020831298828125, 0.14902735900878905, 0.1482570495605469, 0.1474391326904297, 0.14824089050292968, 0.14802943420410156, 0.15064901733398436, 0.15023085021972657, 0.14874214172363281, 0.15001603698730467, 0.1470023651123047, 0.148179931640625, 0.15065203857421874, 0.1501766357421875, 0.150614013671875, 0.14891375732421874, 0.14783247375488281, 0.14728863525390626, 0.14922566223144532, 0.15096832275390626, 0.14965965270996093, 0.15020442199707032, 0.14783897399902343, 0.14780621337890626, 0.15004019165039062, 0.14899862670898437, 0.15063031005859376, 0.1492618865966797, 0.1489514923095703, 0.14838099670410157, 0.15995619201660155, 0.14741285705566406, 0.1466479949951172, 0.14689010620117188, 0.14781817626953125, 0.14709648132324218, 0.15391941833496095, 0.14886431884765625, 0.14756886291503907, 0.14874467468261718, 0.1461207733154297, 0.14679859924316407, 0.1520025634765625, 0.150476806640625, 0.15000973510742188, 0.1485323486328125, 0.147704833984375, 0.1464930877685547, 0.14855203247070312, 0.14992720031738282, 0.14944650268554688, 0.14984077453613281, 0.14753555297851562, 0.14751571655273438, 0.14724649047851562, 0.14990806579589844, 0.14971200561523437, 0.15064505004882814, 0.14916192626953126, 0.15003712463378907, 0.14657699584960937, 0.1478697204589844, 0.15068966674804687, 0.1502799072265625, 0.15042448425292967, 0.14940147399902343, 0.14886495971679686, 0.1467342987060547, 0.14909674072265625, 0.15073139953613282, 0.15011581420898437, 0.15108316040039063, 0.1487293701171875, 0.14904998779296874, 0.1473204803466797, 0.14937753295898437, 0.1514352264404297, 0.15026524353027343, 0.1499900207519531, 0.14873330688476563, 0.1485727996826172, 0.14837350463867188, 0.15057817077636718, 0.15104109191894532, 0.14954275512695311, 0.15053219604492188, 0.14835232543945312, 0.14807533264160155, 0.14945794677734375, 0.15097030639648437, 0.15024630737304687, 0.15054847717285155, 0.15010610961914062, 0.16076780700683593, 0.14740089416503907, 0.14671401977539061, 0.14782861328125, 0.14761964416503906, 0.14693263244628907, 0.15456646728515624, 0.14924986267089843, 0.14748463439941406, 0.1476848907470703, 0.14725619506835938, 0.1476259765625, 0.150761474609375, 0.15083059692382814, 0.15019865417480469, 0.14743916320800782, 0.14850502014160155, 0.14685606384277344, 0.1481031036376953, 0.15020448303222655, 0.15069325256347657, 0.14898255920410156, 0.14895872497558593, 0.14736367797851563, 0.14943238830566405, 0.14852549743652343, 0.14992790222167968, 0.15109738159179686, 0.14875238037109376, 0.15001190185546875, 0.14777958679199218, 0.14798439025878907, 0.14986239624023437, 0.150108154296875, 0.15065087890625, 0.15007334899902344, 0.1506570281982422, 0.146808837890625, 0.14803558349609375, 0.15090074157714845, 0.14996255493164062, 0.15091116333007812, 0.14929408264160157, 0.15004978942871094, 0.1475747833251953, 0.14901043701171876, 0.15021868896484375, 0.15079200744628907, 0.15038284301757812, 0.15061750793457032, 0.1501292419433594, 0.14777548217773437, 0.1487298583984375, 0.15075155639648438, 0.15004383850097655, 0.15093618774414064, 0.1493943328857422, 0.15111062622070312, 0.14844927978515626, 0.14958706665039062, 0.1490252227783203, 0.15041990661621094, 0.15023049926757812, 0.16084786987304686, 0.14708531188964843, 0.14729420471191407, 0.1472041015625, 0.14789836120605468, 0.14821334838867187, 0.15471075439453125, 0.14891180419921876, 0.14951423645019532, 0.1473597412109375, 0.1472368621826172, 0.14933811950683593, 0.1500037078857422, 0.15201895141601562, 0.14942562866210937, 0.14781494140625, 0.14778770446777345, 0.1471426544189453, 0.14931974792480468, 0.15128684997558595, 0.15012550354003906, 0.1489644775390625, 0.14848924255371093, 0.15055241394042967, 0.1466798095703125, 0.14954920959472656, 0.1494034881591797, 0.14997914123535155, 0.14966700744628905, 0.14913548278808594, 0.1509383087158203, 0.1468538818359375, 0.14921929931640626, 0.15028755187988282, 0.14996156311035155, 0.1506570281982422, 0.149494873046875, 0.14963600158691406, 0.14705459594726564, 0.1498091583251953, 0.1506078643798828, 0.15050460815429687, 0.15080703735351564, 0.14874774169921876, 0.15000665283203124, 0.14741299438476563, 0.1508922576904297, 0.15007096862792968, 0.15040982055664062, 0.1504500732421875, 0.1491005401611328, 0.15146783447265624, 0.1486801300048828, 0.14930415344238282, 0.14956480407714845, 0.149826171875, 0.15015936279296874, 0.1498927001953125, 0.1508356170654297, 0.14828282165527343, 0.15065965270996093, 0.14974566650390625, 0.1500958709716797, 0.16015455627441405, 0.14722047424316406, 0.1471056365966797, 0.14801705932617187, 0.14729434204101563, 0.14731805419921876, 0.15568162536621094, 0.14911628723144532, 0.149712646484375, 0.14771865844726562, 0.14823785400390624, 0.14828834533691407, 0.14956544494628907, 0.15234197998046875, 0.14890652465820312, 0.15017575073242187, 0.14777507019042968, 0.14834722900390626, 0.14940780639648438, 0.15077491760253905, 0.15159794616699218, 0.14888531494140625, 0.15043545532226563, 0.14744342041015626, 0.14846243286132813, 0.14993318176269532, 0.15106343078613282, 0.14993408203125, 0.1500010528564453, 0.15119017028808593, 0.1474722900390625, 0.14792819213867187, 0.15009065246582032, 0.15064012145996095, 0.15029231262207032, 0.14970742797851563, 0.15063449096679687, 0.14765875244140625, 0.14886912536621094, 0.1502019500732422, 0.14997123718261718, 0.15062754821777344, 0.14943125915527344, 0.15080441284179688, 0.14766079711914062, 0.14990121459960937, 0.1492706298828125, 0.1506385955810547, 0.1490403594970703, 0.15020492553710937, 0.15011561584472657, 0.1482425994873047, 0.15192256164550783, 0.14913020324707033, 0.15151046752929687, 0.1490946807861328, 0.1501510772705078, 0.14844912719726563, 0.15026435852050782, 0.14982144165039063, 0.150687744140625, 0.15027200317382813, 0.1495224304199219, 0.16215461730957031, 0.14909593200683594, 0.14641552734375, 0.14799728393554687, 0.148748291015625, 0.14641273498535157, 0.15466966247558595, 0.15010838317871095, 0.1508659210205078, 0.14722377014160157, 0.14810723876953125, 0.14874911499023438, 0.14871888732910157, 0.15131024169921875, 0.15025234985351563, 0.14909645080566405, 0.1474453430175781, 0.14836585998535157, 0.15063027954101563, 0.1488504638671875, 0.15085591125488282, 0.14958796691894533, 0.14915330505371094, 0.1477145233154297, 0.14880960083007813, 0.1509470977783203, 0.14991241455078125, 0.15116697692871095, 0.1496303405761719, 0.14852117919921876, 0.14806851196289061, 0.14984422302246095, 0.15040921020507814, 0.1501880340576172, 0.15113731384277343, 0.14910768127441407, 0.1507368927001953, 0.14773881530761718, 0.14952566528320313, 0.15034538269042969, 0.14996719360351562, 0.1510386199951172, 0.14994178771972655, 0.15044451904296874, 0.14890188598632811, 0.1501407012939453, 0.14945263671875, 0.15112594604492188, 0.1498853759765625, 0.14992166137695312, 0.14828556823730468, 0.15008111572265626, 0.15005941772460937, 0.15062956237792968, 0.15191737365722657, 0.14974156188964843, 0.15141888427734376, 0.1486929931640625, 0.14989859008789064, 0.14984463500976564, 0.15096421813964844, 0.1495653076171875, 0.15069970703125, 0.16067295837402343, 0.14796627807617188, 0.14791661071777343, 0.14806002807617188, 0.1470492858886719, 0.1481134033203125, 0.15501519775390624, 0.14963095092773437, 0.14951423645019532, 0.14811651611328125, 0.14753590393066407, 0.14831663513183593, 0.15114828491210938, 0.1519048614501953, 0.14974310302734375, 0.150442138671875, 0.14739039611816407, 0.14907830810546874, 0.14803286743164062, 0.15137033081054688, 0.150249755859375, 0.14963101196289064, 0.15034739685058593, 0.14729049682617187, 0.14910415649414063, 0.1489058837890625, 0.1513846435546875, 0.1504950714111328, 0.15006121826171875, 0.15045018005371094, 0.14760089111328126, 0.14965402221679688, 0.1491005401611328, 0.15047488403320314, 0.14957554626464845, 0.15042684936523437, 0.14875631713867188, 0.14877996826171874, 0.14998733520507812, 0.1498787841796875, 0.1510726776123047, 0.15014874267578124, 0.15019013977050782, 0.14839849853515624, 0.1491394500732422, 0.14930943298339844, 0.15014230346679688, 0.15135174560546874, 0.15020828247070311, 0.15068351745605468, 0.14807212829589844, 0.15022988891601563, 0.14904226684570313, 0.15133139038085938, 0.14960220336914062, 0.15011856079101563, 0.14935888671875, 0.15014912414550782, 0.15050880432128907, 0.15042636108398438, 0.1516477508544922, 0.14942665100097657, 0.15204296875, 0.15941836547851562, 0.14711958312988282, 0.14898348999023436, 0.14644291687011718, 0.14857395935058593, 0.150712158203125, 0.15332415771484376, 0.1503272705078125, 0.1482845458984375, 0.14819622802734375, 0.14686968994140626, 0.1490458221435547, 0.15335789489746093, 0.1511367950439453, 0.14981111145019532, 0.1480067901611328, 0.148172607421875, 0.14755247497558593, 0.1504085693359375, 0.15230435180664062, 0.15046861267089845, 0.14987826538085938, 0.14792950439453126, 0.14946517944335938, 0.14776316833496095, 0.15155807495117188, 0.15067916870117187, 0.15004083251953124, 0.14994435119628907, 0.14808493041992188, 0.15060992431640624, 0.14790640258789062, 0.1508717498779297, 0.1494342041015625, 0.15003692626953125, 0.14953082275390625, 0.14907379150390626, 0.1506793212890625, 0.14891030883789064, 0.15103581237792968, 0.14979647827148437, 0.15000198364257813, 0.14881794738769533, 0.14942233276367187, 0.14994432067871094, 0.14954086303710937, 0.1516195831298828, 0.14893875122070313, 0.1517690887451172, 0.1495572509765625, 0.15134515380859376, 0.14895225524902345, 0.1500618896484375, 0.1498787841796875, 0.15085494995117188, 0.1491871337890625, 0.15019334411621094, 0.14959100341796874, 0.1497266845703125, 0.15141737365722657, 0.15013186645507812, 0.1515262451171875, 0.1500913848876953, 0.1605960998535156, 0.14707347106933594, 0.14708358764648438, 0.1492139892578125, 0.14697305297851562, 0.14864979553222657, 0.15545330810546876, 0.1491721954345703, 0.14867709350585936, 0.1474534454345703, 0.1493731231689453, 0.14692965698242189, 0.1516198425292969, 0.15215664672851562, 0.14939459228515625, 0.14927548217773437, 0.14761984252929689, 0.14804374694824218, 0.1492532501220703, 0.15129078674316407, 0.15103794860839845, 0.1492500457763672, 0.15040080261230468, 0.14732514953613282, 0.14898367309570312, 0.15053651428222656, 0.1512807312011719, 0.15014749145507814, 0.14991725158691407, 0.14970489501953124, 0.14796624755859375, 0.15015965270996093, 0.15045529174804687, 0.15064166259765624, 0.15012454223632812, 0.1496268768310547, 0.14853529357910156, 0.14912821960449218, 0.15132931518554688, 0.15042604064941406, 0.15148646545410155, 0.1499279327392578, 0.14963711547851563, 0.14816029357910157, 0.14991792297363282, 0.1504965057373047, 0.1503445129394531, 0.1514126739501953, 0.14908210754394532, 0.1507060546875, 0.1488605194091797, 0.1499202880859375, 0.15023513793945312, 0.15087359619140625, 0.14966534423828126, 0.14958688354492186, 0.15028355407714844, 0.14994505310058595, 0.15148252868652343, 0.14961442565917968, 0.15116192626953126, 0.14994309997558594, 0.15027827453613282]",tokens/s,6.68746961495727,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1843.068928,2899.247104,0.0,2503.999488,2349.010944,s,1,10.456041015625,10.456041015625,0.0,10.456041015625,10.456041015625,10.456041015625,10.456041015625,[10.456041015625],,kWh,8.935959978747783e-05,9.849528328410333e-06,3.258947051595906e-05,0.00013179859863184722,,MB,1900.003328,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0673072662353515,0.2067307266235352,0.0008982250054187398,0.20664173126220703,0.20775204620361326,0.20799231033325194,0.20818452163696288,"[0.20692445373535157, 0.20612460327148438, 0.2050182342529297, 0.20689356994628907, 0.2063273620605469, 0.206389892578125, 0.20607283020019532, 0.20762509155273437, 0.20769865417480468, 0.20823257446289062]",tokens/s,1238.3258366144385,kWh,6.304979010637554e-06,6.953266835215809e-07,4.206173577701955e-06,1.1206479271861089e-05,tokens/kWh,22843927.498514477,MB,1928.25344,3310.288896,0.0,2902.458368,2642.30144,s,10,27.798505859375002,2.7798505859375,0.005764667646533437,2.7790865478515627,2.78595625,2.7890772460937496,2.7915740429687497,"[2.7921982421875, 2.77327392578125, 2.784954345703125, 2.77960205078125, 2.774796875, 2.77301171875, 2.778571044921875, 2.779670654296875, 2.777164306640625, 2.7852626953125]",tokens/s,22.663088555442396,kWh,8.138066803019641e-05,8.976314009735155e-06,4.321750325009662e-05,0.0001335744852900282,tokens/kWh,471646.9605943761,,s,630,27.79591621780394,0.04412050193302215,0.000516710041676491,0.0439946231842041,0.04450067062377929,0.04506403846740722,0.04631580402374268,"[0.044828033447265624, 0.04420832061767578, 0.0445997428894043, 0.044377952575683596, 0.0446869125366211, 0.044015262603759764, 0.043929695129394535, 0.04409417724609375, 0.04395596694946289, 0.043976959228515626, 0.04378432083129883, 0.04444979095458984, 0.043996768951416014, 0.043932064056396485, 0.04451327896118164, 0.04449484634399414, 0.04423788833618164, 0.044372928619384765, 0.04410761642456055, 0.04410383987426758, 0.044486560821533204, 0.04422364807128906, 0.044208385467529296, 0.04423126220703125, 0.044324222564697265, 0.044283649444580075, 0.0444958381652832, 0.044012542724609374, 0.04399411010742187, 0.044224510192871096, 0.04400921630859375, 0.04558643341064453, 0.04415536117553711, 0.04415875244140625, 0.04443340682983398, 0.04434124755859375, 0.04403200149536133, 0.044146270751953126, 0.044157344818115236, 0.04425932693481445, 0.043919361114501954, 0.04392550277709961, 0.04409366226196289, 0.04408707046508789, 0.043905025482177736, 0.04395827102661133, 0.04412416076660156, 0.0481629753112793, 0.044257118225097654, 0.04415692901611328, 0.044281856536865234, 0.04526899337768555, 0.04436550521850586, 0.04407843017578125, 0.044430305480957034, 0.0442347526550293, 0.044339199066162106, 0.0441366081237793, 0.044029792785644534, 0.04398636627197266, 0.044228225708007815, 0.043971359252929686, 0.04554908752441406, 0.044955646514892575, 0.04426156616210938, 0.04414236831665039, 0.044270782470703124, 0.043827713012695314, 0.044034400939941404, 0.043954177856445314, 0.04416259384155274, 0.04402214431762695, 0.043888511657714846, 0.04381497573852539, 0.044312736511230466, 0.0439417610168457, 0.04402969741821289, 0.04429647827148438, 0.04401926422119141, 0.043812641143798826, 0.04373977661132813, 0.04399116897583008, 0.043894302368164065, 0.04430281448364258, 0.04376332855224609, 0.04365555191040039, 0.043898017883300784, 0.04376611328125, 0.04399980926513672, 0.04425459289550781, 0.0440038070678711, 0.04427916717529297, 0.04411164855957031, 0.04416198348999024, 0.04397875213623047, 0.043821407318115235, 0.04395894241333008, 0.04409590530395508, 0.044067424774169923, 0.04398662567138672, 0.044017822265625, 0.04401168060302734, 0.043937793731689455, 0.0443675537109375, 0.04392086410522461, 0.043737953186035156, 0.04469952011108398, 0.043913345336914066, 0.04382428741455078, 0.04384444808959961, 0.04402928161621094, 0.04386207962036133, 0.04424499130249023, 0.04384771347045899, 0.04393548965454101, 0.04371696090698242, 0.04393417739868164, 0.04364492797851562, 0.043796607971191406, 0.04376972961425781, 0.04496822357177734, 0.04392726516723633, 0.04398479843139649, 0.04372079849243164, 0.04403519821166992, 0.043852161407470704, 0.045122974395751955, 0.044552799224853515, 0.04426342391967773, 0.04404838562011719, 0.044181503295898435, 0.043640830993652346, 0.04356262588500977, 0.04372876739501953, 0.04357980728149414, 0.043934814453125, 0.04440524673461914, 0.04399267196655274, 0.0444169921875, 0.04378675079345703, 0.043950111389160156, 0.043919231414794924, 0.04418956756591797, 0.044698272705078125, 0.04395622253417969, 0.04404800033569336, 0.04388288116455078, 0.04561103820800781, 0.043870174407958984, 0.043911167144775394, 0.04375305557250977, 0.04382147216796875, 0.043821056365966796, 0.04370841598510742, 0.043753471374511715, 0.04670198440551758, 0.043960929870605465, 0.043886592864990234, 0.0438579216003418, 0.04393164825439453, 0.044308353424072265, 0.044179073333740236, 0.04399564743041992, 0.04398899078369141, 0.04451737594604492, 0.0440684814453125, 0.04390131378173828, 0.044083198547363284, 0.04394803237915039, 0.046118911743164064, 0.047486942291259764, 0.04407455825805664, 0.044110305786132814, 0.04392745590209961, 0.0438172492980957, 0.044353343963623046, 0.04415283203125, 0.04407910537719727, 0.04450060653686523, 0.044026241302490235, 0.04400566482543945, 0.04385516738891602, 0.04410723114013672, 0.044429824829101565, 0.044052928924560544, 0.04429414367675781, 0.04393686294555664, 0.043982975006103514, 0.043938591003417966, 0.045195167541503906, 0.044284000396728515, 0.04431702423095703, 0.044159809112548826, 0.04410796737670898, 0.04448332977294922, 0.04419164657592774, 0.04393155288696289, 0.04369740676879883, 0.043950592041015625, 0.043657569885253905, 0.04382720184326172, 0.04409513473510742, 0.04378249740600586, 0.04434534454345703, 0.04413849639892578, 0.04377804946899414, 0.04417740631103516, 0.04411404800415039, 0.0444087028503418, 0.044418785095214845, 0.044292320251464845, 0.04381907272338867, 0.043974655151367184, 0.04367577743530274, 0.04383932876586914, 0.04365929412841797, 0.04408438491821289, 0.04374819183349609, 0.043859519958496095, 0.043951553344726564, 0.044153057098388675, 0.04420483016967774, 0.04428799819946289, 0.044227584838867184, 0.04481536102294922, 0.044111488342285156, 0.044181537628173825, 0.04387363052368164, 0.04414361572265625, 0.04386611175537109, 0.043993087768554685, 0.0438249282836914, 0.0439769287109375, 0.04371212768554687, 0.043964126586914065, 0.04396908950805664, 0.044042335510253904, 0.043845630645751955, 0.044069183349609374, 0.04405420684814453, 0.044294113159179686, 0.0438109130859375, 0.0441855354309082, 0.04587724685668945, 0.045590526580810545, 0.044041889190673825, 0.04480422210693359, 0.04386991882324219, 0.04420454406738281, 0.04387014389038086, 0.043775840759277346, 0.04374755096435547, 0.044886688232421874, 0.04413788986206055, 0.044485214233398435, 0.044265472412109375, 0.04807884979248047, 0.04404207992553711, 0.04431683349609375, 0.04420556640625, 0.04394966506958008, 0.044167743682861325, 0.04405692672729492, 0.04401561737060547, 0.044191871643066406, 0.04393535995483398, 0.04385817718505859, 0.043845630645751955, 0.043786239624023435, 0.04391686248779297, 0.04375494384765625, 0.044163265228271485, 0.04398899078369141, 0.043780929565429685, 0.044015201568603515, 0.044043807983398436, 0.04392617416381836, 0.04426953506469727, 0.04380697631835938, 0.043902366638183594, 0.04367136001586914, 0.04392806243896484, 0.04362473678588867, 0.04381292724609375, 0.04376383972167969, 0.04440969467163086, 0.043817951202392576, 0.044490463256835935, 0.04398688125610352, 0.04420016098022461, 0.0440874252319336, 0.04388249588012695, 0.044043807983398436, 0.04392995071411133, 0.043548191070556644, 0.04397116851806641, 0.04378009414672852, 0.04391945648193359, 0.04362128067016602, 0.043784832000732424, 0.043778430938720705, 0.044025856018066405, 0.043870208740234375, 0.043601665496826175, 0.04389503860473633, 0.04379238510131836, 0.043796478271484376, 0.04399718475341797, 0.04372172927856445, 0.04452233505249024, 0.04402191925048828, 0.043783679962158206, 0.04385411071777344, 0.0440219841003418, 0.043802623748779294, 0.04484713745117187, 0.04450649642944336, 0.04398076629638672, 0.04394784164428711, 0.04374348831176758, 0.043911201477050785, 0.04366739273071289, 0.043875038146972654, 0.04457231903076172, 0.044447967529296875, 0.04414214324951172, 0.04390956878662109, 0.04407814407348633, 0.04392832183837891, 0.04367536163330078, 0.043905120849609375, 0.04362688064575195, 0.043541919708251955, 0.04342572784423828, 0.044137119293212894, 0.043375808715820315, 0.04391814422607422, 0.043730945587158204, 0.04399513626098633, 0.04386108779907227, 0.04388751983642578, 0.043753025054931644, 0.045131744384765624, 0.04390959930419922, 0.04385756683349609, 0.043811168670654294, 0.04385507202148437, 0.043780895233154295, 0.04383334350585937, 0.04377731323242187, 0.04401635360717773, 0.044467201232910154, 0.04394313430786133, 0.043797279357910154, 0.04382371139526367, 0.04382352066040039, 0.043659233093261716, 0.04363267135620117, 0.0437657585144043, 0.04366124725341797, 0.04541622543334961, 0.04379644775390625, 0.04390943908691406, 0.043862014770507815, 0.04404633712768555, 0.044023391723632815, 0.04440031814575195, 0.045103839874267575, 0.04544924926757812, 0.043764991760253905, 0.043936481475830076, 0.043757568359375, 0.04393772888183594, 0.04437139129638672, 0.04405632019042969, 0.04390591812133789, 0.04399871826171875, 0.04407551956176758, 0.04632620620727539, 0.04447641754150391, 0.04417536163330078, 0.04393497467041016, 0.04398771286010742, 0.043905025482177736, 0.04381846237182617, 0.0438625602722168, 0.044021343231201174, 0.04410793685913086, 0.04399539184570313, 0.04390102386474609, 0.04388035202026367, 0.04470579147338867, 0.043804672241210936, 0.044010784149169924, 0.043880542755126956, 0.04412416076660156, 0.04378416061401367, 0.04379084777832031, 0.04374748611450195, 0.04385123062133789, 0.04382742309570312, 0.04382751846313476, 0.043648193359375, 0.043805503845214845, 0.04383129501342774, 0.04380672073364258, 0.04379852676391602, 0.044010688781738284, 0.04390380859375, 0.044611583709716796, 0.04365107345581055, 0.04365311813354492, 0.04365673446655274, 0.04409801483154297, 0.04450124740600586, 0.044077056884765625, 0.04401273727416992, 0.04460355377197266, 0.04715708923339844, 0.04436640167236328, 0.04416307067871094, 0.04399513626098633, 0.04401776123046875, 0.044574623107910154, 0.04399718475341797, 0.04401372909545898, 0.04393967819213867, 0.0440709114074707, 0.04388249588012695, 0.04396182250976562, 0.04382486343383789, 0.043915233612060546, 0.043878528594970705, 0.043909854888916015, 0.043862014770507815, 0.04404339218139648, 0.044311424255371094, 0.044519233703613284, 0.04400505447387695, 0.04421683120727539, 0.04426137542724609, 0.04469887924194336, 0.044218463897705076, 0.043981121063232424, 0.04406070327758789, 0.04400979232788086, 0.04415488052368164, 0.044070049285888674, 0.04378915023803711, 0.04397590255737305, 0.04425315093994141, 0.044663616180419925, 0.04657171249389649, 0.04411782455444336, 0.04419184112548828, 0.04500060653686523, 0.0439496955871582, 0.04398102569580078, 0.043745662689208986, 0.04382287979125977, 0.04386975860595703, 0.04403350448608399, 0.045386112213134766, 0.04393331146240234, 0.04387052917480469, 0.04376630401611328, 0.04386624145507813, 0.04381081771850586, 0.044298110961914064, 0.04571968078613281, 0.04579244613647461, 0.043977344512939456, 0.04386732864379883, 0.044080127716064454, 0.044439552307128906, 0.04396783828735352, 0.043698753356933594, 0.04418569564819336, 0.043931838989257815, 0.04391916656494141, 0.04377395248413086, 0.04368588638305664, 0.043802623748779294, 0.04370943832397461, 0.04363161468505859, 0.04429619216918945, 0.04385516738891602, 0.04411257553100586, 0.04382534408569336, 0.044000129699707034, 0.04402272033691406, 0.044072769165039063, 0.04426716613769531, 0.043772735595703126, 0.04395798492431641, 0.04357734298706055, 0.04368809509277344, 0.043796607971191406, 0.04376287841796875, 0.04390044784545898, 0.043977920532226565, 0.044154464721679686, 0.04414486312866211, 0.04395609664916992, 0.044929088592529295, 0.044113246917724606, 0.044243167877197266, 0.0439156494140625, 0.044133792877197264, 0.04394659042358398, 0.04427571105957031, 0.04396156692504883, 0.04397545623779297, 0.04391424179077148, 0.044055553436279295, 0.043902976989746094, 0.04397369766235352, 0.043928512573242186, 0.04440883255004883, 0.04392784118652344, 0.04410543823242188, 0.04393318557739258, 0.0438823356628418, 0.04384739303588867, 0.04437702560424805, 0.04402294540405274, 0.04371462249755859, 0.04361065673828125, 0.043587841033935544, 0.0437657585144043, 0.043763233184814454, 0.04374166488647461, 0.04381491088867188, 0.043872257232666016, 0.04376166534423828, 0.04384470367431641, 0.04461846542358398, 0.04397439956665039, 0.04394044876098633, 0.04411580657958984, 0.0460167350769043, 0.044684864044189455, 0.04405295944213867, 0.04448748779296875, 0.04389177703857422, 0.04390694427490234, 0.04375660705566406, 0.043920318603515626, 0.043914848327636716, 0.044120128631591794, 0.04518265533447265, 0.045015392303466795, 0.044046657562255856, 0.04400678253173828, 0.04408793640136719, 0.04443312072753906, 0.04408044815063476, 0.04367459106445312, 0.04395161437988281, 0.043926017761230465, 0.04394803237915039, 0.044071937561035154, 0.04363161468505859, 0.04433852767944336, 0.04401795196533203, 0.04397673416137695, 0.04386003112792969, 0.04535500717163086, 0.04629033660888672, 0.044274272918701174, 0.043963520050048825, 0.0440840950012207, 0.044074878692626954, 0.0440239372253418, 0.04402380752563476, 0.045147327423095705, 0.04591203308105469, 0.043893600463867186, 0.04386611175537109, 0.04390707015991211, 0.04385126495361328, 0.0443922233581543, 0.04425187301635742, 0.0437592658996582, 0.04407126235961914, 0.04397875213623047, 0.04385721588134766, 0.04371510314941406, 0.04394927978515625, 0.04373600006103515, 0.04583769607543945, 0.04435212707519531, 0.0439400634765625, 0.04369935989379883, 0.04390879821777344, 0.043780448913574216, 0.04401776123046875, 0.04398745727539063, 0.04370636749267578, 0.043703392028808595, 0.04411427307128906, 0.043737567901611325, 0.04393360137939453, 0.04391110229492187, 0.04434560012817383, 0.04413433456420898, 0.04441708755493164, 0.04422860717773437, 0.04404537582397461, 0.04385475158691406, 0.04522601699829101, 0.04541644668579101, 0.04417740631103516, 0.04442521667480469, 0.04389823913574219, 0.043976478576660157, 0.04405539321899414, 0.044056129455566403, 0.04437369537353516, 0.044065536499023436, 0.044050430297851564, 0.044047679901123044, 0.043920063018798826, 0.04377743911743164, 0.04408947372436523, 0.044028385162353516, 0.04439769744873047, 0.04409433746337891, 0.044090625762939456, 0.04479244613647461]",tokens/s,22.665199990654376,,, -4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -2885,7 +2885,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -2897,7 +2897,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -3361,7 +3361,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -3377,61 +3377,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -3440,7 +3440,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -3456,53 +3456,53 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( @@ -3723,7 +3723,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -3732,7 +3732,7 @@ RuntimeError: FlashAttention only supports Ampere GPUs or newer. raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -3748,61 +3748,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -4216,7 +4216,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4242,7 +4242,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -4254,7 +4254,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -4365,7 +4365,7 @@ ChildProcessError: Traceback (most recent call last): ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4381,53 +4381,53 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -4436,7 +4436,7 @@ ChildProcessError: Traceback (most recent call last): RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4452,61 +4452,61 @@ ChildProcessError: Traceback (most recent call last): self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill + File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context + File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( - File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample + File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) - File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward + File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( - File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply + File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( @@ -4546,7 +4546,7 @@ RuntimeError: Isolated process exited with non-zero code -9 RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3952.275456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.4734677734375,12.4734677734375,0.0,12.4734677734375,12.4734677734375,12.4734677734375,12.4734677734375,[12.4734677734375],,kWh,0.00015622058873749058,1.722508014731104e-05,5.7967268595998656e-05,0.00023141293748080028,,MB,3963.71968,2387.542016,0.0,1971.32288,1913.084928,s,10,0.5615455055236817,0.056154550552368176,0.00035814502564154307,0.05609428787231445,0.05638534507751465,0.056742896842956546,0.05702893825531006,"[0.05630588912963867, 0.05590630340576172, 0.05578108978271484, 0.05614937591552734, 0.05582883071899414, 0.05597932815551758, 0.05603919982910156, 0.05710044860839844, 0.05618767929077149, 0.05626736068725586]",tokens/s,4558.846923033629,kWh,1.655127770386044e-06,1.8252889147709846e-07,9.916957086101636e-07,2.829352370473306e-06,tokens/kWh,90480069.81087874,MB,3967.901696,2408.513536,0.0,1992.2944,1972.635136,s,10,35.18063403320313,3.518063403320313,0.027803330613491103,3.5024200439453126,3.5556041748046874,3.558329553222656,3.560509855957031,"[3.502048828125, 3.4907255859375, 3.502791259765625, 3.5011953125, 3.495353759765625, 3.4844482421875, 3.561054931640625, 3.542568359375, 3.54544921875, 3.55499853515625]",tokens/s,17.907579476976235,kWh,0.00010228365814502975,1.1281983364420254e-05,4.5486508140589655e-05,0.00015905214965003961,tokens/kWh,396096.50129607227,,s,630,35.17682631301875,0.05583623224288699,0.0008420710525841882,0.05569855880737305,0.0566146297454834,0.056956986618041994,0.05953084297180176,"[0.055654399871826174, 0.05565011215209961, 0.05557267379760742, 0.056282398223876956, 0.055591392517089847, 0.055548160552978516, 0.055798912048339845, 0.05553241729736328, 0.05513216018676758, 0.05503171157836914, 0.05506057739257812, 0.055207454681396484, 0.05526166534423828, 0.05516287994384766, 0.054975841522216795, 0.05464115142822266, 0.054693599700927735, 0.054419776916503904, 0.05431046295166016, 0.054561088562011716, 0.055048126220703125, 0.055236927032470705, 0.05522393417358398, 0.055328353881835934, 0.05484576034545898, 0.05524435043334961, 0.05848566436767578, 0.055215679168701175, 0.05506067276000977, 0.05489075088500977, 0.0554284782409668, 0.05545843124389648, 0.057030654907226565, 0.055228416442871096, 0.055363582611083983, 0.05525094223022461, 0.05545779037475586, 0.05539945602416992, 0.05553571319580078, 0.05573900985717774, 0.055650367736816406, 0.05585308837890625, 0.05621161651611328, 0.05565030288696289, 0.05560092926025391, 0.05609699249267578, 0.05633433532714844, 0.05592473602294922, 0.05589334487915039, 0.058401344299316406, 0.05714332962036133, 0.05591865539550781, 0.056172542572021485, 0.05565999984741211, 0.05510915374755859, 0.05526416015625, 0.05515683364868164, 0.05579980850219726, 0.05551923370361328, 0.05542707061767578, 0.05578137588500977, 0.05706047821044922, 0.05552012634277344, 0.0550299186706543, 0.055273246765136716, 0.055562976837158204, 0.05536972808837891, 0.05515468978881836, 0.05492531204223633, 0.05502361679077149, 0.055011329650878904, 0.05526505661010742, 0.05464495849609375, 0.054771713256835934, 0.05499699020385742, 0.055193599700927735, 0.0554598388671875, 0.054935550689697264, 0.05450137710571289, 0.05462812805175781, 0.05498886489868164, 0.05524051284790039, 0.054796638488769533, 0.055949310302734374, 0.05554972839355469, 0.055446849822998044, 0.055591678619384764, 0.05555372619628906, 0.055556575775146486, 0.05566463851928711, 0.05578137588500977, 0.056010753631591796, 0.055844863891601565, 0.05633612823486328, 0.05584716796875, 0.05570502471923828, 0.055648574829101564, 0.05572224044799805, 0.05550284957885742, 0.05582169723510742, 0.05562227249145508, 0.0558941764831543, 0.05570339202880859, 0.055443073272705076, 0.05531795120239258, 0.05522528076171875, 0.0551649284362793, 0.05580595016479492, 0.055294975280761716, 0.05569766235351563, 0.05559577560424805, 0.055537086486816406, 0.05562835311889648, 0.0554247055053711, 0.055146816253662106, 0.055112926483154294, 0.05497296142578125, 0.05524300765991211, 0.05510508728027344, 0.05525942230224609, 0.05536374282836914, 0.05568102264404297, 0.05581414413452149, 0.056649280548095704, 0.05538860702514648, 0.05503776168823242, 0.05483520126342773, 0.05523606491088867, 0.05542147064208985, 0.0556151351928711, 0.0554516487121582, 0.05524310302734375, 0.055175167083740234, 0.05527523040771484, 0.05538435363769531, 0.055283489227294924, 0.055586719512939455, 0.05589228820800781, 0.055992321014404295, 0.05584281539916992, 0.055728126525878906, 0.055756160736083984, 0.055960193634033206, 0.05571993637084961, 0.05558272171020508, 0.05624550247192383, 0.055749374389648436, 0.05978726577758789, 0.05578956985473633, 0.055723262786865235, 0.05573471832275391, 0.05554003143310547, 0.05570060729980469, 0.0555098876953125, 0.05566873550415039, 0.055670783996582034, 0.05583977508544922, 0.05530108642578125, 0.054919166564941405, 0.05566828918457031, 0.055363296508789066, 0.05521062469482422, 0.05530019378662109, 0.055488510131835936, 0.05562515258789062, 0.05562015914916992, 0.05565004730224609, 0.055562496185302734, 0.05591244888305664, 0.055282974243164064, 0.055050975799560545, 0.05513216018676758, 0.055347198486328124, 0.05578540802001953, 0.055511104583740235, 0.055488510131835936, 0.05522227096557617, 0.055259136199951174, 0.05548646545410156, 0.05533695983886719, 0.055537662506103515, 0.05535302352905273, 0.05565267181396484, 0.05560688018798828, 0.05537833786010742, 0.055433216094970705, 0.05559091186523438, 0.05550425720214844, 0.05582912063598633, 0.05564575958251953, 0.055817150115966795, 0.056124576568603514, 0.0557371826171875, 0.0558359375, 0.05565513610839844, 0.055482368469238284, 0.055671966552734375, 0.05554671859741211, 0.05561139297485351, 0.05575215911865234, 0.05559555053710938, 0.055209983825683595, 0.0546058235168457, 0.05660988616943359, 0.05564697647094727, 0.05546368026733398, 0.05511788940429688, 0.05544585418701172, 0.05551923370361328, 0.05672256088256836, 0.054823230743408204, 0.05454086303710937, 0.05419993591308594, 0.0546429443359375, 0.05451993560791016, 0.05444607925415039, 0.05466719818115234, 0.05517059326171875, 0.055547870635986325, 0.055382591247558594, 0.05514854431152344, 0.05500457763671875, 0.054633056640625, 0.05506256103515625, 0.05514031982421875, 0.055021568298339846, 0.05802540969848633, 0.05592121505737305, 0.05555814361572266, 0.055218177795410155, 0.0551580810546875, 0.055351486206054686, 0.05535353469848633, 0.055269695281982424, 0.055605247497558595, 0.055672191619873045, 0.0558045425415039, 0.05566668701171875, 0.06031961441040039, 0.05594486236572266, 0.05576051330566406, 0.05626732635498047, 0.05572774505615234, 0.05567724609375, 0.05598553466796875, 0.05605830383300781, 0.05585974502563477, 0.055713024139404294, 0.055865280151367186, 0.055966209411621094, 0.05528998565673828, 0.05505862426757813, 0.05507727813720703, 0.05559305572509766, 0.05525955200195312, 0.05501948928833008, 0.05494172668457031, 0.0547696647644043, 0.05500310516357422, 0.054986785888671875, 0.05541388702392578, 0.05550374221801758, 0.05572198486328125, 0.055549663543701173, 0.05556028747558594, 0.05523247909545898, 0.0553856315612793, 0.054890815734863284, 0.0549728012084961, 0.055131935119628904, 0.05537964630126953, 0.05544588851928711, 0.05516099166870117, 0.05495993423461914, 0.05547417449951172, 0.05453023910522461, 0.05490435028076172, 0.055171390533447266, 0.055459999084472654, 0.05560115051269531, 0.05585903930664062, 0.05580160140991211, 0.055765407562255856, 0.05579743957519531, 0.05611142349243164, 0.055810047149658204, 0.056003841400146484, 0.05598857498168945, 0.05564985656738281, 0.055919456481933597, 0.05584076690673828, 0.055760894775390625, 0.055461952209472656, 0.0555478401184082, 0.05596281433105469, 0.05565932846069336, 0.05580287933349609, 0.05636198425292969, 0.056233985900878906, 0.0567086067199707, 0.056784767150878906, 0.05561315155029297, 0.05510211181640625, 0.05498700714111328, 0.05487411117553711, 0.055252735137939456, 0.0553515510559082, 0.05582438278198242, 0.05519974517822265, 0.05521775817871094, 0.055230880737304686, 0.05535251235961914, 0.05514652633666992, 0.05527222442626953, 0.055564289093017576, 0.05542092895507812, 0.055395809173583985, 0.05495616149902344, 0.05483152008056641, 0.05517926406860352, 0.05493100738525391, 0.05462879943847656, 0.05525094223022461, 0.055389633178710936, 0.05568272018432617, 0.05544847869873047, 0.055152641296386716, 0.05524684906005859, 0.055218177795410155, 0.055582687377929686, 0.05565193557739258, 0.055699905395507815, 0.055889598846435545, 0.05598988723754883, 0.05561619186401367, 0.05553526306152344, 0.0552916145324707, 0.055597217559814456, 0.05550307083129883, 0.05573247909545898, 0.055721023559570315, 0.05551712036132812, 0.055806976318359375, 0.05541875076293945, 0.055390335083007815, 0.054848670959472656, 0.054664031982421875, 0.055538753509521484, 0.05523756790161133, 0.05525299072265625, 0.05538816070556641, 0.055654399871826174, 0.05549260711669922, 0.0556193618774414, 0.055666656494140626, 0.05556572723388672, 0.0548934097290039, 0.055041118621826174, 0.05534735870361328, 0.05539507293701172, 0.055578624725341794, 0.054839073181152345, 0.05462579345703125, 0.054526142120361325, 0.05466502380371094, 0.05469257736206055, 0.05530214309692383, 0.054601470947265626, 0.05506073760986328, 0.055368961334228514, 0.05543756866455078, 0.055233024597167966, 0.05520793533325195, 0.05541222381591797, 0.05502518463134766, 0.05524563217163086, 0.05539372634887695, 0.05554585647583008, 0.055519775390625, 0.05577916717529297, 0.055676959991455076, 0.06326457595825195, 0.05648384094238281, 0.05652099227905273, 0.05657193756103516, 0.05656371307373047, 0.058060798645019535, 0.056311969757080076, 0.05638742446899414, 0.05612086486816406, 0.056740318298339844, 0.05644083023071289, 0.05571152114868164, 0.05599580764770508, 0.056046302795410154, 0.056465503692626956, 0.055678497314453124, 0.055935455322265626, 0.05565577697753906, 0.05504886245727539, 0.05526323318481445, 0.055070720672607425, 0.05557020950317383, 0.05952534484863281, 0.05653504180908203, 0.05569331359863281, 0.05575600051879883, 0.05598230361938476, 0.05613216018676758, 0.05588351821899414, 0.05572224044799805, 0.0554700813293457, 0.0585992317199707, 0.056664257049560546, 0.05787231826782226, 0.05953308868408203, 0.056237857818603514, 0.05581177520751953, 0.056441471099853514, 0.05595686340332031, 0.05608736038208008, 0.05710438537597656, 0.056025089263916014, 0.05652070236206055, 0.05622988891601562, 0.05689263916015625, 0.05649692916870117, 0.05664972686767578, 0.05688729476928711, 0.05652889633178711, 0.0568072624206543, 0.05639913558959961, 0.05657632064819336, 0.056231582641601566, 0.05645609664916992, 0.056594432830810545, 0.05646281433105469, 0.056944385528564456, 0.05633718490600586, 0.05719039916992188, 0.05654463958740234, 0.05532057571411133, 0.05605558395385742, 0.05568739318847656, 0.05574348831176758, 0.055989246368408206, 0.05622579193115235, 0.0562606086730957, 0.05608963012695312, 0.05582697677612305, 0.055504638671875, 0.05551718521118164, 0.05585171127319336, 0.05588787078857422, 0.05592652893066406, 0.05617279815673828, 0.056196895599365235, 0.05654553604125977, 0.056624191284179684, 0.05623849487304688, 0.05662908935546875, 0.056088993072509766, 0.056393184661865235, 0.056129344940185545, 0.05646640014648437, 0.056281089782714844, 0.05633782577514648, 0.05597859191894531, 0.0563056640625, 0.056266624450683596, 0.05584703826904297, 0.05563347244262695, 0.05576544189453125, 0.056293216705322266, 0.056536479949951174, 0.0601075210571289, 0.05669862365722656, 0.05687526321411133, 0.05653839874267578, 0.056113887786865234, 0.056455169677734375, 0.0563240966796875, 0.05663129425048828, 0.056607967376708986, 0.056405982971191405, 0.05627686309814453, 0.05638371276855469, 0.05618345642089844, 0.056102977752685544, 0.056000511169433595, 0.0560120964050293, 0.056260639190673825, 0.05622371292114258, 0.05626950454711914, 0.05634857559204102, 0.05636713409423828, 0.05598419189453125, 0.055670783996582034, 0.05597798538208008, 0.056186206817626955, 0.05643945693969726, 0.05610291290283203, 0.05611276626586914, 0.05596601486206055, 0.05539916610717773, 0.05550694274902344, 0.055531169891357424, 0.05584835052490234, 0.05597894287109375, 0.05617657470703125, 0.05574662399291992, 0.0558837776184082, 0.05601052856445313, 0.05577084732055664, 0.055610942840576175, 0.05561644744873047, 0.05590835189819336, 0.056313377380371094, 0.055944961547851564, 0.0563350715637207, 0.05594521713256836, 0.056635391235351565, 0.05622556686401367, 0.05633865737915039, 0.05610905456542969, 0.056231937408447265, 0.05628860855102539, 0.05713782501220703, 0.056281089782714844, 0.05615820693969727, 0.0567275505065918, 0.05616025543212891, 0.05632819366455078, 0.05635887908935547, 0.05663052749633789, 0.05598432159423828, 0.05550723266601563, 0.055624000549316405, 0.05577920150756836, 0.055779457092285156, 0.05546803283691406, 0.055905536651611326, 0.0553724479675293, 0.05547222518920898, 0.05698559951782227, 0.056403297424316406, 0.056021663665771486, 0.060393470764160156, 0.05673984146118164, 0.056174591064453126, 0.0559554557800293, 0.058210304260253906, 0.055976097106933596, 0.05624111938476563, 0.05648239898681641, 0.05628137588500977, 0.05581414413452149, 0.06095177459716797, 0.055773983001708986, 0.05541414260864258, 0.05569395065307617, 0.05661286544799805, 0.05783875274658203, 0.05603359985351562, 0.056207199096679684, 0.05665206527709961, 0.05618115234375, 0.055341056823730465, 0.05628492736816406, 0.056915969848632814, 0.05708211135864258, 0.05704207992553711, 0.0567152328491211, 0.05661356735229492, 0.05643487930297852, 0.05676163101196289, 0.05722390365600586, 0.057220958709716795, 0.058370208740234374, 0.05700751876831055, 0.05674454498291016, 0.05688115310668945, 0.057032703399658206, 0.056049663543701174, 0.05589126586914062, 0.05621830368041992, 0.05565030288696289, 0.05596160125732422, 0.05610886383056641, 0.05871756744384766, 0.05638156890869141, 0.05605446243286133, 0.05569945526123047, 0.05610662460327148, 0.05554201507568359, 0.05578329467773437, 0.05622761535644531, 0.05625904083251953, 0.05641625595092774, 0.056229183197021484, 0.0566995849609375, 0.05643468856811523, 0.05585504150390625, 0.05567855834960937, 0.056036865234375, 0.05683820724487305, 0.05672643280029297, 0.05629884719848633, 0.056368927001953124, 0.05569331359863281, 0.05580019378662109, 0.055844417572021486, 0.056323009490966795, 0.056370975494384766, 0.05627312088012695, 0.056013824462890625, 0.05616089630126953, 0.0569672966003418, 0.05652096176147461, 0.05667136001586914, 0.05639388656616211, 0.05651529693603516, 0.056346401214599606, 0.05636732864379883, 0.05663948822021484, 0.056378654479980465, 0.05622246551513672, 0.056051681518554684, 0.05654723358154297, 0.05649593734741211]",tokens/s,17.909517885268677,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4572,17 +4572,17 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,846.467072,565.116928,0.0,169.869312,150.669312,s,1,8.1319296875,8.1319296875,0.0,8.1319296875,8.1319296875,8.1319296875,8.1319296875,[8.1319296875],,kWh,2.2475241483327103e-05,2.4720796588808212e-06,8.348062234014852e-06,3.3295383376222775e-05,,MB,1151.246336,625.934336,0.0,209.7152,193.680384,s,11,0.1753775987625122,0.01594341806931929,0.0001830208789097381,0.01591209602355957,0.016218271255493164,0.016245919227600096,0.016268037605285644,"[0.016158304214477538, 0.01592643165588379, 0.015769344329833984, 0.015905728340148927, 0.015714240074157716, 0.015849472045898438, 0.01591209602355957, 0.015912799835205077, 0.01627356719970703, 0.016218271255493164, 0.015737343788146972]",tokens/s,16056.782735480887,kWh,4.7600317202966865e-07,5.2494784996865563e-08,1.9738234025121626e-07,7.258802972777505e-07,tokens/kWh,352675228.9049172,MB,1162.346496,628.031488,0.0,211.812352,193.682944,s,11,10.631011718749999,0.9664556107954545,0.004707955255336871,0.9647833862304688,0.9734203491210938,0.9755199279785156,0.9771995910644531,"[0.9612041015625, 0.963017578125, 0.9734203491210938, 0.9776195068359375, 0.9675733642578125, 0.9644152221679687, 0.9658329467773438, 0.9645062255859375, 0.962111328125, 0.9647833862304688, 0.9665277099609375]",tokens/s,65.18664623215027,kWh,2.7935242829482866e-05,3.08080831378856e-06,1.0652626339749065e-05,4.16686774830205e-05,tokens/kWh,1511927.0350174126,,s,693,10.624617526054392,0.015331338421434894,0.0002571975798281347,0.01528831958770752,0.015467743682861327,0.01559829750061035,0.016082920684814452,"[0.015103391647338867, 0.015395520210266113, 0.015293439865112305, 0.015240351676940917, 0.015201984405517578, 0.015198271751403808, 0.015108096122741698, 0.015095168113708497, 0.015278719902038574, 0.015388031959533692, 0.015181856155395508, 0.015217151641845703, 0.015165504455566406, 0.015199295997619629, 0.015212544441223145, 0.015178879737854005, 0.015255392074584961, 0.015431039810180664, 0.015291040420532226, 0.015262911796569825, 0.015321887969970704, 0.015157407760620117, 0.015150912284851074, 0.015194144248962402, 0.015167776107788087, 0.015210335731506347, 0.015195232391357422, 0.015143487930297852, 0.015628512382507325, 0.015151167869567871, 0.015534015655517578, 0.015282336235046387, 0.01513372802734375, 0.015101759910583497, 0.01520844841003418, 0.015121567726135253, 0.015132512092590332, 0.015108096122741698, 0.015118335723876953, 0.015128576278686523, 0.0151364164352417, 0.015270591735839844, 0.015163040161132813, 0.015118335723876953, 0.015151103973388673, 0.01518182373046875, 0.015159104347229004, 0.015199647903442384, 0.015176095962524415, 0.015278464317321777, 0.015263680458068848, 0.015758975982666016, 0.015300160408020019, 0.01531388759613037, 0.015300512313842773, 0.01562828826904297, 0.015278079986572265, 0.015233247756958009, 0.015353568077087402, 0.015361791610717773, 0.015433279991149903, 0.015382911682128906, 0.015280799865722656, 0.015210495948791505, 0.015159296035766602, 0.015250720024108887, 0.015338208198547363, 0.015213600158691406, 0.015199423789978028, 0.015144800186157227, 0.015251423835754395, 0.015199392318725587, 0.015217472076416015, 0.015230463981628419, 0.015234848022460938, 0.01510268783569336, 0.015616000175476074, 0.015124480247497558, 0.015689536094665526, 0.015327327728271485, 0.015276127815246583, 0.015232768058776856, 0.015247615814208984, 0.015183775901794434, 0.015189248085021973, 0.01519702434539795, 0.015193696022033692, 0.015118751525878906, 0.015143936157226562, 0.015176704406738281, 0.015152128219604492, 0.015148032188415527, 0.015408672332763673, 0.01533795166015625, 0.015219840049743652, 0.01514367961883545, 0.015323264122009277, 0.015318143844604492, 0.01525011157989502, 0.015300671577453613, 0.015187328338623046, 0.01519699192047119, 0.015296319961547851, 0.015244447708129882, 0.015260640144348145, 0.015245247840881347, 0.015386688232421874, 0.015284223556518555, 0.015187552452087402, 0.015194527626037598, 0.015306495666503906, 0.015263104438781738, 0.015407999992370606, 0.015248703956604003, 0.015440768241882324, 0.015291423797607421, 0.015257439613342284, 0.01530470371246338, 0.01524227237701416, 0.015463520050048828, 0.01570915222167969, 0.015294303894042969, 0.015346943855285644, 0.015250304222106933, 0.01532096004486084, 0.015720191955566405, 0.015302687644958497, 0.015285696029663086, 0.015344287872314453, 0.015351807594299317, 0.01532271957397461, 0.015348383903503418, 0.015326784133911133, 0.015396832466125488, 0.015339743614196778, 0.015394816398620606, 0.015266880035400391, 0.01608185577392578, 0.01529856014251709, 0.015191424369812011, 0.015264384269714356, 0.015263744354248047, 0.015278207778930663, 0.015333215713500976, 0.015315103530883788, 0.015312095642089844, 0.015286944389343261, 0.01548902416229248, 0.015381888389587402, 0.015315584182739258, 0.01534943962097168, 0.015308352470397949, 0.015481151580810548, 0.015294912338256837, 0.01534329605102539, 0.015288736343383789, 0.015376288414001465, 0.015435520172119141, 0.01525715160369873, 0.015421664237976074, 0.015407487869262696, 0.01533683204650879, 0.015309727668762207, 0.015243071556091309, 0.015234975814819337, 0.015222880363464355, 0.01521664047241211, 0.0167869758605957, 0.018315744400024415, 0.015548383712768555, 0.015445759773254394, 0.015501855850219726, 0.015332223892211914, 0.015397215843200683, 0.015251999855041503, 0.015357952117919921, 0.015422752380371094, 0.015357855796813966, 0.015769696235656737, 0.015634336471557618, 0.015490943908691407, 0.01540940761566162, 0.015311103820800781, 0.015823007583618164, 0.015400927543640136, 0.015328991889953613, 0.01531760025024414, 0.015333375930786132, 0.015277440071105958, 0.015350720405578614, 0.015233983993530273, 0.015158304214477539, 0.015282719612121583, 0.015655360221862792, 0.015238719940185547, 0.015257087707519532, 0.015783072471618653, 0.015260895729064942, 0.015220352172851562, 0.015248319625854493, 0.015294303894042969, 0.015829312324523927, 0.01815100860595703, 0.015614208221435546, 0.015381952285766602, 0.015341600418090821, 0.015431743621826172, 0.01586390399932861, 0.015642175674438475, 0.015995360374450682, 0.015628640174865722, 0.01547878360748291, 0.015581184387207032, 0.01526748752593994, 0.015352160453796386, 0.015394559860229492, 0.015476448059082031, 0.015362591743469238, 0.015264927864074707, 0.015239744186401368, 0.01563881587982178, 0.015378432273864746, 0.015230976104736327, 0.015322527885437011, 0.015204992294311524, 0.015257375717163086, 0.015228832244873047, 0.015299072265625, 0.015182847976684571, 0.015180704116821288, 0.015283743858337402, 0.015227231979370117, 0.015263903617858886, 0.015275744438171387, 0.015446271896362305, 0.0152674560546875, 0.015251711845397949, 0.015187968254089355, 0.015362048149108886, 0.015348832130432128, 0.015588319778442383, 0.015335359573364259, 0.015544320106506348, 0.015507007598876953, 0.015415712356567383, 0.015417216300964355, 0.015796607971191406, 0.01859971237182617, 0.01674569511413574, 0.015301440238952637, 0.015427552223205566, 0.015243264198303222, 0.015580127716064453, 0.015332159996032714, 0.015313247680664062, 0.015255328178405761, 0.015260831832885742, 0.01531766414642334, 0.015280320167541503, 0.015333375930786132, 0.015237088203430177, 0.015310879707336425, 0.015180800437927246, 0.015407999992370606, 0.015273311614990234, 0.015305536270141602, 0.01526576042175293, 0.015343615531921387, 0.015300288200378417, 0.015355648040771485, 0.015340096473693847, 0.01528217601776123, 0.015339584350585937, 0.015261119842529296, 0.015319711685180664, 0.015398752212524415, 0.015265791893005372, 0.015316927909851074, 0.01545248031616211, 0.015277695655822755, 0.015554847717285157, 0.01544332790374756, 0.015434240341186524, 0.015554176330566407, 0.015606111526489257, 0.015411231994628906, 0.015370207786560059, 0.015286304473876954, 0.015549759864807128, 0.015317279815673828, 0.01567372798919678, 0.015378432273864746, 0.015247360229492187, 0.015224255561828614, 0.015235039710998535, 0.015317279815673828, 0.015323455810546876, 0.015288288116455079, 0.015364031791687011, 0.015288415908813477, 0.015292415618896485, 0.015290047645568848, 0.015298879623413086, 0.015195391654968261, 0.01533414363861084, 0.015253503799438477, 0.015435775756835938, 0.015261695861816407, 0.015497311592102051, 0.015327136039733886, 0.01568563175201416, 0.015333279609680176, 0.015284128189086914, 0.015366239547729492, 0.015335488319396972, 0.015226431846618652, 0.015237407684326172, 0.015400896072387696, 0.01519983959197998, 0.015142911911010743, 0.015172351837158203, 0.01520400047302246, 0.015171584129333495, 0.015278079986572265, 0.015257087707519532, 0.01529203224182129, 0.015263872146606445, 0.015264512062072754, 0.015204352378845215, 0.015197952270507813, 0.015397024154663087, 0.015283807754516602, 0.015241632461547852, 0.015394751548767089, 0.015304415702819825, 0.015274656295776368, 0.01538428783416748, 0.015363743782043457, 0.015274432182312012, 0.0152740478515625, 0.015232928276062012, 0.015234880447387696, 0.015214783668518066, 0.015331487655639648, 0.015239007949829101, 0.015257599830627442, 0.015312383651733399, 0.0152542724609375, 0.015280991554260255, 0.015383456230163574, 0.015386015892028808, 0.015243871688842774, 0.015363295555114747, 0.015309503555297851, 0.015196255683898926, 0.01520035171508789, 0.015279871940612793, 0.015298720359802245, 0.015263936042785645, 0.015259200096130372, 0.01528384017944336, 0.01547283172607422, 0.015372672080993653, 0.015384639739990234, 0.015370240211486816, 0.01529036808013916, 0.01535110378265381, 0.015372960090637208, 0.015358016014099122, 0.015288064002990723, 0.01537660789489746, 0.015369376182556153, 0.015344703674316406, 0.015338335990905762, 0.015348320007324218, 0.015348064422607421, 0.01558035182952881, 0.01533420753479004, 0.015210847854614257, 0.015219679832458496, 0.015196864128112792, 0.015253631591796876, 0.015239040374755859, 0.015245311737060547, 0.015398624420166016, 0.015249631881713866, 0.015317055702209472, 0.015284416198730468, 0.015363871574401855, 0.015245344161987305, 0.015302656173706054, 0.015245599746704102, 0.015228639602661133, 0.01539891242980957, 0.015339103698730469, 0.015293919563293457, 0.015275168418884277, 0.015256671905517579, 0.015303359985351563, 0.015242752075195312, 0.015241567611694335, 0.015327391624450683, 0.015255552291870117, 0.015230976104736327, 0.015238143920898438, 0.015287551879882812, 0.015302528381347656, 0.015255423545837402, 0.015284223556518555, 0.015224639892578124, 0.015240960121154785, 0.015183712005615234, 0.015262304306030273, 0.015212544441223145, 0.01530031967163086, 0.015255840301513671, 0.015366144180297851, 0.015277600288391114, 0.015345600128173828, 0.015254048347473145, 0.015187968254089355, 0.015209535598754882, 0.015186783790588378, 0.015177439689636231, 0.015260191917419433, 0.015172608375549316, 0.015323904037475586, 0.015345696449279786, 0.015351200103759765, 0.015315615653991699, 0.015380319595336913, 0.015831199645996094, 0.01673040008544922, 0.015359711647033692, 0.01537382411956787, 0.015303232192993165, 0.015253472328186034, 0.015409119606018066, 0.015229215621948242, 0.01593929576873779, 0.015460351943969726, 0.015253536224365235, 0.01525545597076416, 0.015236960411071778, 0.015244383811950684, 0.015219807624816895, 0.015212320327758789, 0.015150272369384765, 0.015291232109069824, 0.015265248298645019, 0.01527660846710205, 0.01526912021636963, 0.015225567817687989, 0.015237119674682617, 0.015195679664611817, 0.015468799591064453, 0.015266016006469726, 0.015211872100830078, 0.015092384338378907, 0.01521459197998047, 0.015332991600036622, 0.01522316837310791, 0.015312352180480958, 0.015294560432434082, 0.015253503799438477, 0.015223360061645508, 0.015834848403930665, 0.015322943687438965, 0.015374688148498536, 0.015232159614562988, 0.01528115177154541, 0.015291584014892579, 0.015348352432250977, 0.015335455894470214, 0.015388671875, 0.015341535568237304, 0.01536841583251953, 0.015373663902282715, 0.01532156753540039, 0.015309951782226562, 0.015288288116455079, 0.015291296005249023, 0.015382528305053711, 0.015347647666931152, 0.015312992095947265, 0.015317055702209472, 0.015347583770751953, 0.015275424003601074, 0.015288031578063965, 0.015299712181091309, 0.015322912216186523, 0.015327263832092285, 0.01526576042175293, 0.015294464111328124, 0.015304736137390137, 0.01526576042175293, 0.015275615692138672, 0.015288736343383789, 0.015249407768249512, 0.015243264198303222, 0.015290271759033204, 0.015405152320861816, 0.015390239715576171, 0.01548755168914795, 0.01535654354095459, 0.01533743953704834, 0.015315072059631347, 0.015275584220886231, 0.015212896347045899, 0.01519820785522461, 0.015307007789611816, 0.015187711715698242, 0.015283488273620605, 0.015210240364074708, 0.015203295707702636, 0.015216768264770508, 0.015205280303955078, 0.015277055740356446, 0.015249664306640626, 0.015298208236694337, 0.015188032150268555, 0.015257599830627442, 0.01520639991760254, 0.015299967765808106, 0.015254143714904786, 0.015212800025939941, 0.01526144027709961, 0.015341567993164062, 0.015297696113586426, 0.015276896476745605, 0.015259743690490723, 0.015194016456604004, 0.015218688011169433, 0.01516147232055664, 0.015233920097351075, 0.015219136238098145, 0.015370816230773925, 0.01543887996673584, 0.0154552001953125, 0.015388671875, 0.015357952117919921, 0.015309151649475097, 0.01552451229095459, 0.015200703620910645, 0.01516220760345459, 0.015133472442626953, 0.015229887962341308, 0.015287360191345216, 0.015164383888244629, 0.015305888175964356, 0.015323871612548829, 0.015285408020019531, 0.015377568244934083, 0.015217632293701171, 0.015239999771118165, 0.015161343574523926, 0.015142687797546387, 0.015206111907958984, 0.015211008071899413, 0.01523311996459961, 0.015154208183288574, 0.015189984321594237, 0.015245408058166503, 0.015223615646362305, 0.015219903945922852, 0.01552188777923584, 0.015241855621337891, 0.015517696380615235, 0.015749183654785157, 0.01549715232849121, 0.0153121919631958, 0.015284000396728515, 0.015211135864257812, 0.015243328094482422, 0.0151976318359375, 0.015285087585449218, 0.015290111541748046, 0.015265983581542969, 0.015322463989257813, 0.01535638427734375, 0.015359647750854492, 0.015432224273681641, 0.015427359580993652, 0.015235296249389648, 0.015407103538513184, 0.015484928131103515, 0.01528831958770752, 0.015246560096740722, 0.015372096061706543, 0.015245951652526856, 0.015329664230346679, 0.015170623779296875, 0.015221664428710938, 0.015239232063293457, 0.015269824028015137, 0.015265791893005372, 0.015238719940185547, 0.015241663932800293, 0.015267935752868653, 0.015184991836547852, 0.01529535961151123, 0.015338848114013672, 0.015387231826782227, 0.015308032035827637, 0.015554911613464355, 0.015314720153808593, 0.01528876781463623, 0.015241408348083496, 0.015170559883117676, 0.015227904319763183, 0.015229984283447266, 0.015166432380676269, 0.015221887588500977, 0.015211487770080566, 0.015343520164489746, 0.01528217601776123, 0.015474687576293946, 0.01535763168334961, 0.015224479675292968, 0.015212800025939941, 0.015182239532470703, 0.015497440338134766, 0.015400383949279785, 0.015298944473266601, 0.015215840339660645, 0.015211168289184571, 0.01523862361907959, 0.015265983581542969, 0.0152478084564209, 0.015339103698730469, 0.015108096122741698, 0.015294719696044921, 0.015593088150024415, 0.015335552215576172, 0.015292415618896485, 0.015281344413757324, 0.015204480171203613, 0.015273951530456542, 0.01530339241027832, 0.015259231567382812, 0.015214143753051758, 0.015196288108825683, 0.015237536430358887, 0.015300928115844726, 0.015300831794738769, 0.015206175804138184, 0.015239168167114257, 0.015196160316467285, 0.015265439987182617, 0.015298303604125976, 0.015200863838195801, 0.015226335525512695, 0.01529695987701416, 0.015374719619750976, 0.015394304275512695, 0.01536451244354248, 0.015304448127746582, 0.01609516716003418, 0.01540828800201416, 0.01537337589263916, 0.015363903999328613, 0.015361472129821778, 0.015288991928100586, 0.015210399627685547, 0.01530070400238037, 0.01517363166809082, 0.01527830410003662, 0.015282976150512695, 0.015293439865112305, 0.01520639991760254, 0.01520639991760254, 0.01528217601776123, 0.015295488357543945, 0.015311871528625488, 0.015335424423217774, 0.015429632186889648, 0.015471839904785156, 0.015339391708374023, 0.015389599800109864, 0.015531007766723632, 0.015393759727478027, 0.015267295837402344, 0.015368351936340332, 0.01537712001800537, 0.015372063636779785, 0.015378335952758788, 0.01529036808013916, 0.01548697566986084, 0.015454208374023438, 0.015383808135986329, 0.015480640411376954, 0.015514143943786621, 0.015411616325378418]",tokens/s,65.2258773834051,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4608,16 +4608,16 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4643,11 +4643,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` @@ -4724,7 +4724,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14028.345344,7835.942912,0.0,7440.695296,7427.899392,s,1,31.845142578125,31.845142578125,0.0,31.845142578125,31.845142578125,31.845142578125,31.845142578125,[31.845142578125],,kWh,0.0007130096238708574,7.86430868783795e-05,0.0002699088270380068,0.0010615615377872437,,MB,1197.400064,8416.854016,0.0,8000.63488,7875.673088,s,10,0.9571953277587891,0.09571953277587891,0.0002780228861242757,0.09565481567382812,0.09612270812988281,0.09613992919921875,0.0961537060546875,"[0.09561958312988281, 0.0955525131225586, 0.09569004821777344, 0.0959486083984375, 0.09580300903320313, 0.09615715026855469, 0.09520767974853515, 0.09550665283203125, 0.09611888122558594, 0.09559120178222656]",tokens/s,2674.4802505399543,kWh,2.9104864475250007e-06,3.2097065990848037e-07,1.9323502807522923e-06,5.163807388185773e-06,tokens/kWh,49575822.79031167,MB,1215.070208,8437.825536,0.0,8021.6064,7976.51712,s,10,46.77368115234375,4.6773681152343745,0.006220704046563695,4.6768447265625,4.6834939453125,4.68686806640625,4.68956736328125,"[4.6902421875, 4.68072509765625, 4.673880859375, 4.669634765625, 4.6815068359375, 4.67028515625, 4.682744140625, 4.6764287109375, 4.67097265625, 4.6772607421875]",tokens/s,13.469113066984505,kWh,0.00013614018957080752,1.5016638126850834e-05,8.281410640524727e-05,0.00023397093410290563,tokens/kWh,269264.2154101552,,s,630,46.77019203186032,0.07423840005057197,0.0007574064742200948,0.07412756729125977,0.07484287872314453,0.0753373062133789,0.07690783737182619,"[0.07384646606445312, 0.07495516967773437, 0.07387904357910156, 0.07421788787841797, 0.0753267822265625, 0.07419551849365234, 0.07395136260986328, 0.07432806396484375, 0.07442617797851563, 0.07405133056640625, 0.07421737670898437, 0.07420572662353515, 0.07417855834960937, 0.07492198181152344, 0.07469612884521484, 0.07372576141357422, 0.07386934661865234, 0.07431375885009765, 0.07379161834716796, 0.07350943756103516, 0.07369932556152343, 0.07396966552734376, 0.07435369873046875, 0.07426137542724609, 0.07407369232177734, 0.07368141174316406, 0.07334706878662109, 0.07310131072998047, 0.07318732452392578, 0.07305548858642578, 0.0737118377685547, 0.074144287109375, 0.0743180160522461, 0.07401862335205078, 0.07528575897216797, 0.07415676879882813, 0.07420317077636719, 0.07468614196777344, 0.07795334625244141, 0.07461478424072265, 0.0746618881225586, 0.07605862426757813, 0.07501414489746094, 0.07449350738525391, 0.0748415985107422, 0.07443052673339844, 0.07450224304199218, 0.07426742553710937, 0.07977708435058593, 0.07535481262207032, 0.07832371520996094, 0.0746393585205078, 0.07452035522460937, 0.07430950164794922, 0.07418675231933594, 0.07441238403320312, 0.07403110504150391, 0.07403520202636718, 0.07472128295898438, 0.07396521759033203, 0.07382806396484375, 0.07371456146240235, 0.07344614410400391, 0.07390223693847656, 0.07424188995361328, 0.07408422088623047, 0.07394111633300782, 0.07417414093017578, 0.07400441741943359, 0.07421609497070313, 0.0743584976196289, 0.07400563049316407, 0.07415283203125, 0.07396886444091796, 0.07385167694091797, 0.07394416046142578, 0.07433424377441407, 0.0744283218383789, 0.07406230163574219, 0.07406246185302734, 0.0743196792602539, 0.07425759887695313, 0.07445916748046875, 0.07585878753662109, 0.0743724136352539, 0.07499369812011719, 0.07436908721923828, 0.0746615982055664, 0.07434278106689453, 0.07388211059570313, 0.07459225463867188, 0.07414988708496094, 0.07400653076171874, 0.07392870330810547, 0.07387059020996094, 0.07559273529052735, 0.07500895690917969, 0.07461315155029297, 0.07420105743408204, 0.07396803283691407, 0.07390643310546875, 0.0739530258178711, 0.07517183685302735, 0.07425027465820312, 0.07425987243652343, 0.07375929260253906, 0.0736885757446289, 0.07380633544921875, 0.07389794921875, 0.07411100769042969, 0.07467417907714843, 0.07414963531494141, 0.07412556457519531, 0.07416831970214843, 0.07440589141845703, 0.07447756958007813, 0.07455948638916016, 0.07436083221435547, 0.0744120330810547, 0.07423590087890625, 0.07411436462402343, 0.07422045135498047, 0.074297119140625, 0.07485440063476563, 0.07536831665039062, 0.073984130859375, 0.07405165100097656, 0.07450902557373047, 0.07384512329101563, 0.07444483184814453, 0.07397465515136718, 0.07417948913574218, 0.07388159942626953, 0.07373836517333984, 0.07352678680419922, 0.074316162109375, 0.07442022705078125, 0.07405875396728516, 0.07348287963867188, 0.07333039855957031, 0.07363967895507813, 0.07349750518798828, 0.07415558624267578, 0.07420342254638672, 0.07430569458007813, 0.07400380706787109, 0.07385897827148437, 0.07492793273925781, 0.07421228790283203, 0.07466393280029297, 0.0741346206665039, 0.07414390563964844, 0.07412995147705079, 0.07436003112792969, 0.07422806549072265, 0.07419971466064453, 0.07436310577392578, 0.07409347534179687, 0.07435148620605468, 0.07432806396484375, 0.07449190521240234, 0.07434464263916016, 0.07446304321289063, 0.07488864135742188, 0.07543251037597656, 0.07389756774902344, 0.07483433532714844, 0.07320543670654298, 0.07362332916259766, 0.07421129608154296, 0.07558771514892579, 0.07434003448486329, 0.07363452911376953, 0.07515103912353516, 0.07368739318847656, 0.07405487823486329, 0.07471389007568359, 0.07409574127197266, 0.07391337585449219, 0.07386640167236327, 0.07356690979003906, 0.07466950225830078, 0.07503622436523437, 0.07395382690429687, 0.07409712219238282, 0.07379558563232422, 0.07401273345947265, 0.07412745666503906, 0.07425791931152344, 0.0749494400024414, 0.07404051208496094, 0.0738885726928711, 0.07410688018798828, 0.07389798736572266, 0.07429555511474609, 0.07409168243408203, 0.07408290863037109, 0.07374835205078124, 0.07351513671875, 0.07363510131835938, 0.07330889892578125, 0.07366233825683594, 0.074268798828125, 0.07411302185058594, 0.07342243194580078, 0.07373833465576173, 0.07357266998291015, 0.07340985870361329, 0.073614013671875, 0.07383577728271484, 0.07367935943603515, 0.07333283233642578, 0.07369868469238282, 0.07413820648193359, 0.07350905609130859, 0.0737525405883789, 0.07409257507324218, 0.0739202880859375, 0.07380604553222657, 0.07379132843017579, 0.07394486236572266, 0.07385740661621094, 0.07388694763183594, 0.07384758758544922, 0.0740474853515625, 0.07422557067871094, 0.0744090576171875, 0.07675801849365234, 0.07511357116699219, 0.07444351959228515, 0.07445065307617188, 0.07449967956542969, 0.07409954833984375, 0.07430729675292969, 0.07420336151123047, 0.07407939147949219, 0.07427369689941406, 0.07417826843261718, 0.0734169921875, 0.0734900131225586, 0.074336669921875, 0.07435279846191406, 0.07457164764404296, 0.07446080017089844, 0.07429145812988282, 0.07424348449707031, 0.07730643463134766, 0.07422156524658204, 0.07430976104736328, 0.07433216094970703, 0.07430944061279297, 0.07408150482177735, 0.07401055908203125, 0.07384457397460938, 0.0740167007446289, 0.07429676818847657, 0.07397618865966797, 0.07412374114990235, 0.07407202911376953, 0.07416015625, 0.07488841247558593, 0.07444764709472657, 0.07451551818847656, 0.07437612915039063, 0.07496908569335937, 0.0741949462890625, 0.0748636474609375, 0.074531005859375, 0.07481375885009765, 0.07451491546630859, 0.07425433349609376, 0.07408025360107422, 0.07356396484375, 0.07369337463378907, 0.07386726379394531, 0.07411673736572266, 0.07372838592529297, 0.0737423324584961, 0.07369910430908203, 0.07351660919189452, 0.0735647964477539, 0.07421900939941406, 0.07406003570556641, 0.07398838043212891, 0.07361459350585937, 0.07353148651123047, 0.07358432006835937, 0.07407100677490235, 0.07415596771240235, 0.07391596984863281, 0.07394972991943359, 0.07410070037841797, 0.07381775665283204, 0.07405299377441406, 0.0738987808227539, 0.07460275268554688, 0.07430342102050781, 0.07408640289306641, 0.07546470642089843, 0.07443456268310547, 0.07435465240478516, 0.07429244995117187, 0.07429203033447265, 0.07606476593017578, 0.07413123321533203, 0.07696201324462891, 0.07534591674804687, 0.0743034896850586, 0.07473942565917968, 0.07410307312011719, 0.07410406494140626, 0.0736447982788086, 0.07371981048583984, 0.07393075561523438, 0.07888243103027344, 0.07417804718017579, 0.07379404449462891, 0.07380931091308594, 0.07399689483642578, 0.07405391693115235, 0.07408406066894531, 0.07451443481445312, 0.07414112091064454, 0.07381459045410156, 0.07344918060302734, 0.07369929504394532, 0.073695068359375, 0.07385545349121093, 0.07426457977294922, 0.074176513671875, 0.07410665893554688, 0.07359715270996094, 0.0739368667602539, 0.07409693145751953, 0.07436815643310547, 0.07452528381347656, 0.07426850891113282, 0.07427907562255859, 0.07439961242675781, 0.07439279937744141, 0.07439862060546874, 0.07402301025390624, 0.07428521728515625, 0.07443430328369141, 0.07459996795654297, 0.07455382537841797, 0.07403110504150391, 0.07419513702392579, 0.07434591674804687, 0.07404102325439453, 0.07366726684570313, 0.0737996826171875, 0.07383782196044922, 0.073755615234375, 0.07411033630371094, 0.074, 0.0736848602294922, 0.07330477142333984, 0.0737314224243164, 0.07595507049560547, 0.07426457977294922, 0.07496498870849609, 0.07403110504150391, 0.07401634979248047, 0.07375276947021485, 0.07349612426757812, 0.07384646606445312, 0.07395996856689453, 0.07407830047607422, 0.07382675170898438, 0.07396342468261718, 0.07412041473388672, 0.07451718139648437, 0.07432316589355469, 0.07457369232177734, 0.07511459350585938, 0.07445929718017578, 0.07433462524414063, 0.07575843048095703, 0.07457917022705078, 0.07433296203613281, 0.07442022705078125, 0.07351471710205078, 0.07323062133789063, 0.0732357406616211, 0.07336540985107422, 0.07448834991455078, 0.0761178207397461, 0.07524508666992187, 0.07415609741210938, 0.07446617889404297, 0.07411650848388672, 0.0741013412475586, 0.07453052520751953, 0.07383200073242188, 0.07435132598876953, 0.07457746887207031, 0.07401251220703126, 0.0736960678100586, 0.07347586822509766, 0.07463321685791016, 0.07463868713378906, 0.07433225250244141, 0.074271484375, 0.0746780776977539, 0.07488716888427735, 0.07391027069091796, 0.07427474975585938, 0.07426054382324218, 0.07403929901123046, 0.0742762222290039, 0.07445974731445312, 0.07532546997070312, 0.07500908660888672, 0.0747734375, 0.07458611297607422, 0.07420873260498047, 0.07446720123291016, 0.07431644439697266, 0.07389321899414063, 0.0740337905883789, 0.0739840316772461, 0.07379894256591797, 0.07381439971923828, 0.0738977279663086, 0.07387811279296876, 0.07387503814697266, 0.07442086029052734, 0.07394461059570312, 0.07379987335205078, 0.07445526123046875, 0.07421731567382812, 0.07400431823730469, 0.07378755187988281, 0.07414169311523437, 0.07447142028808594, 0.07544217681884766, 0.07570022583007813, 0.07466172790527344, 0.07435689544677734, 0.07486585235595702, 0.07456531524658203, 0.07419731140136719, 0.07665821075439454, 0.07592352294921875, 0.07556531524658203, 0.07458956909179687, 0.07424079895019531, 0.074176513671875, 0.07400857543945312, 0.07423766326904296, 0.07421389007568359, 0.07439542388916015, 0.07499571228027344, 0.07430758666992188, 0.07428822326660156, 0.07371663665771484, 0.07371161651611328, 0.07355596923828125, 0.07347200012207031, 0.07429238128662109, 0.07396438598632812, 0.0740126724243164, 0.07369522857666015, 0.07407001495361328, 0.0739546890258789, 0.07369987487792969, 0.07391165161132812, 0.07412767791748047, 0.07377555084228515, 0.07399209594726562, 0.07392041778564454, 0.07370873260498047, 0.07392092895507812, 0.0738842544555664, 0.07443267059326172, 0.07368482971191406, 0.07390729522705078, 0.07389654541015625, 0.07428860473632813, 0.07466070556640625, 0.07428688049316406, 0.07412582397460937, 0.07415369415283203, 0.07412931060791016, 0.074174560546875, 0.0742681884765625, 0.07448828887939453, 0.07443389129638672, 0.07413622283935548, 0.074176513671875, 0.07647859191894531, 0.07513279724121094, 0.0744981460571289, 0.07412726593017578, 0.07385088348388671, 0.07409037017822266, 0.07391203308105469, 0.07350678253173829, 0.0737959976196289, 0.0740884780883789, 0.07426585388183594, 0.07394509124755859, 0.07341862487792969, 0.07359305572509765, 0.07458233642578126, 0.07430115509033203, 0.07398783874511719, 0.0736982421875, 0.07437721252441407, 0.07381196594238282, 0.07391836547851563, 0.07401904296875, 0.0742053451538086, 0.0741250228881836, 0.0742113265991211, 0.07399971008300782, 0.07404611206054687, 0.07420905303955078, 0.07471250915527344, 0.07436908721923828, 0.07426841735839844, 0.07427283477783203, 0.0740832290649414, 0.07385292816162109, 0.07452982330322265, 0.07423792266845704, 0.07411974334716796, 0.07387506866455078, 0.07401529693603516, 0.07383475494384766, 0.07385702514648437, 0.07382179260253906, 0.07393321228027344, 0.0741396484375, 0.07350032043457032, 0.07400240325927734, 0.0736396484375, 0.07394480133056641, 0.07377804565429688, 0.07406390380859375, 0.07570845031738281, 0.07424409484863281, 0.074176513671875, 0.07378329467773438, 0.0741949462890625, 0.07412918090820313, 0.0738736343383789, 0.07375772857666016, 0.0740703353881836, 0.0738617935180664, 0.07413276672363281, 0.07383446502685546, 0.07423667144775391, 0.0739835205078125, 0.07361151885986328, 0.07415580749511719, 0.07434223937988281, 0.07411158752441406, 0.074720703125, 0.07431206512451172, 0.07460269165039063, 0.07497843170166016, 0.07424224090576172, 0.07511248016357422, 0.07416054534912109, 0.07434381103515625, 0.07677519989013672, 0.0758121566772461, 0.07377581024169921, 0.07370925140380859, 0.07489977264404298, 0.07361516571044922, 0.0735888671875, 0.07380588531494141, 0.07448726654052734, 0.07392105865478515, 0.0838287353515625, 0.07399628448486328, 0.07375433349609375, 0.0735931167602539, 0.07356825256347656, 0.07335836791992187, 0.07353568267822265, 0.07370121765136718, 0.0738971176147461, 0.07378614044189453, 0.07381718444824219, 0.07355545806884765, 0.07642562866210938, 0.0739797134399414, 0.07396371459960938, 0.07396514892578125, 0.07389430236816406, 0.07426399993896485, 0.07393849945068359, 0.07424649810791016, 0.07404541015625, 0.07461942291259765, 0.07416438293457031, 0.07417855834960937, 0.0739205093383789, 0.07439949035644532, 0.07393888092041015, 0.07385043334960938, 0.07393551635742188, 0.07389603424072265, 0.07369075012207031, 0.07351641845703125, 0.07333740997314453, 0.07362928009033202, 0.0740626220703125, 0.0742011489868164, 0.07380786895751953, 0.07372211456298829, 0.07353078460693359, 0.0735215072631836, 0.07340013122558593, 0.07386918640136719, 0.07446150207519531, 0.07427382659912109, 0.07365897369384766, 0.07379154968261718, 0.07398563385009765, 0.07568793487548828, 0.07433296203613281, 0.07414777374267578, 0.07462092590332031, 0.07434361267089844, 0.0748572769165039]",tokens/s,13.470117881295792,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4733,7 +4733,7 @@ ImportError: This modeling file requires the following packages that were not fo raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4759,11 +4759,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` @@ -4779,7 +4779,7 @@ RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1885.319168,1057.947648,0.0,662.700032,622.833664,s,1,9.176845703125,9.176845703125,0.0,9.176845703125,9.176845703125,9.176845703125,9.176845703125,[9.176845703125],,kWh,5.983104099579274e-05,6.592579648255514e-06,2.243474016999647e-05,8.885836081404472e-05,,MB,1932.9024,1181.679616,0.0,765.46048,735.57504,s,10,0.5673114814758301,0.05673114814758301,0.0006719552344458886,0.056575199127197266,0.05756223526000977,0.057724654388427735,0.05785458969116211,"[0.05568246459960938, 0.05743948745727539, 0.05628220748901367, 0.0578870735168457, 0.056626686096191405, 0.0559666862487793, 0.056914398193359375, 0.056462623596191405, 0.05652371215820313, 0.05752614212036133]",tokens/s,4512.512232857158,kWh,1.6440276262641052e-06,1.8130722293314727e-07,7.553095930112511e-07,2.5806444422085035e-06,tokens/kWh,99200027.64151283,MB,1937.088512,1194.262528,0.0,778.043392,751.3984,s,10,35.474713867187496,3.54747138671875,0.007734135746039402,3.5455054931640624,3.55517255859375,3.5604150146484375,3.5646089794921876,"[3.541433837890625, 3.544659912109375, 3.554007568359375, 3.53791064453125, 3.565657470703125, 3.54635107421875, 3.541282470703125, 3.5521962890625, 3.54877880859375, 3.542435791015625]",tokens/s,17.75912844170172,kWh,0.00010247511134373794,1.130311947542708e-05,4.0495640074390033e-05,0.00015427387089355504,tokens/kWh,408364.68051980337,,s,630,35.46810222244262,0.05629857495625813,0.0007064865954460904,0.0562174072265625,0.05679550514221192,0.05721389503479004,0.05880051448822022,"[0.05526563262939453, 0.05572528076171875, 0.05762736129760742, 0.056774879455566404, 0.056012641906738284, 0.05588940811157227, 0.05678540802001953, 0.056677440643310546, 0.05599878311157227, 0.056285152435302736, 0.05600102233886719, 0.05904579162597656, 0.05614412689208984, 0.057524223327636716, 0.057877727508544925, 0.0562861442565918, 0.05600649642944336, 0.055981632232666015, 0.05604191970825195, 0.05592268753051758, 0.05595340728759766, 0.05602479934692383, 0.055433662414550784, 0.055285633087158205, 0.05556835174560547, 0.056395774841308595, 0.05536547088623047, 0.05553308868408203, 0.055761505126953125, 0.05616249465942383, 0.05604131317138672, 0.05590156936645508, 0.05593363189697265, 0.05576512145996094, 0.055772319793701175, 0.055417022705078124, 0.05543993759155273, 0.05603263854980469, 0.05643686294555664, 0.05625692749023437, 0.055820289611816405, 0.055640064239501956, 0.05687686538696289, 0.05662464141845703, 0.055684799194335936, 0.0555445442199707, 0.055936897277832034, 0.056560222625732424, 0.05610886383056641, 0.056264129638671875, 0.055823104858398434, 0.05585036849975586, 0.05596614456176758, 0.05609062576293945, 0.05607628631591797, 0.05652684783935547, 0.05629536056518555, 0.05670691299438477, 0.05638780975341797, 0.05668044662475586, 0.05739708709716797, 0.056750431060791015, 0.05677587127685547, 0.05590249633789063, 0.05670064163208008, 0.05655094528198242, 0.0565968017578125, 0.05629465484619141, 0.0572545280456543, 0.056054080963134766, 0.056063201904296874, 0.05610134506225586, 0.056037216186523436, 0.05608607864379883, 0.060066688537597654, 0.05804032135009766, 0.056575679779052736, 0.056258846282958984, 0.055932960510253905, 0.056559616088867185, 0.05605971145629883, 0.0559617919921875, 0.05638265609741211, 0.056288063049316404, 0.05572544097900391, 0.055569023132324216, 0.055569984436035155, 0.05528639984130859, 0.05545964813232422, 0.05568921661376953, 0.05630752182006836, 0.05616966247558594, 0.05626544189453125, 0.05573007965087891, 0.056004543304443356, 0.05686489486694336, 0.05598998260498047, 0.05576870346069336, 0.05581727981567383, 0.05618678283691406, 0.05610691070556641, 0.05641020965576172, 0.05595091247558594, 0.05601443099975586, 0.056099681854248046, 0.056094688415527345, 0.056428577423095705, 0.05631292724609375, 0.05634121704101563, 0.05629990386962891, 0.056749855041503906, 0.056369182586669925, 0.056411903381347654, 0.057159934997558594, 0.056155807495117185, 0.05706159973144531, 0.056419681549072266, 0.05627328109741211, 0.056244640350341796, 0.05615734481811523, 0.055769214630126955, 0.05592160034179688, 0.05559888076782227, 0.05556803131103515, 0.05586956787109375, 0.056053600311279296, 0.05600531387329102, 0.05574431991577149, 0.055769088745117185, 0.05882166290283203, 0.056545761108398436, 0.056116767883300785, 0.05613881683349609, 0.05610432052612305, 0.05621334457397461, 0.056148670196533204, 0.05633638381958008, 0.05575299072265625, 0.055780223846435543, 0.055712608337402346, 0.05591839981079102, 0.05614400100708008, 0.056256065368652346, 0.05762911987304688, 0.057290912628173825, 0.05648169708251953, 0.05640758514404297, 0.05631206512451172, 0.056793312072753906, 0.05623756790161133, 0.05622809600830078, 0.056219806671142576, 0.05657379150390625, 0.05657596969604492, 0.05746051025390625, 0.05660675048828125, 0.05679766464233398, 0.05663948822021484, 0.056446975708007815, 0.05671446228027344, 0.056539936065673826, 0.05677081680297852, 0.05677155303955078, 0.056580894470214846, 0.056825599670410155, 0.057229217529296876, 0.05709865570068359, 0.05769209671020508, 0.05600604629516601, 0.05650697708129883, 0.05630361557006836, 0.0571146240234375, 0.05543945693969726, 0.05572099304199219, 0.0559788818359375, 0.05596521759033203, 0.055900768280029295, 0.056446369171142576, 0.05781142425537109, 0.05669907379150391, 0.05601670455932617, 0.0556844482421875, 0.05567299270629883, 0.055502689361572266, 0.056119968414306644, 0.05571964645385742, 0.05602102279663086, 0.05617625427246094, 0.05611119842529297, 0.0563485107421875, 0.055932960510253905, 0.055539840698242186, 0.05535878372192383, 0.05522211074829102, 0.0558616943359375, 0.056254814147949216, 0.0560722541809082, 0.05567273712158203, 0.055814239501953126, 0.05584076690673828, 0.055357440948486325, 0.05652070236206055, 0.05643264007568359, 0.05650636672973633, 0.05874873733520508, 0.056953025817871095, 0.05687705612182617, 0.056209247589111326, 0.05631129455566406, 0.05652492904663086, 0.05611775970458984, 0.05657193756103516, 0.05633865737915039, 0.05629884719848633, 0.05625062561035156, 0.056323486328125, 0.05598287963867187, 0.056257919311523436, 0.055758560180664066, 0.05594345474243164, 0.05583536148071289, 0.05569910430908203, 0.056172576904296875, 0.055862560272216796, 0.056470462799072266, 0.056027137756347656, 0.05603033447265625, 0.056021888732910155, 0.05616857528686523, 0.05575811386108399, 0.05590095901489258, 0.056156158447265625, 0.05593683242797851, 0.055777278900146485, 0.05621500778198242, 0.05631817626953125, 0.05606361770629883, 0.05656643295288086, 0.05600259017944336, 0.05608857727050781, 0.056051712036132816, 0.05591603088378906, 0.05632275390625, 0.05560710525512695, 0.05568022537231445, 0.05567567825317383, 0.05541616058349609, 0.055892478942871096, 0.05602278518676758, 0.05629510498046875, 0.05843628692626953, 0.056631488800048826, 0.05613116836547852, 0.05656326293945312, 0.05694550323486328, 0.05689984130859375, 0.0567825927734375, 0.05654732894897461, 0.05663875198364258, 0.0566952018737793, 0.056377281188964845, 0.056420703887939454, 0.05641836929321289, 0.056534591674804686, 0.056103199005126954, 0.05613087844848633, 0.05640047836303711, 0.05628863906860351, 0.05611536026000977, 0.056594688415527346, 0.05580563354492187, 0.055970558166503905, 0.05670297622680664, 0.05624745559692383, 0.06635810852050782, 0.05611110305786133, 0.05620956802368164, 0.05604502487182617, 0.0560316162109375, 0.056061183929443356, 0.057243457794189455, 0.05599942398071289, 0.056051712036132816, 0.05630265426635742, 0.055796417236328125, 0.055549312591552734, 0.05530713653564453, 0.05586329650878906, 0.05531238555908203, 0.05679017639160156, 0.05628195190429688, 0.056387008666992186, 0.05617523193359375, 0.05634799957275391, 0.05650908660888672, 0.056772415161132815, 0.05601705551147461, 0.05602659225463867, 0.056449535369873044, 0.056545280456542966, 0.05661491012573242, 0.056635265350341794, 0.05638361740112305, 0.056506591796875, 0.056403743743896485, 0.06072662353515625, 0.05719516754150391, 0.05693993759155273, 0.056662113189697265, 0.0569126091003418, 0.0564890251159668, 0.056703071594238284, 0.056652000427246094, 0.05623235321044922, 0.05613302230834961, 0.055217952728271485, 0.05583647918701172, 0.05610902404785156, 0.05637129592895508, 0.056043487548828125, 0.05579814529418945, 0.05647564697265625, 0.05532380676269531, 0.055479137420654294, 0.05581414413452149, 0.055979839324951174, 0.05617273712158203, 0.05623311996459961, 0.055892223358154296, 0.05568368148803711, 0.05520793533325195, 0.05528163146972656, 0.05536550521850586, 0.05828623962402344, 0.05612518310546875, 0.056282848358154294, 0.056351264953613284, 0.05620121765136719, 0.055737632751464844, 0.05607916641235351, 0.05599555206298828, 0.05741027069091797, 0.05667638397216797, 0.05648998260498047, 0.05659830474853516, 0.05627734375, 0.05633001708984375, 0.056069889068603516, 0.056420574188232424, 0.056242496490478515, 0.05664303970336914, 0.05635712051391602, 0.05870191955566406, 0.05695398330688477, 0.056539104461669924, 0.05663199996948242, 0.05641836929321289, 0.056659423828125, 0.05634076690673828, 0.056596446990966796, 0.057444801330566404, 0.056598464965820314, 0.056465377807617186, 0.05698569488525391, 0.0580824966430664, 0.05647411346435547, 0.056043617248535155, 0.05600185775756836, 0.05595568084716797, 0.05595808029174805, 0.05622822570800781, 0.05611289596557617, 0.055943168640136716, 0.055965023040771486, 0.05553014373779297, 0.05598751831054687, 0.05618153762817383, 0.05605292892456055, 0.05582166290283203, 0.05621033477783203, 0.056033184051513675, 0.056182880401611325, 0.055760894775390625, 0.05580361557006836, 0.05596092987060547, 0.05585359954833984, 0.05610947036743164, 0.05590995025634766, 0.056062400817871096, 0.05590774536132812, 0.05589052963256836, 0.055919776916503905, 0.055962753295898435, 0.055854816436767575, 0.056104991912841795, 0.056885215759277345, 0.056569854736328126, 0.05637907028198242, 0.056677921295166016, 0.05610063934326172, 0.05605478286743164, 0.0560225601196289, 0.056062049865722656, 0.05617907333374023, 0.056233985900878906, 0.05634038543701172, 0.05624358367919922, 0.056226558685302734, 0.05628067016601562, 0.0567704963684082, 0.05606787109375, 0.05600937652587891, 0.05596979141235352, 0.05669887924194336, 0.056301376342773435, 0.05619036865234375, 0.056510814666748045, 0.05575929641723633, 0.05628124618530273, 0.05599948883056641, 0.055974494934082034, 0.05615846252441406, 0.056309471130371096, 0.05625263977050781, 0.05711635208129883, 0.05728704071044922, 0.05608441543579101, 0.05692627334594726, 0.0555068473815918, 0.055550048828125, 0.05599814224243164, 0.05681388854980469, 0.05623993682861328, 0.05635910415649414, 0.056240127563476565, 0.055860897064208985, 0.05718460845947266, 0.05605574417114258, 0.05609392166137695, 0.056162208557128904, 0.05623855972290039, 0.05595286560058594, 0.056056224822998046, 0.05596377563476562, 0.05658012771606445, 0.056065601348876955, 0.05616419219970703, 0.05628780746459961, 0.056869056701660155, 0.05651846313476563, 0.05607628631591797, 0.05643199920654297, 0.05639641571044922, 0.05657190322875977, 0.056448478698730466, 0.05640758514404297, 0.05651968002319336, 0.05646764755249024, 0.05654230499267578, 0.05617532730102539, 0.056354686737060546, 0.056616127014160154, 0.05606063842773437, 0.05664521789550781, 0.05620374298095703, 0.05618700790405273, 0.0569727668762207, 0.05640975952148437, 0.05633331298828125, 0.05646121597290039, 0.05624812698364258, 0.05620915222167969, 0.05618719863891602, 0.056223743438720705, 0.056403358459472655, 0.0563721923828125, 0.05627897644042969, 0.05648774337768555, 0.05679091262817383, 0.05679526519775391, 0.059418014526367184, 0.0568276481628418, 0.056462047576904296, 0.056289409637451174, 0.05650022506713867, 0.05613363265991211, 0.055811393737792966, 0.0556201286315918, 0.05585321426391601, 0.05601887893676758, 0.05610707092285156, 0.0561868782043457, 0.056281089782714844, 0.05633638381958008, 0.05613158416748047, 0.05608652877807617, 0.05633219146728516, 0.0563590087890625, 0.056240127563476565, 0.056306720733642575, 0.056228832244873045, 0.056338432312011716, 0.056586238861083986, 0.05632624053955078, 0.055942337036132814, 0.05624803161621094, 0.056395809173583986, 0.05559091186523438, 0.055695358276367186, 0.05589347076416016, 0.05659292984008789, 0.05610671997070313, 0.055427425384521486, 0.055535457611083985, 0.055967487335205075, 0.05588620758056641, 0.057097599029541014, 0.056156768798828124, 0.056452766418457034, 0.056392318725585935, 0.056245952606201174, 0.05647577667236328, 0.05605337524414063, 0.05619535827636719, 0.055944480895996095, 0.055998497009277344, 0.05608428955078125, 0.05846499252319336, 0.05639759826660156, 0.05644905471801758, 0.05599795150756836, 0.056750431060791015, 0.05620172882080078, 0.05630316925048828, 0.05596819305419922, 0.05629747009277344, 0.05829033660888672, 0.056446815490722654, 0.05698675155639649, 0.05739334487915039, 0.05594591903686524, 0.056207359313964846, 0.05629494476318359, 0.05615811157226563, 0.05632883071899414, 0.05637273788452148, 0.056301151275634766, 0.057432926177978516, 0.056346622467041016, 0.05689260864257813, 0.056420833587646484, 0.05638588714599609, 0.05633433532714844, 0.0562852783203125, 0.056354721069335936, 0.05612972640991211, 0.05621126556396484, 0.055799137115478514, 0.055543838500976564, 0.05584751892089844, 0.0558263053894043, 0.056475807189941406, 0.05626675033569336, 0.056282176971435546, 0.056140735626220704, 0.056831390380859374, 0.05637139129638672, 0.05521587371826172, 0.05617484664916992, 0.056220897674560545, 0.05605257415771484, 0.056154048919677735, 0.05607424163818359, 0.05607769775390625, 0.055604991912841795, 0.055433406829833984, 0.05541328048706055, 0.05587907028198242, 0.05595827102661133, 0.056016895294189455, 0.056371070861816405, 0.05610079956054687, 0.0562355842590332, 0.05640569686889649, 0.05683091354370117, 0.05616131210327149, 0.05647062301635742, 0.056317054748535156, 0.056267295837402344, 0.056412384033203124, 0.05632220840454102, 0.0563218879699707, 0.05624627304077148, 0.05636310577392578, 0.056473182678222655, 0.0562465934753418, 0.056578144073486325, 0.05634652709960938, 0.056784896850585936, 0.05904703903198242, 0.056922016143798826, 0.0565689582824707, 0.05637923049926758, 0.05630361557006836, 0.05633446502685547, 0.05602249526977539, 0.05600710296630859, 0.05605782318115234, 0.05599132919311523, 0.05635171127319336, 0.05593241500854492, 0.05601897430419922, 0.05593132781982422, 0.056070369720458986, 0.056212894439697264, 0.056209823608398435, 0.05615756988525391, 0.05631203079223633, 0.05631340789794922, 0.05655420684814453, 0.056068225860595705, 0.05563699340820313, 0.05573324966430664, 0.056118305206298826, 0.05621427154541016, 0.05629359817504883, 0.0562213134765625, 0.05631606292724609, 0.05589436721801758, 0.05606387329101563]",tokens/s,17.762438938764653,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14385.012736,10142.810112,0.0,9747.562496,9611.730944,s,1,34.31497265625,34.31497265625,0.0,34.31497265625,34.31497265625,34.31497265625,34.31497265625,[34.31497265625],,kWh,0.0007788282409832997,8.590274566197604e-05,0.00029462440236599496,0.0011593553890112706,,MB,4722.757632,10507.71456,0.0,10091.495424,9989.953536,s,10,1.2657408599853515,0.12657408599853515,0.0014093910157059629,0.12633460998535156,0.12765541381835938,0.12900365600585936,0.13008224975585939,"[0.12648461151123047, 0.1268326110839844, 0.12621836853027343, 0.12543926239013672, 0.12524419403076173, 0.12735580444335937, 0.1264508514404297, 0.1259213409423828, 0.13035189819335938, 0.12544191741943359]",tokens/s,2022.5308994367356,kWh,3.729289157172676e-06,4.1116035866514746e-07,2.4903958038480642e-06,6.630845319685888e-06,tokens/kWh,38607445.60576283,MB,4722.757632,10509.811712,0.0,10093.592576,9989.956096,s,10,76.04738525390624,7.6047385253906254,0.028115135723231145,7.601865478515625,7.63665849609375,7.638342919921875,7.639690458984375,"[7.6362841796875, 7.64002734375, 7.606771484375, 7.59695947265625, 7.629376953125, 7.632111328125, 7.59613720703125, 7.57292041015625, 7.584455078125, 7.552341796875]",tokens/s,8.28430849918853,kWh,0.00022130831102241136,2.4411459785056264e-05,0.00011673431068695224,0.00036245408149441984,tokens/kWh,173815.12091199867,,s,630,76.04411956024167,0.12070495168292335,0.0012465179445850338,0.12048220825195313,0.12197726821899414,0.12290295524597168,0.1254197380065918,"[0.12093644714355468, 0.12052684783935547, 0.12114031982421875, 0.12114559936523438, 0.12024076843261719, 0.12042652893066406, 0.12247039794921875, 0.12105907440185547, 0.11997347259521485, 0.1235728988647461, 0.12073308563232422, 0.12119087982177734, 0.120115234375, 0.12042652893066406, 0.12082134246826172, 0.12290505981445313, 0.12082717132568359, 0.12087747192382813, 0.1215511703491211, 0.12336518096923828, 0.12130732727050782, 0.12086624145507813, 0.11998675537109375, 0.12129203033447265, 0.121053955078125, 0.12080947113037109, 0.12175052642822265, 0.12255538940429687, 0.12162627410888673, 0.1309876403808594, 0.11998486328125, 0.12093350219726562, 0.12089024353027343, 0.12006195068359375, 0.12007628631591796, 0.12049215698242187, 0.1212326431274414, 0.11952710723876953, 0.11978777313232422, 0.12054946899414062, 0.12071587371826172, 0.11957453155517578, 0.11993292999267578, 0.12310918426513671, 0.1218309097290039, 0.11992339324951172, 0.12047769927978516, 0.11982201385498047, 0.12151430511474609, 0.12066815948486329, 0.12031807708740234, 0.12014374542236328, 0.12162662506103515, 0.12161148834228516, 0.12151888275146484, 0.12299049377441407, 0.12170454406738282, 0.12071513366699219, 0.12131507110595703, 0.12122354888916016, 0.12232498931884765, 0.12214832305908203, 0.1206747817993164, 0.12229942321777344, 0.12165753936767579, 0.12072425842285156, 0.12099174499511718, 0.12197682952880859, 0.1211883544921875, 0.12135833740234375, 0.12080947113037109, 0.12072774505615234, 0.12190636444091797, 0.1197492446899414, 0.11997388458251954, 0.12029132843017579, 0.12101773071289063, 0.12034304046630859, 0.12006790161132813, 0.12008806610107423, 0.12207801818847656, 0.12089055633544922, 0.11990220642089844, 0.12062777709960938, 0.12097357177734375, 0.12062265777587891, 0.1205498275756836, 0.12089344024658204, 0.12103065490722656, 0.12200287628173828, 0.12112694549560547, 0.12624444580078126, 0.1206827163696289, 0.12104691314697266, 0.12128342437744141, 0.1218720932006836, 0.1232259521484375, 0.12245152282714844, 0.12116214752197266, 0.12153699493408203, 0.12116928100585937, 0.12054950714111329, 0.12096326446533204, 0.12128050994873046, 0.12099721527099609, 0.12186726379394532, 0.1216669464111328, 0.12133232116699219, 0.1215283203125, 0.12116377258300781, 0.12213862609863281, 0.12441347503662109, 0.12052297973632813, 0.12073395538330078, 0.1220315170288086, 0.1205759048461914, 0.12130374145507812, 0.12244739532470703, 0.12072128295898438, 0.11985497283935546, 0.1197903060913086, 0.1198919677734375, 0.12131737518310547, 0.1203828125, 0.12005996704101562, 0.12562902069091797, 0.12049651336669921, 0.120174560546875, 0.11925299072265624, 0.11983872222900391, 0.12050227355957031, 0.1197768325805664, 0.12025286102294921, 0.1210871353149414, 0.12058029174804688, 0.12023875427246093, 0.12007958221435547, 0.12050307464599609, 0.12152543640136719, 0.12071609497070312, 0.12092415618896485, 0.12148294067382813, 0.12031622314453125, 0.11979679870605468, 0.11986220550537109, 0.1201270751953125, 0.12120105743408204, 0.11987916564941406, 0.11959552001953125, 0.12033971405029296, 0.1204245147705078, 0.12007891082763672, 0.12032627105712891, 0.1209692153930664, 0.120774658203125, 0.12033843231201172, 0.1203076171875, 0.12059043121337891, 0.12049635314941406, 0.11988047790527344, 0.12030668640136719, 0.12090179443359375, 0.12027008056640626, 0.12090633392333984, 0.11986739349365234, 0.1207537612915039, 0.12154716491699219, 0.12094054412841797, 0.12024323272705079, 0.12014281463623047, 0.11995257568359376, 0.12143494415283203, 0.12211952209472657, 0.12065821075439453, 0.12162652587890625, 0.12019145965576172, 0.12042854309082031, 0.12349849700927734, 0.13049568176269533, 0.12055225372314453, 0.12093849945068359, 0.12003533172607422, 0.12061414337158204, 0.12140825653076172, 0.12044422149658203, 0.12101087951660157, 0.12060610961914063, 0.12011580657958984, 0.12172227478027343, 0.12047154998779297, 0.12102877044677735, 0.12364179229736329, 0.12290038299560548, 0.11981414031982422, 0.12044083404541016, 0.12103065490722656, 0.11961737823486328, 0.12001910400390625, 0.1198878402709961, 0.12020089721679687, 0.11991410827636718, 0.11956707000732422, 0.12073369598388672, 0.11995462036132812, 0.12044576263427734, 0.12099174499511718, 0.12387942504882812, 0.11965030670166016, 0.12062483215332032, 0.12080944061279297, 0.11953997039794922, 0.12006819152832031, 0.12116172790527344, 0.11973632049560547, 0.120700927734375, 0.12094185638427735, 0.12071298980712891, 0.12090054321289062, 0.120166015625, 0.1196732177734375, 0.12081472015380859, 0.12027954864501954, 0.12056166076660156, 0.121162109375, 0.1203133773803711, 0.12025494384765625, 0.12021273803710937, 0.12043341064453125, 0.12041385650634766, 0.12217708587646485, 0.12042230224609375, 0.12081446075439453, 0.1203220443725586, 0.12011724853515625, 0.11988719940185547, 0.11987010955810547, 0.12074320220947266, 0.12102524566650391, 0.1207391357421875, 0.12014598083496093, 0.12039759826660157, 0.12019907379150391, 0.11980076599121094, 0.11972402954101563, 0.12099174499511718, 0.12059974670410156, 0.12116255950927735, 0.12072665405273438, 0.12122364807128906, 0.12068495941162109, 0.120340576171875, 0.12084825897216797, 0.12175769805908203, 0.12599501037597657, 0.12045696258544922, 0.12127462768554688, 0.12070057678222657, 0.12022767639160156, 0.12110694122314453, 0.12465766143798829, 0.12062252807617188, 0.11955462646484374, 0.11983599853515625, 0.11924342346191406, 0.12044083404541016, 0.12002304077148437, 0.12080329895019531, 0.12011280059814453, 0.11973875427246093, 0.12054118347167969, 0.12097740936279297, 0.12075635528564453, 0.12074518585205078, 0.1207384033203125, 0.12399343872070312, 0.12022978973388672, 0.12038739013671874, 0.1201042251586914, 0.1210667495727539, 0.12024179077148438, 0.12044537353515625, 0.12058870697021484, 0.11973744201660157, 0.11992787170410156, 0.12121398162841797, 0.12085327911376953, 0.12023955535888672, 0.12321238708496093, 0.12340025329589843, 0.1209151382446289, 0.12047824096679688, 0.12052851104736328, 0.11967139434814453, 0.12126822662353516, 0.12048588562011718, 0.12127027130126954, 0.12112019348144532, 0.1218414077758789, 0.12148191833496094, 0.12422713470458985, 0.11996598052978516, 0.12059677124023438, 0.12057379150390625, 0.12152028656005859, 0.12085993957519531, 0.12108258819580078, 0.12210176086425781, 0.12171453094482422, 0.12121875, 0.12198121643066406, 0.12071488189697266, 0.1223724136352539, 0.1217702407836914, 0.12202188873291016, 0.1212968978881836, 0.12316252899169922, 0.12113520050048829, 0.12116361236572265, 0.12188790130615235, 0.12252422332763672, 0.12188422393798828, 0.1215447006225586, 0.12178931427001953, 0.12183334350585938, 0.12167167663574219, 0.12115334320068359, 0.12154646301269531, 0.1224803237915039, 0.12275804901123047, 0.12201967620849609, 0.12106768035888672, 0.12126822662353516, 0.12120188903808594, 0.12188671875, 0.12310546875, 0.12288060760498047, 0.1220322265625, 0.1215282211303711, 0.11992195129394531, 0.12068889617919921, 0.12622412872314454, 0.12080611419677735, 0.12105532836914062, 0.11988572692871094, 0.12245536041259765, 0.12089965057373046, 0.12023462677001953, 0.11957987213134766, 0.11952381134033203, 0.12026911926269532, 0.12054729461669922, 0.12026882934570313, 0.12095699310302735, 0.11983385467529296, 0.12065190124511718, 0.12455174255371093, 0.12037939453125, 0.12068867492675782, 0.11992668914794923, 0.12038150024414063, 0.12153849792480469, 0.12094246673583985, 0.12005315399169922, 0.11988166046142579, 0.11942739105224609, 0.11968895721435546, 0.11972073364257813, 0.11997337341308593, 0.11971379089355469, 0.12490735626220703, 0.12194064331054688, 0.12115570831298828, 0.12002623748779297, 0.11907968139648438, 0.11986032104492188, 0.12082579040527344, 0.12014886474609375, 0.11965245056152343, 0.12067635345458984, 0.12082479858398437, 0.1200755844116211, 0.1204804458618164, 0.12036243438720703, 0.12018335723876954, 0.12026265716552734, 0.12046246337890625, 0.12080422210693359, 0.11998003387451171, 0.11966464233398437, 0.12013481903076172, 0.11999318695068359, 0.12061238098144532, 0.12016278076171875, 0.12100137329101562, 0.12034518432617188, 0.12040953826904296, 0.11986492919921875, 0.11989500427246094, 0.12054937744140624, 0.12044601440429688, 0.120531005859375, 0.12054003143310547, 0.12138233947753906, 0.12076099395751953, 0.12067945861816406, 0.12031254577636719, 0.11957263946533203, 0.1212907485961914, 0.12094054412841797, 0.12033952331542969, 0.12221126556396485, 0.12026470184326171, 0.12041206359863281, 0.12102870178222656, 0.11988956451416016, 0.11977378845214844, 0.1234593276977539, 0.1204735336303711, 0.12113516998291016, 0.12024012756347656, 0.12034877014160156, 0.12618128204345702, 0.12249702453613281, 0.12015577697753907, 0.12044940948486328, 0.12072140502929687, 0.12012973022460938, 0.1200508804321289, 0.1199130859375, 0.12012278747558594, 0.11981884765625, 0.12018013000488281, 0.12022831726074219, 0.11910566711425781, 0.11931648254394531, 0.12052051544189453, 0.12082809448242188, 0.12214284515380859, 0.1204796142578125, 0.1196723861694336, 0.12048397064208985, 0.1200513916015625, 0.12075382232666015, 0.12053276824951172, 0.12000550079345704, 0.11943526458740235, 0.11951308441162109, 0.11965977478027344, 0.11943807983398437, 0.11923455810546875, 0.12327350616455078, 0.11873654174804688, 0.1203776626586914, 0.12051634979248046, 0.11923273468017578, 0.11963938903808594, 0.12034825897216797, 0.11953033447265625, 0.11957987213134766, 0.1201725082397461, 0.11972281646728515, 0.12014726257324218, 0.12026032257080078, 0.11960540771484375, 0.12021331024169922, 0.11956880187988281, 0.11966320037841797, 0.11966464233398437, 0.11982157135009766, 0.11993164825439454, 0.11918131256103516, 0.12023193359375, 0.11950284576416016, 0.11984281921386719, 0.12008585357666016, 0.12006671905517578, 0.12037503814697266, 0.11967513275146484, 0.1200926742553711, 0.11982601928710937, 0.1196253433227539, 0.12016925048828125, 0.11935948944091797, 0.11961516571044922, 0.121653564453125, 0.12012525177001954, 0.12089539337158203, 0.11990854644775391, 0.11968931579589843, 0.1203220443725586, 0.12011929321289062, 0.12022169494628906, 0.12099993896484375, 0.12016435241699219, 0.12067839813232421, 0.12176112365722656, 0.11925981140136718, 0.12039373016357421, 0.12212838745117187, 0.12419276428222656, 0.12163276672363281, 0.12094464111328125, 0.12077568054199218, 0.12043135833740234, 0.12124556732177734, 0.11983712005615234, 0.1209288330078125, 0.120710205078125, 0.12094767761230468, 0.12081484985351562, 0.12153446197509765, 0.1206025619506836, 0.12058294677734376, 0.12111872100830078, 0.12083631896972656, 0.11980786895751953, 0.12003628540039063, 0.12058246612548829, 0.12110684967041016, 0.12276310729980469, 0.11899945831298828, 0.11971379089355469, 0.11993497467041016, 0.11981951904296875, 0.12069500732421876, 0.12100457763671875, 0.12158566284179688, 0.1207227554321289, 0.12036576080322266, 0.12053708648681641, 0.11975475311279297, 0.1196789779663086, 0.12126825714111328, 0.12005888366699219, 0.12034556579589843, 0.11950796508789062, 0.11958927917480469, 0.1199642562866211, 0.12013948822021485, 0.11955238342285156, 0.1195492172241211, 0.12046399688720703, 0.12069174194335938, 0.123478271484375, 0.11956502532958985, 0.11984889221191407, 0.12021526336669922, 0.1200327377319336, 0.11988057708740234, 0.12060671997070313, 0.12110578918457031, 0.11995750427246094, 0.12077120208740234, 0.12049612426757812, 0.12022169494628906, 0.12036656188964844, 0.11981059265136719, 0.1197875213623047, 0.12067142486572266, 0.12016518402099609, 0.12002098846435547, 0.11983439636230468, 0.11994882965087891, 0.12025926208496093, 0.11958067321777344, 0.11980140686035157, 0.12032044982910156, 0.12102098846435547, 0.12123529815673828, 0.11997325134277344, 0.12013958740234375, 0.11978781127929687, 0.11942169952392578, 0.12139622497558594, 0.12052703857421875, 0.12007266998291016, 0.12022966766357422, 0.11962947082519532, 0.12013654327392578, 0.1246740493774414, 0.120610595703125, 0.11959318542480468, 0.11972342681884765, 0.12091043090820312, 0.12011910247802735, 0.1235027847290039, 0.11964415740966797, 0.11975475311279297, 0.1206839370727539, 0.11958934020996094, 0.120150146484375, 0.11989984130859375, 0.11979590606689453, 0.11912754821777344, 0.11915532684326172, 0.11973632049560547, 0.12029132843017579, 0.11915001678466797, 0.11998806762695312, 0.11948515319824218, 0.12000665283203125, 0.12285951995849609, 0.11932825469970704, 0.11902355194091797, 0.11969522857666015, 0.1187212142944336, 0.1191357421875, 0.11905455780029296, 0.11926585388183594, 0.11857686614990234, 0.11894364929199219, 0.11845407867431641, 0.11928572845458985, 0.11911404418945312, 0.1216880645751953, 0.11921612548828125, 0.11875260925292969, 0.11916710662841797, 0.11868217468261719, 0.11807536315917969, 0.11918950653076171, 0.11968704223632813, 0.11968482971191406, 0.11958512115478516, 0.11995961761474609, 0.11961740875244141, 0.1197733154296875, 0.11948607635498047, 0.1198329620361328, 0.11899494171142579]",tokens/s,8.284664266523826,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4832,7 +4832,7 @@ torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,5097.627648,3461.28384,0.0,3066.036224,2865.160192,s,1,13.38390625,13.38390625,0.0,13.38390625,13.38390625,13.38390625,13.38390625,[13.38390625],,kWh,0.00017487062844995005,1.9281217380313427e-05,6.501616312401537e-05,0.00025916800895427883,,MB,5149.663232,3790.536704,0.0,3374.317568,3158.448128,s,10,0.9157994461059572,0.0915799446105957,0.0011892466327708747,0.09108694076538086,0.09342338409423828,0.09351868362426759,0.09359492324829102,"[0.09025955200195312, 0.09040876770019532, 0.09074793243408204, 0.0907008285522461, 0.09142594909667968, 0.09361398315429688, 0.09259849548339844, 0.09199523162841797, 0.09064649963378907, 0.09340220642089844]",tokens/s,2795.37185885545,kWh,2.7058780424000155e-06,2.9840813150666025e-07,1.496266629111287e-06,4.500552803017963e-06,tokens/kWh,56881901.22518561,MB,5153.906688,3790.536704,0.0,3374.317568,3158.450688,s,10,55.28744775390625,5.528744775390625,0.02828313822901441,5.533910888671874,5.561607861328125,5.561866918945313,5.562074165039062,"[5.48099658203125, 5.48805810546875, 5.5044287109375, 5.536615234375, 5.51940771484375, 5.56155029296875, 5.54486474609375, 5.55819384765625, 5.53120654296875, 5.5621259765625]",tokens/s,11.394991550419114,kWh,0.00016187142267759847,1.7854969432405406e-05,6.869442841228868e-05,0.00024842082052229254,tokens/kWh,253601.9318652342,,s,630,55.28498953247067,0.08775395163884239,0.0011452891762173502,0.08761142349243164,0.08863429946899415,0.08943564682006835,0.09132697471618652,"[0.08672249603271484, 0.08921660614013673, 0.08627180480957031, 0.08731305694580078, 0.0873371810913086, 0.08705142211914063, 0.08651136016845704, 0.08661033630371094, 0.08614755249023437, 0.08600479888916016, 0.08675353240966797, 0.08663724517822266, 0.08671340942382813, 0.08705039978027344, 0.08636041259765626, 0.086299072265625, 0.08696361541748047, 0.08683487701416015, 0.08672348785400391, 0.08708096313476563, 0.08619414520263671, 0.08639411163330078, 0.08615299224853516, 0.08633042907714844, 0.08709673309326171, 0.08659552001953125, 0.0863279037475586, 0.08664678192138672, 0.08673075103759766, 0.08678137969970703, 0.0874686050415039, 0.08691232299804688, 0.08670272064208985, 0.08641276550292969, 0.08648764801025391, 0.08669776153564453, 0.08782447814941406, 0.08641081237792969, 0.08690950775146485, 0.08630271911621094, 0.08749228668212891, 0.08859024047851563, 0.08655913543701171, 0.08709939575195312, 0.09019391632080079, 0.08824755096435546, 0.08671040344238282, 0.086995361328125, 0.0865315170288086, 0.08727817535400391, 0.08965074920654297, 0.0903944320678711, 0.0877423324584961, 0.08647776031494141, 0.08700227355957031, 0.08631951904296875, 0.08639859008789062, 0.0870544662475586, 0.08724534606933594, 0.08680770874023437, 0.0864901123046875, 0.0872790756225586, 0.08621724700927734, 0.0864300765991211, 0.08674457550048828, 0.08694953918457031, 0.0866251220703125, 0.0865005111694336, 0.08636252593994141, 0.08686841583251953, 0.08805593872070312, 0.08734047698974609, 0.08655712127685547, 0.08668364715576173, 0.08688758087158203, 0.08662335968017579, 0.08706598663330078, 0.08693135833740234, 0.08662265777587891, 0.08678809356689453, 0.08698191833496094, 0.08695267486572265, 0.08637030029296874, 0.08682003021240234, 0.08649967956542969, 0.0866984634399414, 0.08723865509033203, 0.08686182403564453, 0.0871725082397461, 0.08734146881103516, 0.09051907348632812, 0.08798684692382812, 0.0868106231689453, 0.08767398071289062, 0.08646131134033203, 0.08711500549316406, 0.08680524444580077, 0.0869191665649414, 0.08658029174804688, 0.08715058898925782, 0.08689759826660157, 0.0867872314453125, 0.08687091064453124, 0.08707711791992187, 0.08726252746582032, 0.087072509765625, 0.08758477020263672, 0.08744003295898438, 0.08725078582763672, 0.08689667510986328, 0.08698483276367187, 0.08744290924072265, 0.08677430725097657, 0.08726707458496094, 0.08734336090087891, 0.0901711654663086, 0.08681494140625, 0.08739408111572265, 0.08768144226074219, 0.08700457763671875, 0.08753603363037109, 0.08715264129638672, 0.08681881713867187, 0.08741248321533203, 0.08664214324951172, 0.08724150085449218, 0.08748851013183594, 0.08652909088134765, 0.08695410919189453, 0.08725177764892578, 0.0868331527709961, 0.08665907287597656, 0.08636316680908203, 0.08964969635009766, 0.08684108734130859, 0.08767507171630859, 0.08820172882080078, 0.08718745422363282, 0.08751280212402343, 0.08670236968994141, 0.08757247924804687, 0.08760115051269532, 0.08708064270019532, 0.08687033843994141, 0.08836710357666015, 0.08680448150634766, 0.08678825378417969, 0.08713187408447266, 0.08694182586669921, 0.08683721923828125, 0.08659964752197266, 0.0871629409790039, 0.08775475311279297, 0.08809471893310547, 0.08714649963378907, 0.08667545318603516, 0.09059667205810547, 0.08697277069091797, 0.08667910766601562, 0.08709606170654297, 0.08641539001464844, 0.08729395294189453, 0.08661395263671876, 0.08662432098388671, 0.0909677734375, 0.08739868927001954, 0.0868384017944336, 0.08705522918701172, 0.08800800323486328, 0.08683904266357421, 0.0872674560546875, 0.0874334716796875, 0.08734982299804687, 0.08711513519287109, 0.09081037139892578, 0.08801340484619141, 0.08753907012939453, 0.08694646453857421, 0.0868325424194336, 0.08708911895751953, 0.08690560150146484, 0.08682460784912109, 0.08802649688720703, 0.08714348602294922, 0.08666703796386718, 0.08761753845214844, 0.08724272155761718, 0.08727263641357422, 0.08740541076660156, 0.08711526489257812, 0.0874639663696289, 0.08712374114990235, 0.08803603363037109, 0.08776627349853515, 0.08742578887939453, 0.08734620666503906, 0.08781104278564453, 0.08759910583496094, 0.08808201599121093, 0.08745820617675781, 0.08749056243896484, 0.08765235137939453, 0.08943001556396485, 0.08964688110351562, 0.08870320129394531, 0.08752742767333985, 0.08788582611083984, 0.08790611267089844, 0.08831916809082031, 0.08848700714111328, 0.08786319732666016, 0.08739984130859375, 0.08759766387939454, 0.08777932739257813, 0.08754291534423828, 0.08910323333740235, 0.08783846282958985, 0.08857830047607422, 0.09074073791503906, 0.08742707061767578, 0.08813702392578125, 0.08794719696044923, 0.08765705871582032, 0.08713375854492188, 0.08719542694091797, 0.0881200942993164, 0.08866560363769531, 0.09160710144042969, 0.08810765075683594, 0.0873388442993164, 0.08696217346191407, 0.08822486114501953, 0.08830818939208984, 0.08768761444091797, 0.08727916717529297, 0.08744547271728516, 0.08770742034912109, 0.08768511962890625, 0.08852345275878906, 0.08725424194335937, 0.08725583648681641, 0.08711539459228515, 0.0873722915649414, 0.08787126159667968, 0.08744931030273438, 0.08794560241699219, 0.08732466888427734, 0.08680413055419922, 0.08766226959228515, 0.08744812774658203, 0.08761148834228516, 0.08736358642578125, 0.08777145385742187, 0.08790460968017579, 0.08782425689697265, 0.0876559066772461, 0.08772281646728515, 0.08731619262695313, 0.087531005859375, 0.08743417358398438, 0.08740585327148437, 0.08739708709716797, 0.08702976226806641, 0.08752947235107422, 0.08790016174316406, 0.08959964752197265, 0.08764019012451171, 0.08738569641113281, 0.08931375885009765, 0.08813993835449219, 0.08786316680908203, 0.08801702117919921, 0.08744898986816406, 0.08740310668945313, 0.08751500701904297, 0.08792896270751953, 0.09071820831298828, 0.08827699279785156, 0.08729190063476562, 0.08747539520263672, 0.08677401733398438, 0.08715116882324218, 0.08721202850341797, 0.08762163543701172, 0.08744550323486328, 0.08704144287109375, 0.08678256225585937, 0.08739997100830078, 0.08822950744628906, 0.08725385284423828, 0.08747007751464844, 0.087364990234375, 0.08755580902099609, 0.08771389007568359, 0.08752761840820313, 0.08716966247558594, 0.08723865509033203, 0.08765644836425782, 0.08714649963378907, 0.08775475311279297, 0.08766025543212891, 0.08725122833251953, 0.08726732635498047, 0.08761277008056641, 0.0876304931640625, 0.08797529602050781, 0.08717375946044922, 0.08711373138427735, 0.08699884796142578, 0.08758665466308593, 0.08728399658203125, 0.08755001831054687, 0.08751516723632813, 0.08740860748291016, 0.08717842864990234, 0.08839254760742188, 0.08801074981689454, 0.08871116638183593, 0.08743936157226563, 0.0880345916748047, 0.0877677764892578, 0.08830770874023437, 0.08798598480224609, 0.08887519836425781, 0.08826470184326173, 0.08820896148681641, 0.08896969604492187, 0.08988047790527344, 0.08874569702148437, 0.08880982208251953, 0.08880086517333985, 0.08873158264160157, 0.08877481842041016, 0.08920060729980468, 0.08793910217285156, 0.08779193878173829, 0.08765187072753906, 0.08757465362548827, 0.08846371459960937, 0.09420390319824219, 0.0915979232788086, 0.0886341781616211, 0.08780169677734374, 0.08774687957763672, 0.08810006713867187, 0.08765497589111328, 0.08778294372558594, 0.0878086395263672, 0.08815837097167968, 0.08808432006835938, 0.08825241851806641, 0.08764643096923828, 0.08791395568847657, 0.08769725036621094, 0.08778905487060547, 0.08829574584960938, 0.08774518585205078, 0.08753561401367188, 0.08732057952880859, 0.08766368103027344, 0.08805452728271485, 0.08821778869628906, 0.08750406646728516, 0.08784317016601563, 0.08755862426757813, 0.08825856018066407, 0.08786739349365234, 0.08842422485351563, 0.08827942657470703, 0.08799420928955078, 0.08779571533203125, 0.08786688232421876, 0.08917453002929687, 0.08760476684570312, 0.08820076751708984, 0.08793529510498047, 0.08761164855957031, 0.08835485076904297, 0.0876148452758789, 0.08776972961425782, 0.09135222625732421, 0.0876839370727539, 0.08769081878662109, 0.0877531509399414, 0.08740249633789063, 0.08743936157226563, 0.09081609344482422, 0.08972329711914062, 0.08811110687255859, 0.08780595397949219, 0.08749056243896484, 0.08716659545898438, 0.08759539031982422, 0.08794461059570312, 0.08833695983886719, 0.08775273895263672, 0.08736972808837891, 0.08742060852050781, 0.087484130859375, 0.08820387268066407, 0.08757148742675781, 0.08769430541992188, 0.08754176330566406, 0.08720384216308594, 0.08737177276611328, 0.08774166107177735, 0.08764905548095703, 0.09094876861572265, 0.08775308990478516, 0.0875232925415039, 0.0875647964477539, 0.08784512329101563, 0.08724364471435547, 0.08772281646728515, 0.08759302520751953, 0.0875387191772461, 0.08767378997802734, 0.08850844573974609, 0.08829686737060546, 0.0885824966430664, 0.08774272155761718, 0.0876578598022461, 0.08825676727294922, 0.08779199981689453, 0.08738979339599609, 0.08748073577880859, 0.08830156707763671, 0.09102745819091797, 0.08817254638671874, 0.08764559936523438, 0.08774102020263672, 0.08796774291992188, 0.08778125, 0.08846963500976562, 0.08853609466552734, 0.08775369262695312, 0.08736675262451171, 0.08763689422607422, 0.08759442901611328, 0.08807635498046874, 0.08872716522216798, 0.08831168365478516, 0.08841081237792969, 0.08795337677001953, 0.08818019104003906, 0.0875607681274414, 0.0882339859008789, 0.08775478363037109, 0.0879943389892578, 0.08783052825927734, 0.08850249481201172, 0.09037596893310547, 0.08786265563964844, 0.08787324523925781, 0.08783721923828125, 0.08890751647949219, 0.08787417602539062, 0.08809081268310547, 0.08781804656982421, 0.08863539123535157, 0.08790016174316406, 0.08854243469238281, 0.08834742736816406, 0.08780966186523438, 0.08776105499267578, 0.08798131561279297, 0.08815305328369141, 0.088923583984375, 0.08761135864257813, 0.08738188934326171, 0.0874750747680664, 0.08728521728515624, 0.08759539031982422, 0.08837324523925781, 0.08790214538574219, 0.08708412933349609, 0.08769779205322266, 0.08736418914794922, 0.08785414123535157, 0.08748332977294922, 0.08834662628173828, 0.08718540954589844, 0.0878380126953125, 0.08805856323242188, 0.08794882965087891, 0.0877265625, 0.08813164520263672, 0.08754720306396484, 0.08984844970703125, 0.08886617279052735, 0.08782460784912109, 0.08749097442626953, 0.08988671875, 0.08825651550292969, 0.08795458984375, 0.08866492462158203, 0.08799436950683594, 0.08820243072509766, 0.08767139434814453, 0.08733103942871094, 0.0989881591796875, 0.08893660736083984, 0.08751203155517578, 0.08720665740966797, 0.08752947235107422, 0.0876883544921875, 0.08787439727783203, 0.08763362884521485, 0.08736112213134765, 0.08708985900878906, 0.0873512954711914, 0.08757852935791016, 0.08778342437744141, 0.0881460189819336, 0.08796329498291015, 0.08748281860351563, 0.08745148468017579, 0.08823177337646484, 0.08793660736083984, 0.08758911895751953, 0.08733734130859375, 0.08747417449951173, 0.0876786880493164, 0.08753753662109375, 0.08719833374023438, 0.08713375854492188, 0.0869948501586914, 0.08731065368652344, 0.08781327819824218, 0.08812630462646484, 0.08795516967773437, 0.08710956573486328, 0.08712841796875, 0.08766585540771485, 0.08843462371826172, 0.08758975982666016, 0.08719155120849609, 0.08718268585205079, 0.09164822387695312, 0.0887996826171875, 0.08814966583251953, 0.08808403015136719, 0.08771398162841797, 0.0873778533935547, 0.08696809387207032, 0.08752406311035156, 0.0876525115966797, 0.08738374328613281, 0.08757484436035157, 0.0873587188720703, 0.08726399993896485, 0.0878182373046875, 0.08740367889404296, 0.08811427307128906, 0.09023257446289062, 0.08875417327880859, 0.08800227355957031, 0.0873864974975586, 0.08696221160888672, 0.08701689910888671, 0.08702207946777343, 0.08722220611572265, 0.0878551025390625, 0.08772777557373047, 0.09126515197753907, 0.08815779113769531, 0.08896109008789063, 0.0887603530883789, 0.08806735992431641, 0.08819305419921875, 0.08784966278076171, 0.08771078491210937, 0.08779792022705078, 0.08847171020507813, 0.08754649353027344, 0.08798332977294922, 0.0877281265258789, 0.08887375640869141, 0.08824422454833984, 0.08807218933105469, 0.08774451446533203, 0.08773017883300781, 0.08743321228027344, 0.08777295684814453, 0.08784713745117187, 0.08775625610351563, 0.08839609527587891, 0.08810514831542969, 0.08774454498291015, 0.08944025421142578, 0.08878291320800781, 0.08796915435791015, 0.08835062408447265, 0.08740131378173828, 0.08772819519042968, 0.08798307037353516, 0.08822227478027343, 0.08760956573486328, 0.08786943817138672, 0.08769670104980469, 0.0877791976928711, 0.0891394271850586, 0.08860118103027344, 0.08816166687011719, 0.09099244689941406, 0.08832809448242188, 0.0876038055419922, 0.08801721954345704, 0.08756018829345703, 0.08806931304931641, 0.0881526107788086, 0.10139228820800782, 0.08885244750976562, 0.08792630767822265, 0.08820211029052734, 0.08812748718261719, 0.08796729278564454, 0.08808902740478515, 0.08823935699462891, 0.08764006042480468, 0.08755043029785156, 0.08841785430908203, 0.08753376007080078, 0.08754176330566406, 0.0880268783569336, 0.08781289672851562, 0.08786534118652344, 0.08767894744873046, 0.08750508880615235, 0.08821887969970703]",tokens/s,11.395498223436944,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4858,7 +4858,7 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) @@ -4870,7 +4870,7 @@ ChildProcessError: Traceback (most recent call last): self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() - File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ + File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' @@ -4912,7 +4912,7 @@ ImportError: This modeling file requires the following packages that were not fo ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,12193.357824,7099.84256,0.0,6704.594944,6690.791936,s,1,28.44133203125,28.44133203125,0.0,28.44133203125,28.44133203125,28.44133203125,28.44133203125,[28.44133203125],,kWh,0.0006235108196625006,6.877042901585516e-05,0.00020887405598800195,0.0009011553046663577,,MB,1414.0416,7313.752064,0.0,6897.532928,6816.50432,s,10,1.1857021408081054,0.11857021408081055,0.0010024997860044512,0.11849817657470704,0.1201054313659668,0.12013938941955567,0.12016655586242676,"[0.12009788513183593, 0.11803987121582031, 0.11808700561523437, 0.11830169677734376, 0.11748175811767578, 0.11901964569091797, 0.11683843231201171, 0.11869465637207031, 0.11896784210205077, 0.12017334747314454]",tokens/s,2159.058259146984,kWh,3.4711949982843576e-06,3.828096586059955e-07,2.307318839317635e-06,6.161323496207988e-06,tokens/kWh,41549514.5089454,MB,1429.864448,7320.04352,0.0,6903.824384,6816.50688,s,10,72.63889843749999,7.26388984375,0.015049420714715026,7.2661413574218745,7.280953271484375,7.285970288085937,7.289983901367187,"[7.2909873046875, 7.26429052734375, 7.27105419921875, 7.246630859375, 7.2679921875, 7.25025634765625, 7.2623486328125, 7.26813818359375, 7.27983837890625, 7.23736181640625]",tokens/s,8.67303901286533,kWh,0.00021200965658713236,2.3385078023517928e-05,9.511884243488237e-05,0.00033051357704553267,tokens/kWh,190612.44189469682,,s,630,72.6361952819824,0.11529554806663876,0.0010446708919572187,0.115090576171875,0.11633906249999999,0.11714389457702637,0.11896890357971192,"[0.11530003356933594, 0.11442550659179687, 0.11449600219726562, 0.11477782440185547, 0.11467190551757812, 0.11597782135009765, 0.11590493011474609, 0.1181503677368164, 0.1160010223388672, 0.11474934387207031, 0.11423603057861328, 0.11417788696289062, 0.1152425308227539, 0.11499724578857422, 0.11604441833496094, 0.12042675018310547, 0.11649612426757812, 0.11655500793457031, 0.11493247985839844, 0.11514998626708985, 0.11507798767089844, 0.11556893157958985, 0.11576290893554687, 0.11464508819580078, 0.11562179565429688, 0.11634483337402343, 0.11534073638916016, 0.11492617797851562, 0.11450511932373048, 0.11523772430419922, 0.11565200042724609, 0.11610710144042968, 0.11600873565673828, 0.11528262329101563, 0.11634473419189453, 0.11496662139892579, 0.11498291015625, 0.11481906890869141, 0.11617485046386719, 0.11482931518554687, 0.11630754852294922, 0.11550761413574219, 0.11574681854248046, 0.11531468963623047, 0.11623388671875, 0.12600972747802736, 0.11497894287109375, 0.11875052642822266, 0.11647567749023438, 0.11615718078613281, 0.11529625701904297, 0.11559321594238281, 0.11422720336914062, 0.11409548950195313, 0.11512464141845703, 0.11666044616699218, 0.11514262390136719, 0.1166060791015625, 0.11510633850097657, 0.11519219207763672, 0.11556454467773437, 0.11493689727783203, 0.11478102111816406, 0.11487741088867187, 0.11472191619873047, 0.11617779541015626, 0.11568742370605468, 0.11547647857666016, 0.11493785858154297, 0.11429248046875, 0.11434355163574218, 0.11523455810546875, 0.11492153930664062, 0.116283203125, 0.117570556640625, 0.11477922821044922, 0.11559152221679687, 0.11497119903564453, 0.11427635192871094, 0.11439513397216797, 0.11502793884277343, 0.11443756866455078, 0.11583958435058593, 0.11552931213378906, 0.114968994140625, 0.1166346206665039, 0.11602812957763672, 0.11464463806152343, 0.11552543640136718, 0.11564064025878906, 0.11542176055908203, 0.11569760131835938, 0.11571119689941406, 0.11521218872070313, 0.11535372924804688, 0.11471894073486329, 0.11477455902099609, 0.11447641754150391, 0.11445916748046875, 0.11548477172851562, 0.11646566772460938, 0.11871561431884765, 0.1153054428100586, 0.11541814422607422, 0.11486022186279297, 0.11465174102783203, 0.11518966674804687, 0.11540294647216796, 0.11669884490966796, 0.11518915557861328, 0.11437340545654297, 0.11584512329101562, 0.11483920288085937, 0.11444608306884765, 0.11628141021728515, 0.1153986587524414, 0.11510816192626953, 0.11628771209716797, 0.1155455322265625, 0.11538079833984374, 0.11531641387939454, 0.11446918487548828, 0.11434803009033204, 0.11413654327392578, 0.11474179077148437, 0.11548419189453125, 0.11519884490966797, 0.11557833862304688, 0.11564701080322265, 0.11837235260009765, 0.11480802917480469, 0.11720188903808594, 0.11519366455078126, 0.11558297729492187, 0.11497062683105469, 0.11751833343505859, 0.11452825927734375, 0.11466957092285156, 0.11468342590332031, 0.11784758758544922, 0.11503507232666016, 0.11471593475341797, 0.11548336029052735, 0.11580413055419922, 0.11510137939453124, 0.11875977325439453, 0.11483689880371094, 0.11418685150146485, 0.11474329376220703, 0.11525529479980469, 0.11536294555664063, 0.1157161636352539, 0.11509228515625, 0.11497401428222656, 0.11522860717773438, 0.11466194915771484, 0.11441490936279297, 0.11542617797851562, 0.11470162963867188, 0.11561235046386718, 0.1167227554321289, 0.11475244903564454, 0.11542063903808594, 0.11472541046142579, 0.11464704132080078, 0.11466751861572265, 0.11626290893554687, 0.11459693145751954, 0.11620652770996094, 0.11579801940917969, 0.11555987548828126, 0.11446044921875, 0.11532323455810548, 0.11487664031982422, 0.11470281219482421, 0.11524697875976563, 0.1159842529296875, 0.11503119659423829, 0.11449839782714843, 0.11491276550292968, 0.1147479019165039, 0.11485164642333984, 0.1145304946899414, 0.1149333724975586, 0.11499510192871094, 0.11596233367919923, 0.11518726348876954, 0.1189883804321289, 0.11527254486083985, 0.1148098907470703, 0.11480572509765626, 0.11504630279541016, 0.11403385925292969, 0.11465206146240234, 0.11511138916015624, 0.11513910675048829, 0.11455423736572265, 0.11495692443847656, 0.11450367736816407, 0.1146692123413086, 0.1152290267944336, 0.11548262023925782, 0.11578982543945313, 0.11466537475585938, 0.11506674957275391, 0.11436259460449219, 0.11447529602050781, 0.11413270568847657, 0.11427606201171875, 0.11414147186279297, 0.11510578918457032, 0.11496060943603516, 0.11427804565429688, 0.11533222198486329, 0.11493888092041016, 0.11666448211669922, 0.1141451187133789, 0.11750109100341796, 0.11460079956054688, 0.11538745880126954, 0.11566563415527344, 0.11498438262939453, 0.11390975952148437, 0.11476873779296876, 0.11435206604003906, 0.11637267303466797, 0.11480579376220704, 0.11611103820800782, 0.114781982421875, 0.11624838256835937, 0.11534937286376953, 0.11459954833984375, 0.11450777435302735, 0.11481804656982422, 0.11452227020263672, 0.1145588150024414, 0.11580006408691407, 0.11524710083007812, 0.1148436508178711, 0.11507679748535156, 0.1149948501586914, 0.11492607879638672, 0.1148416976928711, 0.11518572998046875, 0.11476150512695313, 0.11867568206787109, 0.11573801422119141, 0.11484630584716797, 0.11462451171875, 0.11594342041015625, 0.11438089752197265, 0.11330960083007813, 0.11523149108886718, 0.11573366546630859, 0.11542790222167969, 0.1154276123046875, 0.11474944305419922, 0.11482854461669922, 0.1148485107421875, 0.1144438705444336, 0.11714396667480469, 0.11630182647705078, 0.11567072296142578, 0.11788726043701171, 0.1154416961669922, 0.11462620544433594, 0.1146712646484375, 0.11444294738769531, 0.11452178955078125, 0.11503033447265625, 0.11633843231201171, 0.11616860961914062, 0.11966422271728516, 0.11536978912353515, 0.11477903747558593, 0.11474127960205079, 0.11468185424804687, 0.1150013427734375, 0.115797119140625, 0.11456355285644532, 0.11532546997070313, 0.11565052795410156, 0.11422447967529296, 0.1141537628173828, 0.1142188491821289, 0.11428294372558594, 0.11504000091552734, 0.11550508880615235, 0.11590892791748048, 0.11477401733398437, 0.11556422424316407, 0.11508921813964844, 0.114498046875, 0.1143746566772461, 0.11510784149169923, 0.11459359741210938, 0.11510006713867188, 0.11539788818359376, 0.11538220977783203, 0.11490972900390625, 0.11511113739013672, 0.11630016326904297, 0.11515542602539063, 0.11800704193115234, 0.1153617935180664, 0.11457357025146485, 0.11516681671142578, 0.11497465515136719, 0.11519599914550781, 0.11435887908935546, 0.11435846710205078, 0.11938211059570313, 0.11566851043701172, 0.11587948608398438, 0.11559414672851563, 0.11486412811279297, 0.11480604553222656, 0.1143057632446289, 0.11507939147949219, 0.11466294097900391, 0.11474969482421875, 0.11553507232666016, 0.11560755157470703, 0.11511273956298829, 0.11468800354003907, 0.1145789794921875, 0.11466185760498047, 0.11429682922363281, 0.11502496337890625, 0.11581468963623047, 0.11601372528076172, 0.11581849670410156, 0.11576694488525391, 0.11481126403808593, 0.1144258270263672, 0.11421491241455078, 0.11523686218261718, 0.11477401733398437, 0.11584921264648437, 0.11524915313720703, 0.11508943939208985, 0.1150893783569336, 0.11420374298095703, 0.11447388458251953, 0.11598847961425782, 0.11478813171386719, 0.11490531158447266, 0.11530025482177735, 0.11522665405273437, 0.11606204986572266, 0.11476195526123047, 0.1148326416015625, 0.11461504364013672, 0.11632809448242187, 0.115714111328125, 0.11517699432373046, 0.1149849624633789, 0.11546851348876953, 0.117772705078125, 0.11456086730957031, 0.11432726287841796, 0.11463491058349609, 0.11659715270996093, 0.115212158203125, 0.11681372833251953, 0.11538835144042969, 0.11449788665771485, 0.11504828643798828, 0.11428361511230468, 0.11424665832519532, 0.11443814086914063, 0.11428044891357422, 0.11459401702880859, 0.11599030303955078, 0.11507711791992188, 0.11469414520263672, 0.11491526031494141, 0.11366515350341796, 0.1146569595336914, 0.11406777954101563, 0.11620966339111329, 0.11529420471191407, 0.11569097900390625, 0.11585753631591797, 0.11516150665283204, 0.1166192626953125, 0.1149725112915039, 0.11506089782714844, 0.11773542022705077, 0.11578572845458984, 0.11456511688232422, 0.11670118713378906, 0.11466957092285156, 0.11408383941650391, 0.11431526184082032, 0.1150218276977539, 0.11792998504638671, 0.11526780700683593, 0.11665952301025391, 0.11611305236816406, 0.11479532623291015, 0.11514182281494141, 0.11478237152099609, 0.11476233673095704, 0.11493154907226563, 0.11522684478759766, 0.11521024322509765, 0.11600691223144531, 0.11580006408691407, 0.11502191925048828, 0.11544265747070312, 0.11473811340332031, 0.11489446258544922, 0.11488236999511718, 0.11540332794189453, 0.11580973052978516, 0.11656864166259766, 0.11531190490722656, 0.1154849624633789, 0.11474169921875, 0.11482726287841796, 0.1148231658935547, 0.1147894058227539, 0.11486502075195312, 0.11580384063720703, 0.11643331146240235, 0.11472035217285156, 0.11501200103759765, 0.114155517578125, 0.11429273223876953, 0.11445977783203125, 0.11524114990234376, 0.1150030746459961, 0.11604624176025391, 0.11539692687988282, 0.11523260498046875, 0.11465484619140626, 0.11455149078369141, 0.11431922912597656, 0.11480006408691407, 0.11523689270019531, 0.11590259552001952, 0.11552191925048828, 0.11892121887207031, 0.1152020492553711, 0.11444019317626954, 0.11510784149169923, 0.1150978240966797, 0.11479837036132813, 0.11547200012207032, 0.11530278778076172, 0.11479862213134766, 0.11516105651855468, 0.1146081314086914, 0.11409139251708984, 0.11437734222412109, 0.1147146224975586, 0.11444188690185547, 0.11503651428222657, 0.11565670776367187, 0.11582169342041015, 0.11448614501953125, 0.1142108154296875, 0.11457852935791016, 0.1143609619140625, 0.11462268829345704, 0.1162550048828125, 0.11574044799804688, 0.11515494537353516, 0.11520169830322266, 0.11512380981445312, 0.11839686584472656, 0.11520012664794922, 0.11944172668457032, 0.116129150390625, 0.1164967041015625, 0.11557548522949218, 0.1151488037109375, 0.11549247741699219, 0.11479689788818359, 0.11493379211425782, 0.1168436508178711, 0.11614297485351563, 0.11552092742919921, 0.11570175933837891, 0.11628173065185547, 0.11518531036376953, 0.11494588470458984, 0.11457814025878907, 0.11489405059814453, 0.11391776275634766, 0.11499187469482422, 0.1157245101928711, 0.11525325012207031, 0.1145323486328125, 0.11486617279052734, 0.11455693054199219, 0.11442546844482422, 0.11449609375, 0.11505372619628906, 0.11517523193359375, 0.11619574737548828, 0.117266845703125, 0.11549209594726563, 0.11487596893310546, 0.1147713623046875, 0.11475424194335937, 0.11454774475097657, 0.11512105560302735, 0.11725193786621094, 0.11566102600097657, 0.11614002990722656, 0.11481702423095703, 0.11505868530273437, 0.11491327667236328, 0.11489199829101562, 0.11566716766357422, 0.11633309173583985, 0.11559849548339844, 0.11927417755126953, 0.1157480926513672, 0.1157778549194336, 0.11411929321289062, 0.11422720336914062, 0.11543142700195312, 0.11563593292236328, 0.1164393310546875, 0.1168476791381836, 0.11697401428222656, 0.1173939208984375, 0.11492697906494141, 0.11486790466308594, 0.1149736328125, 0.11570585632324219, 0.11637344360351562, 0.11555232238769532, 0.115504638671875, 0.11520674896240235, 0.11470982360839843, 0.11443587493896484, 0.11461014556884766, 0.11538518524169922, 0.11500543975830078, 0.11642060852050781, 0.11636531066894532, 0.11544684600830078, 0.11583106994628906, 0.11501615905761718, 0.11484384155273437, 0.11452623748779298, 0.11461830139160156, 0.11531874847412109, 0.11593529510498046, 0.11575657653808594, 0.11562544250488281, 0.11472179412841797, 0.11470848083496094, 0.11797503662109375, 0.11549491119384765, 0.11850982666015625, 0.11613884735107421, 0.115880126953125, 0.11568370819091797, 0.11546249389648437, 0.11477811431884766, 0.11424671936035156, 0.11472172546386719, 0.11443225860595703, 0.11664653015136718, 0.1154703369140625, 0.11515058898925781, 0.11509171295166015, 0.11402265930175781, 0.11402384185791016, 0.11424348449707031, 0.11435842895507813, 0.11463459014892578, 0.11623673248291015, 0.11534130859375, 0.1161396484375, 0.1152311019897461, 0.11419843292236329, 0.11403478240966797, 0.11410636901855468, 0.11421900939941407, 0.11532028961181641, 0.11669558715820312, 0.115115234375, 0.11482323455810547, 0.11423567962646484, 0.11454428863525391, 0.11415017700195312, 0.11412226867675782, 0.11443001556396484, 0.11598070526123047, 0.1154378204345703, 0.11452995300292969, 0.11487996673583985, 0.11422579193115234, 0.11403798675537109, 0.11398633575439453, 0.11445145416259765, 0.1137242202758789, 0.11554563140869141, 0.11470111846923828, 0.11480608367919921, 0.11516336059570312, 0.11460025787353516, 0.11434188842773438, 0.11476787567138672, 0.11440537261962891, 0.11445043182373046, 0.11552745819091798, 0.11581257629394531, 0.1156136932373047, 0.11831008148193359, 0.11445088195800782, 0.11424806213378906, 0.1141739501953125, 0.11436844635009766, 0.11565267181396484, 0.11437641906738281, 0.11532681274414062, 0.11503593444824219, 0.11431587219238282, 0.1146921615600586, 0.11411670684814453, 0.11424460601806641, 0.11462928009033203, 0.11714380645751953]",tokens/s,8.673361779953705,,,True -8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): +8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch @@ -4938,11 +4938,11 @@ ChildProcessError: Traceback (most recent call last): self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( - File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa + File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")`