IlyasMoutawwakil HF staff commited on
Commit
9a44217
·
verified ·
1 Parent(s): 2a656c0

Upload cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-generation_openai-community/gpt2/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-generation_openai-community/gpt2",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-generation",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 791.011328,
108
  "max_global_vram": 1355.28448,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 725.614592,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.38058349609375,
117
- "mean": 7.38058349609375,
118
  "stdev": 0.0,
119
- "p50": 7.38058349609375,
120
- "p90": 7.38058349609375,
121
- "p95": 7.38058349609375,
122
- "p99": 7.38058349609375,
123
  "values": [
124
- 7.38058349609375
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 9.79268531249981e-07,
131
- "ram": 5.211768322305634e-07,
132
- "gpu": 1.6541679900001875e-06,
133
- "total": 3.1546133534807317e-06
134
  },
135
  "efficiency": null
136
  },
137
  "prefill": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1195.49952,
141
  "max_global_vram": 1365.77024,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 725.614592,
@@ -145,114 +145,113 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 77,
149
- "total": 0.4903361606597902,
150
- "mean": 0.00636800208649078,
151
- "stdev": 0.00024804738054740966,
152
- "p50": 0.006288608074188232,
153
- "p90": 0.006525209426879883,
154
- "p95": 0.006893216133117676,
155
- "p99": 0.007409616737365721,
156
  "values": [
157
- 0.007550015926361084,
158
- 0.00717193603515625,
159
- 0.00688259220123291,
160
- 0.006398079872131347,
161
- 0.0063352642059326175,
162
- 0.00632966423034668,
163
- 0.006345920085906983,
164
- 0.006353888034820556,
165
- 0.006321087837219238,
166
- 0.006340991973876953,
167
- 0.006324448108673096,
168
- 0.006361152172088623,
169
- 0.006231167793273926,
170
- 0.006226784229278565,
171
- 0.006260159969329834,
172
- 0.006267839908599853,
173
- 0.00628659200668335,
174
- 0.006232960224151611,
175
- 0.006252895832061767,
176
- 0.00641923189163208,
177
- 0.006260064125061035,
178
  0.006302175998687744,
179
- 0.006290592193603515,
180
- 0.006307263851165771,
181
- 0.00629417610168457,
182
- 0.00622057580947876,
183
- 0.0062008957862854,
184
- 0.006292736053466797,
185
- 0.006479072093963623,
186
- 0.006260255813598633,
187
- 0.006291935920715332,
188
- 0.006243391990661621,
189
- 0.006279647827148438,
190
- 0.006244224071502685,
191
- 0.0062039680480957034,
192
- 0.006226592063903809,
193
- 0.006235648155212403,
194
- 0.007365280151367187,
195
- 0.006935711860656738,
196
- 0.006528031826019287,
197
- 0.006369376182556152,
198
- 0.006433599948883056,
199
- 0.006270080089569092,
200
- 0.006228928089141846,
201
- 0.006190815925598145,
202
- 0.00628927993774414,
203
- 0.006230016231536865,
204
- 0.006225120067596435,
205
- 0.0062195839881896975,
206
- 0.006283840179443359,
207
- 0.0062295360565185545,
208
- 0.006428575992584228,
209
- 0.006484928131103515,
210
- 0.006523327827453613,
211
- 0.006395103931427002,
212
- 0.006515935897827148,
213
- 0.006385727882385254,
214
- 0.006523263931274414,
215
- 0.006454815864562988,
216
- 0.006288608074188232,
217
- 0.006216639995574951,
218
- 0.00623203182220459,
219
- 0.006214176177978516,
220
- 0.006624512195587158,
221
- 0.006763775825500488,
222
- 0.006361184120178223,
223
- 0.006277088165283203,
224
- 0.006288127899169922,
225
  0.0062865281105041505,
226
- 0.00630079984664917,
227
- 0.006193471908569336,
228
- 0.006194752216339111,
229
- 0.0062135357856750485,
230
- 0.006221248149871826,
231
- 0.0062154560089111325,
232
- 0.006193984031677246,
233
- 0.006213471889495849
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
  ]
235
  },
236
  "throughput": {
237
  "unit": "tokens/s",
238
- "value": 314.07024885290855
239
  },
240
  "energy": {
241
  "unit": "kWh",
242
- "cpu": 7.813567176078304e-08,
243
- "ram": 4.2720651648529784e-08,
244
- "gpu": 1.5021941842105172e-07,
245
- "total": 2.710757418303646e-07
246
  },
247
  "efficiency": {
248
  "unit": "tokens/kWh",
249
- "value": 7378011.719143692
250
  }
251
  },
252
  "decode": {
253
  "memory": {
254
  "unit": "MB",
255
- "max_ram": 1214.17728,
256
  "max_global_vram": 1365.77024,
257
  "max_process_vram": 0.0,
258
  "max_reserved": 725.614592,
@@ -260,205 +259,203 @@
260
  },
261
  "latency": {
262
  "unit": "s",
263
- "count": 77,
264
- "total": 0.5145581755638122,
265
- "mean": 0.006682573708620937,
266
- "stdev": 0.0002910752437658753,
267
- "p50": 0.006599520206451416,
268
- "p90": 0.0068155519485473635,
269
- "p95": 0.007122745609283448,
270
- "p99": 0.008015219097137448,
271
  "values": [
272
- 0.0075838398933410645,
273
- 0.00793446397781372,
274
- 0.006700287818908692,
275
- 0.0067365760803222655,
276
- 0.006623167991638184,
277
- 0.0066278080940246585,
278
- 0.006605631828308105,
279
- 0.006598527908325195,
280
- 0.007346720218658447,
281
- 0.006633056163787842,
282
- 0.00659987211227417,
283
- 0.006593376159667969,
284
- 0.006554080009460449,
285
- 0.006548128128051758,
286
- 0.006543680191040039,
287
- 0.006518911838531494,
288
- 0.006551040172576904,
289
- 0.006585599899291993,
290
- 0.006725503921508789,
291
- 0.006536896228790283,
292
- 0.006547488212585449,
293
- 0.006710303783416748,
294
- 0.006624800205230713,
295
- 0.0065796799659729,
296
- 0.006568160057067871,
297
- 0.006529151916503906,
298
- 0.006575679779052735,
299
- 0.006710879802703858,
300
- 0.006521247863769532,
301
- 0.006685567855834961,
302
- 0.006605184078216553,
303
- 0.006503039836883545,
304
- 0.0065797119140625,
305
- 0.006516767978668213,
306
- 0.006696447849273681,
307
- 0.00654252815246582,
308
- 0.006511648178100586,
309
- 0.008270943641662597,
310
- 0.006784512042999268,
311
- 0.006777919769287109,
312
- 0.006858816146850586,
313
- 0.0066648640632629395,
314
- 0.006526944160461426,
315
- 0.006526303768157959,
316
- 0.006553215980529785,
317
- 0.006501152038574219,
318
- 0.006519807815551758,
319
- 0.006619071960449219,
320
- 0.0066258559226989745,
321
- 0.006625951766967774,
322
- 0.006505087852478027,
323
- 0.006900928020477295,
324
- 0.006803040027618408,
325
- 0.006695903778076172,
326
- 0.006766304016113282,
327
- 0.006806719779968262,
328
- 0.0066752638816833494,
329
- 0.006773568153381347,
330
- 0.006796351909637451,
331
- 0.006612800121307373,
332
- 0.006609824180603028,
333
- 0.006501535892486573,
334
- 0.006523903846740723,
335
- 0.007066751956939697,
336
- 0.006828800201416015,
337
- 0.006578112125396728,
338
- 0.006671840190887451,
339
- 0.006573760032653809,
340
- 0.006573887825012207,
341
- 0.006514848232269287,
342
- 0.006520864009857178,
343
- 0.006545631885528564,
344
- 0.006565087795257568,
345
- 0.006599520206451416,
346
- 0.006502528190612793,
347
- 0.006501920223236084,
348
- 0.006532576084136963
349
  ]
350
  },
351
  "throughput": {
352
  "unit": "tokens/s",
353
- "value": 149.64294351290695
354
  },
355
  "energy": {
356
  "unit": "kWh",
357
- "cpu": 7.451963764982637e-08,
358
- "ram": 4.0385879775532454e-08,
359
- "gpu": 1.4709207298920198e-07,
360
- "total": 2.6199759041456067e-07
361
  },
362
  "efficiency": {
363
  "unit": "tokens/kWh",
364
- "value": 3816828.996089975
365
  }
366
  },
367
  "per_token": {
368
  "memory": null,
369
  "latency": {
370
  "unit": "s",
371
- "count": 77,
372
- "total": 0.4841798720359801,
373
- "mean": 0.006288050286181561,
374
- "stdev": 0.00026699542202695013,
375
- "p50": 0.00621670389175415,
376
- "p90": 0.006411878585815429,
377
- "p95": 0.006714572906494141,
378
- "p99": 0.007436943225860593,
379
  "values": [
380
- 0.007047167778015137,
381
- 0.007311359882354736,
382
- 0.0063170561790466305,
383
- 0.006331391811370849,
384
- 0.006240255832672119,
385
- 0.006242303848266601,
386
- 0.006221856117248535,
387
- 0.006198272228240966,
388
- 0.006957056045532227,
389
- 0.0062494721412658695,
390
- 0.00621670389175415,
391
- 0.0062044157981872555,
392
- 0.006158336162567139,
393
- 0.006158271789550781,
394
- 0.006153215885162353,
395
- 0.0061337599754333495,
396
- 0.006169600009918213,
397
- 0.006191103935241699,
398
- 0.006325247764587402,
399
- 0.006155263900756836,
400
- 0.006165503978729248,
401
- 0.0063201279640197755,
402
- 0.00624128007888794,
403
- 0.0061972479820251464,
404
- 0.006166528224945069,
405
- 0.006150144100189209,
406
- 0.006195199966430664,
407
- 0.006293504238128662,
408
- 0.006140927791595459,
409
- 0.006288383960723877,
410
  0.0062269439697265625,
411
- 0.00612556791305542,
412
- 0.006198272228240966,
 
 
413
  0.006116352081298828,
414
- 0.0063170561790466305,
415
- 0.006164480209350586,
416
- 0.0061337599754333495,
417
- 0.00783462381362915,
418
- 0.0063569917678833006,
419
- 0.006376448154449463,
420
- 0.006472703933715821,
421
- 0.0062791681289672855,
422
- 0.006149119853973388,
423
- 0.006137856006622314,
424
- 0.0061736321449279786,
425
- 0.006112224102020264,
426
- 0.006130688190460205,
427
- 0.006238207817077636,
428
- 0.006232063770294189,
429
- 0.006243328094482422,
430
- 0.00611737585067749,
431
- 0.0065064959526062015,
432
- 0.0064143362045288085,
433
- 0.006312960147857666,
434
- 0.006329343795776367,
435
- 0.006409215927124024,
436
- 0.006288383960723877,
437
- 0.0063610877990722655,
438
- 0.006396927833557129,
439
- 0.00623308801651001,
440
- 0.006232063770294189,
441
- 0.0061224961280822755,
442
- 0.006144000053405762,
443
- 0.006653952121734619,
444
- 0.0064102401733398436,
445
- 0.0061972479820251464,
446
- 0.006288383960723877,
447
- 0.00618393611907959,
448
- 0.006181888103485108,
449
- 0.00613478422164917,
450
- 0.0061265921592712404,
451
- 0.006165503978729248,
452
  0.006188032150268555,
453
- 0.006220799922943115,
454
- 0.006123551845550537,
455
- 0.006124544143676758,
456
- 0.0061521921157836916
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
457
  ]
458
  },
459
  "throughput": {
460
  "unit": "tokens/s",
461
- "value": 159.03180707660232
462
  },
463
  "energy": null,
464
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_text-generation_openai-community/gpt2",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-generation",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 791.072768,
108
  "max_global_vram": 1355.28448,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 725.614592,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.36196826171875,
117
+ "mean": 7.36196826171875,
118
  "stdev": 0.0,
119
+ "p50": 7.36196826171875,
120
+ "p90": 7.36196826171875,
121
+ "p95": 7.36196826171875,
122
+ "p99": 7.36196826171875,
123
  "values": [
124
+ 7.36196826171875
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.0165625527780345e-06,
131
+ "ram": 5.41433303116786e-07,
132
+ "gpu": 1.6605568840001843e-06,
133
+ "total": 3.2185527398950053e-06
134
  },
135
  "efficiency": null
136
  },
137
  "prefill": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1191.952384,
141
  "max_global_vram": 1365.77024,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 725.614592,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 76,
149
+ "total": 0.4922538595199585,
150
+ "mean": 0.006477024467367876,
151
+ "stdev": 0.00032774161098666224,
152
+ "p50": 0.006309391975402832,
153
+ "p90": 0.006774208068847656,
154
+ "p95": 0.0072180480957031255,
155
+ "p99": 0.007596280217170716,
156
  "values": [
157
+ 0.007860832214355469,
158
+ 0.007508096218109131,
159
+ 0.00748963212966919,
160
+ 0.007494080066680908,
161
+ 0.006888895988464355,
162
+ 0.006349952220916748,
163
+ 0.0063277120590209965,
164
+ 0.006295936107635498,
165
+ 0.0063656001091003415,
166
+ 0.006361504077911377,
 
 
 
 
 
 
 
 
 
 
 
167
  0.006302175998687744,
168
+ 0.0062501120567321775,
169
+ 0.006231488227844239,
170
+ 0.006340288162231446,
171
+ 0.006288832187652588,
172
+ 0.0062849922180175785,
173
+ 0.006297088146209716,
174
+ 0.006305247783660889,
175
+ 0.006290783882141113,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
  0.0062865281105041505,
177
+ 0.006280128002166748,
178
+ 0.006295392036437988,
179
+ 0.006315616130828858,
180
+ 0.00624396800994873,
181
+ 0.006284543991088867,
182
+ 0.006298431873321533,
183
+ 0.006299647808074951,
184
+ 0.0062623038291931155,
185
+ 0.006584447860717773,
186
+ 0.006949440002441406,
187
+ 0.006778560161590576,
188
+ 0.006702688217163086,
189
+ 0.006616543769836425,
190
+ 0.0066797761917114255,
191
+ 0.00628380823135376,
192
+ 0.0062638077735900875,
193
+ 0.006292479991912842,
194
+ 0.006319104194641113,
195
+ 0.006284832000732422,
196
+ 0.0062709121704101565,
197
+ 0.0062800002098083495,
198
+ 0.006338399887084961,
199
+ 0.006295392036437988,
200
+ 0.0062813439369201664,
201
+ 0.006297887802124024,
202
+ 0.006310431957244873,
203
+ 0.00633190393447876,
204
+ 0.006312032222747802,
205
+ 0.0062921919822692875,
206
+ 0.006246623992919922,
207
+ 0.006285183906555176,
208
+ 0.006713247776031494,
209
+ 0.006596159934997558,
210
+ 0.006629151821136475,
211
+ 0.006615744113922119,
212
+ 0.006680768013000488,
213
+ 0.006643904209136963,
214
+ 0.006598432064056397,
215
+ 0.006645664215087891,
216
+ 0.006576608180999756,
217
+ 0.006601856231689453,
218
+ 0.007127520084381104,
219
+ 0.006769855976104736,
220
+ 0.006272607803344726,
221
+ 0.0064167361259460445,
222
+ 0.006308351993560791,
223
+ 0.006279232025146485,
224
+ 0.006289343833923339,
225
+ 0.006253280162811279,
226
+ 0.006273920059204101,
227
+ 0.006270080089569092,
228
+ 0.006288832187652588,
229
+ 0.006247392177581787,
230
+ 0.006513440132141114,
231
+ 0.0066462721824646,
232
+ 0.006597856044769287
233
  ]
234
  },
235
  "throughput": {
236
  "unit": "tokens/s",
237
+ "value": 308.7837648408262
238
  },
239
  "energy": {
240
  "unit": "kWh",
241
+ "cpu": 7.892425722957996e-08,
242
+ "ram": 4.300645887905692e-08,
243
+ "gpu": 1.5013624961589237e-07,
244
+ "total": 2.7206696572452925e-07
245
  },
246
  "efficiency": {
247
  "unit": "tokens/kWh",
248
+ "value": 7351131.346188577
249
  }
250
  },
251
  "decode": {
252
  "memory": {
253
  "unit": "MB",
254
+ "max_ram": 1210.298368,
255
  "max_global_vram": 1365.77024,
256
  "max_process_vram": 0.0,
257
  "max_reserved": 725.614592,
 
259
  },
260
  "latency": {
261
  "unit": "s",
262
+ "count": 76,
263
+ "total": 0.5169356474876403,
264
+ "mean": 0.006801784835363689,
265
+ "stdev": 0.0003572991233096181,
266
+ "p50": 0.006617839813232422,
267
+ "p90": 0.007117200136184693,
268
+ "p95": 0.0075078001022338865,
269
+ "p99": 0.008178991794586182,
270
  "values": [
271
+ 0.008266592025756836,
272
+ 0.007898240089416505,
273
+ 0.007736927986145019,
274
+ 0.008149791717529296,
275
+ 0.006643775939941406,
276
+ 0.006586463928222657,
277
+ 0.0066208958625793455,
278
+ 0.006611487865447998,
279
+ 0.007431424140930175,
280
+ 0.006620351791381836,
281
+ 0.0066518402099609375,
282
+ 0.006522560119628906,
283
+ 0.006491231918334961,
284
+ 0.006599552154541015,
285
+ 0.006596127986907959,
286
+ 0.006582240104675293,
287
+ 0.0065641279220581055,
288
+ 0.0065714879035949705,
289
+ 0.006591072082519531,
290
+ 0.006588064193725586,
291
+ 0.0066072001457214355,
292
+ 0.006690048217773438,
293
+ 0.006606880187988281,
294
+ 0.006604351997375488,
295
+ 0.006694272041320801,
296
+ 0.006563615798950195,
297
+ 0.006605055809020996,
298
+ 0.006615327835083008,
299
+ 0.007038527965545654,
300
+ 0.0071224641799926755,
301
+ 0.0070024957656860355,
302
+ 0.0070910401344299314,
303
+ 0.006992415904998779,
304
+ 0.0069209918975830075,
305
+ 0.006681536197662354,
306
+ 0.006594175815582275,
307
+ 0.006589407920837402,
308
+ 0.006559743881225586,
309
+ 0.006579296112060547,
310
+ 0.006602367877960205,
311
+ 0.00662169599533081,
312
+ 0.006547359943389892,
313
+ 0.00653868818283081,
314
+ 0.006565440177917481,
315
+ 0.006612703800201416,
316
+ 0.0065402560234069824,
317
+ 0.006565951824188233,
318
+ 0.0067066879272460935,
319
+ 0.006613088130950928,
320
+ 0.006591968059539795,
321
+ 0.00686521577835083,
322
+ 0.006963679790496826,
323
+ 0.0069552321434021,
324
+ 0.006938079833984375,
325
+ 0.00699235200881958,
326
+ 0.006949056148529053,
327
+ 0.006975200176239014,
328
+ 0.006952928066253662,
329
+ 0.006947008132934571,
330
+ 0.00694217586517334,
331
+ 0.007210495948791504,
332
+ 0.0072821760177612304,
333
+ 0.006953216075897216,
334
+ 0.006591008186340332,
335
+ 0.0065972480773925785,
336
+ 0.006580575942993164,
337
+ 0.006555424213409424,
338
+ 0.006632287979125976,
339
+ 0.006597407817840576,
340
+ 0.006565279960632324,
341
+ 0.006573184013366699,
342
+ 0.00659830379486084,
343
+ 0.006657343864440918,
344
+ 0.007111936092376709,
345
+ 0.006995423793792725,
346
+ 0.0069640960693359375
 
347
  ]
348
  },
349
  "throughput": {
350
  "unit": "tokens/s",
351
+ "value": 147.0202342774535
352
  },
353
  "energy": {
354
  "unit": "kWh",
355
+ "cpu": 7.503042186493293e-08,
356
+ "ram": 4.116874495469756e-08,
357
+ "gpu": 1.469166575789161e-07,
358
+ "total": 2.6311582439854656e-07
359
  },
360
  "efficiency": {
361
  "unit": "tokens/kWh",
362
+ "value": 3800607.592819202
363
  }
364
  },
365
  "per_token": {
366
  "memory": null,
367
  "latency": {
368
  "unit": "s",
369
+ "count": 76,
370
+ "total": 0.4863630719184875,
371
+ "mean": 0.006399514104190625,
372
+ "stdev": 0.0003208190283690651,
373
+ "p50": 0.006235104084014892,
374
+ "p90": 0.006692864179611206,
375
+ "p95": 0.007076608061790467,
376
+ "p99": 0.007602943897247315,
377
  "values": [
378
+ 0.007709695816040039,
379
+ 0.007349247932434082,
380
+ 0.007201791763305664,
381
+ 0.0075673599243164065,
382
+ 0.006255616188049316,
383
+ 0.006200319766998291,
384
+ 0.006239264011383056,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
385
  0.0062269439697265625,
386
+ 0.0070348801612854,
387
+ 0.006239232063293457,
388
+ 0.006269951820373535,
389
+ 0.0061439361572265625,
390
  0.006116352081298828,
391
+ 0.006194176197052002,
392
+ 0.006213632106781006,
393
+ 0.006200319766998291,
394
+ 0.00617571210861206,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
395
  0.006188032150268555,
396
+ 0.006212607860565185,
397
+ 0.006207488059997559,
398
+ 0.006228991985321045,
399
+ 0.006309887886047363,
400
+ 0.006213632106781006,
401
+ 0.0062259202003479,
402
+ 0.006312992095947266,
403
+ 0.006184959888458252,
404
+ 0.006196224212646485,
405
+ 0.006231040000915527,
406
+ 0.006631423950195312,
407
+ 0.006687744140625,
408
+ 0.006603775978088379,
409
+ 0.006675456047058105,
410
+ 0.00659660816192627,
411
+ 0.006525951862335205,
412
+ 0.006301695823669433,
413
+ 0.006213632106781006,
414
+ 0.006211584091186524,
415
+ 0.006180863857269287,
416
+ 0.006168575763702393,
417
+ 0.006221824169158936,
418
+ 0.006239168167114258,
419
+ 0.006168575763702393,
420
+ 0.006161375999450684,
421
+ 0.006189055919647217,
422
+ 0.006228991985321045,
423
+ 0.006162432193756104,
424
+ 0.006187007904052734,
425
+ 0.006308864116668702,
426
+ 0.006223872184753418,
427
+ 0.006212607860565185,
428
+ 0.006465536117553711,
429
+ 0.006554624080657959,
430
+ 0.0065474557876586915,
431
+ 0.006500351905822754,
432
+ 0.006593535900115967,
433
+ 0.006543360233306885,
434
+ 0.0065771517753601075,
435
+ 0.006543360233306885,
436
+ 0.00653926420211792,
437
+ 0.006529024124145508,
438
+ 0.0067338237762451176,
439
+ 0.006849535942077637,
440
+ 0.0065177597999572755,
441
+ 0.006200319766998291,
442
+ 0.006217728137969971,
443
+ 0.006189055919647217,
444
+ 0.006161407947540283,
445
+ 0.006250495910644531,
446
+ 0.006216767787933349,
447
+ 0.0061859841346740725,
448
+ 0.006182911872863769,
449
+ 0.00618393611907959,
450
+ 0.006271999835968017,
451
+ 0.0066979842185974124,
452
+ 0.00659660816192627,
453
+ 0.006561791896820069
454
  ]
455
  },
456
  "throughput": {
457
  "unit": "tokens/s",
458
+ "value": 156.2618635913569
459
  },
460
  "energy": null,
461
  "efficiency": null