IlyasMoutawwakil HF staff commited on
Commit
745f185
·
verified ·
1 Parent(s): 5000d62

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 721.293312,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.13442919921875,
117
- "mean": 7.13442919921875,
118
  "stdev": 0.0,
119
- "p50": 7.13442919921875,
120
- "p90": 7.13442919921875,
121
- "p95": 7.13442919921875,
122
- "p99": 7.13442919921875,
123
  "values": [
124
- 7.13442919921875
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 1.1297022277776848e-06,
131
- "ram": 5.60757316662321e-07,
132
- "gpu": 1.8155570080001168e-06,
133
- "total": 3.5060165524401224e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 914.558976,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
@@ -145,188 +145,185 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 157,
149
- "total": 0.9974321289062504,
150
- "mean": 0.006353070884753186,
151
- "stdev": 0.00021092148129269071,
152
- "p50": 0.006322175979614258,
153
- "p90": 0.006540492820739746,
154
- "p95": 0.006765772819519043,
155
- "p99": 0.0071974503707885745,
156
  "values": [
157
- 0.006987775802612305,
158
- 0.007005184173583984,
159
- 0.00667955207824707,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  0.006615039825439453,
161
- 0.00659660816192627,
162
- 0.006626304149627686,
163
- 0.006551551818847656,
164
- 0.007095295906066895,
165
- 0.006352896213531494,
166
- 0.006142943859100342,
167
- 0.006131711959838867,
168
- 0.006122528076171875,
169
- 0.006136832237243653,
170
- 0.0061255359649658205,
171
- 0.006108160018920898,
172
- 0.006091775894165039,
173
- 0.006090752124786377,
174
- 0.0060661759376525876,
175
- 0.006089727878570556,
176
- 0.006069248199462891,
177
- 0.006114304065704346,
178
- 0.006338560104370118,
179
- 0.006091775894165039,
180
- 0.006097919940948486,
181
- 0.006053887844085694,
182
- 0.006080512046813965,
183
- 0.006056960105895996,
184
- 0.006096896171569824,
185
- 0.00606822395324707,
186
- 0.006074368000030517,
187
- 0.00606822395324707,
188
- 0.0063836159706115725,
189
- 0.006330368041992188,
190
- 0.006307839870452881,
191
- 0.00632422399520874,
192
- 0.006316031932830811,
193
- 0.006347743988037109,
194
- 0.006300672054290772,
195
- 0.006347775936126709,
196
- 0.006351903915405273,
197
- 0.006393856048583985,
198
- 0.006251520156860352,
199
- 0.006217728137969971,
200
- 0.006235136032104492,
201
- 0.006266880035400391,
202
- 0.006341631889343262,
203
- 0.006322175979614258,
204
- 0.006340608119964599,
205
- 0.00633241605758667,
206
- 0.006346752166748047,
207
- 0.006396927833557129,
208
- 0.006331391811370849,
209
- 0.00633241605758667,
210
- 0.0063539199829101565,
211
- 0.006358016014099121,
212
- 0.006338560104370118,
213
- 0.00652288007736206,
214
- 0.006337535858154297,
215
- 0.006525951862335205,
216
- 0.006301695823669433,
217
- 0.006326272010803223,
218
- 0.006348800182342529,
219
- 0.006296576023101807,
220
- 0.006304768085479737,
221
- 0.006333439826965332,
222
- 0.0067645440101623535,
223
- 0.006770688056945801,
224
- 0.00733900785446167,
225
- 0.007041024208068848,
226
- 0.0072427520751953125,
227
- 0.0065771517753601075,
228
- 0.006386688232421875,
229
- 0.006347775936126709,
230
- 0.006492159843444824,
231
- 0.006335487842559814,
232
- 0.006404096126556396,
233
- 0.006352896213531494,
234
- 0.006367231845855713,
235
- 0.006335487842559814,
236
- 0.006342656135559082,
 
237
  0.006428671836853027,
238
- 0.006308864116668702,
239
- 0.0063211522102355954,
240
- 0.006334464073181153,
241
- 0.006331391811370849,
242
- 0.0063170561790466305,
243
- 0.006315008163452148,
244
- 0.006342656135559082,
245
- 0.00632428789138794,
246
- 0.006301695823669433,
247
- 0.006303743839263916,
248
- 0.006292479991912842,
249
- 0.006320064067840576,
250
- 0.0063211522102355954,
251
- 0.006340608119964599,
252
- 0.0063211522102355954,
253
- 0.006299647808074951,
254
- 0.006338560104370118,
255
- 0.006298624038696289,
256
- 0.006325247764587402,
257
- 0.006326272010803223,
258
- 0.006334464073181153,
259
- 0.006533120155334473,
260
- 0.006302720069885254,
261
- 0.007161856174468994,
262
- 0.006699007987976074,
263
- 0.006322175979614258,
264
- 0.006295551776885986,
265
- 0.0063498239517211915,
266
- 0.006300672054290772,
267
- 0.006351871967315674,
268
- 0.006304736137390137,
269
- 0.006371327877044678,
270
- 0.006334464073181153,
271
- 0.006315008163452148,
272
- 0.0063610877990722655,
273
- 0.0063211522102355954,
274
- 0.006326272010803223,
275
- 0.0062975997924804685,
276
- 0.006314943790435791,
277
- 0.0063170561790466305,
278
- 0.006277120113372803,
279
- 0.006310912132263183,
280
- 0.00616755199432373,
281
- 0.006276063919067383,
282
- 0.006260735988616943,
283
- 0.006326303958892822,
284
- 0.006307839870452881,
285
- 0.006285312175750732,
286
- 0.006301695823669433,
287
- 0.006281216144561768,
288
- 0.006284287929534912,
289
- 0.006307839870452881,
290
- 0.006284287929534912,
291
- 0.006285312175750732,
292
- 0.006296576023101807,
293
- 0.006304768085479737,
294
  0.006434815883636475,
295
- 0.0062679038047790524,
296
- 0.006403071880340576,
297
- 0.0062863359451293946,
298
- 0.006329376220703125,
299
- 0.006303743839263916,
300
- 0.0062811517715454105,
301
- 0.006292448043823242,
302
- 0.006270976066589356,
303
- 0.006299647808074951,
304
- 0.00628223991394043,
305
- 0.006386688232421875,
306
- 0.006470655918121338,
307
- 0.006300672054290772,
308
- 0.006346752166748047,
309
- 0.006323200225830078,
310
- 0.0065075201988220215,
311
- 0.006307839870452881,
312
- 0.006328320026397705,
313
- 0.006328320026397705
314
  ]
315
  },
316
  "throughput": {
317
  "unit": "samples/s",
318
- "value": 157.40419367898326
319
  },
320
  "energy": {
321
  "unit": "kWh",
322
- "cpu": 7.510299608386229e-08,
323
- "ram": 4.100279902241455e-08,
324
- "gpu": 1.454449124177188e-07,
325
- "total": 2.6155070752399563e-07
326
  },
327
  "efficiency": {
328
  "unit": "samples/kWh",
329
- "value": 3823350.391465702
330
  }
331
  }
332
  }
 
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+cu121",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 764.690432,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.4639306640625,
117
+ "mean": 7.4639306640625,
118
  "stdev": 0.0,
119
+ "p50": 7.4639306640625,
120
+ "p90": 7.4639306640625,
121
+ "p95": 7.4639306640625,
122
+ "p99": 7.4639306640625,
123
  "values": [
124
+ 7.4639306640625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.0541516187494438e-06,
131
+ "ram": 5.621382218018841e-07,
132
+ "gpu": 1.557223467999868e-06,
133
+ "total": 3.173513308551196e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 957.792256,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 154,
149
+ "total": 0.9972991976737977,
150
+ "mean": 0.00647596881606362,
151
+ "stdev": 0.00017864928256275443,
152
+ "p50": 0.006453248023986816,
153
+ "p90": 0.006775500631332398,
154
+ "p95": 0.006876979351043701,
155
+ "p99": 0.006927875094413757,
156
  "values": [
157
+ 0.006936575889587402,
158
+ 0.0069283838272094726,
159
+ 0.006887423992156983,
160
+ 0.00692742395401001,
161
+ 0.0069110398292541505,
162
+ 0.006905856132507324,
163
+ 0.006859776020050049,
164
+ 0.006818816184997558,
165
+ 0.0068689918518066405,
166
+ 0.006834176063537598,
167
+ 0.006874112129211426,
168
+ 0.006882304191589355,
169
+ 0.006909952163696289,
170
+ 0.006797311782836914,
171
+ 0.006724607944488525,
172
+ 0.0067983360290527345,
173
+ 0.006816768169403077,
174
+ 0.0066007041931152345,
175
+ 0.006621183872222901,
176
+ 0.006636544227600098,
177
  0.006615039825439453,
178
+ 0.006559711933135986,
179
+ 0.006560768127441406,
180
+ 0.006536191940307618,
181
+ 0.006655968189239502,
182
+ 0.006649856090545654,
183
+ 0.0065782079696655275,
184
+ 0.006618112087249756,
185
+ 0.00658739185333252,
186
+ 0.006563839912414551,
187
+ 0.006587456226348877,
188
+ 0.006599679946899414,
189
+ 0.0065474557876586915,
190
+ 0.006603775978088379,
191
+ 0.006590464115142822,
192
+ 0.00653926420211792,
193
+ 0.006486015796661377,
194
+ 0.006504447937011719,
195
+ 0.0064880638122558594,
196
+ 0.006426623821258545,
197
+ 0.006191103935241699,
198
+ 0.006221824169158936,
199
+ 0.006210559844970703,
200
+ 0.006188032150268555,
201
+ 0.00620851182937622,
202
+ 0.0061931519508361815,
203
+ 0.006212607860565185,
204
+ 0.006184959888458252,
205
+ 0.006210559844970703,
206
+ 0.006200319766998291,
207
+ 0.0062228479385375976,
208
+ 0.006200319766998291,
209
+ 0.006210559844970703,
210
+ 0.006201344013214111,
211
+ 0.006201344013214111,
212
+ 0.006211616039276123,
213
+ 0.006207488059997559,
214
+ 0.006239232063293457,
215
+ 0.006196224212646485,
216
+ 0.006254591941833496,
217
+ 0.006211584091186524,
218
+ 0.00618393611907959,
219
+ 0.006218751907348633,
220
+ 0.00619001579284668,
221
+ 0.006239200115203857,
222
+ 0.006198272228240966,
223
+ 0.006457344055175781,
224
+ 0.006463488101959228,
225
+ 0.006440959930419922,
226
+ 0.006449151992797852,
227
+ 0.006441984176635742,
228
+ 0.0064839677810668945,
229
+ 0.006490111827850342,
230
+ 0.006453248023986816,
231
+ 0.006469696044921875,
232
+ 0.006460447788238525,
233
+ 0.006474751949310303,
234
+ 0.0064542717933654785,
235
+ 0.006449151992797852,
236
+ 0.0064624958038330075,
237
+ 0.006466559886932373,
238
+ 0.006437888145446777,
239
+ 0.006462463855743408,
240
+ 0.006462463855743408,
241
+ 0.006434815883636475,
242
+ 0.0064460477828979495,
243
+ 0.00642252779006958,
244
+ 0.006423488140106202,
245
+ 0.006432767868041992,
246
+ 0.006427648067474365,
247
+ 0.006489088058471679,
248
+ 0.00653107213973999,
249
+ 0.0064767999649047855,
250
+ 0.006437888145446777,
251
+ 0.006434815883636475,
252
+ 0.006467584133148193,
253
+ 0.006428607940673828,
254
+ 0.00643071985244751,
255
  0.006428671836853027,
256
+ 0.006432767868041992,
257
+ 0.0065136637687683106,
258
+ 0.006458367824554443,
259
+ 0.006437888145446777,
260
+ 0.006481919765472412,
261
+ 0.0065372161865234375,
262
+ 0.0064102401733398436,
263
+ 0.006536191940307618,
264
+ 0.006415359973907471,
265
+ 0.0064204797744750975,
266
+ 0.006456319808959961,
267
+ 0.006411263942718506,
268
+ 0.006475776195526123,
269
+ 0.006465536117553711,
270
+ 0.00643071985244751,
271
+ 0.006469632148742676,
272
+ 0.0064204797744750975,
273
+ 0.006453248023986816,
274
+ 0.006455296039581298,
275
+ 0.006429696083068847,
276
+ 0.0064471039772033695,
277
+ 0.006481919765472412,
278
+ 0.00643993616104126,
279
+ 0.006459328174591064,
280
+ 0.006427648067474365,
281
+ 0.006458367824554443,
282
+ 0.006440959930419922,
283
+ 0.006426623821258545,
284
+ 0.006436863899230957,
285
+ 0.006437888145446777,
286
+ 0.0064951682090759275,
287
+ 0.006471680164337158,
288
+ 0.006441984176635742,
289
+ 0.006472703933715821,
290
+ 0.006441984176635742,
291
+ 0.0064440321922302245,
292
+ 0.0064778242111206055,
293
+ 0.006429696083068847,
294
+ 0.006471680164337158,
295
+ 0.006435840129852295,
296
+ 0.00642252779006958,
297
+ 0.0064471039772033695,
298
+ 0.006419456005096436,
299
+ 0.006436895847320556,
300
+ 0.0064440321922302245,
 
 
 
 
 
 
 
 
 
 
 
301
  0.006434815883636475,
302
+ 0.006453248023986816,
303
+ 0.006453248023986816,
304
+ 0.0064512319564819336,
305
+ 0.006436863899230957,
306
+ 0.006456319808959961,
307
+ 0.006453248023986816,
308
+ 0.006421504020690918,
309
+ 0.006496255874633789,
310
+ 0.006680575847625733
 
 
 
 
 
 
 
 
 
 
311
  ]
312
  },
313
  "throughput": {
314
  "unit": "samples/s",
315
+ "value": 154.41704992764994
316
  },
317
  "energy": {
318
  "unit": "kWh",
319
+ "cpu": 7.68224445295709e-08,
320
+ "ram": 4.200134707484013e-08,
321
+ "gpu": 1.3543917645161068e-07,
322
+ "total": 2.542629680560217e-07
323
  },
324
  "efficiency": {
325
  "unit": "samples/kWh",
326
+ "value": 3932936.0765570477
327
  }
328
  }
329
  }