IlyasMoutawwakil HF staff commited on
Commit
37b71d4
·
verified ·
1 Parent(s): b5c9da8

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -104,7 +104,7 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 788.201472,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
@@ -113,31 +113,31 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.3738916015625,
117
- "mean": 7.3738916015625,
118
  "stdev": 0.0,
119
- "p50": 7.3738916015625,
120
- "p90": 7.3738916015625,
121
- "p95": 7.3738916015625,
122
- "p99": 7.3738916015625,
123
  "values": [
124
- 7.3738916015625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
- "cpu": 8.653178263885922e-07,
131
- "ram": 4.584137916818688e-07,
132
- "gpu": 0.0,
133
- "total": 1.3237316180704611e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
- "max_ram": 1084.112896,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
@@ -145,164 +145,161 @@
145
  },
146
  "latency": {
147
  "unit": "s",
148
- "count": 133,
149
- "total": 0.9968165755271909,
150
- "mean": 0.007494861470129257,
151
- "stdev": 0.000339218940209733,
152
- "p50": 0.007535615921020508,
153
- "p90": 0.007839129734039305,
154
- "p95": 0.007880703926086426,
155
- "p99": 0.008829561920166018,
156
  "values": [
157
- 0.00794316816329956,
158
- 0.007880703926086426,
159
- 0.007890944004058837,
160
- 0.007867392063140868,
161
- 0.00774348783493042,
162
- 0.0075980801582336424,
163
- 0.007864319801330566,
164
- 0.007820288181304931,
165
- 0.007579648017883301,
166
- 0.007610367774963379,
167
- 0.0075970239639282225,
168
- 0.0077506561279296875,
169
- 0.0075335679054260255,
170
- 0.0077199358940124516,
171
- 0.007766016006469726,
172
- 0.007658495903015137,
173
- 0.007599040031433106,
174
- 0.007623680114746094,
175
- 0.007589888095855713,
176
- 0.007514111995697022,
177
- 0.007543807983398438,
178
- 0.007672832012176513,
179
- 0.007591936111450195,
180
- 0.007610367774963379,
181
- 0.007609344005584716,
182
- 0.007609344005584716,
183
- 0.007584767818450928,
184
- 0.007707647800445557,
185
- 0.007878655910491944,
186
- 0.007868415832519531,
187
- 0.007781375885009765,
188
- 0.007591936111450195,
189
- 0.007498752117156982,
190
- 0.007523327827453613,
191
- 0.007535615921020508,
192
- 0.007535615921020508,
193
- 0.007574528217315674,
194
- 0.00760319995880127,
195
- 0.007639039993286132,
196
- 0.007624703884124756,
197
- 0.007616511821746826,
198
- 0.007600128173828125,
199
- 0.007579648017883301,
200
- 0.009174015998840332,
201
- 0.008943584442138672,
202
- 0.00858726406097412,
203
- 0.007854080200195313,
204
- 0.0077844481468200685,
205
- 0.00780083179473877,
206
- 0.00774348783493042,
207
- 0.007734272003173828,
208
- 0.00754585599899292,
209
  0.007615488052368164,
210
- 0.007542816162109375,
211
- 0.007526400089263916,
212
- 0.0075304961204528805,
213
- 0.007547904014587403,
214
- 0.007544832229614258,
215
- 0.007494656085968018,
216
- 0.007523263931274414,
 
 
 
 
 
 
 
 
217
  0.007576576232910156,
218
- 0.007684095859527588,
219
- 0.007666687965393066,
220
- 0.0076277761459350585,
221
- 0.007617536067962646,
222
- 0.007573503971099854,
223
- 0.007558144092559814,
224
- 0.0075632638931274416,
225
- 0.007488512039184571,
226
- 0.007588831901550293,
227
- 0.0074925761222839355,
228
- 0.007512063980102539,
229
- 0.007391232013702393,
230
- 0.007359488010406494,
231
- 0.007497727870941162,
232
- 0.007372799873352051,
233
- 0.007176191806793213,
234
- 0.0071905279159545895,
235
- 0.007226367950439453,
236
- 0.0077506561279296875,
237
- 0.007773183822631836,
238
- 0.007792640209197998,
239
- 0.0078438401222229,
240
- 0.007911424160003662,
241
  0.007880703926086426,
242
- 0.007517183780670166,
243
- 0.007425024032592774,
244
- 0.007362559795379638,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
245
  0.007411712169647216,
246
- 0.007455743789672851,
247
- 0.0073062400817871095,
248
- 0.007286784172058106,
249
- 0.007364607810974121,
250
- 0.007729152202606201,
251
- 0.007387135982513428,
252
  0.007416831970214844,
253
- 0.007153664112091064,
254
- 0.00709939193725586,
255
- 0.0070860800743103025,
256
- 0.007057407855987549,
257
- 0.007104512214660645,
258
- 0.007109632015228271,
259
- 0.0070830078125,
260
- 0.007129087924957276,
261
- 0.007072703838348389,
262
- 0.007074816226959229,
263
- 0.0070891518592834475,
264
- 0.007094272136688232,
265
- 0.007160831928253173,
266
- 0.007138304233551026,
267
- 0.007174143791198731,
268
- 0.007221248149871826,
269
- 0.0071485438346862796,
270
- 0.007161856174468994,
271
- 0.0070850238800048825,
272
- 0.0070860800743103025,
273
- 0.007098368167877197,
274
- 0.007090176105499267,
275
- 0.007085055828094483,
276
- 0.00713318395614624,
277
- 0.007110655784606934,
278
- 0.007062528133392334,
279
- 0.007108607769012451,
280
- 0.007072768211364746,
281
- 0.007080959796905518,
282
- 0.007078911781311035,
283
- 0.007063551902770996,
284
- 0.0071157760620117185,
285
- 0.0071157760620117185,
286
- 0.007131135940551757,
287
- 0.007117824077606201,
288
- 0.0071188478469848635,
289
- 0.007097343921661377
290
  ]
291
  },
292
  "throughput": {
293
  "unit": "samples/s",
294
- "value": 133.42474760680986
295
  },
296
  "energy": {
297
  "unit": "kWh",
298
- "cpu": 8.852724996890976e-08,
299
- "ram": 4.833838955793149e-08,
300
- "gpu": 1.6385377950746162e-07,
301
- "total": 3.007194190343029e-07
302
  },
303
  "efficiency": {
304
  "unit": "samples/kWh",
305
- "value": 3325358.911676836
306
  }
307
  }
308
  }
 
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+cu124",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 788.066304,
108
  "max_global_vram": 1185.415168,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.387978515625,
117
+ "mean": 7.387978515625,
118
  "stdev": 0.0,
119
+ "p50": 7.387978515625,
120
+ "p90": 7.387978515625,
121
+ "p95": 7.387978515625,
122
+ "p99": 7.387978515625,
123
  "values": [
124
+ 7.387978515625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 9.222970923611723e-07,
131
+ "ram": 4.6958802323310144e-07,
132
+ "gpu": 1.6758346740000825e-06,
133
+ "total": 3.0677197895943562e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 1084.809216,
141
  "max_global_vram": 1195.900928,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 555.74528,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 130,
149
+ "total": 1.001474017620087,
150
+ "mean": 0.007703646289385282,
151
+ "stdev": 0.00022089592815666712,
152
+ "p50": 0.007660032033920288,
153
+ "p90": 0.007956684827804565,
154
+ "p95": 0.008030617237091064,
155
+ "p99": 0.008238305444717407,
156
  "values": [
157
+ 0.007699456214904785,
158
+ 0.007661568164825439,
159
+ 0.0076349759101867675,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  0.007615488052368164,
161
+ 0.007605247974395752,
162
+ 0.007550975799560547,
163
+ 0.007632895946502686,
164
+ 0.007633920192718506,
165
+ 0.007619584083557129,
166
+ 0.007565311908721924,
167
+ 0.007612415790557861,
168
+ 0.0075970559120178225,
169
+ 0.007673855781555176,
170
+ 0.007570432186126709,
171
+ 0.007591872215270996,
172
+ 0.007582719802856445,
173
+ 0.007621632099151611,
174
+ 0.00764415979385376,
175
+ 0.007651328086853027,
176
  0.007576576232910156,
177
+ 0.007661568164825439,
178
+ 0.007591936111450195,
179
+ 0.007658495903015137,
180
+ 0.007822336196899414,
181
+ 0.008034303665161132,
182
+ 0.008136704444885253,
183
+ 0.00800870418548584,
184
+ 0.007895040035247802,
185
+ 0.007915520191192627,
186
+ 0.007930880069732665,
187
+ 0.007944255828857423,
188
+ 0.007824384212493896,
189
+ 0.007831552028656007,
 
 
 
 
 
 
 
 
 
 
190
  0.007880703926086426,
191
+ 0.007921664237976075,
192
+ 0.0078919677734375,
193
+ 0.007912447929382324,
194
+ 0.007875584125518798,
195
+ 0.007903200149536133,
196
+ 0.0079267840385437,
197
+ 0.00793497610092163,
198
+ 0.007987199783325195,
199
+ 0.007947264194488525,
200
+ 0.007840767860412597,
201
+ 0.00797388792037964,
202
+ 0.008076288223266602,
203
+ 0.007956480026245117,
204
+ 0.007871456146240234,
205
+ 0.007903232097625732,
206
+ 0.007879680156707763,
207
+ 0.007964672088623047,
208
+ 0.007874559879302979,
209
+ 0.007863296031951903,
210
+ 0.007958528041839599,
211
+ 0.007932928085327149,
212
+ 0.007827455997467042,
213
+ 0.007871488094329833,
214
+ 0.007881728172302246,
215
+ 0.007921664237976075,
216
+ 0.007903232097625732,
217
+ 0.00785203218460083,
218
+ 0.007948287963867188,
219
+ 0.008178688049316407,
220
+ 0.008026111602783203,
221
+ 0.008152064323425292,
222
+ 0.008262656211853027,
223
+ 0.008345600128173827,
224
+ 0.007950335979461669,
225
+ 0.007913472175598145,
226
+ 0.007805952072143555,
227
+ 0.007846911907196046,
228
+ 0.007837696075439453,
229
+ 0.007854112148284912,
230
+ 0.007755775928497314,
231
+ 0.007688191890716553,
232
+ 0.007856128215789794,
233
+ 0.007700479984283447,
234
+ 0.007506944179534912,
235
+ 0.0074700798988342285,
236
+ 0.007685120105743408,
237
+ 0.007640063762664795,
238
+ 0.007732223987579345,
239
+ 0.007779327869415284,
240
+ 0.007832575798034667,
241
+ 0.007902207851409913,
242
+ 0.00786636781692505,
243
+ 0.007601151943206787,
244
+ 0.007581696033477783,
245
+ 0.007732223987579345,
246
+ 0.007549952030181885,
247
+ 0.007549952030181885,
248
+ 0.007519231796264648,
249
+ 0.0074997758865356446,
250
+ 0.007458816051483155,
251
+ 0.007481279850006103,
252
+ 0.007457791805267334,
253
+ 0.007484416007995606,
254
+ 0.007471168041229248,
255
+ 0.007400479793548584,
256
+ 0.007449600219726562,
257
  0.007411712169647216,
258
+ 0.007436287879943848,
259
+ 0.007439295768737793,
260
+ 0.007432191848754883,
 
 
 
261
  0.007416831970214844,
262
+ 0.0073820161819458,
263
+ 0.007436287879943848,
264
+ 0.007436287879943848,
265
+ 0.007427072048187256,
266
+ 0.0074332160949707035,
267
+ 0.007433184146881103,
268
+ 0.007420928001403809,
269
+ 0.0074332160949707035,
270
+ 0.0074106879234313965,
271
+ 0.007469056129455566,
272
+ 0.007612415790557861,
273
+ 0.007483391761779785,
274
+ 0.007477248191833496,
275
+ 0.007484416007995606,
276
+ 0.007463935852050781,
277
+ 0.007460832118988037,
278
+ 0.007422976016998291,
279
+ 0.007489535808563232,
280
+ 0.007473152160644531,
281
+ 0.007505919933319092,
282
+ 0.007532544136047363,
283
+ 0.007434239864349365,
284
+ 0.007450687885284424,
285
+ 0.007431168079376221,
286
+ 0.007426047801971435
 
 
 
 
 
 
 
 
 
 
 
 
287
  ]
288
  },
289
  "throughput": {
290
  "unit": "samples/s",
291
+ "value": 129.80865974829118
292
  },
293
  "energy": {
294
  "unit": "kWh",
295
+ "cpu": 8.8426083582091e-08,
296
+ "ram": 4.834484753746809e-08,
297
+ "gpu": 1.6505195625373376e-07,
298
+ "total": 3.018228873732929e-07
299
  },
300
  "efficiency": {
301
  "unit": "samples/kWh",
302
+ "value": 3313201.356937539
303
  }
304
  }
305
  }