IlyasMoutawwakil HF staff commited on
Commit
21ad506
·
verified ·
1 Parent(s): c89db48

Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
@@ -104,24 +104,24 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1386.213376,
108
- "max_global_vram": 0.0,
109
- "max_process_vram": 0.0,
110
  "max_reserved": 400.556032,
111
  "max_allocated": 346.271744
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 7.6943818359375,
117
- "mean": 7.6943818359375,
118
  "stdev": 0.0,
119
- "p50": 7.6943818359375,
120
- "p90": 7.6943818359375,
121
- "p95": 7.6943818359375,
122
- "p99": 7.6943818359375,
123
  "values": [
124
- 7.6943818359375
125
  ]
126
  },
127
  "throughput": null,
@@ -131,202 +131,185 @@
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
- "max_ram": 1555.030016,
135
- "max_global_vram": 0.0,
136
- "max_process_vram": 0.0,
137
  "max_reserved": 406.847488,
138
- "max_allocated": 354.740224
139
  },
140
  "latency": {
141
  "unit": "s",
142
- "count": 174,
143
- "total": 0.9942441844940184,
144
- "mean": 0.005714047037321946,
145
- "stdev": 0.00022942141344556592,
146
- "p50": 0.0056419820785522466,
147
- "p90": 0.005965965080261231,
148
- "p95": 0.006349243164062501,
149
- "p99": 0.006390803732872009,
150
  "values": [
151
- 0.005816940784454346,
152
- 0.0056940622329711916,
153
- 0.005736142158508301,
154
- 0.005824780941009521,
155
- 0.0058123011589050295,
156
- 0.005898221015930176,
157
- 0.005925580978393554,
158
- 0.005889901161193848,
159
- 0.005731021881103516,
160
- 0.005785100936889649,
161
- 0.005814382076263428,
162
- 0.005760461807250977,
163
- 0.0057099018096923825,
164
- 0.005646222114562989,
165
- 0.005643502235412597,
166
- 0.005639502048492432,
167
- 0.005643822193145752,
168
- 0.00565054178237915,
169
- 0.005616302013397217,
170
- 0.005600781917572022,
171
- 0.005575181961059571,
172
- 0.005600142002105713,
173
- 0.005552621841430664,
174
- 0.005594701766967774,
175
- 0.005603662014007568,
176
- 0.0056065421104431155,
177
- 0.005578862190246582,
178
- 0.0055767822265625,
179
- 0.005554542064666748,
180
- 0.005545742988586426,
181
- 0.005578702926635743,
182
- 0.0055678229331970214,
183
- 0.005527182102203369,
184
- 0.005588942050933838,
185
- 0.005575501918792724,
186
- 0.005535821914672851,
187
- 0.0067167792320251465,
188
- 0.005675981998443604,
189
- 0.005540782928466797,
190
- 0.005530063152313232,
191
- 0.005570541858673095,
192
- 0.005537742137908935,
193
- 0.005565102100372314,
194
- 0.005578221797943116,
195
- 0.005561741828918457,
196
- 0.005563982009887695,
197
- 0.005563823223114014,
198
- 0.00559630298614502,
199
- 0.005554862976074219,
200
- 0.005530861854553223,
201
- 0.005566542148590088,
202
- 0.005558541774749756,
203
- 0.0055785422325134275,
204
- 0.005523182868957519,
205
- 0.0055628628730773925,
206
- 0.00555230188369751,
207
- 0.0055619020462036135,
208
- 0.005568942070007324,
209
- 0.005558382034301758,
210
- 0.005597742080688477,
211
- 0.005572301864624023,
212
- 0.005549582004547119,
213
- 0.005596941947937012,
214
- 0.0056115021705627445,
215
- 0.005589101791381836,
216
- 0.0056334218978881835,
217
- 0.0056279821395874025,
218
- 0.005570382118225098,
219
- 0.005568141937255859,
220
- 0.005583022117614746,
221
- 0.005569902896881104,
222
- 0.005589103221893311,
223
- 0.0055838232040405274,
224
- 0.005551181793212891,
225
- 0.005552142143249512,
226
- 0.005599502086639404,
227
- 0.0056420621871948245,
228
- 0.00584510087966919,
229
- 0.005635982036590576,
230
- 0.005660461902618408,
231
- 0.0056403021812438964,
232
- 0.005629102230072022,
233
- 0.005627821922302246,
234
- 0.0056407818794250485,
235
- 0.00565166187286377,
236
- 0.005661742210388183,
237
- 0.005665741920471192,
238
- 0.005620461940765381,
239
- 0.00564174222946167,
240
- 0.005657422065734863,
241
- 0.005629901885986328,
242
- 0.005661101818084717,
243
- 0.005650701999664306,
244
- 0.005647021770477295,
245
- 0.005650860786437988,
246
- 0.005617901802062988,
247
- 0.005651822090148926,
248
- 0.005641901969909668,
249
- 0.0056708621978759765,
250
- 0.0056334218978881835,
251
- 0.005637901782989502,
252
- 0.0056519808769226075,
253
- 0.00562542200088501,
254
- 0.00564830207824707,
255
- 0.005607341766357422,
256
- 0.005632142066955567,
257
- 0.005661742210388183,
258
- 0.005638062000274658,
259
- 0.005671981811523437,
260
- 0.0056286220550537105,
261
- 0.005670221805572509,
262
- 0.0056427021026611325,
263
- 0.005644941806793213,
264
- 0.005657742023468018,
265
- 0.005649901866912842,
266
- 0.005639822006225586,
267
- 0.005659341812133789,
268
- 0.005668141841888428,
269
- 0.00567182207107544,
270
- 0.005671182155609131,
271
- 0.0056316618919372554,
272
- 0.005615821838378906,
273
- 0.0056780619621276856,
274
- 0.0056763019561767575,
275
- 0.00563582181930542,
276
- 0.005649742126464844,
277
- 0.005657742023468018,
278
- 0.005662861824035644,
279
- 0.005657261848449707,
280
- 0.005646542072296142,
281
- 0.0056451020240783694,
282
- 0.005681900978088379,
283
- 0.005641582012176513,
284
- 0.005652462005615234,
285
- 0.005658542156219483,
286
- 0.005658061981201172,
287
- 0.0056236619949340825,
288
- 0.005644941806793213,
289
- 0.005646542072296142,
290
- 0.005636462211608887,
291
- 0.005612942218780517,
292
- 0.00565054178237915,
293
- 0.005636782169342041,
294
- 0.005678701877593994,
295
- 0.005647021770477295,
296
- 0.00563742208480835,
297
- 0.005645421981811524,
298
- 0.005629261970520019,
299
- 0.0056543822288513184,
300
- 0.006290060043334961,
301
- 0.006367178916931152,
302
- 0.0059737410545349125,
303
- 0.005941421031951904,
304
- 0.005859661102294922,
305
- 0.006034861087799073,
306
- 0.0058268609046936036,
307
- 0.005902060985565186,
308
- 0.005737261772155762,
309
- 0.005638862133026123,
310
- 0.005615501880645752,
311
- 0.005947821140289307,
312
- 0.006339179992675781,
313
- 0.006369739055633545,
314
- 0.006367499828338623,
315
- 0.006413578987121582,
316
- 0.006348458766937256,
317
- 0.0063457398414611815,
318
- 0.0063506999015808105,
319
- 0.006348139762878418,
320
- 0.006263820171356202,
321
- 0.0063788599967956544,
322
- 0.006373419761657715,
323
- 0.00638238000869751,
324
- 0.006312620162963867
325
  ]
326
  },
327
  "throughput": {
328
  "unit": "samples/s",
329
- "value": 175.00730978733404
330
  },
331
  "energy": null,
332
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 900.612096,
108
+ "max_global_vram": 689.3568,
109
+ "max_process_vram": 47075.782656,
110
  "max_reserved": 400.556032,
111
  "max_allocated": 346.271744
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.17821484375,
117
+ "mean": 7.17821484375,
118
  "stdev": 0.0,
119
+ "p50": 7.17821484375,
120
+ "p90": 7.17821484375,
121
+ "p95": 7.17821484375,
122
+ "p99": 7.17821484375,
123
  "values": [
124
+ 7.17821484375
125
  ]
126
  },
127
  "throughput": null,
 
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
+ "max_ram": 1070.546944,
135
+ "max_global_vram": 777.05216,
136
+ "max_process_vram": 172772.708352,
137
  "max_reserved": 406.847488,
138
+ "max_allocated": 355.303424
139
  },
140
  "latency": {
141
  "unit": "s",
142
+ "count": 157,
143
+ "total": 0.9957800683975225,
144
+ "mean": 0.006342548206353644,
145
+ "stdev": 0.00032880941361050793,
146
+ "p50": 0.006368940830230713,
147
+ "p90": 0.006658092021942139,
148
+ "p95": 0.006671019840240479,
149
+ "p99": 0.006904842987060547,
150
  "values": [
151
+ 0.006296781063079834,
152
+ 0.006040462017059326,
153
+ 0.0063695812225341795,
154
+ 0.006293101787567139,
155
+ 0.006444459915161133,
156
+ 0.006388620853424072,
157
+ 0.0063247809410095215,
158
+ 0.00607086181640625,
159
+ 0.006204462051391602,
160
+ 0.006069262027740479,
161
+ 0.006016942024230957,
162
+ 0.005948942184448242,
163
+ 0.005915022850036621,
164
+ 0.0059246220588684085,
165
+ 0.00588958215713501,
166
+ 0.005904303073883056,
167
+ 0.00586942195892334,
168
+ 0.005874862194061279,
169
+ 0.005842702865600586,
170
+ 0.005820141792297363,
171
+ 0.005832462787628174,
172
+ 0.005799343109130859,
173
+ 0.00582414197921753,
174
+ 0.005809103012084961,
175
+ 0.005816462039947509,
176
+ 0.0059844617843627925,
177
+ 0.005808942794799805,
178
+ 0.005790863037109375,
179
+ 0.005798061847686767,
180
+ 0.006834700107574463,
181
+ 0.006368940830230713,
182
+ 0.0063860611915588375,
183
+ 0.0060390219688415524,
184
+ 0.00654094123840332,
185
+ 0.006374061107635498,
186
+ 0.006192780971527099,
187
+ 0.006242702007293701,
188
+ 0.006061421871185303,
189
+ 0.006162382125854492,
190
+ 0.006178700923919677,
191
+ 0.006118062019348144,
192
+ 0.006377901077270508,
193
+ 0.007913256168365478,
194
+ 0.006919178962707519,
195
+ 0.006635499954223633,
196
+ 0.006647180080413819,
197
+ 0.006641261100769043,
198
+ 0.006639340877532959,
199
+ 0.006653900146484375,
200
+ 0.00661870002746582,
201
+ 0.006604939937591553,
202
+ 0.006603499889373779,
203
+ 0.006654220104217529,
204
+ 0.006618539810180664,
205
+ 0.006612940788269043,
206
+ 0.006624619960784912,
207
+ 0.006631179809570313,
208
+ 0.006610539913177491,
209
+ 0.006611979961395263,
210
+ 0.00661246109008789,
211
+ 0.006601261138916016,
212
+ 0.006603179931640625,
213
+ 0.0065922999382019044,
214
+ 0.006611820220947266,
215
+ 0.006644619941711426,
216
+ 0.006633901119232178,
217
+ 0.006602221012115479,
218
+ 0.0066319799423217776,
219
+ 0.006653739929199219,
220
+ 0.006620460033416748,
221
+ 0.006630539894104004,
222
+ 0.006614220142364502,
223
+ 0.0066163010597229,
224
+ 0.0066164608001708985,
225
+ 0.006628940105438233,
226
+ 0.006611020088195801,
227
+ 0.006893579006195068,
228
+ 0.006440781116485596,
229
+ 0.006069101810455322,
230
+ 0.006218060970306396,
231
+ 0.006138542175292969,
232
+ 0.006125261783599853,
233
+ 0.006412460803985596,
234
+ 0.006063022136688232,
235
+ 0.0063249411582946774,
236
+ 0.006060781955718994,
237
+ 0.006090541839599609,
238
+ 0.0062513408660888675,
239
+ 0.006051502227783203,
240
+ 0.006249580860137939,
241
+ 0.00615054178237915,
242
+ 0.006074862003326416,
243
+ 0.006213100910186767,
244
+ 0.006043181896209717,
245
+ 0.006389260768890381,
246
+ 0.006117102146148682,
247
+ 0.006092462062835693,
248
+ 0.006106542110443115,
249
+ 0.006072142124176025,
250
+ 0.006037261962890625,
251
+ 0.006058221817016602,
252
+ 0.006047821998596191,
253
+ 0.00601790189743042,
254
+ 0.006010062217712402,
255
+ 0.005995662212371826,
256
+ 0.006039341926574707,
257
+ 0.006007822036743164,
258
+ 0.006032941818237304,
259
+ 0.006115662097930908,
260
+ 0.006124300956726074,
261
+ 0.006255981922149659,
262
+ 0.006029582023620606,
263
+ 0.0060150218009948735,
264
+ 0.006014701843261719,
265
+ 0.0059931020736694336,
266
+ 0.006015501976013184,
267
+ 0.006043342113494873,
268
+ 0.0060219020843505855,
269
+ 0.006006862163543701,
270
+ 0.0061852622032165525,
271
+ 0.006488780021667481,
272
+ 0.006048622131347656,
273
+ 0.006146862030029297,
274
+ 0.006121420860290527,
275
+ 0.006221102237701416,
276
+ 0.006173420906066895,
277
+ 0.006639180183410645,
278
+ 0.0066705398559570315,
279
+ 0.006650219917297364,
280
+ 0.006633901119232178,
281
+ 0.0066374211311340334,
282
+ 0.006635499954223633,
283
+ 0.00664222002029419,
284
+ 0.006652939796447754,
285
+ 0.006666860103607178,
286
+ 0.006657579898834229,
287
+ 0.006658860206604004,
288
+ 0.006659180164337158,
289
+ 0.006641739845275879,
290
+ 0.0066750202178955075,
291
+ 0.0066729397773742675,
292
+ 0.006645420074462891,
293
+ 0.006681739807128906,
294
+ 0.0066460599899291995,
295
+ 0.006637740135192871,
296
+ 0.006644781112670898,
297
+ 0.006652141094207763,
298
+ 0.006592618942260742,
299
+ 0.006660940170288086,
300
+ 0.006692619800567627,
301
+ 0.006656620025634765,
302
+ 0.006669740200042725,
303
+ 0.006670380115509033,
304
+ 0.006657259941101074,
305
+ 0.006664140224456787,
306
+ 0.006639019966125488,
307
+ 0.006640940189361572
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
308
  ]
309
  },
310
  "throughput": {
311
  "unit": "samples/s",
312
+ "value": 157.6653369379598
313
  },
314
  "energy": null,
315
  "efficiency": null