IlyasMoutawwakil HF staff commited on
Commit
27121a2
·
verified ·
1 Parent(s): 3f496b0

Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
@@ -11,7 +11,7 @@
11
  "model": "google/vit-base-patch16-224",
12
  "processor": "google/vit-base-patch16-224",
13
  "device": "cuda",
14
- "device_ids": "4",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1027.895296,
115
- "max_global_vram": 691.064832,
116
- "max_process_vram": 44012.957696,
117
  "max_reserved": 400.556032,
118
  "max_allocated": 346.271744
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 7.71839892578125,
124
- "mean": 7.71839892578125,
125
  "stdev": 0.0,
126
- "p50": 7.71839892578125,
127
- "p90": 7.71839892578125,
128
- "p95": 7.71839892578125,
129
- "p99": 7.71839892578125,
130
  "values": [
131
- 7.71839892578125
132
  ]
133
  },
134
  "throughput": null,
@@ -138,176 +138,188 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1200.08704,
142
- "max_global_vram": 779.259904,
143
- "max_process_vram": 172436.516864,
144
  "max_reserved": 406.847488,
145
- "max_allocated": 355.303424
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 148,
150
- "total": 0.9992548928260806,
151
- "mean": 0.006751722248824867,
152
- "stdev": 0.0003732253822001072,
153
- "p50": 0.0066807780265808105,
154
- "p90": 0.0072113040447235105,
155
- "p95": 0.00727659158706665,
156
- "p99": 0.0074257651901245125,
157
  "values": [
158
- 0.007183176040649414,
159
- 0.007148295879364014,
160
- 0.007271494865417481,
161
- 0.007368295192718506,
162
- 0.007472614765167237,
163
- 0.007372934818267823,
164
- 0.007296616077423095,
165
- 0.007313096046447754,
166
- 0.00733373498916626,
167
- 0.0072793359756469726,
168
- 0.00725437593460083,
169
- 0.007244296073913574,
170
- 0.007203015804290771,
171
- 0.007174056053161621,
172
- 0.007136295795440674,
173
- 0.0071278162002563475,
174
- 0.007189896106719971,
175
- 0.007161575794219971,
176
- 0.007182536125183106,
177
- 0.007197896003723145,
178
- 0.007215335845947266,
179
- 0.0072428550720214846,
180
- 0.007187496185302735,
181
- 0.007219655990600586,
182
- 0.007148935794830323,
183
- 0.006986055850982666,
184
- 0.00667805814743042,
185
- 0.006984457015991211,
186
- 0.006894856929779053,
187
- 0.008375492095947266,
188
- 0.00720957612991333,
189
- 0.006975656986236573,
190
- 0.006952455997467041,
191
- 0.006978376865386963,
192
- 0.006958856105804444,
193
- 0.006921417236328125,
194
- 0.006714696884155273,
195
- 0.006722537994384765,
196
- 0.006628616809844971,
197
- 0.006540137767791748,
198
- 0.006298859119415284,
199
- 0.006334699153900146,
200
- 0.006377898216247559,
201
- 0.006589258193969726,
202
- 0.0063924589157104495,
203
- 0.006356458187103272,
204
- 0.006433419227600098,
205
- 0.006367497920989991,
206
- 0.006337099075317383,
207
- 0.006383498191833496,
208
- 0.006355659008026123,
209
- 0.0063873381614685054,
210
- 0.00641533899307251,
211
- 0.006386857986450195,
212
- 0.006442859172821045,
213
- 0.006376459121704102,
214
- 0.006473897933959961,
215
- 0.006354379177093506,
216
- 0.006219658851623535,
217
- 0.006244779109954834,
218
- 0.006275498867034912,
219
- 0.006215818881988525,
220
- 0.006450218200683594,
221
- 0.006262059211730957,
222
- 0.006478697776794434,
223
- 0.0062946991920471195,
224
- 0.006259499073028564,
225
- 0.00627069902420044,
226
- 0.006268778800964356,
227
- 0.006293738842010498,
228
- 0.006249258995056153,
229
- 0.0062572588920593265,
230
- 0.006317578792572022,
231
- 0.006579658031463623,
232
- 0.006587337970733642,
233
- 0.006671338081359863,
234
- 0.006637417793273926,
235
- 0.006482858180999756,
236
- 0.0062810988426208495,
237
- 0.006273579120635986,
238
- 0.006229100227355957,
239
- 0.0065086188316345215,
240
- 0.00625678014755249,
241
- 0.00625101900100708,
242
- 0.006390539169311524,
243
- 0.006598378181457519,
244
- 0.006478377819061279,
245
- 0.0068490967750549315,
246
- 0.006539497852325439,
247
- 0.006344778060913086,
248
- 0.006683497905731201,
249
- 0.006590057849884033,
250
- 0.006367338180541992,
251
- 0.006445097923278809,
252
- 0.006508938789367676,
253
- 0.006665098190307617,
254
- 0.006676777839660645,
255
- 0.006791818141937256,
256
- 0.00658909797668457,
257
- 0.0069274969100952145,
258
- 0.006617258071899414,
259
- 0.00662509822845459,
260
- 0.006724298000335693,
261
- 0.00677597713470459,
262
- 0.006578857898712158,
263
- 0.006761737823486328,
264
- 0.0066284570693969725,
265
- 0.006570377826690674,
266
- 0.0064322991371154785,
267
- 0.006357738971710205,
268
- 0.006412138938903808,
269
- 0.00639133882522583,
270
- 0.006447817802429199,
271
- 0.0063894190788269046,
272
- 0.006471337795257569,
273
- 0.006402698040008545,
274
- 0.006471339225769043,
275
- 0.007078056812286377,
276
- 0.007044936180114746,
277
- 0.007021416187286377,
278
- 0.006979337215423584,
279
- 0.007029575824737549,
280
- 0.007035016059875488,
281
- 0.00699981689453125,
282
- 0.006978055953979492,
283
- 0.006981576919555664,
284
- 0.007225255012512207,
285
- 0.007182856082916259,
286
- 0.007023016929626465,
287
- 0.007070696830749511,
288
- 0.0069943761825561526,
289
- 0.0070615758895874025,
290
- 0.007030056953430176,
291
- 0.007006855964660644,
292
- 0.006987977027893067,
293
- 0.006988136768341064,
294
- 0.00702077579498291,
295
- 0.006958376884460449,
296
- 0.0069857358932495115,
297
- 0.006999337196350098,
298
- 0.006991335868835449,
299
- 0.0069610967636108394,
300
- 0.006973735809326172,
301
- 0.0069625358581542965,
302
- 0.0069930958747863765,
303
- 0.006952776908874512,
304
- 0.006989736080169678,
305
- 0.006615818023681641
 
 
 
 
 
 
 
 
 
 
 
 
306
  ]
307
  },
308
  "throughput": {
309
  "unit": "samples/s",
310
- "value": 148.11035809034493
311
  },
312
  "energy": null,
313
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
 
11
  "model": "google/vit-base-patch16-224",
12
  "processor": "google/vit-base-patch16-224",
13
  "device": "cuda",
14
+ "device_ids": "5",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1386.999808,
115
+ "max_global_vram": 11.128832,
116
+ "max_process_vram": 0.0,
117
  "max_reserved": 400.556032,
118
  "max_allocated": 346.271744
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 7.83645263671875,
124
+ "mean": 7.83645263671875,
125
  "stdev": 0.0,
126
+ "p50": 7.83645263671875,
127
+ "p90": 7.83645263671875,
128
+ "p95": 7.83645263671875,
129
+ "p99": 7.83645263671875,
130
  "values": [
131
+ 7.83645263671875
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1557.737472,
142
+ "max_global_vram": 11.38688,
143
+ "max_process_vram": 0.0,
144
  "max_reserved": 406.847488,
145
+ "max_allocated": 354.740224
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 160,
150
+ "total": 0.9980249857902529,
151
+ "mean": 0.00623765616118908,
152
+ "stdev": 0.0008198818897457517,
153
+ "p50": 0.006320936918258666,
154
+ "p90": 0.00641356086730957,
155
+ "p95": 0.006483031988143921,
156
+ "p99": 0.006579219245910644,
157
  "values": [
158
+ 0.005835817813873291,
159
+ 0.005921739101409912,
160
+ 0.005742538928985596,
161
+ 0.005925098896026612,
162
+ 0.005747018814086914,
163
+ 0.005883338928222656,
164
+ 0.005746539115905762,
165
+ 0.005550378799438476,
166
+ 0.005540299892425537,
167
+ 0.00552269983291626,
168
+ 0.005508619785308838,
169
+ 0.005476940155029297,
170
+ 0.005486380100250244,
171
+ 0.005468939781188965,
172
+ 0.005483500003814697,
173
+ 0.00556541919708252,
174
+ 0.0056111798286437985,
175
+ 0.005838698863983154,
176
+ 0.005641418933868408,
177
+ 0.005426380157470703,
178
+ 0.0054177408218383785,
179
+ 0.005445260047912598,
180
+ 0.015368103981018066,
181
+ 0.004619983196258545,
182
+ 0.00463774299621582,
183
+ 0.004622383117675781,
184
+ 0.004615822792053223,
185
+ 0.004839023113250732,
186
+ 0.0058513379096984865,
187
+ 0.005841739177703857,
188
+ 0.005747659206390381,
189
+ 0.005931018829345703,
190
+ 0.0059063777923583985,
191
+ 0.006299497127532959,
192
+ 0.006286376953125,
193
+ 0.006300776958465576,
194
+ 0.006301577091217041,
195
+ 0.006208296775817871,
196
+ 0.006006217956542968,
197
+ 0.006307336807250976,
198
+ 0.00632509708404541,
199
+ 0.00629789686203003,
200
+ 0.006315496921539307,
201
+ 0.006305417060852051,
202
+ 0.0062631769180297855,
203
+ 0.006323816776275634,
204
+ 0.0062802968025207516,
205
+ 0.006306217193603515,
206
+ 0.006310536861419678,
207
+ 0.006294217109680176,
208
+ 0.006300937175750732,
209
+ 0.006295816898345947,
210
+ 0.006290535926818847,
211
+ 0.006279497146606445,
212
+ 0.006315017223358154,
213
+ 0.006290856838226318,
214
+ 0.006287817001342773,
215
+ 0.006270856857299805,
216
+ 0.006312777042388916,
217
+ 0.006329576969146728,
218
+ 0.006462056159973145,
219
+ 0.006315977096557617,
220
+ 0.006300457000732422,
221
+ 0.006359817028045655,
222
+ 0.006301577091217041,
223
+ 0.0063225369453430175,
224
+ 0.006477096080780029,
225
+ 0.0062993369102478025,
226
+ 0.006300776958465576,
227
+ 0.0063225369453430175,
228
+ 0.006295977115631103,
229
+ 0.0062852568626403805,
230
+ 0.006282697200775147,
231
+ 0.006285896778106689,
232
+ 0.006289257049560547,
233
+ 0.0062830171585083,
234
+ 0.006313416004180908,
235
+ 0.006563015937805176,
236
+ 0.006375977039337158,
237
+ 0.006360616207122803,
238
+ 0.006370536804199219,
239
+ 0.0063607759475708,
240
+ 0.0063630170822143554,
241
+ 0.0063273367881774905,
242
+ 0.0063270158767700195,
243
+ 0.006323976993560791,
244
+ 0.006324936866760254,
245
+ 0.006344616889953613,
246
+ 0.006300776958465576,
247
+ 0.006347657203674317,
248
+ 0.006327657222747802,
249
+ 0.006341576099395752,
250
+ 0.006345736980438233,
251
+ 0.006327016830444336,
252
+ 0.00641869592666626,
253
+ 0.006401576042175293,
254
+ 0.006321096897125244,
255
+ 0.0063207769393920895,
256
+ 0.0063084568977355955,
257
+ 0.006310057163238525,
258
+ 0.006299497127532959,
259
+ 0.006326056957244873,
260
+ 0.006344457149505615,
261
+ 0.00631117582321167,
262
+ 0.006333736896514892,
263
+ 0.006319656848907471,
264
+ 0.0063329367637634274,
265
+ 0.0063225369453430175,
266
+ 0.006298057079315186,
267
+ 0.00633549690246582,
268
+ 0.006315176010131836,
269
+ 0.00633277702331543,
270
+ 0.006320296764373779,
271
+ 0.006347177028656006,
272
+ 0.006349896907806397,
273
+ 0.0063678169250488284,
274
+ 0.00634941577911377,
275
+ 0.006319656848907471,
276
+ 0.006344936847686768,
277
+ 0.0063343758583068845,
278
+ 0.006319976806640625,
279
+ 0.006369417190551758,
280
+ 0.006325255870819091,
281
+ 0.006356616973876953,
282
+ 0.006354856967926026,
283
+ 0.006324296951293946,
284
+ 0.0063775768280029295,
285
+ 0.00638477611541748,
286
+ 0.006547976016998291,
287
+ 0.006481895923614502,
288
+ 0.006407017230987549,
289
+ 0.006530695915222168,
290
+ 0.0064591760635375976,
291
+ 0.006364296913146972,
292
+ 0.006360136985778809,
293
+ 0.006413416862487793,
294
+ 0.006538216114044189,
295
+ 0.006504617214202881,
296
+ 0.006403656959533691,
297
+ 0.006413256168365479,
298
+ 0.006602536201477051,
299
+ 0.006390536785125732,
300
+ 0.00636333703994751,
301
+ 0.006394217014312744,
302
+ 0.006563015937805176,
303
+ 0.0063943772315979,
304
+ 0.006382215976715088,
305
+ 0.00636765718460083,
306
+ 0.006420135974884033,
307
+ 0.006342216968536377,
308
+ 0.006421576023101807,
309
+ 0.006384936809539795,
310
+ 0.006407815933227539,
311
+ 0.006383976936340332,
312
+ 0.006364457130432129,
313
+ 0.0063857359886169435,
314
+ 0.006362215995788574,
315
+ 0.0064148569107055665,
316
+ 0.006395336151123047,
317
+ 0.006357896804809571
318
  ]
319
  },
320
  "throughput": {
321
  "unit": "samples/s",
322
+ "value": 160.31662761760353
323
  },
324
  "energy": null,
325
  "efficiency": null