IlyasMoutawwakil HF staff commited on
Commit
8b5b9b3
·
verified ·
1 Parent(s): 1cf8d16

Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.0.dev20231010+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
@@ -103,7 +103,7 @@
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
- "peft_version": "0.12.0",
107
  "peft_commit": null
108
  }
109
  },
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 946.696192,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 45385.076736,
117
  "max_reserved": 400.556032,
118
  "max_allocated": 346.271744
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 8.2245732421875,
124
- "mean": 8.2245732421875,
125
  "stdev": 0.0,
126
- "p50": 8.2245732421875,
127
- "p90": 8.2245732421875,
128
- "p95": 8.2245732421875,
129
- "p99": 8.2245732421875,
130
  "values": [
131
- 8.2245732421875
132
  ]
133
  },
134
  "throughput": null,
@@ -138,185 +138,188 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1118.80192,
142
  "max_global_vram": 68702.69952,
143
- "max_process_vram": 177837.850624,
144
  "max_reserved": 406.847488,
145
  "max_allocated": 355.303424
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 157,
150
- "total": 0.9969836549758916,
151
- "mean": 0.006350214362903766,
152
- "stdev": 0.0005209348934555443,
153
- "p50": 0.006191052913665771,
154
- "p90": 0.006841551303863525,
155
- "p95": 0.006891311073303223,
156
- "p99": 0.0069669916343688965,
157
  "values": [
158
- 0.006522093772888184,
159
- 0.006599374771118164,
160
- 0.006588174819946289,
161
- 0.006470573902130127,
162
- 0.0065539350509643554,
163
- 0.006557775020599365,
164
- 0.006525134086608887,
165
- 0.006601294994354248,
166
- 0.006511374950408936,
167
- 0.006483374118804931,
168
- 0.006531694889068604,
169
- 0.0064440140724182126,
170
- 0.006359054088592529,
171
- 0.006374574184417724,
172
- 0.00636561393737793,
173
- 0.00632465410232544,
174
- 0.0063633742332458495,
175
- 0.0062830538749694825,
176
- 0.0062894530296325685,
177
- 0.006273454189300537,
178
- 0.00626625394821167,
179
- 0.006293774127960205,
180
- 0.006274573802947998,
181
- 0.006301293849945069,
182
- 0.006308973789215088,
183
- 0.006296013832092285,
184
- 0.00629633378982544,
185
- 0.006285614013671875,
186
- 0.006290733814239502,
187
- 0.0063044939041137695,
188
- 0.0062616138458251955,
189
- 0.006268174171447754,
190
- 0.012105946540832519,
191
- 0.006419374942779541,
192
- 0.006155693054199219,
193
- 0.006352015018463135,
194
- 0.006183692932128906,
195
- 0.006165133953094482,
196
- 0.00614449405670166,
197
- 0.006150094032287598,
198
- 0.0061547341346740725,
199
- 0.006171533107757568,
200
- 0.0062612929344177245,
201
- 0.006123853206634521,
202
  0.006129454135894776,
203
- 0.00610209321975708,
204
- 0.006148653030395508,
205
- 0.006165773868560791,
206
- 0.0061174540519714354,
207
- 0.00617585277557373,
208
- 0.006191052913665771,
209
- 0.0061625738143920895,
210
- 0.006150894165039063,
211
- 0.006136814117431641,
212
- 0.006153933048248291,
213
- 0.006147054195404053,
214
- 0.006159053802490234,
215
- 0.006173614025115967,
216
- 0.0061353731155395505,
217
- 0.006151373863220215,
218
- 0.0061688141822814945,
219
- 0.006109292984008789,
220
- 0.006151052951812744,
221
- 0.0061619338989257815,
222
- 0.006158894062042237,
223
- 0.006189613819122314,
224
- 0.00614305305480957,
225
- 0.006206893920898437,
226
- 0.006226093769073487,
227
- 0.006180493831634521,
228
- 0.006161774158477783,
229
- 0.0061732931137084964,
230
- 0.006163692951202392,
231
- 0.006220493793487549,
232
- 0.006175533771514892,
233
- 0.006207054138183594,
234
- 0.006425933837890625,
235
- 0.006156653881072998,
236
- 0.006179694175720215,
237
- 0.006198254108428955,
238
- 0.006162252902984619,
239
- 0.006193933963775634,
240
- 0.006156174182891846,
241
- 0.006180654048919678,
242
  0.006175854206085205,
243
- 0.0061878528594970705,
244
- 0.006150094032287598,
245
- 0.006202094078063965,
246
- 0.006184974193572998,
247
- 0.006192493915557862,
248
- 0.006142733097076416,
249
- 0.006128334045410157,
250
- 0.006151214122772217,
251
- 0.0061337738037109376,
252
- 0.006155852794647217,
253
- 0.00614881420135498,
254
- 0.006138093948364258,
255
- 0.0061523327827453615,
256
- 0.006201292991638184,
257
- 0.006202253818511963,
258
- 0.006191854000091553,
259
- 0.006191854000091553,
260
- 0.0061387338638305665,
261
- 0.0061139330863952636,
262
- 0.0061256132125854495,
263
- 0.006179853916168213,
264
- 0.0061619338989257815,
265
- 0.006210894107818604,
266
- 0.006130093097686767,
267
- 0.006119534015655518,
268
- 0.00617233419418335,
269
- 0.006172493934631347,
270
- 0.006187373161315918,
271
- 0.006157614231109619,
272
- 0.006158254146575928,
273
- 0.006207374095916748,
274
- 0.006153133869171143,
275
- 0.00612673282623291,
276
- 0.006155054092407227,
277
- 0.006123373985290528,
278
- 0.006142252922058105,
279
- 0.006157934188842773,
280
- 0.006145453929901123,
281
- 0.006196494102478027,
282
- 0.006185133934020996,
283
- 0.006174892902374268,
284
- 0.006420175075531006,
285
- 0.0069312148094177245,
286
- 0.006975056171417237,
287
- 0.006914895057678222,
288
- 0.006647055149078369,
289
- 0.006501134872436524,
290
- 0.006455853939056396,
291
- 0.006913455963134765,
292
- 0.006932175159454346,
293
- 0.006906414985656738,
294
- 0.006960655212402344,
295
- 0.00680641508102417,
296
- 0.006858575820922852,
297
- 0.006840014934539795,
298
- 0.006816975116729736,
299
- 0.006857775211334229,
300
- 0.006843855857849121,
301
- 0.006831854820251465,
302
- 0.0068566551208496095,
303
- 0.0068595361709594726,
304
- 0.006887535095214844,
305
- 0.006885934829711914,
306
- 0.006859535217285156,
307
- 0.006487534046173095,
308
- 0.00625809383392334,
309
- 0.006236013889312744,
310
- 0.006111534118652344,
311
- 0.006127532958984375,
312
- 0.006132493019104004,
313
- 0.006160654067993164,
314
- 0.00616849422454834
 
 
 
 
 
 
 
 
 
 
 
315
  ]
316
  },
317
  "throughput": {
318
  "unit": "samples/s",
319
- "value": 157.47499892944236
320
  },
321
  "energy": null,
322
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "transformers",
 
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
+ "peft_version": null,
107
  "peft_commit": null
108
  }
109
  },
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1025.785856,
115
  "max_global_vram": 68702.69952,
116
+ "max_process_vram": 43496.968192,
117
  "max_reserved": 400.556032,
118
  "max_allocated": 346.271744
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 7.55050341796875,
124
+ "mean": 7.55050341796875,
125
  "stdev": 0.0,
126
+ "p50": 7.55050341796875,
127
+ "p90": 7.55050341796875,
128
+ "p95": 7.55050341796875,
129
+ "p99": 7.55050341796875,
130
  "values": [
131
+ 7.55050341796875
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1197.899776,
142
  "max_global_vram": 68702.69952,
143
+ "max_process_vram": 172623.765504,
144
  "max_reserved": 406.847488,
145
  "max_allocated": 355.303424
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 160,
150
+ "total": 0.9981148366928106,
151
+ "mean": 0.006238217729330063,
152
+ "stdev": 0.0011374314906560403,
153
+ "p50": 0.005996892929077149,
154
+ "p90": 0.006791711139678955,
155
+ "p95": 0.006817679023742676,
156
+ "p99": 0.008340491857528675,
157
  "values": [
158
+ 0.006735855102539062,
159
+ 0.0068072152137756345,
160
+ 0.0066856141090393065,
161
+ 0.0066409740447998045,
162
+ 0.006299213886260986,
163
+ 0.006194253921508789,
164
+ 0.006203373908996582,
165
+ 0.006168492794036865,
166
+ 0.006169933795928955,
167
+ 0.006152493953704834,
168
+ 0.006048812866210938,
169
+ 0.006014412879943848,
170
+ 0.006023694038391114,
171
+ 0.006011212825775146,
172
+ 0.0059696130752563475,
173
+ 0.005919053077697754,
174
+ 0.005883533000946045,
175
+ 0.0058888130187988285,
176
+ 0.0058870530128479,
177
+ 0.0058572921752929686,
178
+ 0.005862893104553223,
179
+ 0.005860812187194824,
180
+ 0.005907533168792725,
181
+ 0.005870253086090088,
182
+ 0.005867053031921387,
183
+ 0.01025266170501709,
184
+ 0.006154253959655762,
185
+ 0.005861133098602295,
186
+ 0.0058820929527282715,
187
+ 0.005890573024749756,
188
+ 0.005875213146209717,
189
+ 0.005852653026580811,
190
+ 0.005873133182525634,
191
+ 0.0070116958618164065,
192
+ 0.006228813171386719,
193
+ 0.006025293827056885,
194
+ 0.006010252952575684,
195
+ 0.0059536128044128415,
196
+ 0.005982734203338623,
197
+ 0.006024493217468262,
198
+ 0.0059745731353759764,
199
+ 0.0061432127952575685,
200
+ 0.00627281379699707,
201
+ 0.00598641300201416,
202
  0.006129454135894776,
203
+ 0.005991693019866943,
204
+ 0.005973773002624512,
205
+ 0.006011373996734619,
206
+ 0.005997132778167724,
207
+ 0.0062188940048217775,
208
+ 0.006018413066864014,
209
+ 0.005988173007965088,
210
+ 0.0059822540283203125,
211
+ 0.00595537281036377,
212
+ 0.0061859340667724605,
213
+ 0.006014893054962158,
214
+ 0.005975372791290283,
215
+ 0.006000332832336426,
216
+ 0.0059841728210449216,
217
+ 0.006008333206176758,
218
+ 0.0060124931335449215,
219
+ 0.005976492881774902,
220
+ 0.005980493068695069,
221
+ 0.006007213115692139,
222
+ 0.005993773937225342,
223
+ 0.0059601731300354,
224
+ 0.005996653079986572,
225
+ 0.005995532989501953,
226
+ 0.006013614177703857,
227
+ 0.006147373199462891,
228
+ 0.005978253841400147,
229
+ 0.00597985315322876,
230
+ 0.006016333103179932,
231
+ 0.005993292808532715,
232
+ 0.0059603328704833985,
233
+ 0.005980173110961914,
 
 
 
 
 
 
 
 
234
  0.006175854206085205,
235
+ 0.005986252784729004,
236
+ 0.005979533195495605,
237
+ 0.018511720657348633,
238
+ 0.00398448896408081,
239
+ 0.003971369028091431,
240
+ 0.003976968050003052,
241
+ 0.003978569030761719,
242
+ 0.0053483319282531736,
243
+ 0.0060001730918884276,
244
+ 0.0059870538711547855,
245
+ 0.005975213050842285,
246
+ 0.005964653015136719,
247
+ 0.005970413208007812,
248
+ 0.005973773956298828,
249
+ 0.00596401309967041,
250
+ 0.005960813045501709,
251
+ 0.005980813026428223,
252
+ 0.005972652912139892,
253
+ 0.005952173233032226,
254
+ 0.006010892868041992,
255
+ 0.00597825288772583,
256
+ 0.0059536128044128415,
257
+ 0.005972493171691895,
258
+ 0.005998892784118652,
259
+ 0.0059841728210449216,
260
+ 0.005949452877044677,
261
+ 0.005984333038330078,
262
+ 0.005912013053894043,
263
+ 0.006001613140106201,
264
+ 0.005977612972259521,
265
+ 0.005950413227081299,
266
+ 0.005958733081817627,
267
+ 0.005971373081207276,
268
+ 0.005946574211120606,
269
+ 0.005957293033599854,
270
+ 0.005966413021087647,
271
+ 0.005964972972869873,
272
+ 0.005983054161071777,
273
+ 0.005972012996673584,
274
+ 0.005953932762145996,
275
+ 0.005940012931823731,
276
+ 0.005958413124084473,
277
+ 0.00595729398727417,
278
+ 0.00597825288772583,
279
+ 0.005933133125305176,
280
+ 0.005951693058013916,
281
+ 0.006013613224029541,
282
+ 0.006012972831726074,
283
+ 0.0065198540687561035,
284
+ 0.006721775054931641,
285
+ 0.006754254817962647,
286
+ 0.00677153491973877,
287
+ 0.006817615032196045,
288
+ 0.00675953483581543,
289
+ 0.006748654842376709,
290
+ 0.006536495208740234,
291
+ 0.006659053802490235,
292
+ 0.006791375160217285,
293
+ 0.00672529411315918,
294
+ 0.0068388948440551755,
295
+ 0.006719854831695557,
296
+ 0.006782094955444336,
297
+ 0.006765775203704834,
298
+ 0.006796814918518066,
299
+ 0.006794734954833985,
300
+ 0.006795694828033447,
301
+ 0.006783374786376953,
302
+ 0.006759214878082276,
303
+ 0.006791055202484131,
304
+ 0.006838735103607178,
305
+ 0.006813614845275879,
306
+ 0.0068137750625610356,
307
+ 0.0068244948387146,
308
+ 0.006840335845947266,
309
+ 0.006818894863128662,
310
+ 0.006780495166778564,
311
+ 0.006735374927520752,
312
+ 0.006765934944152832,
313
+ 0.006773614883422851,
314
+ 0.006787215232849121,
315
+ 0.006779534816741943,
316
+ 0.00678785514831543,
317
+ 0.006800973892211914
318
  ]
319
  },
320
  "throughput": {
321
  "unit": "samples/s",
322
+ "value": 160.30219581761733
323
  },
324
  "energy": null,
325
  "efficiency": null