IlyasMoutawwakil HF staff commited on
Commit
e6e28d8
·
verified ·
1 Parent(s): 53609b5

Upload cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json with huggingface_hub

Browse files
cuda_inference_timm_image-classification_timm/resnet50.a1_in1k/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1108.041728,
115
- "max_global_vram": 409.468928,
116
- "max_process_vram": 42245.513216,
117
  "max_reserved": 123.731968,
118
  "max_allocated": 102.475264
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 8.04746728515625,
124
- "mean": 8.04746728515625,
125
  "stdev": 0.0,
126
- "p50": 8.04746728515625,
127
- "p90": 8.04746728515625,
128
- "p95": 8.04746728515625,
129
- "p99": 8.04746728515625,
130
  "values": [
131
- 8.04746728515625
132
  ]
133
  },
134
  "throughput": null,
@@ -138,167 +138,160 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1192.239104,
142
- "max_global_vram": 521.838592,
143
- "max_process_vram": 112953.20064,
144
  "max_reserved": 148.897792,
145
  "max_allocated": 113.516032
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 139,
150
- "total": 0.9991877548694612,
151
- "mean": 0.007188401114168784,
152
- "stdev": 0.0025029485410295415,
153
- "p50": 0.007056778907775879,
154
- "p90": 0.007429514026641846,
155
- "p95": 0.007876504850387574,
156
- "p99": 0.008106778888702393,
157
  "values": [
158
- 0.007583498001098633,
159
- 0.0072850980758666995,
160
- 0.00737565803527832,
161
- 0.007266858100891113,
162
- 0.0073676581382751466,
163
- 0.007199179172515869,
164
- 0.007243018150329589,
165
- 0.007163339138031006,
166
- 0.0072662181854248045,
167
- 0.007258539199829102,
168
- 0.007208137989044189,
169
- 0.007217258930206299,
170
- 0.007192937850952148,
171
- 0.007220778942108154,
172
- 0.007177739143371582,
173
- 0.007183659076690674,
174
- 0.007179338932037354,
175
- 0.0072802982330322265,
176
- 0.007194058895111084,
177
- 0.007131338119506836,
178
- 0.00715597915649414,
179
- 0.007096939086914063,
180
- 0.007110379219055176,
181
- 0.007100618839263916,
182
- 0.008188614845275879,
183
- 0.007498857975006104,
184
- 0.007067338943481445,
185
- 0.007061738967895508,
186
- 0.007041578769683838,
187
- 0.007057098865509033,
188
- 0.007031339168548584,
189
- 0.0070372591018676755,
190
- 0.00703981876373291,
191
- 0.007010058879852295,
192
- 0.0070372591018676755,
193
- 0.007039659023284912,
194
- 0.007017259120941162,
195
- 0.007039339065551758,
196
- 0.007043498992919922,
197
- 0.007044619083404541,
198
- 0.007000139236450195,
199
- 0.007009579181671143,
200
- 0.007003018856048584,
201
- 0.007026379108428955,
202
- 0.0070542187690734864,
203
- 0.00702557897567749,
204
- 0.007028298854827881,
205
- 0.006984139919281006,
206
- 0.007054539203643799,
207
- 0.0070158190727233885,
208
- 0.007022378921508789,
209
- 0.007021739006042481,
210
- 0.00703981876373291,
211
- 0.0070151791572570805,
212
- 0.007005898952484131,
213
- 0.007482858180999756,
214
- 0.007030698776245117,
215
- 0.007003818988800049,
216
- 0.007022378921508789,
217
- 0.007017259120941162,
218
- 0.007000938892364502,
219
- 0.007016940116882324,
220
- 0.0070486202239990236,
221
- 0.007314058780670166,
222
- 0.0070439791679382325,
223
- 0.007069738864898681,
224
- 0.007085739135742187,
225
- 0.007056778907775879,
226
- 0.0072838177680969235,
227
- 0.007296298027038574,
228
- 0.007080618858337402,
229
- 0.007072459220886231,
230
- 0.00702669906616211,
231
- 0.007016458988189697,
232
- 0.007055179119110107,
233
- 0.0070370988845825195,
234
- 0.007032938957214355,
235
- 0.007035819053649903,
236
- 0.00704413890838623,
237
- 0.03450757598876953,
238
- 0.0023395130634307863,
239
- 0.0023297529220581053,
240
- 0.0023308730125427245,
241
- 0.0023297529220581053,
242
- 0.0024046330451965334,
243
- 0.007072779178619384,
244
- 0.007040139198303223,
245
- 0.0070865387916564946,
246
- 0.007068459033966064,
247
- 0.007069899082183838,
248
- 0.007066379070281983,
249
- 0.007067338943481445,
250
- 0.007040139198303223,
251
- 0.007036458969116211,
252
- 0.00705997896194458,
253
- 0.007102859020233154,
254
- 0.007080618858337402,
255
- 0.007047179222106933,
256
- 0.0070228590965271,
257
- 0.007025739192962646,
258
- 0.00704141902923584,
259
- 0.0070225391387939455,
260
- 0.007016458988189697,
261
- 0.0070127792358398435,
262
- 0.0069356589317321774,
263
- 0.007367338180541992,
264
- 0.00787485694885254,
265
- 0.007899655818939208,
266
- 0.007973257064819337,
267
- 0.007463818073272705,
268
- 0.007617417812347412,
269
- 0.00789133596420288,
270
- 0.007913577079772948,
271
- 0.007896137237548829,
272
- 0.007289738178253174,
273
- 0.007359498023986817,
274
- 0.007404138088226318,
275
- 0.007387978076934814,
276
- 0.0074209380149841304,
277
- 0.007292139053344727,
278
- 0.007527018070220947,
279
- 0.0072996578216552735,
280
- 0.0072641391754150395,
281
- 0.0071124591827392575,
282
- 0.007073740005493164,
283
- 0.007229578971862793,
284
- 0.007114698886871338,
285
- 0.006981898784637451,
286
- 0.007035500049591065,
287
- 0.007005898952484131,
288
- 0.00702669906616211,
289
- 0.007009418964385986,
290
- 0.007012938976287842,
291
- 0.007008139133453369,
292
- 0.0070084590911865235,
293
- 0.006995820045471192,
294
- 0.007046539783477784,
295
- 0.007015339851379395,
296
- 0.00701373815536499
297
  ]
298
  },
299
  "throughput": {
300
  "unit": "samples/s",
301
- "value": 139.11299385185086
302
  },
303
  "energy": null,
304
  "efficiency": null
 
3
  "name": "cuda_inference_timm_image-classification_timm/resnet50.a1_in1k",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "image-classification",
9
  "library": "timm",
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1468.727296,
115
+ "max_global_vram": 11.124736,
116
+ "max_process_vram": 0.0,
117
  "max_reserved": 123.731968,
118
  "max_allocated": 102.475264
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 8.10223974609375,
124
+ "mean": 8.10223974609375,
125
  "stdev": 0.0,
126
+ "p50": 8.10223974609375,
127
+ "p90": 8.10223974609375,
128
+ "p95": 8.10223974609375,
129
+ "p99": 8.10223974609375,
130
  "values": [
131
+ 8.10223974609375
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1555.369984,
142
+ "max_global_vram": 11.247616,
143
+ "max_process_vram": 0.0,
144
  "max_reserved": 148.897792,
145
  "max_allocated": 113.516032
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 132,
150
+ "total": 0.9976080794334413,
151
+ "mean": 0.007557636965404858,
152
+ "stdev": 0.00023275267480340494,
153
+ "p50": 0.007442369461059571,
154
+ "p90": 0.007921487808227538,
155
+ "p95": 0.008037527751922607,
156
+ "p99": 0.008446068305969237,
157
  "values": [
158
+ 0.008104126930236816,
159
+ 0.0076146888732910156,
160
+ 0.007617888927459716,
161
+ 0.007707328796386719,
162
+ 0.007670527935028077,
163
+ 0.007601569175720215,
164
+ 0.007588129043579101,
165
+ 0.007561408996582032,
166
+ 0.007537088871002197,
167
+ 0.00751676893234253,
168
+ 0.007514049053192138,
169
+ 0.007483489990234375,
170
+ 0.007482530117034912,
171
+ 0.0074471688270568845,
172
+ 0.007467649936676026,
173
+ 0.007439488887786865,
174
+ 0.007434689998626709,
175
+ 0.007483008861541748,
176
+ 0.007437088966369629,
177
+ 0.007428289890289307,
178
+ 0.007427168846130371,
179
+ 0.00741772985458374,
180
+ 0.007436288833618164,
181
+ 0.007894527912139893,
182
+ 0.007798848152160645,
183
+ 0.007923327922821044,
184
+ 0.008033568382263184,
185
+ 0.007573410034179688,
186
+ 0.007786688804626465,
187
+ 0.00751324987411499,
188
+ 0.007483008861541748,
189
+ 0.007455968856811523,
190
+ 0.007447649955749511,
191
+ 0.0074190092086792,
192
+ 0.008542844772338867,
193
+ 0.00812796688079834,
194
+ 0.00793692684173584,
195
+ 0.007709249019622803,
196
+ 0.0082220458984375,
197
+ 0.007975487232208252,
198
+ 0.00785756778717041,
199
+ 0.007981246948242187,
200
+ 0.007826047897338867,
201
+ 0.007880127906799317,
202
+ 0.007904926776885986,
203
+ 0.0079268479347229,
204
+ 0.007946527004241944,
205
+ 0.007630848884582519,
206
+ 0.0076642889976501465,
207
+ 0.00758220911026001,
208
+ 0.00763132905960083,
209
+ 0.00767388916015625,
210
+ 0.007559968948364258,
211
+ 0.007392448902130127,
212
+ 0.007432129859924317,
213
+ 0.007422369003295898,
214
+ 0.007428609848022461,
215
+ 0.00743596887588501,
216
+ 0.0076297287940979,
217
+ 0.00738716983795166,
218
+ 0.007594688892364502,
219
+ 0.007647489070892334,
220
+ 0.007400929927825928,
221
+ 0.00748668909072876,
222
+ 0.007417409896850586,
223
+ 0.007432289123535156,
224
+ 0.007460130214691162,
225
+ 0.007467169761657715,
226
+ 0.007449728965759277,
227
+ 0.00742588996887207,
228
+ 0.007430368900299072,
229
+ 0.00741068983078003,
230
+ 0.007439009189605713,
231
+ 0.0074348502159118654,
232
+ 0.007417408943176269,
233
+ 0.007397729873657227,
234
+ 0.007600449085235596,
235
+ 0.007444929122924804,
236
+ 0.007456610202789307,
237
+ 0.00743212890625,
238
+ 0.007443490028381347,
239
+ 0.007441888809204102,
240
+ 0.00745644998550415,
241
+ 0.0074298901557922365,
242
+ 0.007434209823608398,
243
+ 0.007424930095672608,
244
+ 0.007430210113525391,
245
+ 0.007430369853973389,
246
+ 0.007436930179595947,
247
+ 0.007413730144500732,
248
+ 0.007442369937896729,
249
+ 0.007414370059967041,
250
+ 0.007414690017700195,
251
+ 0.0074110088348388675,
252
+ 0.007418049812316895,
253
+ 0.007439329147338867,
254
+ 0.007434050083160401,
255
+ 0.007416928768157959,
256
+ 0.007382369995117187,
257
+ 0.007417570114135743,
258
+ 0.00742204999923706,
259
+ 0.007438529968261718,
260
+ 0.00741117000579834,
261
+ 0.007411330223083496,
262
+ 0.0074343690872192385,
263
+ 0.007399010181427002,
264
+ 0.007421568870544433,
265
+ 0.007418369770050049,
266
+ 0.007442368984222412,
267
+ 0.007413569927215576,
268
+ 0.007402689933776856,
269
+ 0.007419489860534668,
270
+ 0.007430369853973389,
271
+ 0.007431808948516846,
272
+ 0.007431169986724853,
273
+ 0.007415009021759033,
274
+ 0.007432129859924317,
275
+ 0.007408449172973633,
276
+ 0.007471010208129882,
277
+ 0.00745884895324707,
278
+ 0.0074631690979003905,
279
+ 0.007421889781951904,
280
+ 0.007425249099731445,
281
+ 0.007434209823608398,
282
+ 0.0074138898849487305,
283
+ 0.007436288833618164,
284
+ 0.0074631690979003905,
285
+ 0.007416450023651123,
286
+ 0.007450688838958741,
287
+ 0.008446366310119628,
288
+ 0.00844540500640869,
289
+ 0.008042366981506347
 
 
 
 
 
 
 
290
  ]
291
  },
292
  "throughput": {
293
  "unit": "samples/s",
294
+ "value": 132.31649053500556
295
  },
296
  "energy": null,
297
  "efficiency": null