IlyasMoutawwakil HF staff commited on
Commit
fe6e712
1 Parent(s): 704f222

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.1+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
@@ -65,7 +65,7 @@
65
  "name": "process",
66
  "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
67
  "device_isolation": true,
68
- "device_isolation_action": "error",
69
  "numactl": false,
70
  "numactl_kwargs": {},
71
  "start_method": "spawn"
@@ -103,7 +103,7 @@
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
- "peft_version": null,
107
  "peft_commit": null
108
  }
109
  },
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1387.757568,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 0.0,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 7.7257412109375,
124
- "mean": 7.7257412109375,
125
  "stdev": 0.0,
126
- "p50": 7.7257412109375,
127
- "p90": 7.7257412109375,
128
- "p95": 7.7257412109375,
129
- "p99": 7.7257412109375,
130
  "values": [
131
- 7.7257412109375
132
  ]
133
  },
134
  "throughput": null,
@@ -138,160 +138,164 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1517.11744,
142
  "max_global_vram": 68702.69952,
143
- "max_process_vram": 0.0,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 132,
150
- "total": 0.9988101744651795,
151
- "mean": 0.007566743745948329,
152
- "stdev": 0.00023264861790925508,
153
- "p50": 0.007474857807159424,
154
- "p90": 0.00785370502471924,
155
- "p95": 0.008107896423339842,
156
- "p99": 0.008442011423110962,
157
  "values": [
158
- 0.00831021499633789,
159
- 0.008040936470031738,
160
- 0.007874697208404542,
161
- 0.008244935989379882,
162
- 0.007931817054748535,
163
- 0.007848456859588623,
164
- 0.007854856967926026,
165
- 0.007842216968536377,
166
- 0.007784296989440918,
167
- 0.007680458068847656,
168
- 0.007626858234405518,
169
- 0.007598537921905518,
170
- 0.007569737911224365,
171
- 0.007492618083953857,
172
- 0.0075058979988098145,
173
- 0.00750509786605835,
174
- 0.007504137992858886,
175
- 0.007486058235168457,
176
- 0.007539497852325439,
177
- 0.00749085807800293,
178
- 0.0076124567985534665,
179
- 0.007541098117828369,
180
- 0.007478538036346435,
181
- 0.0074794979095458985,
182
- 0.007456617832183838,
183
- 0.008598054885864258,
184
- 0.0076079778671264645,
185
- 0.007457737922668457,
186
- 0.007455657958984375,
187
- 0.007484938144683838,
188
- 0.007444297790527344,
189
- 0.007412137985229492,
190
- 0.007474217891693115,
191
- 0.007427817821502686,
192
- 0.007482697963714599,
193
- 0.007435657978057861,
194
- 0.00744877815246582,
195
- 0.00746461820602417,
196
- 0.007474857807159424,
197
- 0.007425578117370606,
198
- 0.007482378005981445,
199
- 0.007449738025665283,
200
- 0.0074513378143310546,
201
- 0.007491016864776611,
202
- 0.0074649381637573245,
203
- 0.0074770979881286625,
204
- 0.007519177913665772,
205
- 0.007439817905426025,
206
- 0.007447497844696045,
207
- 0.007469738006591797,
208
- 0.007462378025054932,
209
- 0.00750525712966919,
210
- 0.007487177848815918,
211
- 0.007470218181610108,
212
- 0.007467658042907715,
213
- 0.007471978187561035,
214
- 0.00746685791015625,
215
- 0.007467817783355713,
216
- 0.0074703779220581055,
217
- 0.007479177951812744,
218
- 0.00747069787979126,
219
- 0.007474857807159424,
220
- 0.00746413803100586,
221
- 0.007517578125,
222
- 0.007504137992858886,
223
- 0.007487497806549072,
224
- 0.007760296821594239,
225
- 0.0074935779571533205,
226
- 0.007459338188171387,
227
- 0.007469898223876953,
228
- 0.007967495918273925,
229
- 0.007744777202606201,
230
- 0.007433738231658936,
231
- 0.007407658100128174,
232
- 0.0074727778434753415,
233
- 0.007451498031616211,
234
- 0.007480937957763672,
235
- 0.0074703779220581055,
236
- 0.0074673380851745606,
237
- 0.007466217994689941,
238
- 0.007473897933959961,
239
- 0.007453258037567139,
240
- 0.00747549819946289,
241
- 0.007462217807769776,
242
- 0.0074513378143310546,
243
- 0.0074135780334472655,
244
- 0.007462058067321777,
245
- 0.007442537784576416,
246
- 0.007486217975616455,
247
- 0.007458538055419922,
248
- 0.0074582180976867675,
249
- 0.0074844579696655275,
250
- 0.007459497928619384,
251
- 0.007453897953033447,
252
- 0.00748701810836792,
253
- 0.007438377857208252,
254
- 0.007471337795257569,
255
- 0.007428458213806152,
256
- 0.007446857929229736,
257
- 0.007472457885742187,
258
- 0.0074703779220581055,
259
- 0.007477578163146973,
260
- 0.007427178859710694,
261
- 0.007482378005981445,
262
- 0.007486537933349609,
263
- 0.00752797794342041,
264
- 0.007487338066101074,
265
- 0.007465258121490479,
266
- 0.007509897232055664,
267
- 0.007483017921447754,
268
- 0.007484618186950683,
269
- 0.007425898075103759,
270
- 0.007445097923278809,
271
- 0.007423337936401367,
272
- 0.0074343781471252444,
273
- 0.007435177803039551,
274
- 0.007429257869720459,
275
- 0.007429257869720459,
276
- 0.007445898056030274,
277
- 0.007423818111419678,
278
- 0.007469898223876953,
279
- 0.007467658042907715,
280
- 0.007508137226104736,
281
- 0.008189736366271972,
282
- 0.008442854881286621,
283
- 0.008440134048461913,
284
- 0.0083839750289917,
285
- 0.007854217052459717,
286
- 0.007849096775054932,
287
- 0.00783277702331543,
288
- 0.007807656764984131,
289
- 0.007888136863708496
 
 
 
 
290
  ]
291
  },
292
  "throughput": {
293
  "unit": "samples/s",
294
- "value": 132.15724406359837
295
  },
296
  "energy": null,
297
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
65
  "name": "process",
66
  "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
67
  "device_isolation": true,
68
+ "device_isolation_action": "warn",
69
  "numactl": false,
70
  "numactl_kwargs": {},
71
  "start_method": "spawn"
 
103
  "optimum_commit": null,
104
  "timm_version": "1.0.9",
105
  "timm_commit": null,
106
+ "peft_version": "0.12.0",
107
  "peft_commit": null
108
  }
109
  },
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1042.169856,
115
  "max_global_vram": 68702.69952,
116
+ "max_process_vram": 47205.773312,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 8.413111328125,
124
+ "mean": 8.413111328125,
125
  "stdev": 0.0,
126
+ "p50": 8.413111328125,
127
+ "p90": 8.413111328125,
128
+ "p95": 8.413111328125,
129
+ "p99": 8.413111328125,
130
  "values": [
131
+ 8.413111328125
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1159.278592,
142
  "max_global_vram": 68702.69952,
143
+ "max_process_vram": 233726.119936,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.4432
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 136,
150
+ "total": 1.0017187685966489,
151
+ "mean": 0.007365579180857716,
152
+ "stdev": 0.0011758168392413482,
153
+ "p50": 0.007278536081314087,
154
+ "p90": 0.007543255090713501,
155
+ "p95": 0.007699134588241577,
156
+ "p99": 0.009731863546371462,
157
  "values": [
158
+ 0.007694855213165284,
159
+ 0.007215496063232422,
160
+ 0.00730893611907959,
161
+ 0.0073746957778930665,
162
+ 0.007458535194396973,
163
+ 0.007463816165924072,
164
+ 0.007483816146850586,
165
+ 0.007565895080566406,
166
+ 0.007379816055297851,
167
+ 0.007335815906524658,
168
+ 0.007365574836730957,
169
+ 0.007291336059570313,
170
+ 0.007273416042327881,
171
+ 0.0072127761840820315,
172
+ 0.007301095962524414,
173
+ 0.007154376029968262,
174
+ 0.0071631760597229005,
175
+ 0.007173736095428467,
176
+ 0.007110217094421387,
177
+ 0.0071068558692932125,
178
+ 0.007166215896606445,
179
+ 0.007127976894378662,
180
+ 0.007087337017059326,
181
+ 0.007069897174835205,
182
+ 0.007074697017669678,
183
+ 0.007013896942138672,
184
+ 0.007083816051483154,
185
+ 0.007040456771850586,
186
+ 0.008149892807006835,
187
+ 0.0071612558364868165,
188
+ 0.006900297164916992,
189
+ 0.0068841371536254884,
190
+ 0.0071071767807006836,
191
+ 0.007341415882110596,
192
+ 0.0072321357727050785,
193
+ 0.007228936195373535,
194
+ 0.007314536094665527,
195
+ 0.00732797622680664,
196
+ 0.007102375984191895,
197
+ 0.018953855514526368,
198
+ 0.0023889520168304443,
199
+ 0.0038340680599212645,
200
+ 0.007267496109008789,
201
+ 0.007398536205291748,
202
+ 0.007099976062774658,
203
+ 0.007313735961914062,
204
+ 0.007198855876922607,
205
+ 0.007219016075134277,
206
+ 0.008625091552734375,
207
+ 0.007376935958862305,
208
+ 0.007142536163330078,
209
+ 0.007153576850891114,
210
+ 0.0071228561401367185,
211
+ 0.0071566162109375,
212
+ 0.007539655208587646,
213
+ 0.0075182151794433594,
214
+ 0.007542375087738037,
215
+ 0.009465728759765625,
216
+ 0.007636613845825195,
217
+ 0.0075394949913024905,
218
+ 0.007606695175170899,
219
+ 0.007350855827331543,
220
+ 0.007132936000823975,
221
+ 0.007697575092315674,
222
+ 0.007544135093688965,
223
+ 0.007263815879821777,
224
+ 0.007318056106567382,
225
+ 0.0073222160339355465,
226
+ 0.007371174812316895,
227
+ 0.00735149621963501,
228
+ 0.007332456111907959,
229
+ 0.007316135883331299,
230
+ 0.0072860560417175296,
231
+ 0.007254055976867676,
232
+ 0.007287496089935302,
233
+ 0.007295815944671631,
234
+ 0.007474215984344482,
235
+ 0.0073194952011108394,
236
+ 0.007255815982818604,
237
+ 0.007275335788726807,
238
+ 0.0072702159881591795,
239
+ 0.0072890958786010745,
240
+ 0.007266695976257324,
241
+ 0.007363655090332031,
242
+ 0.007331655979156494,
243
+ 0.007299816131591797,
244
+ 0.007313095092773438,
245
+ 0.00730733585357666,
246
+ 0.007303016185760498,
247
+ 0.007278696060180664,
248
+ 0.007280295848846436,
249
+ 0.007299335956573486,
250
+ 0.007306215763092041,
251
+ 0.007272615909576416,
252
+ 0.007301896095275879,
253
+ 0.00727677583694458,
254
+ 0.0072303757667541504,
255
+ 0.00725869607925415,
256
+ 0.00725869607925415,
257
+ 0.00729261589050293,
258
+ 0.007304615974426269,
259
+ 0.007250535964965821,
260
+ 0.007242856025695801,
261
+ 0.007291335105895996,
262
+ 0.007201257228851318,
263
+ 0.007280456066131592,
264
+ 0.007272776126861572,
265
+ 0.007250855922698975,
266
+ 0.007284615993499756,
267
+ 0.007242856025695801,
268
+ 0.007347015857696533,
269
+ 0.007413094997406006,
270
+ 0.007294055938720703,
271
+ 0.0072439761161804195,
272
+ 0.007264935970306396,
273
+ 0.007239175796508789,
274
+ 0.007230056762695313,
275
+ 0.0072604560852050784,
276
+ 0.007270535945892334,
277
+ 0.00726205587387085,
278
+ 0.007242216110229492,
279
+ 0.007251815795898437,
280
+ 0.007271175861358643,
281
+ 0.007262375831604004,
282
+ 0.0072897357940673825,
283
+ 0.007249735832214356,
284
+ 0.007281095981597901,
285
+ 0.007238696098327637,
286
+ 0.007207176208496094,
287
+ 0.007193895816802978,
288
+ 0.007267015933990479,
289
+ 0.00727837610244751,
290
+ 0.00987516689300537,
291
+ 0.007838374137878417,
292
+ 0.007621095180511474,
293
+ 0.007703813076019287
294
  ]
295
  },
296
  "throughput": {
297
  "unit": "samples/s",
298
+ "value": 135.76664854800336
299
  },
300
  "energy": null,
301
  "efficiency": null