IlyasMoutawwakil HF staff commited on
Commit
c9102ac
·
verified ·
1 Parent(s): 3f349fe

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -65,7 +65,7 @@
65
  "name": "process",
66
  "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
67
  "device_isolation": true,
68
- "device_isolation_action": "error",
69
  "numactl": false,
70
  "numactl_kwargs": {},
71
  "start_method": "spawn"
@@ -95,7 +95,7 @@
95
  "optimum_benchmark_commit": null,
96
  "transformers_version": "4.44.2",
97
  "transformers_commit": null,
98
- "accelerate_version": "0.33.0",
99
  "accelerate_commit": null,
100
  "diffusers_version": "0.30.2",
101
  "diffusers_commit": null,
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1389.07648,
115
- "max_global_vram": 11.247616,
116
- "max_process_vram": 0.0,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.37152
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 7.76156982421875,
124
- "mean": 7.76156982421875,
125
  "stdev": 0.0,
126
- "p50": 7.76156982421875,
127
- "p90": 7.76156982421875,
128
- "p95": 7.76156982421875,
129
- "p99": 7.76156982421875,
130
  "values": [
131
- 7.76156982421875
132
  ]
133
  },
134
  "throughput": null,
@@ -138,150 +138,152 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1514.479616,
142
- "max_global_vram": 11.300864,
143
- "max_process_vram": 0.0,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.5072
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 122,
150
- "total": 0.9962058074474334,
151
- "mean": 0.008165621372519947,
152
- "stdev": 0.0012362802294811678,
153
- "p50": 0.008042850494384766,
154
- "p90": 0.008504481124877929,
155
- "p95": 0.008635712385177612,
156
- "p99": 0.00927870672225952,
157
  "values": [
158
- 0.008615649223327637,
159
- 0.008395489692687988,
160
- 0.008390369415283204,
161
- 0.008412448883056641,
162
- 0.008082690238952637,
163
- 0.008065251350402832,
164
- 0.008061570167541504,
165
- 0.008023011207580566,
166
- 0.008043490409851074,
167
- 0.008051651000976563,
168
- 0.008024930000305176,
169
- 0.008010850906372071,
170
- 0.008007810592651368,
171
- 0.008009571075439453,
172
- 0.008003810882568359,
173
- 0.008026209831237793,
174
- 0.008223649978637695,
175
- 0.008002850532531738,
176
- 0.00804284954071045,
177
- 0.008036931037902833,
178
- 0.00805405044555664,
179
- 0.008037570953369141,
180
- 0.00810157012939453,
181
- 0.00802285099029541,
182
- 0.008046690940856933,
183
- 0.008036770820617677,
184
- 0.008031331062316894,
185
- 0.008065410614013672,
186
- 0.008055971145629883,
187
- 0.0080222110748291,
188
- 0.0080474910736084,
189
- 0.008045089721679688,
190
- 0.008008931159973144,
191
- 0.008004131317138672,
192
- 0.008023650169372558,
193
- 0.008102371215820312,
194
- 0.008016290664672852,
195
- 0.008058691024780273,
196
- 0.008017411231994629,
197
- 0.008049891471862794,
198
- 0.008042370796203614,
199
- 0.008070369720458985,
200
- 0.00804141139984131,
201
- 0.008074529647827148,
202
- 0.008068930625915528,
203
- 0.008051969528198243,
204
- 0.008049731254577636,
205
- 0.00801165008544922,
206
- 0.008041730880737305,
207
- 0.008192290306091308,
208
- 0.008025569915771484,
209
- 0.008024611473083497,
210
- 0.008004130363464356,
211
- 0.008021410942077636,
212
- 0.007995010852813721,
213
- 0.00801980972290039,
214
- 0.007942211151123047,
215
- 0.008109729766845703,
216
- 0.007911651134490967,
217
- 0.00846044921875,
218
- 0.008397089958190917,
219
- 0.00839309024810791,
220
- 0.00808141040802002,
221
- 0.00800653076171875,
222
- 0.008027810096740723,
223
- 0.008066850662231445,
224
- 0.008055331230163575,
225
- 0.008064129829406737,
226
- 0.019301849365234373,
227
- 0.0025417509078979493,
228
- 0.003428147077560425,
229
- 0.008041729927062988,
230
- 0.008042210578918458,
231
- 0.008014370918273925,
232
- 0.008033571243286132,
233
- 0.008012290954589843,
234
- 0.008019009590148925,
235
- 0.007991971015930176,
236
- 0.007999011039733887,
237
- 0.008046530723571777,
238
- 0.008039971351623536,
239
- 0.008042851448059081,
240
- 0.008054370880126954,
241
- 0.00806333065032959,
242
- 0.008008451461791992,
243
- 0.008017730712890626,
244
- 0.008009890556335449,
245
- 0.00801548957824707,
246
- 0.008000770568847656,
247
- 0.008029730796813964,
248
- 0.008006051063537597,
249
- 0.008036770820617677,
250
- 0.007991331100463867,
251
- 0.00798141098022461,
252
- 0.008216130256652831,
253
- 0.008509248733520507,
254
- 0.008441889762878418,
255
- 0.008475170135498047,
256
- 0.008466368675231933,
257
- 0.008503329277038575,
258
- 0.00804141139984131,
259
- 0.008024450302124023,
260
- 0.009330685615539552,
261
- 0.008090049743652343,
262
- 0.008010531425476074,
263
- 0.008025250434875487,
264
- 0.008060450553894044,
265
- 0.00908204746246338,
266
- 0.009034847259521485,
267
- 0.009017566680908203,
268
- 0.00908316707611084,
269
- 0.008636768341064454,
270
- 0.00850460910797119,
271
- 0.00855820941925049,
272
- 0.00845884895324707,
273
- 0.00849772834777832,
274
- 0.008487488746643066,
275
- 0.008582689285278321,
276
- 0.00854956817626953,
277
- 0.008061731338500976,
278
- 0.008013089179992676,
279
- 0.008040611267089843
 
 
280
  ]
281
  },
282
  "throughput": {
283
  "unit": "samples/s",
284
- "value": 122.46465447998057
285
  },
286
  "energy": null,
287
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
65
  "name": "process",
66
  "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
67
  "device_isolation": true,
68
+ "device_isolation_action": "warn",
69
  "numactl": false,
70
  "numactl_kwargs": {},
71
  "start_method": "spawn"
 
95
  "optimum_benchmark_commit": null,
96
  "transformers_version": "4.44.2",
97
  "transformers_commit": null,
98
+ "accelerate_version": "0.34.0",
99
  "accelerate_commit": null,
100
  "diffusers_version": "0.30.2",
101
  "diffusers_commit": null,
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1031.069696,
115
+ "max_global_vram": 68702.69952,
116
+ "max_process_vram": 44370.382848,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.37152
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 7.6900244140625,
124
+ "mean": 7.6900244140625,
125
  "stdev": 0.0,
126
+ "p50": 7.6900244140625,
127
+ "p90": 7.6900244140625,
128
+ "p95": 7.6900244140625,
129
+ "p99": 7.6900244140625,
130
  "values": [
131
+ 7.6900244140625
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1148.645376,
142
+ "max_global_vram": 68702.69952,
143
+ "max_process_vram": 218992.996352,
144
  "max_reserved": 555.74528,
145
  "max_allocated": 499.5072
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 124,
150
+ "total": 1.000955594778061,
151
+ "mean": 0.00807222253853275,
152
+ "stdev": 0.003566930112279063,
153
+ "p50": 0.007740370035171509,
154
+ "p90": 0.008459327030181884,
155
+ "p95": 0.008549190759658813,
156
+ "p99": 0.016620282993316644,
157
  "values": [
158
+ 0.00851772689819336,
159
+ 0.008453726768493652,
160
+ 0.015384099960327148,
161
+ 0.003182547092437744,
162
+ 0.008228768348693847,
163
+ 0.008207326889038085,
164
+ 0.007884768962860108,
165
+ 0.0077406101226806644,
166
+ 0.00764093017578125,
167
+ 0.007619329929351807,
168
+ 0.007591169834136963,
169
+ 0.007517889976501465,
170
+ 0.007517570972442627,
171
+ 0.007464611053466797,
172
+ 0.007456450939178467,
173
+ 0.007446209907531739,
174
+ 0.0074393310546875,
175
+ 0.007457571029663086,
176
+ 0.007472290992736817,
177
+ 0.0074210910797119145,
178
+ 0.007447010993957519,
179
+ 0.007367650985717774,
180
+ 0.00742733097076416,
181
+ 0.007370370864868164,
182
+ 0.01160987377166748,
183
+ 0.008309087753295898,
184
+ 0.008166208267211914,
185
+ 0.008127009391784668,
186
+ 0.00817404842376709,
187
+ 0.008165727615356446,
188
+ 0.008128607749938965,
189
+ 0.008145407676696777,
190
+ 0.008169407844543458,
191
+ 0.008159808158874512,
192
+ 0.00819644832611084,
193
+ 0.0081430082321167,
194
+ 0.008157407760620117,
195
+ 0.008161249160766602,
196
+ 0.008207167625427246,
197
+ 0.010982996940612793,
198
+ 0.008545247077941895,
199
+ 0.008217887878417969,
200
+ 0.008247008323669433,
201
+ 0.016989532470703124,
202
+ 0.0061417360305786135,
203
+ 0.00880556583404541,
204
+ 0.008549886703491211,
205
+ 0.008483806610107421,
206
+ 0.008439167022705078,
207
+ 0.008466366767883302,
208
+ 0.00851628589630127,
209
+ 0.008461727142333984,
210
+ 0.008438687324523925,
211
+ 0.008299487113952637,
212
+ 0.008359807014465332,
213
+ 0.008265727996826172,
214
+ 0.008273567199707031,
215
+ 0.008260767936706543,
216
+ 0.008260287284851075,
217
+ 0.008246527671813966,
218
+ 0.007683810234069824,
219
+ 0.0076154909133911135,
220
+ 0.00746605110168457,
221
+ 0.007571809768676758,
222
+ 0.007495650768280029,
223
+ 0.007429891109466553,
224
+ 0.007495649814605713,
225
+ 0.007470530986785889,
226
+ 0.0075154900550842285,
227
+ 0.007436770915985108,
228
+ 0.007534369945526123,
229
+ 0.007440451145172119,
230
+ 0.007617889881134033,
231
+ 0.007469730854034424,
232
+ 0.007565730094909668,
233
+ 0.007740129947662353,
234
+ 0.007682370185852051,
235
+ 0.007647329807281494,
236
+ 0.00757213020324707,
237
+ 0.007619649887084961,
238
+ 0.007486851215362549,
239
+ 0.007480610847473144,
240
+ 0.00749229097366333,
241
+ 0.007492930889129639,
242
+ 0.007509089946746826,
243
+ 0.0074572510719299314,
244
+ 0.007632929801940918,
245
+ 0.007506050109863281,
246
+ 0.007494690895080566,
247
+ 0.00789436912536621,
248
+ 0.008231488227844239,
249
+ 0.007648449897766113,
250
+ 0.007699170112609863,
251
+ 0.007676770210266113,
252
+ 0.042911670684814454,
253
+ 0.002612468957901001,
254
+ 0.0024998300075531007,
255
+ 0.002457911014556885,
256
+ 0.0024388699531555174,
257
+ 0.00244783091545105,
258
+ 0.005583178043365478,
259
+ 0.008260766983032226,
260
+ 0.008216927528381348,
261
+ 0.007931328773498536,
262
+ 0.008147968292236327,
263
+ 0.008182687759399414,
264
+ 0.00820268726348877,
265
+ 0.008158687591552735,
266
+ 0.00816316795349121,
267
+ 0.008184927940368653,
268
+ 0.008196767807006835,
269
+ 0.008193086624145508,
270
+ 0.00818988800048828,
271
+ 0.00819388771057129,
272
+ 0.00815548801422119,
273
+ 0.00819996738433838,
274
+ 0.008195648193359375,
275
+ 0.008007168769836426,
276
+ 0.007720929145812988,
277
+ 0.007697249889373779,
278
+ 0.00771357011795044,
279
+ 0.007247490882873535,
280
+ 0.007275651931762695,
281
+ 0.007265091896057129
282
  ]
283
  },
284
  "throughput": {
285
  "unit": "samples/s",
286
+ "value": 123.88161937143092
287
  },
288
  "energy": null,
289
  "efficiency": null