IlyasMoutawwakil HF staff commited on
Commit
ae4b4af
·
verified ·
1 Parent(s): b3403bd

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -71,12 +71,12 @@
71
  "start_method": "spawn"
72
  },
73
  "environment": {
74
- "cpu": " AMD EPYC 7643 48-Core Processor",
75
- "cpu_count": 96,
76
- "cpu_ram_mb": 1082028.982272,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
- "platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.12",
82
  "gpu": [
@@ -85,16 +85,16 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.4.0",
88
- "optimum_benchmark_commit": "65fa416fd503cfe9a2be7637ee30c70a4a1f96f1",
89
- "transformers_version": "4.43.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.33.0",
92
  "accelerate_commit": null,
93
- "diffusers_version": "0.29.2",
94
  "diffusers_commit": null,
95
  "optimum_version": null,
96
  "optimum_commit": null,
97
- "timm_version": "1.0.8",
98
  "timm_commit": null,
99
  "peft_version": null,
100
  "peft_commit": null
@@ -104,24 +104,24 @@
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 903.59808,
108
- "max_global_vram": 841.764864,
109
- "max_process_vram": 42509.725696,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.37152
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
- "total": 6.7452548828125,
117
- "mean": 6.7452548828125,
118
  "stdev": 0.0,
119
- "p50": 6.7452548828125,
120
- "p90": 6.7452548828125,
121
- "p95": 6.7452548828125,
122
- "p99": 6.7452548828125,
123
  "values": [
124
- 6.7452548828125
125
  ]
126
  },
127
  "throughput": null,
@@ -131,159 +131,151 @@
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
- "max_ram": 1013.16608,
135
- "max_global_vram": 897.65888,
136
- "max_process_vram": 199013.900288,
137
  "max_reserved": 555.74528,
138
  "max_allocated": 499.5072
139
  },
140
  "latency": {
141
  "unit": "s",
142
- "count": 131,
143
- "total": 0.9981487026214597,
144
- "mean": 0.007619455745201984,
145
- "stdev": 0.0011736739847574798,
146
- "p50": 0.007268142223358154,
147
- "p90": 0.008511658668518066,
148
- "p95": 0.009672695636749266,
149
- "p99": 0.009753864192962647,
150
  "values": [
151
- 0.00783549976348877,
152
- 0.0080446195602417,
153
- 0.00822589874267578,
154
- 0.009021898269653321,
155
- 0.007987659931182861,
156
- 0.007968299865722657,
157
- 0.007835020065307617,
158
- 0.007684461116790772,
159
- 0.007599660873413086,
160
- 0.0075449409484863285,
161
- 0.007492140769958496,
162
- 0.007435021877288818,
163
- 0.0073343820571899416,
164
- 0.007268142223358154,
165
- 0.007241742134094238,
166
- 0.007224621772766113,
167
- 0.007242221832275391,
168
- 0.007271182060241699,
169
- 0.007264461994171143,
170
- 0.00717454195022583,
171
- 0.007216782093048096,
172
- 0.007193902015686035,
173
- 0.007215662002563477,
174
- 0.007188782215118408,
175
- 0.007214861869812012,
176
- 0.007194382190704346,
177
- 0.007210701942443848,
178
- 0.007203661918640137,
179
- 0.007210542201995849,
180
- 0.007234861850738525,
181
- 0.007246700763702393,
182
- 0.007201901912689209,
183
- 0.0072334218025207515,
184
- 0.0071809420585632324,
185
- 0.00718110179901123,
186
- 0.007184301853179932,
187
- 0.007190701961517334,
188
- 0.007203981876373291,
189
- 0.007175821781158448,
190
- 0.007165421962738037,
191
- 0.007188782215118408,
192
- 0.007188461780548096,
193
- 0.007165262222290039,
194
- 0.0071735820770263675,
195
- 0.0071950221061706544,
196
- 0.0071687822341918945,
197
- 0.007237261772155762,
198
- 0.007184142112731933,
199
- 0.007231502056121826,
200
- 0.007222861766815186,
201
- 0.007188782215118408,
202
- 0.007207662105560303,
203
- 0.007198222160339355,
204
- 0.007156622886657715,
205
- 0.007180302143096924,
206
- 0.017212915420532226,
207
- 0.002491672992706299,
208
- 0.006976462841033935,
209
- 0.00718110179901123,
210
- 0.007159341812133789,
211
- 0.00729406213760376,
212
- 0.007182062149047852,
213
- 0.007207181930541992,
214
- 0.007177581787109375,
215
- 0.007211182117462158,
216
- 0.007158701896667481,
217
- 0.007223182201385498,
218
- 0.00720382308959961,
219
- 0.007201422214508057,
220
- 0.007339022159576416,
221
- 0.007197102069854736,
222
- 0.007187022209167481,
223
- 0.007216621875762939,
224
- 0.007187501907348633,
225
- 0.007179982185363769,
226
- 0.007189422130584717,
227
- 0.0072163019180297855,
228
- 0.0071881418228149415,
229
- 0.007200302124023438,
230
- 0.007161581993103027,
231
- 0.0072115020751953125,
232
- 0.007195981979370117,
233
- 0.007430060863494873,
234
- 0.007272781848907471,
235
- 0.00717502212524414,
236
- 0.007434381008148193,
237
- 0.007579020977020264,
238
- 0.007367181777954102,
239
- 0.0073110218048095706,
240
- 0.007270542144775391,
241
- 0.007547660827636719,
242
- 0.007506221771240234,
243
- 0.007527660846710205,
244
- 0.007535661220550537,
245
- 0.007579340934753418,
246
- 0.007557100772857666,
247
- 0.007595981121063232,
248
- 0.008511658668518066,
249
- 0.009408295631408692,
250
- 0.009484616279602051,
251
- 0.009667976379394531,
252
- 0.009560935974121094,
253
- 0.009694696426391602,
254
- 0.009714695930480957,
255
- 0.009707976341247558,
256
- 0.009645735740661621,
257
- 0.009677414894104003,
258
- 0.009736616134643555,
259
- 0.009761256217956543,
260
- 0.00840541934967041,
261
- 0.007590701103210449,
262
- 0.007606541156768799,
263
- 0.007584620952606201,
264
- 0.007585101127624512,
265
- 0.007579501152038574,
266
- 0.007595661163330078,
267
- 0.007611340999603271,
268
- 0.007593901157379151,
269
- 0.0075814208984375,
270
- 0.007574060916900635,
271
- 0.007596621036529541,
272
- 0.007581261157989502,
273
- 0.0075737409591674805,
274
- 0.007580300807952881,
275
- 0.007567501068115235,
276
- 0.007583341121673584,
277
- 0.007603180885314941,
278
- 0.00757054090499878,
279
- 0.0076091008186340335,
280
- 0.007569419860839844,
281
- 0.007560620784759522
282
  ]
283
  },
284
  "throughput": {
285
  "unit": "samples/s",
286
- "value": 131.2429697658794
287
  },
288
  "energy": null,
289
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
71
  "start_method": "spawn"
72
  },
73
  "environment": {
74
+ "cpu": " AMD EPYC 7763 64-Core Processor",
75
+ "cpu_count": 128,
76
+ "cpu_ram_mb": 1082015.256576,
77
  "system": "Linux",
78
  "machine": "x86_64",
79
+ "platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35",
80
  "processor": "x86_64",
81
  "python_version": "3.10.12",
82
  "gpu": [
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.4.0",
88
+ "optimum_benchmark_commit": null,
89
+ "transformers_version": "4.44.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.33.0",
92
  "accelerate_commit": null,
93
+ "diffusers_version": "0.30.1",
94
  "diffusers_commit": null,
95
  "optimum_version": null,
96
  "optimum_commit": null,
97
+ "timm_version": "1.0.9",
98
  "timm_commit": null,
99
  "peft_version": null,
100
  "peft_commit": null
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1385.721856,
108
+ "max_global_vram": 0.0,
109
+ "max_process_vram": 0.0,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.37152
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.55107666015625,
117
+ "mean": 7.55107666015625,
118
  "stdev": 0.0,
119
+ "p50": 7.55107666015625,
120
+ "p90": 7.55107666015625,
121
+ "p95": 7.55107666015625,
122
+ "p99": 7.55107666015625,
123
  "values": [
124
+ 7.55107666015625
125
  ]
126
  },
127
  "throughput": null,
 
131
  "forward": {
132
  "memory": {
133
  "unit": "MB",
134
+ "max_ram": 1511.600128,
135
+ "max_global_vram": 0.0,
136
+ "max_process_vram": 0.0,
137
  "max_reserved": 555.74528,
138
  "max_allocated": 499.5072
139
  },
140
  "latency": {
141
  "unit": "s",
142
+ "count": 123,
143
+ "total": 0.9967537579536438,
144
+ "mean": 0.008103689089054015,
145
+ "stdev": 0.0003547586862852718,
146
+ "p50": 0.00796621322631836,
147
+ "p90": 0.008386563682556152,
148
+ "p95": 0.008537842750549317,
149
+ "p99": 0.009016686096191407,
150
  "values": [
151
+ 0.008061893463134765,
152
+ 0.00808845329284668,
153
+ 0.008280611991882325,
154
+ 0.008400932312011718,
155
+ 0.008691651344299316,
156
+ 0.008391331672668458,
157
+ 0.008212451934814452,
158
+ 0.008326851844787597,
159
+ 0.008269251823425293,
160
+ 0.008128613471984862,
161
+ 0.00806605339050293,
162
+ 0.00798973321914673,
163
+ 0.00791309404373169,
164
+ 0.007949573993682861,
165
+ 0.00793837308883667,
166
+ 0.00796621322631836,
167
+ 0.00798973321914673,
168
+ 0.007990533828735352,
169
+ 0.00803005313873291,
170
+ 0.007999173164367676,
171
+ 0.007994853019714355,
172
+ 0.00797613286972046,
173
+ 0.01107484245300293,
174
+ 0.008219492912292481,
175
+ 0.007853253841400147,
176
+ 0.007894853115081786,
177
+ 0.007938852787017822,
178
+ 0.007951333045959472,
179
+ 0.0081126127243042,
180
+ 0.00794109296798706,
181
+ 0.007932612895965576,
182
+ 0.007983333110809326,
183
+ 0.007968293190002442,
184
+ 0.00826941204071045,
185
+ 0.008364770889282227,
186
+ 0.008333731651306152,
187
+ 0.008367491722106933,
188
+ 0.008293891906738281,
189
+ 0.008316770553588867,
190
+ 0.008212773323059083,
191
+ 0.008326691627502441,
192
+ 0.008352292060852052,
193
+ 0.008540130615234376,
194
+ 0.008251651763916016,
195
+ 0.008432770729064941,
196
+ 0.008453731536865234,
197
+ 0.008568131446838378,
198
+ 0.008346851348876954,
199
+ 0.00851725196838379,
200
+ 0.008501412391662598,
201
+ 0.008172772407531738,
202
+ 0.008183012008666992,
203
+ 0.008176932334899903,
204
+ 0.008033093452453613,
205
+ 0.007949892997741698,
206
+ 0.008103973388671875,
207
+ 0.007926692962646484,
208
+ 0.007929732799530029,
209
+ 0.007948293209075927,
210
+ 0.008366532325744629,
211
+ 0.008219812393188477,
212
+ 0.008081091880798339,
213
+ 0.007943492889404297,
214
+ 0.007917893886566161,
215
+ 0.007911814212799071,
216
+ 0.007927652835845947,
217
+ 0.00796893310546875,
218
+ 0.007947813034057618,
219
+ 0.007973252773284912,
220
+ 0.007892133235931397,
221
+ 0.007931812763214112,
222
+ 0.007914533138275146,
223
+ 0.00793613290786743,
224
+ 0.007927814006805419,
225
+ 0.007924774169921874,
226
+ 0.007902052879333496,
227
+ 0.00792205286026001,
228
+ 0.007941412925720214,
229
+ 0.007900932788848876,
230
+ 0.007922852993011474,
231
+ 0.007946212768554688,
232
+ 0.007977413177490235,
233
+ 0.00794269323348999,
234
+ 0.007994533061981201,
235
+ 0.007934853076934815,
236
+ 0.00799197292327881,
237
+ 0.007916452884674073,
238
+ 0.00795357322692871,
239
+ 0.007965572834014892,
240
+ 0.007916773796081543,
241
+ 0.007923813819885253,
242
+ 0.007902692794799805,
243
+ 0.007959332942962647,
244
+ 0.007955973148345947,
245
+ 0.007925413131713866,
246
+ 0.007989412784576416,
247
+ 0.007963172912597656,
248
+ 0.007908133029937744,
249
+ 0.007929412841796876,
250
+ 0.007917092800140382,
251
+ 0.007944293022155761,
252
+ 0.007915013790130616,
253
+ 0.007966214179992676,
254
+ 0.007963014125823975,
255
+ 0.007981733798980713,
256
+ 0.007923813819885253,
257
+ 0.007939173221588134,
258
+ 0.007893892765045166,
259
+ 0.007903013229370117,
260
+ 0.007875973224639893,
261
+ 0.007905253887176514,
262
+ 0.007916133880615234,
263
+ 0.00791437292098999,
264
+ 0.00790621280670166,
265
+ 0.007886693000793457,
266
+ 0.007926692962646484,
267
+ 0.007902853965759278,
268
+ 0.007952453136444092,
269
+ 0.009024289131164551,
270
+ 0.008989729881286622,
271
+ 0.00879180908203125,
272
+ 0.008328612327575684,
273
+ 0.008312931060791015
 
 
 
 
 
 
 
 
274
  ]
275
  },
276
  "throughput": {
277
  "unit": "samples/s",
278
+ "value": 123.40058817788814
279
  },
280
  "energy": null,
281
  "efficiency": null