IlyasMoutawwakil HF staff commited on
Commit
3ca56d8
1 Parent(s): 3d4e58c

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -6,19 +6,17 @@
6
  "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
- "model": "FacebookAI/roberta-base",
10
  "library": "transformers",
 
 
11
  "device": "cuda",
12
  "device_ids": "0",
13
  "seed": 42,
14
  "inter_op_num_threads": null,
15
  "intra_op_num_threads": null,
16
- "hub_kwargs": {
17
- "revision": "main",
18
- "force_download": false,
19
- "local_files_only": false,
20
- "trust_remote_code": false
21
- },
22
  "no_weights": true,
23
  "device_map": null,
24
  "torch_dtype": null,
@@ -85,7 +83,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
- "optimum_benchmark_commit": "3e2eebdc0f80ae6deeb2e1faad3e889ed5a4df2d",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,167 +102,174 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1006.084096,
108
  "max_global_vram": 897.14688,
109
- "max_process_vram": 237518.712832,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.443712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 139,
116
- "total": 0.9977614045143125,
117
- "mean": 0.0071781396008223945,
118
- "stdev": 0.00037241223977587106,
119
- "p50": 0.0069807348251342775,
120
- "p90": 0.007632312965393066,
121
- "p95": 0.007899845886230468,
122
- "p99": 0.00824808925628662,
123
  "values": [
124
- 0.007517210006713867,
125
- 0.007071774005889892,
126
- 0.0069544949531555175,
127
- 0.007124413967132568,
128
- 0.00816184425354004,
129
- 0.008229043006896973,
130
- 0.008213204383850098,
131
- 0.008209362983703613,
132
- 0.007926486015319823,
133
- 0.007580090045928955,
134
- 0.0075224900245666505,
135
- 0.0074044098854064945,
136
- 0.00734713077545166,
137
- 0.0073431310653686525,
138
- 0.007378170967102051,
139
- 0.007334331035614014,
140
- 0.007396410942077636,
141
- 0.00734201192855835,
142
- 0.007269213199615479,
143
- 0.007616249084472657,
144
- 0.007276092052459717,
145
- 0.007333531856536866,
146
- 0.007116734027862549,
147
- 0.006879294872283935,
148
- 0.006884416103363037,
149
- 0.0068624958992004395,
150
- 0.00707081413269043,
151
- 0.006875454902648926,
152
- 0.007191133022308349,
153
- 0.006972734928131104,
154
- 0.006916094779968262,
155
- 0.006898335933685303,
156
- 0.006901216030120849,
157
- 0.006880415916442871,
158
- 0.006888895988464355,
159
- 0.009095754623413086,
160
- 0.00825976276397705,
161
- 0.006931135177612305,
162
- 0.006879776000976562,
163
- 0.006910816192626953,
164
- 0.0069144959449768065,
165
- 0.006955134868621826,
166
- 0.0072356128692626955,
167
- 0.0069383358955383305,
168
- 0.006967295169830323,
169
- 0.006934335231781006,
170
- 0.006948414802551269,
171
- 0.006955295085906982,
172
- 0.006953215122222901,
173
- 0.0069807348251342775,
174
- 0.006967134952545166,
175
- 0.006957534790039062,
176
- 0.006950174808502197,
177
- 0.006950815200805664,
178
- 0.006961054801940918,
179
- 0.006933216094970703,
180
- 0.006942015171051025,
181
- 0.0069709749221801755,
182
- 0.0069684147834777836,
183
- 0.006953054904937744,
184
- 0.0069564151763916015,
185
- 0.006941534996032715,
186
- 0.0069404149055480955,
187
- 0.006955134868621826,
188
- 0.006934335231781006,
189
- 0.0069709749221801755,
190
- 0.0069682540893554685,
191
- 0.007663289070129395,
192
- 0.006988414764404297,
193
- 0.006953855037689209,
194
- 0.006974174976348877,
195
- 0.006964415073394775,
196
- 0.00698713493347168,
197
- 0.006940255165100098,
198
- 0.0069752950668334965,
199
- 0.006982494831085205,
200
- 0.006957214832305908,
201
- 0.006957375049591065,
202
- 0.006972575187683105,
203
- 0.006966495037078858,
204
- 0.006954975128173828,
205
- 0.00692601490020752,
206
- 0.00697129487991333,
207
- 0.006966014862060547,
208
- 0.006943934917449951,
209
- 0.006915295124053955,
210
- 0.006927454948425293,
211
- 0.0069684147834777836,
212
- 0.006945375919342041,
213
- 0.006923935890197754,
214
- 0.006936735153198243,
215
- 0.006957534790039062,
216
- 0.006940094947814941,
217
- 0.0069655351638793946,
218
- 0.0069880948066711425,
219
- 0.006982813835144043,
220
- 0.007026334762573242,
221
- 0.006950174808502197,
222
- 0.0069807348251342775,
223
- 0.0069636149406433106,
224
- 0.00696521520614624,
225
- 0.00692985486984253,
226
- 0.006981214046478271,
227
- 0.006934014797210694,
228
- 0.007089533805847168,
229
- 0.007843926906585693,
230
- 0.00788088607788086,
231
- 0.007802327156066894,
232
- 0.007369051933288575,
233
- 0.007359131813049317,
234
- 0.007342331886291504,
235
- 0.007254332065582275,
236
- 0.007254012107849121,
237
- 0.007870165824890137,
238
- 0.007896885871887208,
239
- 0.00788792610168457,
240
- 0.007624568939208984,
241
- 0.007461050987243653,
242
- 0.007478010177612304,
243
- 0.007508090019226074,
244
- 0.007497529983520507,
245
- 0.007411931037902832,
246
- 0.007353210926055908,
247
- 0.007398489952087402,
248
- 0.007403450965881348,
249
- 0.007517048835754395,
250
- 0.007568728923797607,
251
- 0.007441530227661133,
252
- 0.006974494934082031,
253
- 0.007036894798278809,
254
- 0.006936416149139404,
255
- 0.007026334762573242,
256
- 0.007043614864349366,
257
- 0.006990815162658691,
258
- 0.007006333827972412,
259
- 0.006991134166717529,
260
- 0.006990654945373535,
261
- 0.007054173946380615,
262
- 0.0069980149269104
 
 
 
 
 
 
 
263
  ]
264
  },
265
  "throughput": {
266
  "unit": "samples/s",
267
- "value": 139.3118629074072
268
  },
269
  "energy": null,
270
  "efficiency": null
 
6
  "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
 
9
  "library": "transformers",
10
+ "model": "FacebookAI/roberta-base",
11
+ "processor": "FacebookAI/roberta-base",
12
  "device": "cuda",
13
  "device_ids": "0",
14
  "seed": 42,
15
  "inter_op_num_threads": null,
16
  "intra_op_num_threads": null,
17
+ "model_kwargs": {},
18
+ "processor_kwargs": {},
19
+ "hub_kwargs": {},
 
 
 
20
  "no_weights": true,
21
  "device_map": null,
22
  "torch_dtype": null,
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "6fd377459e287bb09e9383ba2516b1b2a271a562",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1007.775744,
106
  "max_global_vram": 897.14688,
107
+ "max_process_vram": 207166.001152,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.443712
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 146,
114
+ "total": 0.9992842464447022,
115
+ "mean": 0.0068444126468815225,
116
+ "stdev": 0.00033736598203322157,
117
+ "p50": 0.006653344392776489,
118
+ "p90": 0.0071704633235931394,
119
+ "p95": 0.007282543063163757,
120
+ "p99": 0.007604917860031129,
121
  "values": [
122
+ 0.006918064117431641,
123
+ 0.007029102802276611,
124
+ 0.007152944087982178,
125
+ 0.006964622974395752,
126
+ 0.007236303806304932,
127
+ 0.007088462829589844,
128
+ 0.007127984046936035,
129
+ 0.007232143878936768,
130
+ 0.007292782783508301,
131
+ 0.007223503112792969,
132
+ 0.00718798303604126,
133
+ 0.007133743762969971,
134
+ 0.007087663173675537,
135
+ 0.00706382417678833,
136
+ 0.007042063236236572,
137
+ 0.007122543811798096,
138
+ 0.007009744167327881,
139
+ 0.007002543926239013,
140
+ 0.007245742797851563,
141
+ 0.006998383998870849,
142
+ 0.006989583969116211,
143
+ 0.007004623889923096,
144
+ 0.007168943881988525,
145
+ 0.006998544216156006,
146
+ 0.006997104167938232,
147
+ 0.006998223781585693,
148
+ 0.00698414421081543,
149
+ 0.006962543964385987,
150
+ 0.006985583782196045,
151
+ 0.006987984180450439,
152
+ 0.00703934383392334,
153
+ 0.0070198230743408205,
154
+ 0.0070985441207885745,
155
+ 0.00706894302368164,
156
+ 0.007102863788604736,
157
+ 0.007087983131408691,
158
+ 0.007055503845214844,
159
+ 0.007051183223724365,
160
+ 0.007036464214324951,
161
+ 0.007053584098815918,
162
+ 0.007057263851165772,
163
+ 0.007028943061828614,
164
+ 0.007054384231567383,
165
+ 0.007629901885986328,
166
+ 0.009487338066101074,
167
+ 0.006663185119628906,
168
+ 0.006615503787994385,
169
+ 0.006653104782104492,
170
+ 0.006624144077301025,
171
+ 0.006624625205993652,
172
+ 0.006623505115509033,
173
+ 0.006619823932647705,
174
+ 0.006612464904785156,
175
+ 0.0066391839981079104,
176
+ 0.0066057448387146,
177
+ 0.0066187047958374025,
178
+ 0.006615824222564697,
179
+ 0.006615345001220703,
180
+ 0.006597905158996582,
181
+ 0.006640625,
182
+ 0.0066175851821899416,
183
+ 0.0066251039505004885,
184
+ 0.006625424861907959,
185
+ 0.00663630485534668,
186
+ 0.0066390252113342286,
187
+ 0.006674705028533935,
188
+ 0.006625264167785644,
189
+ 0.00732990312576294,
190
+ 0.006650064945220947,
191
+ 0.006653584003448486,
192
+ 0.006635984897613525,
193
+ 0.006619344234466553,
194
+ 0.006628465175628662,
195
+ 0.006609745025634765,
196
+ 0.006622223854064941,
197
+ 0.006624145030975342,
198
+ 0.006631184101104737,
199
+ 0.006604625225067139,
200
+ 0.006635664939880371,
201
+ 0.006852143764495849,
202
+ 0.006651504039764404,
203
+ 0.006669425010681153,
204
+ 0.00663102388381958,
205
+ 0.006626224994659423,
206
+ 0.006619345188140869,
207
+ 0.006622543811798096,
208
+ 0.006588144779205322,
209
+ 0.006623504161834717,
210
+ 0.006612945079803467,
211
+ 0.006645744800567627,
212
+ 0.006630224227905273,
213
+ 0.006673425197601318,
214
+ 0.006667984008789062,
215
+ 0.0066297450065612796,
216
+ 0.006641263961791992,
217
+ 0.00665102481842041,
218
+ 0.006656464099884033,
219
+ 0.006607824802398681,
220
+ 0.006591344833374023,
221
+ 0.006606544017791748,
222
+ 0.006615345001220703,
223
+ 0.006618384838104248,
224
+ 0.006600304126739502,
225
+ 0.006599024772644043,
226
+ 0.0066276640892028804,
227
+ 0.006648145198822022,
228
+ 0.006631824016571045,
229
+ 0.006607824802398681,
230
+ 0.006624625205993652,
231
+ 0.006631824016571045,
232
+ 0.006592784881591797,
233
+ 0.006606385231018066,
234
+ 0.0066156649589538576,
235
+ 0.006617744922637939,
236
+ 0.006616623878479004,
237
+ 0.006602224826812744,
238
+ 0.007171982765197754,
239
+ 0.007283183097839355,
240
+ 0.007023663997650147,
241
+ 0.006892623901367187,
242
+ 0.006971663951873779,
243
+ 0.007280622959136963,
244
+ 0.007574381828308105,
245
+ 0.007570542812347412,
246
+ 0.007541741847991943,
247
+ 0.007143663883209229,
248
+ 0.00696062421798706,
249
+ 0.007090864181518555,
250
+ 0.007088624000549317,
251
+ 0.00674430513381958,
252
+ 0.0067118239402771,
253
+ 0.0067790250778198245,
254
+ 0.0067556648254394535,
255
+ 0.006706863880157471,
256
+ 0.0067460651397705075,
257
+ 0.006621904850006103,
258
+ 0.0065900650024414064,
259
+ 0.006601103782653809,
260
+ 0.006603344917297364,
261
+ 0.006604625225067139,
262
+ 0.006611663818359375,
263
+ 0.006619665145874023,
264
+ 0.006598865032196045,
265
+ 0.006625583171844482,
266
+ 0.006638384819030761,
267
+ 0.006608784198760986
268
  ]
269
  },
270
  "throughput": {
271
  "unit": "samples/s",
272
+ "value": 146.1045748689077
273
  },
274
  "energy": null,
275
  "efficiency": null