IlyasMoutawwakil HF staff commited on
Commit
2442020
1 Parent(s): 3af6d14

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -11,7 +11,7 @@
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cuda",
14
- "device_ids": "6",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
@@ -105,30 +105,32 @@
105
  "timm_commit": null,
106
  "peft_version": "0.13.0",
107
  "peft_commit": null
108
- }
 
 
109
  },
110
  "report": {
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1105.825792,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 4162.883584,
117
  "max_reserved": 555.74528,
118
  "max_allocated": 499.374592
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "values": [
123
- 0.08647783660888672
124
  ],
125
  "count": 1,
126
- "total": 0.08647783660888672,
127
- "mean": 0.08647783660888672,
128
- "p50": 0.08647783660888672,
129
- "p90": 0.08647783660888672,
130
- "p95": 0.08647783660888672,
131
- "p99": 0.08647783660888672,
132
  "stdev": 0,
133
  "stdev_": 0
134
  },
@@ -139,172 +141,169 @@
139
  "forward": {
140
  "memory": {
141
  "unit": "MB",
142
- "max_ram": 1228.632064,
143
  "max_global_vram": 68702.69952,
144
- "max_process_vram": 234972.008448,
145
  "max_reserved": 555.74528,
146
  "max_allocated": 499.442688
147
  },
148
  "latency": {
149
  "unit": "s",
150
  "values": [
151
- 0.007502727031707764,
152
- 0.007381126880645752,
153
- 0.007342886924743652,
154
- 0.007351847171783448,
155
- 0.00735936689376831,
156
- 0.007281766891479492,
157
- 0.007329287052154541,
158
- 0.007330566883087158,
159
- 0.007325286865234375,
160
- 0.007322086811065674,
161
- 0.007067047119140625,
162
- 0.006927525997161866,
163
- 0.0069176068305969235,
164
- 0.006586246013641358,
165
- 0.006952966213226318,
166
- 0.006857767105102539,
167
- 0.0069971270561218265,
168
- 0.006864165782928467,
169
- 0.0068582468032836914,
170
- 0.006636166095733643,
171
- 0.006637286186218261,
172
- 0.006654406070709228,
173
- 0.006707046985626221,
174
- 0.006612805843353271,
175
- 0.006650725841522217,
176
- 0.006631046772003174,
177
- 0.006625926017761231,
178
- 0.006670405864715576,
179
- 0.006661126136779785,
180
- 0.006646246910095215,
181
- 0.0066553659439086916,
182
- 0.006701926231384278,
183
- 0.006618247032165527,
184
- 0.006624485969543457,
185
- 0.006669285774230957,
186
- 0.006453606128692627,
187
- 0.006443686008453369,
188
- 0.006429446220397949,
189
- 0.006464486122131348,
190
- 0.00644416618347168,
191
- 0.006443046092987061,
192
- 0.006438886165618897,
193
- 0.006692326068878174,
194
- 0.006661927223205567,
195
- 0.0066622462272644045,
196
- 0.006647046089172363,
197
- 0.006459365844726563,
198
- 0.0066380867958068844,
199
- 0.006442726135253906,
200
- 0.006465126037597657,
201
- 0.006449925899505615,
202
- 0.006585926055908203,
203
- 0.006479045867919922,
204
- 0.006441286087036133,
205
- 0.0064638471603393555,
206
- 0.006429605960845948,
207
- 0.006668806076049805,
208
- 0.006459206104278564,
209
- 0.006424645900726319,
210
- 0.006429446220397949,
211
- 0.006436806201934814,
212
- 0.006429286003112793,
213
- 0.006486405849456787,
214
- 0.00647856616973877,
215
- 0.006625926017761231,
216
- 0.0064616060256958005,
217
- 0.006437925815582275,
218
- 0.006450885772705078,
219
- 0.006469446182250977,
220
- 0.006455845832824707,
221
- 0.006476326942443848,
222
- 0.006513606071472168,
223
- 0.006476645946502686,
224
- 0.006500006198883057,
225
- 0.006466725826263428,
226
- 0.006484325885772705,
227
- 0.00648080587387085,
228
- 0.0066380867958068844,
229
- 0.006486405849456787,
230
- 0.0064609661102294925,
231
- 0.006700806140899658,
232
- 0.0069782471656799315,
233
- 0.006737765789031983,
234
- 0.0068553671836853024,
235
- 0.007530247211456299,
236
- 0.007588807106018066,
237
- 0.007604327201843262,
238
- 0.0075329670906066895,
239
- 0.007506247997283936,
240
- 0.007552647113800049,
241
- 0.007587367057800293,
242
- 0.007572007179260254,
243
- 0.007553926944732666,
244
- 0.007510247230529785,
245
- 0.00755200719833374,
246
- 0.007524006843566895,
247
- 0.007534407138824463,
248
- 0.007526566982269287,
249
- 0.007514566898345947,
250
- 0.007517446994781494,
251
- 0.0075065670013427736,
252
- 0.007571207046508789,
253
- 0.007514886856079102,
254
- 0.0075041670799255375,
255
- 0.007524167060852051,
256
- 0.0075022468566894535,
257
- 0.007552166938781738,
258
- 0.007553446769714356,
259
- 0.007527687072753906,
260
- 0.0075067272186279295,
261
- 0.007535847187042236,
262
- 0.007523847103118896,
263
- 0.007527846813201905,
264
- 0.007490567207336426,
265
- 0.007560166835784912,
266
- 0.0075115270614624025,
267
- 0.0075243268013000485,
268
- 0.007529128074645996,
269
- 0.007511046886444092,
270
- 0.007493927001953125,
271
- 0.007481767177581787,
272
- 0.0075065670013427736,
273
- 0.007503847122192383,
274
- 0.007530086994171142,
275
- 0.007512327194213867,
276
- 0.007504006862640381,
277
- 0.007132647037506104,
278
- 0.007141286849975586,
279
- 0.006972167015075683,
280
- 0.007310565948486328,
281
- 0.007252326965332031,
282
- 0.007018406867980957,
283
- 0.007215366840362549,
284
- 0.007110567092895508,
285
- 0.007154885768890381,
286
- 0.007021606922149658,
287
- 0.006906726837158203,
288
- 0.007372966766357422,
289
- 0.00736288595199585,
290
- 0.007379206180572509,
291
- 0.007348485946655273,
292
- 0.007104325771331787,
293
- 0.006861766815185547
294
  ],
295
- "count": 143,
296
- "total": 0.9994489393234255,
297
- "mean": 0.006989153421842135,
298
- "p50": 0.006927525997161866,
299
- "p90": 0.007530215167999268,
300
- "p95": 0.007553366804122925,
301
- "p99": 0.007588202285766601,
302
- "stdev": 0.0004424341285815792,
303
- "stdev_": 6.3302964161425805
304
  },
305
  "throughput": {
306
  "unit": "samples/s",
307
- "value": 143.07884512519823
308
  },
309
  "energy": null,
310
  "efficiency": null
 
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cuda",
14
+ "device_ids": "5",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
 
105
  "timm_commit": null,
106
  "peft_version": "0.13.0",
107
  "peft_commit": null
108
+ },
109
+ "print_report": true,
110
+ "log_report": true
111
  },
112
  "report": {
113
  "load": {
114
  "memory": {
115
  "unit": "MB",
116
+ "max_ram": 1106.812928,
117
  "max_global_vram": 68702.69952,
118
+ "max_process_vram": 4501.004288,
119
  "max_reserved": 555.74528,
120
  "max_allocated": 499.374592
121
  },
122
  "latency": {
123
  "unit": "s",
124
  "values": [
125
+ 0.08644545745849609
126
  ],
127
  "count": 1,
128
+ "total": 0.08644545745849609,
129
+ "mean": 0.08644545745849609,
130
+ "p50": 0.08644545745849609,
131
+ "p90": 0.08644545745849609,
132
+ "p95": 0.08644545745849609,
133
+ "p99": 0.08644545745849609,
134
  "stdev": 0,
135
  "stdev_": 0
136
  },
 
141
  "forward": {
142
  "memory": {
143
  "unit": "MB",
144
+ "max_ram": 1229.778944,
145
  "max_global_vram": 68702.69952,
146
+ "max_process_vram": 230355.349504,
147
  "max_reserved": 555.74528,
148
  "max_allocated": 499.442688
149
  },
150
  "latency": {
151
  "unit": "s",
152
  "values": [
153
+ 0.00786269187927246,
154
+ 0.007625093936920166,
155
+ 0.007648773193359375,
156
+ 0.007510054111480713,
157
+ 0.007558534145355225,
158
+ 0.007559174060821533,
159
+ 0.007542532920837402,
160
+ 0.007406854152679444,
161
+ 0.007292134761810303,
162
+ 0.007544293880462647,
163
+ 0.0075194940567016606,
164
+ 0.007346694946289063,
165
+ 0.007280294895172119,
166
+ 0.0070863752365112305,
167
+ 0.0072391748428344725,
168
+ 0.007056775093078613,
169
+ 0.00665805721282959,
170
+ 0.006823336124420166,
171
+ 0.006839655876159668,
172
+ 0.006652457237243652,
173
+ 0.006673417091369629,
174
+ 0.006634056091308594,
175
+ 0.015959625244140626,
176
+ 0.007784933090209961,
177
+ 0.0068436570167541505,
178
+ 0.006670055866241455,
179
+ 0.006703816890716553,
180
+ 0.006674057006835938,
181
+ 0.006636456966400146,
182
+ 0.006674376964569092,
183
+ 0.006665896892547607,
184
+ 0.006860616207122802,
185
+ 0.006884615898132324,
186
+ 0.0067439770698547365,
187
+ 0.006737095832824707,
188
+ 0.006877255916595459,
189
+ 0.006686697006225586,
190
+ 0.0066943769454956056,
191
+ 0.006840136051177979,
192
+ 0.006896615982055664,
193
+ 0.006785416126251221,
194
+ 0.006655177116394043,
195
+ 0.006691176891326905,
196
+ 0.006650537014007568,
197
+ 0.007569893836975098,
198
+ 0.0074986939430236816,
199
+ 0.007269895076751709,
200
+ 0.00746877384185791,
201
+ 0.007498213768005371,
202
+ 0.007530693054199219,
203
+ 0.007342215061187744,
204
+ 0.007987971782684327,
205
+ 0.007159335136413574,
206
+ 0.007427333831787109,
207
+ 0.007236614227294922,
208
+ 0.007094855785369873,
209
+ 0.00723725414276123,
210
+ 0.007346214771270752,
211
+ 0.0073358149528503416,
212
+ 0.007063974857330322,
213
+ 0.007230374813079834,
214
+ 0.007039334774017334,
215
+ 0.007038536071777343,
216
+ 0.007324773788452149,
217
+ 0.00714925479888916,
218
+ 0.007012455940246582,
219
+ 0.007372614860534668,
220
+ 0.007240774154663086,
221
+ 0.0070567760467529295,
222
+ 0.00738573408126831,
223
+ 0.0072687740325927735,
224
+ 0.007611333847045898,
225
+ 0.007585573196411133,
226
+ 0.007543653964996338,
227
+ 0.007515974044799804,
228
+ 0.007475813865661621,
229
+ 0.00718093490600586,
230
+ 0.007153894901275635,
231
+ 0.007159655094146729,
232
+ 0.007019336223602295,
233
+ 0.007136616230010986,
234
+ 0.007511973857879639,
235
+ 0.0075678138732910155,
236
+ 0.007555333137512207,
237
+ 0.007608774185180664,
238
+ 0.007562693119049072,
239
+ 0.007561254024505615,
240
+ 0.007553893089294434,
241
+ 0.007530694007873535,
242
+ 0.0071700549125671385,
243
+ 0.006874855995178223,
244
+ 0.007027975082397461,
245
+ 0.007218854904174805,
246
+ 0.007025094985961914,
247
+ 0.007615653991699218,
248
+ 0.007513733863830566,
249
+ 0.007581254005432129,
250
+ 0.007377574920654297,
251
+ 0.0071700549125671385,
252
+ 0.007388615131378174,
253
+ 0.007245093822479248,
254
+ 0.007039815902709961,
255
+ 0.007196935176849365,
256
+ 0.006930215835571289,
257
+ 0.0067084569931030276,
258
+ 0.006732135772705078,
259
+ 0.006740937232971192,
260
+ 0.006730535984039307,
261
+ 0.006701577186584473,
262
+ 0.00668557596206665,
263
+ 0.006853096008300781,
264
+ 0.006802056789398193,
265
+ 0.006728456974029541,
266
+ 0.0067393360137939454,
267
+ 0.006723816871643066,
268
+ 0.0068078160285949705,
269
+ 0.006782536029815674,
270
+ 0.006732457160949707,
271
+ 0.006773416042327881,
272
+ 0.006743337154388428,
273
+ 0.006726377010345459,
274
+ 0.00674669599533081,
275
+ 0.006764616966247559,
276
+ 0.0067436561584472655,
277
+ 0.0067503762245178225,
278
+ 0.006740777015686035,
279
+ 0.006725416183471679,
280
+ 0.006758216857910156,
281
+ 0.006732776165008545,
282
+ 0.006680777072906494,
283
+ 0.006727175235748291,
284
+ 0.006689416885375977,
285
+ 0.006753736019134522,
286
+ 0.006716136932373047,
287
+ 0.00671725606918335,
288
+ 0.00669901704788208,
289
+ 0.006740456104278565,
290
+ 0.006715657234191895,
291
+ 0.0067343759536743165,
292
+ 0.006710696220397949
 
 
 
293
  ],
294
+ "count": 140,
295
+ "total": 0.9999337997436525,
296
+ "mean": 0.00714238428388323,
297
+ "p50": 0.007039575338363647,
298
+ "p90": 0.007561397933959961,
299
+ "p95": 0.007611549854278564,
300
+ "p99": 0.007939112620353697,
301
+ "stdev": 0.0008265530341704439,
302
+ "stdev_": 11.572508581420891
303
  },
304
  "throughput": {
305
  "unit": "samples/s",
306
+ "value": 140.00926864947567
307
  },
308
  "energy": null,
309
  "efficiency": null