IlyasMoutawwakil HF staff commited on
Commit
1e91be2
·
verified ·
1 Parent(s): 2b9794f

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
- "optimum_benchmark_commit": "ebd20fcf042acf5db8d8956e7057fa93c82e14ab",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,160 +104,161 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1006.546944,
108
- "max_global_vram": 898.469888,
109
- "max_process_vram": 236416.294912,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 132,
116
- "total": 0.9964992427825929,
117
- "mean": 0.007549236687746916,
118
- "stdev": 0.0004251598062794823,
119
- "p50": 0.0072824816703796385,
120
- "p90": 0.0081010103225708,
121
- "p95": 0.008159554147720337,
122
- "p99": 0.009257718667984008,
123
  "values": [
124
- 0.00929824161529541,
125
- 0.009455041885375976,
126
- 0.008008001327514648,
127
- 0.00802848243713379,
128
- 0.007899840831756592,
129
- 0.007907201766967773,
130
- 0.007836321830749511,
131
- 0.007506721019744873,
132
- 0.007251041889190674,
133
- 0.007262081146240234,
134
- 0.007232000827789306,
135
- 0.00723872184753418,
136
- 0.007238881111145019,
137
- 0.007213602066040039,
138
- 0.007223361015319825,
139
- 0.00723104190826416,
140
- 0.007251040935516357,
141
- 0.007262241840362549,
142
- 0.007239840984344483,
143
- 0.007227042198181152,
144
- 0.007244640827178955,
145
- 0.007234401226043701,
146
- 0.007252322196960449,
147
- 0.007602880954742432,
148
- 0.007626242160797119,
149
- 0.007635361194610596,
150
- 0.007591842174530029,
151
- 0.00734912109375,
152
- 0.008022722244262695,
153
- 0.009167522430419921,
154
- 0.008101442337036132,
155
- 0.008074721336364747,
156
- 0.008103841781616212,
157
- 0.008073921203613282,
158
- 0.008097122192382813,
159
- 0.008123842239379883,
160
- 0.008085761070251465,
161
- 0.008103201866149903,
162
- 0.008103522300720215,
163
- 0.008065920829772949,
164
- 0.008149762153625489,
165
- 0.00805648136138916,
166
- 0.00769008207321167,
167
- 0.007865121841430664,
168
- 0.007795361042022705,
169
- 0.008024962425231933,
170
- 0.007694080829620361,
171
- 0.007764801979064941,
172
- 0.007459681034088134,
173
- 0.007646242141723633,
174
- 0.007608961105346679,
175
- 0.007506562232971191,
176
- 0.007235520839691162,
177
- 0.007227042198181152,
178
- 0.007243521213531494,
179
- 0.00721296215057373,
180
- 0.007234880924224854,
181
- 0.007266561985015869,
182
- 0.007251040935516357,
183
- 0.007202881813049317,
184
- 0.007246080875396728,
185
- 0.007244161128997803,
186
- 0.00726928186416626,
187
- 0.007250561237335205,
188
- 0.00722720193862915,
189
- 0.007222880840301513,
190
- 0.0072244820594787595,
191
- 0.0073214411735534664,
192
- 0.0072648019790649415,
193
- 0.007244640827178955,
194
- 0.007244162082672119,
195
- 0.0072377610206604,
196
- 0.007254880905151367,
197
- 0.0072526421546936035,
198
- 0.007266880989074707,
199
- 0.007251522064208984,
200
- 0.0072356810569763185,
201
- 0.007242722034454346,
202
- 0.007298241138458252,
203
- 0.007282882213592529,
204
- 0.007282081127166748,
205
- 0.007229762077331543,
206
- 0.007250561237335205,
207
- 0.0072721619606018065,
208
- 0.00723504114151001,
209
- 0.0072411208152770995,
210
- 0.007263842105865478,
211
- 0.007270720958709717,
212
- 0.007275042057037354,
213
- 0.007253760814666748,
214
- 0.007273441791534424,
215
- 0.007252800941467285,
216
- 0.007195042133331299,
217
- 0.007222880840301513,
218
- 0.007238081932067871,
219
- 0.007244161128997803,
220
- 0.007227200984954834,
221
- 0.007229762077331543,
222
- 0.007199201107025146,
223
- 0.007176161766052246,
224
- 0.007939520835876466,
225
- 0.008036481857299804,
226
- 0.00759968090057373,
227
- 0.007595042228698731,
228
- 0.007559361934661865,
229
- 0.00756256103515625,
230
- 0.0075352020263671875,
231
- 0.007578560829162598,
232
- 0.007578882217407226,
233
- 0.007845601081848145,
234
- 0.00817152214050293,
235
- 0.008226241111755371,
236
- 0.008213281631469727,
237
- 0.007631522178649902,
238
- 0.007635041236877442,
239
- 0.007659361839294433,
240
- 0.007664000988006592,
241
- 0.007651202201843262,
242
- 0.007583200931549072,
243
- 0.0076104021072387696,
244
- 0.0074228811264038085,
245
- 0.007220481872558594,
246
- 0.007220160961151123,
247
- 0.007221762180328369,
248
- 0.007217761039733887,
249
- 0.0072171220779418946,
250
- 0.0072377610206604,
251
- 0.007397602081298828,
252
- 0.008137600898742676,
253
- 0.00807984161376953,
254
- 0.008172640800476074,
255
- 0.0075536007881164555
 
256
  ]
257
  },
258
  "throughput": {
259
  "unit": "samples/s",
260
- "value": 132.46372333551142
261
  },
262
  "energy": null,
263
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
+ "optimum_benchmark_commit": "2d3261cf9b992810a685ce736f41254a91cd091e",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1005.867008,
108
+ "max_global_vram": 898.47808,
109
+ "max_process_vram": 218933.284864,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 133,
116
+ "total": 0.9991743364334102,
117
+ "mean": 0.007512588995739929,
118
+ "stdev": 0.0003207846587929703,
119
+ "p50": 0.007383348941802978,
120
+ "p90": 0.00788853187561035,
121
+ "p95": 0.00823525161743164,
122
+ "p99": 0.008366124954223633,
123
  "values": [
124
+ 0.007864148139953613,
125
+ 0.0074025487899780276,
126
+ 0.007426389217376709,
127
+ 0.007471028804779053,
128
+ 0.007604628086090088,
129
+ 0.00754750919342041,
130
+ 0.007544628143310547,
131
+ 0.007523348808288574,
132
+ 0.007529748916625977,
133
+ 0.007440468788146973,
134
+ 0.007399028778076172,
135
+ 0.007434069156646728,
136
+ 0.0074454288482666015,
137
+ 0.00738398790359497,
138
+ 0.007416308879852295,
139
+ 0.007322868824005127,
140
+ 0.007299028873443604,
141
+ 0.007541429042816162,
142
+ 0.007322709083557129,
143
+ 0.007327668190002442,
144
+ 0.007355188846588135,
145
+ 0.007377589225769043,
146
+ 0.0073507089614868164,
147
+ 0.007509909152984619,
148
+ 0.00743582820892334,
149
+ 0.007391829013824463,
150
+ 0.007398068904876709,
151
+ 0.007324628829956055,
152
+ 0.008200307846069337,
153
+ 0.009608466148376465,
154
+ 0.007309908866882324,
155
+ 0.007427029132843018,
156
+ 0.007235349178314209,
157
+ 0.007300148963928223,
158
+ 0.007340788841247559,
159
+ 0.00733790922164917,
160
+ 0.007352309226989746,
161
+ 0.007592948913574218,
162
+ 0.0073459091186523435,
163
+ 0.007322709083557129,
164
+ 0.007298708915710449,
165
+ 0.007339669227600098,
166
+ 0.007357268810272217,
167
+ 0.007337109088897705,
168
+ 0.007375349044799804,
169
+ 0.007324628829956055,
170
+ 0.007375188827514648,
171
+ 0.007324789047241211,
172
+ 0.007515509128570557,
173
+ 0.007329588890075684,
174
+ 0.007327029228210449,
175
+ 0.007357109069824219,
176
+ 0.007318229198455811,
177
+ 0.007333428859710694,
178
+ 0.007320629119873047,
179
+ 0.007268788814544677,
180
+ 0.007351189136505127,
181
+ 0.007389109134674072,
182
+ 0.007321428775787353,
183
+ 0.007313748836517334,
184
+ 0.007359669208526611,
185
+ 0.0073102288246154785,
186
+ 0.00734830904006958,
187
+ 0.007323348999023437,
188
+ 0.007827668190002442,
189
+ 0.008315667152404786,
190
+ 0.008324148178100586,
191
+ 0.008355027198791503,
192
+ 0.007858707904815673,
193
+ 0.007389429092407226,
194
+ 0.007378549098968506,
195
+ 0.0073335890769958495,
196
+ 0.007348789215087891,
197
+ 0.007342389106750488,
198
+ 0.0073662281036376955,
199
+ 0.007346068859100342,
200
+ 0.007329109191894531,
201
+ 0.007338869094848633,
202
+ 0.007419508934020996,
203
+ 0.007401109218597412,
204
+ 0.007402547836303711,
205
+ 0.007378708839416504,
206
+ 0.007356948852539062,
207
+ 0.007383988857269287,
208
+ 0.007412629127502441,
209
+ 0.007367029190063477,
210
+ 0.007411027908325195,
211
+ 0.007383348941802978,
212
+ 0.007605908870697021,
213
+ 0.00760126781463623,
214
+ 0.007392148971557617,
215
+ 0.00735870885848999,
216
+ 0.007385428905487061,
217
+ 0.007335669040679932,
218
+ 0.007379188060760498,
219
+ 0.007363829135894775,
220
+ 0.007375988960266113,
221
+ 0.007335349082946778,
222
+ 0.007362709045410156,
223
+ 0.0073617491722106935,
224
+ 0.007352309226989746,
225
+ 0.007332949161529541,
226
+ 0.007377267837524414,
227
+ 0.007369268894195557,
228
+ 0.00733246898651123,
229
+ 0.007397109031677246,
230
+ 0.008050228118896484,
231
+ 0.008300626754760742,
232
+ 0.0081115083694458,
233
+ 0.007626388072967529,
234
+ 0.007740468978881836,
235
+ 0.007899669170379639,
236
+ 0.007881107807159424,
237
+ 0.007889907836914062,
238
+ 0.008287667274475097,
239
+ 0.008371347427368165,
240
+ 0.008131987571716308,
241
+ 0.007883028030395508,
242
+ 0.007894067764282227,
243
+ 0.007843987941741943,
244
+ 0.007831668853759766,
245
+ 0.007756467819213867,
246
+ 0.007754067897796631,
247
+ 0.007639029026031494,
248
+ 0.007387349128723145,
249
+ 0.007381109237670899,
250
+ 0.007404308795928955,
251
+ 0.007370549201965332,
252
+ 0.007364949226379394,
253
+ 0.007338708877563477,
254
+ 0.007425908088684082,
255
+ 0.007412468910217285,
256
+ 0.007420787811279297
257
  ]
258
  },
259
  "throughput": {
260
  "unit": "samples/s",
261
+ "value": 133.10990399808344
262
  },
263
  "energy": null,
264
  "efficiency": null