IlyasMoutawwakil HF staff commited on
Commit
452f62a
·
verified ·
1 Parent(s): 58bfff6

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
- "optimum_benchmark_commit": "57f6495c03ea0fa48e157048c97add150dcd765c",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,164 +104,147 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1005.883392,
108
- "max_global_vram": 898.473984,
109
- "max_process_vram": 175601.803264,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 136,
116
- "total": 0.9973448486328123,
117
- "mean": 0.007333418004653033,
118
- "stdev": 0.0002452375024910205,
119
- "p50": 0.0072775003910064695,
120
- "p90": 0.007556140184402466,
121
- "p95": 0.007932978630065918,
122
- "p99": 0.008247482204437256,
123
  "values": [
124
- 0.007991658210754395,
125
- 0.007382541179656982,
126
- 0.007444620132446289,
127
- 0.007689579010009766,
128
- 0.007338059902191162,
129
- 0.007279500961303711,
130
- 0.00726669979095459,
131
- 0.007239020824432373,
132
- 0.007247660160064697,
133
- 0.007199660778045654,
134
- 0.007185900211334228,
135
- 0.007147181034088135,
136
- 0.0071585412025451664,
137
- 0.007118220806121826,
138
- 0.0071681408882141116,
139
- 0.0071425409317016604,
140
- 0.007184940814971924,
141
- 0.007171501159667968,
142
- 0.0071511797904968265,
143
- 0.007159501075744629,
144
- 0.007126380920410156,
145
- 0.007233420848846436,
146
- 0.0071494197845458985,
147
- 0.0071358208656311034,
148
- 0.00718350076675415,
149
- 0.007194221019744873,
150
- 0.007153100967407226,
151
- 0.007169421195983887,
152
- 0.007136940956115723,
153
- 0.0071663808822631835,
154
- 0.007172461032867432,
155
- 0.0071308608055114745,
156
- 0.0071425409317016604,
157
- 0.007142701148986816,
158
- 0.007135980129241944,
159
- 0.007131980895996094,
160
- 0.007259500980377197,
161
- 0.007183020114898682,
162
- 0.007249900817871094,
163
- 0.007343820095062256,
164
- 0.007303979873657227,
165
- 0.007249101161956787,
166
- 0.007249101161956787,
167
- 0.008466217041015625,
168
- 0.0073807802200317385,
169
- 0.007267021179199219,
170
- 0.0072534198760986325,
171
- 0.0072252612113952635,
172
- 0.007251339912414551,
173
- 0.00721566104888916,
174
- 0.007277740001678467,
175
- 0.007232301235198975,
176
- 0.007254860877990723,
177
- 0.007266221046447754,
178
- 0.007273260116577148,
179
- 0.0072441411018371584,
180
- 0.007243020057678223,
181
- 0.007234220981597901,
182
- 0.007507500171661377,
183
- 0.007261899948120118,
184
- 0.007274381160736084,
185
- 0.007227340221405029,
186
- 0.007277901172637939,
187
- 0.007247340202331543,
188
- 0.007251020908355713,
189
- 0.007346380233764649,
190
- 0.007283979892730713,
191
- 0.007475019931793213,
192
- 0.007271181106567383,
193
- 0.0072678208351135256,
194
- 0.007290219783782959,
195
- 0.007248140811920166,
196
- 0.0072649397850036625,
197
- 0.007254061222076416,
198
- 0.0072670202255249025,
199
- 0.007279500961303711,
200
- 0.0072319798469543456,
201
- 0.007229741096496582,
202
- 0.007288619995117187,
203
- 0.007283181190490723,
204
- 0.0072913398742675785,
205
- 0.007268300056457519,
206
- 0.007295501232147217,
207
- 0.007283339977264405,
208
- 0.007267180919647217,
209
- 0.007245259761810303,
210
- 0.007284141063690185,
211
- 0.007304780960083008,
212
- 0.007285099983215332,
213
- 0.007305740833282471,
214
- 0.007313260078430176,
215
- 0.007315499782562256,
216
- 0.007348461151123047,
217
- 0.008228137969970703,
218
- 0.008257898330688476,
219
- 0.007913418769836426,
220
- 0.008070697784423828,
221
- 0.008225737571716308,
222
- 0.008199817657470703,
223
- 0.007908298969268798,
224
- 0.0077406191825866695,
225
- 0.007847339153289794,
226
- 0.007714539051055908,
227
- 0.007604780197143554,
228
- 0.0072251009941101075,
229
- 0.007286059856414795,
230
- 0.007297101020812988,
231
- 0.007288461208343506,
232
- 0.007288459777832031,
233
- 0.00728334093093872,
234
- 0.00732013988494873,
235
- 0.007303659915924072,
236
- 0.007271501064300537,
237
- 0.007290540218353271,
238
- 0.0073009409904479985,
239
- 0.007292780876159668,
240
- 0.007309579849243164,
241
- 0.0072844610214233396,
242
- 0.007316619873046875,
243
- 0.007289420127868653,
244
- 0.007294061183929443,
245
- 0.007288780212402344,
246
- 0.007314539909362793,
247
- 0.007316141128540039,
248
- 0.007300300121307373,
249
- 0.007292140007019043,
250
- 0.007285261154174804,
251
- 0.007308300018310547,
252
- 0.007240780830383301,
253
- 0.007253260135650635,
254
- 0.007298861026763916,
255
- 0.007277260780334473,
256
- 0.00732174015045166,
257
- 0.00731118106842041,
258
- 0.007331820011138916,
259
- 0.007318220138549805
260
  ]
261
  },
262
  "throughput": {
263
  "unit": "samples/s",
264
- "value": 136.36206191512647
265
  },
266
  "energy": null,
267
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
+ "optimum_benchmark_commit": "2a75c0bc0d007cc875fa0f75ca41d02e46f917be",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1004.384256,
108
+ "max_global_vram": 898.486272,
109
+ "max_process_vram": 200689.958912,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 119,
116
+ "total": 0.9991010742187496,
117
+ "mean": 0.00839580734637605,
118
+ "stdev": 0.0005026137789401673,
119
+ "p50": 0.008358707427978515,
120
+ "p90": 0.008833650970458984,
121
+ "p95": 0.009058433723449706,
122
+ "p99": 0.009747294559478759,
123
  "values": [
124
+ 0.008812787055969239,
125
+ 0.009042866706848144,
126
+ 0.00836126708984375,
127
+ 0.008697266578674317,
128
+ 0.00883022689819336,
129
+ 0.00881646728515625,
130
+ 0.008869747161865234,
131
+ 0.009599824905395508,
132
+ 0.009073266983032227,
133
+ 0.008847347259521484,
134
+ 0.008983345985412597,
135
+ 0.00831038761138916,
136
+ 0.009056785583496093,
137
+ 0.008201587677001953,
138
+ 0.00823822784423828,
139
+ 0.008279827117919922,
140
+ 0.008644307136535644,
141
+ 0.009039346694946288,
142
+ 0.009092466354370117,
143
+ 0.009153906822204589,
144
+ 0.008589587211608886,
145
+ 0.008672627449035644,
146
+ 0.012228462219238281,
147
+ 0.008627026557922364,
148
+ 0.008368626594543457,
149
+ 0.008355507850646973,
150
+ 0.00834590721130371,
151
+ 0.008354388236999511,
152
+ 0.008410226821899415,
153
+ 0.00839982795715332,
154
+ 0.008357267379760742,
155
+ 0.008395828247070313,
156
+ 0.008325587272644043,
157
+ 0.008339187622070313,
158
+ 0.00835694694519043,
159
+ 0.00832766819000244,
160
+ 0.00835790729522705,
161
+ 0.008335827827453613,
162
+ 0.008331506729125977,
163
+ 0.008327828407287598,
164
+ 0.008283666610717774,
165
+ 0.00830750846862793,
166
+ 0.008365107536315917,
167
+ 0.008328627586364747,
168
+ 0.008363348007202148,
169
+ 0.008369426727294921,
170
+ 0.008362868309020995,
171
+ 0.008333746910095214,
172
+ 0.00840910816192627,
173
+ 0.008337427139282226,
174
+ 0.008293428421020509,
175
+ 0.00829678726196289,
176
+ 0.008268788337707519,
177
+ 0.008338228225708007,
178
+ 0.007617268085479736,
179
+ 0.0078177490234375,
180
+ 0.007638868808746338,
181
+ 0.007815988063812255,
182
+ 0.007918229103088379,
183
+ 0.007661268234252929,
184
+ 0.007821268081665039,
185
+ 0.008273588180541993,
186
+ 0.008361268043518066,
187
+ 0.008364147186279296,
188
+ 0.00832222843170166,
189
+ 0.00845950698852539,
190
+ 0.008381587982177734,
191
+ 0.00804270839691162,
192
+ 0.008068147659301758,
193
+ 0.008088308334350586,
194
+ 0.007950708866119385,
195
+ 0.008092307090759277,
196
+ 0.008368947982788085,
197
+ 0.008351187705993652,
198
+ 0.008359188079833985,
199
+ 0.008330548286437988,
200
+ 0.008358707427978515,
201
+ 0.00835182762145996,
202
+ 0.008415987014770508,
203
+ 0.008100148200988769,
204
+ 0.007650228977203369,
205
+ 0.007685268878936767,
206
+ 0.007617588996887207,
207
+ 0.008360466957092284,
208
+ 0.008371667861938476,
209
+ 0.008376947402954102,
210
+ 0.00836606788635254,
211
+ 0.008382387161254883,
212
+ 0.008580307006835937,
213
+ 0.008364788055419922,
214
+ 0.008341426849365235,
215
+ 0.008331188201904297,
216
+ 0.00781198787689209,
217
+ 0.007510708808898926,
218
+ 0.008290388107299805,
219
+ 0.008382387161254883,
220
+ 0.008420948028564454,
221
+ 0.008390867233276367,
222
+ 0.008374387741088868,
223
+ 0.008369426727294921,
224
+ 0.00837918758392334,
225
+ 0.008399348258972168,
226
+ 0.008442867279052734,
227
+ 0.008419668197631836,
228
+ 0.008329266548156739,
229
+ 0.00977966594696045,
230
+ 0.008498706817626954,
231
+ 0.0084119873046875,
232
+ 0.008668466567993164,
233
+ 0.008316147804260253,
234
+ 0.008372146606445313,
235
+ 0.008405587196350098,
236
+ 0.008348627090454102,
237
+ 0.008331027984619141,
238
+ 0.008405266761779785,
239
+ 0.008026867866516114,
240
+ 0.00793182897567749,
241
+ 0.007958548069000244,
242
+ 0.007947668075561524
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
  ]
244
  },
245
  "throughput": {
246
  "unit": "samples/s",
247
+ "value": 119.10706841452694
248
  },
249
  "energy": null,
250
  "efficiency": null