Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram":
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
@@ -112,166 +112,171 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.007442431926727295,
|
154 |
-
0.007521279811859131,
|
155 |
-
0.007543807983398438,
|
156 |
-
0.007640063762664795,
|
157 |
-
0.007614463806152344,
|
158 |
-
0.007633920192718506,
|
159 |
-
0.00760319995880127,
|
160 |
-
0.0075939841270446775,
|
161 |
-
0.007547904014587403,
|
162 |
-
0.007614463806152344,
|
163 |
-
0.007571519851684571,
|
164 |
-
0.007658527851104736,
|
165 |
-
0.007490560054779053,
|
166 |
-
0.007513088226318359,
|
167 |
-
0.007529471874237061,
|
168 |
-
0.007581696033477783,
|
169 |
-
0.00743833589553833,
|
170 |
-
0.007489535808563232,
|
171 |
-
0.007521279811859131,
|
172 |
-
0.007511040210723877,
|
173 |
-
0.007486464023590088,
|
174 |
-
0.007497727870941162,
|
175 |
-
0.007588863849639893,
|
176 |
-
0.0076943039894104,
|
177 |
-
0.007591936111450195,
|
178 |
-
0.007612415790557861,
|
179 |
-
0.0075642881393432615,
|
180 |
-
0.007519231796264648,
|
181 |
-
0.007497727870941162,
|
182 |
-
0.007472127914428711,
|
183 |
-
0.007401472091674805,
|
184 |
-
0.007316480159759522,
|
185 |
-
0.0072837119102478025,
|
186 |
-
0.0074670081138610836,
|
187 |
-
0.007444479942321777,
|
188 |
-
0.0071833600997924804,
|
189 |
-
0.007292928218841553,
|
190 |
-
0.007162879943847656,
|
191 |
-
0.0071792640686035155,
|
192 |
-
0.007414783954620361,
|
193 |
-
0.007523327827453613,
|
194 |
-
0.007539711952209473,
|
195 |
-
0.007636991977691651,
|
196 |
-
0.00774451208114624,
|
197 |
-
0.007418879985809326,
|
198 |
-
0.007433248043060303,
|
199 |
-
0.007507967948913574,
|
200 |
-
0.007435264110565186,
|
201 |
-
0.0074332160949707035,
|
202 |
-
0.0073697280883789065,
|
203 |
-
0.007324672222137451,
|
204 |
-
0.007208960056304932,
|
205 |
-
0.007301119804382325,
|
206 |
-
0.00733900785446167,
|
207 |
-
0.007244800090789795,
|
208 |
-
0.007244800090789795,
|
209 |
-
0.0070891518592834475,
|
210 |
-
0.007116799831390381,
|
211 |
-
0.007116799831390381,
|
212 |
-
0.007112703800201416,
|
213 |
-
0.00709222412109375,
|
214 |
-
0.0070830078125,
|
215 |
-
0.007282688140869141,
|
216 |
-
0.007129087924957276,
|
217 |
-
0.0071157760620117185,
|
218 |
-
0.007137279987335205,
|
219 |
-
0.007355391979217529,
|
220 |
-
0.007311295986175537,
|
221 |
-
0.007135231971740722,
|
222 |
-
0.007270400047302246,
|
223 |
-
0.007109663963317871,
|
224 |
-
0.007104512214660645,
|
225 |
-
0.007124991893768311,
|
226 |
-
0.007137279987335205,
|
227 |
-
0.007124991893768311,
|
228 |
-
0.00710041618347168,
|
229 |
-
0.007071680068969727,
|
230 |
-
0.0071157760620117185,
|
231 |
0.00710041618347168,
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
240 |
0.0071188478469848635,
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
252 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
253 |
0.007087103843688965,
|
254 |
-
0.
|
255 |
-
0.
|
256 |
-
0.
|
|
|
|
|
|
|
|
|
257 |
0.007097343921661377,
|
258 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
259 |
]
|
260 |
},
|
261 |
"throughput": {
|
262 |
"unit": "samples/s",
|
263 |
-
"value":
|
264 |
},
|
265 |
"energy": {
|
266 |
"unit": "kWh",
|
267 |
-
"cpu": 8.
|
268 |
-
"ram": 4.
|
269 |
-
"gpu": 1.
|
270 |
-
"total": 3.
|
271 |
},
|
272 |
"efficiency": {
|
273 |
"unit": "samples/kWh",
|
274 |
-
"value":
|
275 |
}
|
276 |
}
|
277 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 907.968512,
|
108 |
"max_global_vram": 1195.900928,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 555.74528,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 140,
|
116 |
+
"total": 1.002176672458649,
|
117 |
+
"mean": 0.007158404803276062,
|
118 |
+
"stdev": 0.0002322019430714897,
|
119 |
+
"p50": 0.0070763518810272215,
|
120 |
+
"p90": 0.007282687902450562,
|
121 |
+
"p95": 0.007918489789962768,
|
122 |
+
"p99": 0.008064624919891357,
|
123 |
"values": [
|
124 |
+
0.008255488395690918,
|
125 |
+
0.007997439861297608,
|
126 |
+
0.008054816246032714,
|
127 |
+
0.008065024375915527,
|
128 |
+
0.007953407764434815,
|
129 |
+
0.007935999870300293,
|
130 |
+
0.00791756820678711,
|
131 |
+
0.008064000129699708,
|
132 |
+
0.007294976234436036,
|
133 |
+
0.007126016139984131,
|
134 |
+
0.007120895862579346,
|
135 |
+
0.007121920108795166,
|
136 |
+
0.00707583999633789,
|
137 |
+
0.007077888011932373,
|
138 |
+
0.007095295906066895,
|
139 |
+
0.00709939193725586,
|
140 |
+
0.007076863765716553,
|
141 |
+
0.007070720195770264,
|
142 |
+
0.007146495819091797,
|
143 |
+
0.007126016139984131,
|
144 |
+
0.007103487968444824,
|
145 |
+
0.007063551902770996,
|
146 |
+
0.007085055828094483,
|
147 |
+
0.007077888011932373,
|
148 |
+
0.007080959796905518,
|
149 |
+
0.0070348801612854,
|
150 |
+
0.007065599918365479,
|
151 |
+
0.007108607769012451,
|
152 |
+
0.007077888011932373,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
153 |
0.00710041618347168,
|
154 |
+
0.007068672180175781,
|
155 |
+
0.007063551902770996,
|
156 |
+
0.007067647933959961,
|
157 |
+
0.0070594558715820314,
|
158 |
+
0.007036928176879883,
|
159 |
+
0.0070830078125,
|
160 |
+
0.0070830078125,
|
161 |
+
0.007291903972625732,
|
162 |
+
0.00739737606048584,
|
163 |
+
0.007251967906951904,
|
164 |
+
0.007280640125274658,
|
165 |
+
0.007278592109680176,
|
166 |
+
0.007280640125274658,
|
167 |
+
0.007262207984924316,
|
168 |
+
0.007255040168762207,
|
169 |
+
0.0070860800743103025,
|
170 |
+
0.007085055828094483,
|
171 |
+
0.007054336071014404,
|
172 |
+
0.007067647933959961,
|
173 |
+
0.007070720195770264,
|
174 |
+
0.0070974078178405765,
|
175 |
+
0.007058432102203369,
|
176 |
+
0.007044095993041993,
|
177 |
+
0.007058432102203369,
|
178 |
+
0.007072768211364746,
|
179 |
+
0.0070522880554199216,
|
180 |
+
0.007080959796905518,
|
181 |
0.0071188478469848635,
|
182 |
+
0.007061503887176514,
|
183 |
+
0.00704204797744751,
|
184 |
+
0.007077888011932373,
|
185 |
+
0.007070720195770264,
|
186 |
+
0.0070553598403930665,
|
187 |
+
0.0071198720932006835,
|
188 |
+
0.007075808048248291,
|
189 |
+
0.007046144008636474,
|
190 |
+
0.00709830379486084,
|
191 |
+
0.007028736114501953,
|
192 |
+
0.00704307222366333,
|
193 |
+
0.007060480117797851,
|
194 |
+
0.007030784130096435,
|
195 |
+
0.007015423774719238,
|
196 |
+
0.007046144008636474,
|
197 |
+
0.007117824077606201,
|
198 |
+
0.00714035177230835,
|
199 |
+
0.007070720195770264,
|
200 |
+
0.0070553598403930665,
|
201 |
+
0.0074741759300231934,
|
202 |
+
0.00728166389465332,
|
203 |
+
0.0073134078979492185,
|
204 |
+
0.007221248149871826,
|
205 |
+
0.0072540159225463864,
|
206 |
+
0.007214079856872559,
|
207 |
+
0.007251967906951904,
|
208 |
+
0.007216127872467041,
|
209 |
+
0.007165952205657959,
|
210 |
+
0.007046144008636474,
|
211 |
+
0.007024640083312988,
|
212 |
+
0.007045119762420654,
|
213 |
+
0.007103487968444824,
|
214 |
+
0.007070720195770264,
|
215 |
+
0.007071807861328125,
|
216 |
+
0.007046144008636474,
|
217 |
+
0.007098368167877197,
|
218 |
+
0.007095263957977295,
|
219 |
+
0.007014400005340577,
|
220 |
+
0.007057407855987549,
|
221 |
+
0.0070860800743103025,
|
222 |
+
0.007060480117797851,
|
223 |
+
0.0072130560874938965,
|
224 |
+
0.007073791980743408,
|
225 |
+
0.007064576148986816,
|
226 |
+
0.007061503887176514,
|
227 |
+
0.007072768211364746,
|
228 |
+
0.007088128089904785,
|
229 |
0.007087103843688965,
|
230 |
+
0.007066624164581299,
|
231 |
+
0.007106560230255127,
|
232 |
+
0.007058432102203369,
|
233 |
+
0.007038976192474365,
|
234 |
+
0.007464992046356201,
|
235 |
+
0.007074816226959229,
|
236 |
+
0.007065599918365479,
|
237 |
0.007097343921661377,
|
238 |
+
0.007038976192474365,
|
239 |
+
0.00704307222366333,
|
240 |
+
0.007067647933959961,
|
241 |
+
0.007045119762420654,
|
242 |
+
0.007027711868286133,
|
243 |
+
0.007053311824798584,
|
244 |
+
0.007088191986083984,
|
245 |
+
0.007095295906066895,
|
246 |
+
0.007035967826843262,
|
247 |
+
0.007046144008636474,
|
248 |
+
0.007071743965148926,
|
249 |
+
0.0070594558715820314,
|
250 |
+
0.007049215793609619,
|
251 |
+
0.007030784130096435,
|
252 |
+
0.007044095993041993,
|
253 |
+
0.007073791980743408,
|
254 |
+
0.007073791980743408,
|
255 |
+
0.007023615837097168,
|
256 |
+
0.007038976192474365,
|
257 |
+
0.0070522880554199216,
|
258 |
+
0.007087103843688965,
|
259 |
+
0.007069695949554444,
|
260 |
+
0.007039999961853028,
|
261 |
+
0.007267327785491944,
|
262 |
+
0.007243743896484375,
|
263 |
+
0.007123968124389648
|
264 |
]
|
265 |
},
|
266 |
"throughput": {
|
267 |
"unit": "samples/s",
|
268 |
+
"value": 139.69592772154314
|
269 |
},
|
270 |
"energy": {
|
271 |
"unit": "kWh",
|
272 |
+
"cpu": 8.619212321997074e-08,
|
273 |
+
"ram": 4.709335230275072e-08,
|
274 |
+
"gpu": 1.6702911913043247e-07,
|
275 |
+
"total": 3.0031459465315394e-07
|
276 |
},
|
277 |
"efficiency": {
|
278 |
"unit": "samples/kWh",
|
279 |
+
"value": 3329841.4988953248
|
280 |
}
|
281 |
}
|
282 |
}
|