Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json
CHANGED
@@ -83,7 +83,7 @@
|
|
83 |
"gpu_count": 1,
|
84 |
"gpu_vram_mb": 68702699520,
|
85 |
"optimum_benchmark_version": "0.2.0",
|
86 |
-
"optimum_benchmark_commit": "
|
87 |
"transformers_version": "4.40.2",
|
88 |
"transformers_commit": null,
|
89 |
"accelerate_version": "0.30.1",
|
@@ -102,166 +102,167 @@
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
-
"max_ram": 1008.
|
106 |
-
"max_global_vram":
|
107 |
-
"max_process_vram":
|
108 |
"max_reserved": 555.74528,
|
109 |
"max_allocated": 499.443712
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
-
"count":
|
114 |
-
"total": 0.
|
115 |
-
"mean": 0.
|
116 |
-
"stdev": 0.
|
117 |
-
"p50": 0.
|
118 |
-
"p90": 0.
|
119 |
-
"p95": 0.
|
120 |
-
"p99": 0.
|
121 |
"values": [
|
122 |
-
0.
|
123 |
-
0.
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
181 |
-
0.
|
182 |
-
0.
|
183 |
-
0.
|
184 |
-
0.
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
196 |
-
0.
|
197 |
-
0.
|
198 |
-
0.
|
199 |
-
0.
|
200 |
-
0.
|
201 |
-
0.
|
202 |
-
0.
|
203 |
-
0.
|
204 |
-
0.
|
205 |
-
0.
|
206 |
-
0.
|
207 |
-
0.
|
208 |
-
0.
|
209 |
-
0.
|
210 |
-
0.
|
211 |
-
0.
|
212 |
-
0.
|
213 |
-
0.
|
214 |
-
0.
|
215 |
-
0.
|
216 |
-
0.
|
217 |
-
0.
|
218 |
-
0.
|
219 |
-
0.
|
220 |
-
0.
|
221 |
-
0.
|
222 |
-
0.
|
223 |
-
0.
|
224 |
-
0.
|
225 |
-
0.
|
226 |
-
0.
|
227 |
-
0.
|
228 |
-
0.
|
229 |
-
0.
|
230 |
-
0.
|
231 |
-
0.
|
232 |
-
0.
|
233 |
-
0.
|
234 |
-
0.
|
235 |
-
0.
|
236 |
-
0.
|
237 |
-
0.
|
238 |
-
0.
|
239 |
-
0.
|
240 |
-
0.
|
241 |
-
0.
|
242 |
-
0.
|
243 |
-
0.
|
244 |
-
0.
|
245 |
-
0.
|
246 |
-
0.
|
247 |
-
0.
|
248 |
-
0.
|
249 |
-
0.
|
250 |
-
0.
|
251 |
-
0.
|
252 |
-
0.
|
253 |
-
0.
|
254 |
-
0.
|
255 |
-
0.
|
256 |
-
0.
|
257 |
-
0.
|
258 |
-
0.
|
259 |
-
0.
|
|
|
260 |
]
|
261 |
},
|
262 |
"throughput": {
|
263 |
"unit": "samples/s",
|
264 |
-
"value":
|
265 |
},
|
266 |
"energy": null,
|
267 |
"efficiency": null
|
|
|
83 |
"gpu_count": 1,
|
84 |
"gpu_vram_mb": 68702699520,
|
85 |
"optimum_benchmark_version": "0.2.0",
|
86 |
+
"optimum_benchmark_commit": "77e62a3eef699bce70248c56d35c703369938b85",
|
87 |
"transformers_version": "4.40.2",
|
88 |
"transformers_commit": null,
|
89 |
"accelerate_version": "0.30.1",
|
|
|
102 |
"forward": {
|
103 |
"memory": {
|
104 |
"unit": "MB",
|
105 |
+
"max_ram": 1008.959488,
|
106 |
+
"max_global_vram": 2522.836992,
|
107 |
+
"max_process_vram": 226898.518016,
|
108 |
"max_reserved": 555.74528,
|
109 |
"max_allocated": 499.443712
|
110 |
},
|
111 |
"latency": {
|
112 |
"unit": "s",
|
113 |
+
"count": 139,
|
114 |
+
"total": 0.9981516461372376,
|
115 |
+
"mean": 0.0071809470945125,
|
116 |
+
"stdev": 0.0003183605302359094,
|
117 |
+
"p50": 0.007091347217559814,
|
118 |
+
"p90": 0.007381938934326172,
|
119 |
+
"p95": 0.007519570779800414,
|
120 |
+
"p99": 0.007941451139450073,
|
121 |
"values": [
|
122 |
+
0.007919826030731202,
|
123 |
+
0.007084307193756104,
|
124 |
+
0.007131347179412842,
|
125 |
+
0.007205427169799805,
|
126 |
+
0.007233266830444336,
|
127 |
+
0.007191027164459228,
|
128 |
+
0.007468945980072021,
|
129 |
+
0.007454866886138916,
|
130 |
+
0.007448145866394043,
|
131 |
+
0.007505746841430664,
|
132 |
+
0.00739422607421875,
|
133 |
+
0.007310067176818848,
|
134 |
+
0.007332306861877441,
|
135 |
+
0.0072875070571899415,
|
136 |
+
0.007303826808929444,
|
137 |
+
0.007191826820373535,
|
138 |
+
0.00722318696975708,
|
139 |
+
0.0071572670936584475,
|
140 |
+
0.0071598272323608395,
|
141 |
+
0.007091347217559814,
|
142 |
+
0.0070843081474304195,
|
143 |
+
0.007174868106842041,
|
144 |
+
0.007135988235473633,
|
145 |
+
0.007057907104492188,
|
146 |
+
0.007338547229766846,
|
147 |
+
0.007123188018798828,
|
148 |
+
0.007170707225799561,
|
149 |
+
0.007203987121582031,
|
150 |
+
0.00716206693649292,
|
151 |
+
0.007123666763305664,
|
152 |
+
0.0071027069091796876,
|
153 |
+
0.007140787124633789,
|
154 |
+
0.007090867042541504,
|
155 |
+
0.007150866985321045,
|
156 |
+
0.007123666763305664,
|
157 |
+
0.007299187183380127,
|
158 |
+
0.007139347076416016,
|
159 |
+
0.007074546813964844,
|
160 |
+
0.00709486722946167,
|
161 |
+
0.007080627918243408,
|
162 |
+
0.007022387981414795,
|
163 |
+
0.007189427852630615,
|
164 |
+
0.010238061904907227,
|
165 |
+
0.007643986225128174,
|
166 |
+
0.007075346946716308,
|
167 |
+
0.007070868015289306,
|
168 |
+
0.007056628227233887,
|
169 |
+
0.007053427219390869,
|
170 |
+
0.007045746803283692,
|
171 |
+
0.0070311870574951175,
|
172 |
+
0.006975828170776367,
|
173 |
+
0.007052146911621094,
|
174 |
+
0.00706318712234497,
|
175 |
+
0.007262227058410645,
|
176 |
+
0.007053906917572021,
|
177 |
+
0.007051348209381103,
|
178 |
+
0.007378867149353027,
|
179 |
+
0.007138707160949707,
|
180 |
+
0.007047347068786621,
|
181 |
+
0.007018708229064942,
|
182 |
+
0.007068787097930908,
|
183 |
+
0.006990386962890625,
|
184 |
+
0.007085906982421875,
|
185 |
+
0.007226867198944092,
|
186 |
+
0.007213266849517822,
|
187 |
+
0.007223027229309082,
|
188 |
+
0.0070721468925476075,
|
189 |
+
0.007887666225433349,
|
190 |
+
0.007074387073516846,
|
191 |
+
0.006992147922515869,
|
192 |
+
0.007117907047271729,
|
193 |
+
0.007012466907501221,
|
194 |
+
0.007091506958007813,
|
195 |
+
0.007059826850891114,
|
196 |
+
0.007017268180847168,
|
197 |
+
0.00707470703125,
|
198 |
+
0.007002226829528809,
|
199 |
+
0.007013906955718994,
|
200 |
+
0.007130387783050537,
|
201 |
+
0.007081266880035401,
|
202 |
+
0.006995666980743409,
|
203 |
+
0.007040787220001221,
|
204 |
+
0.007078708171844483,
|
205 |
+
0.007096628189086914,
|
206 |
+
0.007026226997375489,
|
207 |
+
0.0070630269050598144,
|
208 |
+
0.007061906814575195,
|
209 |
+
0.007101906776428223,
|
210 |
+
0.007059668064117432,
|
211 |
+
0.007103346824645996,
|
212 |
+
0.007100626945495605,
|
213 |
+
0.007048787117004395,
|
214 |
+
0.007042226791381836,
|
215 |
+
0.007047028064727783,
|
216 |
+
0.007106067180633545,
|
217 |
+
0.007049586772918701,
|
218 |
+
0.007069427013397217,
|
219 |
+
0.007051667213439941,
|
220 |
+
0.007094868183135986,
|
221 |
+
0.007091667175292969,
|
222 |
+
0.007081747055053711,
|
223 |
+
0.007106866836547852,
|
224 |
+
0.007089587211608886,
|
225 |
+
0.007068147182464599,
|
226 |
+
0.006969107151031494,
|
227 |
+
0.007127027988433838,
|
228 |
+
0.007092628002166748,
|
229 |
+
0.007051667213439941,
|
230 |
+
0.007000786781311035,
|
231 |
+
0.0069948678016662595,
|
232 |
+
0.007072627067565918,
|
233 |
+
0.0070790271759033204,
|
234 |
+
0.007019987106323242,
|
235 |
+
0.007085426807403565,
|
236 |
+
0.007112627029418945,
|
237 |
+
0.0070659079551696775,
|
238 |
+
0.007070547103881836,
|
239 |
+
0.00706270694732666,
|
240 |
+
0.007078707218170166,
|
241 |
+
0.0070316681861877445,
|
242 |
+
0.007048947811126709,
|
243 |
+
0.007074546813964844,
|
244 |
+
0.007846705913543701,
|
245 |
+
0.007954705238342285,
|
246 |
+
0.0077140660285949705,
|
247 |
+
0.007284467220306397,
|
248 |
+
0.007434707164764404,
|
249 |
+
0.007435986042022705,
|
250 |
+
0.007326547145843506,
|
251 |
+
0.007276947021484375,
|
252 |
+
0.007210066795349121,
|
253 |
+
0.0072801470756530765,
|
254 |
+
0.007296466827392578,
|
255 |
+
0.007345586776733398,
|
256 |
+
0.007026226997375489,
|
257 |
+
0.00707710599899292,
|
258 |
+
0.0069875078201293945,
|
259 |
+
0.007039506912231445,
|
260 |
+
0.0070275068283081055
|
261 |
]
|
262 |
},
|
263 |
"throughput": {
|
264 |
"unit": "samples/s",
|
265 |
+
"value": 139.2573969475663
|
266 |
},
|
267 |
"energy": null,
|
268 |
"efficiency": null
|