Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub
Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json
CHANGED
@@ -104,7 +104,7 @@
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
-
"max_ram": 976.
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
@@ -112,103 +112,104 @@
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
-
"count":
|
116 |
-
"total":
|
117 |
-
"mean": 0.
|
118 |
-
"stdev": 0.
|
119 |
-
"p50": 0.
|
120 |
-
"p90": 0.
|
121 |
-
"p95": 0.
|
122 |
-
"p99": 0.
|
123 |
"values": [
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.014520319938659668,
|
129 |
-
0.014205951690673829,
|
130 |
-
0.0140830717086792,
|
131 |
-
0.014090239524841308,
|
132 |
-
0.013670399665832519,
|
133 |
-
0.013530112266540528,
|
134 |
-
0.01370019245147705,
|
135 |
-
0.01365401554107666,
|
136 |
-
0.014047103881835938,
|
137 |
-
0.013808639526367187,
|
138 |
-
0.01407590389251709,
|
139 |
-
0.01400115203857422,
|
140 |
-
0.013964320182800293,
|
141 |
-
0.014272416114807129,
|
142 |
-
0.014381055831909179,
|
143 |
-
0.014013407707214355,
|
144 |
-
0.013666208267211915,
|
145 |
-
0.013516799926757812,
|
146 |
-
0.013423616409301758,
|
147 |
-
0.013727744102478028,
|
148 |
-
0.01378598403930664,
|
149 |
-
0.013485055923461914,
|
150 |
-
0.013889535903930664,
|
151 |
-
0.014347264289855957,
|
152 |
-
0.014358528137207031,
|
153 |
-
0.014246912002563476,
|
154 |
-
0.014154751777648926,
|
155 |
-
0.014056447982788087,
|
156 |
-
0.013825023651123047,
|
157 |
-
0.013985792160034179,
|
158 |
-
0.013760512351989745,
|
159 |
-
0.013303808212280274,
|
160 |
-
0.013816831588745117,
|
161 |
-
0.014284799575805664,
|
162 |
0.014353407859802245,
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.014000255584716797,
|
170 |
-
0.013831040382385255,
|
171 |
-
0.01376972770690918,
|
172 |
-
0.013735936164855958,
|
173 |
-
0.01377791976928711,
|
174 |
-
0.01386393642425537,
|
175 |
-
0.013793279647827148,
|
176 |
-
0.013938688278198242,
|
177 |
-
0.013905920028686524,
|
178 |
-
0.013829119682312012,
|
179 |
-
0.01377791976928711,
|
180 |
-
0.013737983703613281,
|
181 |
-
0.013727775573730469,
|
182 |
-
0.013750271797180176,
|
183 |
-
0.013698047637939453,
|
184 |
0.013744128227233888,
|
185 |
-
0.
|
186 |
-
0.
|
187 |
-
0.
|
188 |
-
0.
|
189 |
-
0.
|
190 |
-
0.
|
191 |
-
0.
|
192 |
-
0.
|
193 |
-
0.
|
194 |
-
0.
|
195 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
]
|
197 |
},
|
198 |
"throughput": {
|
199 |
"unit": "samples/s",
|
200 |
-
"value":
|
201 |
},
|
202 |
"energy": {
|
203 |
"unit": "kWh",
|
204 |
-
"cpu": 1.
|
205 |
-
"ram": 8.
|
206 |
-
"gpu": 3.
|
207 |
-
"total": 5.
|
208 |
},
|
209 |
"efficiency": {
|
210 |
"unit": "samples/kWh",
|
211 |
-
"value":
|
212 |
}
|
213 |
}
|
214 |
}
|
|
|
104 |
"forward": {
|
105 |
"memory": {
|
106 |
"unit": "MB",
|
107 |
+
"max_ram": 976.101376,
|
108 |
"max_global_vram": 1434.976256,
|
109 |
"max_process_vram": 0.0,
|
110 |
"max_reserved": 794.820608,
|
|
|
112 |
},
|
113 |
"latency": {
|
114 |
"unit": "s",
|
115 |
+
"count": 73,
|
116 |
+
"total": 0.9979040594100952,
|
117 |
+
"mean": 0.0136699186220561,
|
118 |
+
"stdev": 0.0005464524057608967,
|
119 |
+
"p50": 0.013645824432373046,
|
120 |
+
"p90": 0.014329446220397949,
|
121 |
+
"p95": 0.014507212448120118,
|
122 |
+
"p99": 0.0151649076461792,
|
123 |
"values": [
|
124 |
+
0.014483455657958985,
|
125 |
+
0.015059967994689942,
|
126 |
+
0.014542847633361817,
|
127 |
+
0.01434931182861328,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
128 |
0.014353407859802245,
|
129 |
+
0.013663167953491212,
|
130 |
+
0.013631487846374512,
|
131 |
+
0.013766655921936035,
|
132 |
+
0.013785087585449218,
|
133 |
+
0.013682687759399414,
|
134 |
+
0.013818880081176758,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
0.013744128227233888,
|
136 |
+
0.013651007652282715,
|
137 |
+
0.014218239784240723,
|
138 |
+
0.013961215972900391,
|
139 |
+
0.014144512176513671,
|
140 |
+
0.014115839958190919,
|
141 |
+
0.014417920112609863,
|
142 |
+
0.014206975936889648,
|
143 |
+
0.013717503547668456,
|
144 |
+
0.013819904327392578,
|
145 |
+
0.014043135643005371,
|
146 |
+
0.014728192329406739,
|
147 |
+
0.015434752464294434,
|
148 |
+
0.014119935989379882,
|
149 |
+
0.014223360061645507,
|
150 |
+
0.01410044765472412,
|
151 |
+
0.014164992332458496,
|
152 |
+
0.01405951976776123,
|
153 |
+
0.013999103546142578,
|
154 |
+
0.014210047721862793,
|
155 |
+
0.01406771183013916,
|
156 |
+
0.013774847984313965,
|
157 |
+
0.013598719596862792,
|
158 |
+
0.013377535820007324,
|
159 |
+
0.01334169578552246,
|
160 |
+
0.014033920288085937,
|
161 |
+
0.014249983787536622,
|
162 |
+
0.014235648155212402,
|
163 |
+
0.013860799789428712,
|
164 |
+
0.013584383964538574,
|
165 |
+
0.013645824432373046,
|
166 |
+
0.013635583877563476,
|
167 |
+
0.013634559631347656,
|
168 |
+
0.013126655578613282,
|
169 |
+
0.013320192337036133,
|
170 |
+
0.013120512008666992,
|
171 |
+
0.013149184226989746,
|
172 |
+
0.013128704071044921,
|
173 |
+
0.013110239982604981,
|
174 |
+
0.013138943672180176,
|
175 |
+
0.013129728317260742,
|
176 |
+
0.013129728317260742,
|
177 |
+
0.01316659164428711,
|
178 |
+
0.013147135734558106,
|
179 |
+
0.013136896133422851,
|
180 |
+
0.013124608039855956,
|
181 |
+
0.01315225601196289,
|
182 |
+
0.013107199668884278,
|
183 |
+
0.013114303588867187,
|
184 |
+
0.013097984313964844,
|
185 |
+
0.013146112442016602,
|
186 |
+
0.013190143585205078,
|
187 |
+
0.013133824348449707,
|
188 |
+
0.013179903984069824,
|
189 |
+
0.013107199668884278,
|
190 |
+
0.013115391731262208,
|
191 |
+
0.013211647987365722,
|
192 |
+
0.013022144317626954,
|
193 |
+
0.013003775596618652,
|
194 |
+
0.013151231765747071,
|
195 |
+
0.013010944366455078,
|
196 |
+
0.012971967697143554
|
197 |
]
|
198 |
},
|
199 |
"throughput": {
|
200 |
"unit": "samples/s",
|
201 |
+
"value": 73.15332502320261
|
202 |
},
|
203 |
"energy": {
|
204 |
"unit": "kWh",
|
205 |
+
"cpu": 1.5623979324214215e-07,
|
206 |
+
"ram": 8.50517201407046e-08,
|
207 |
+
"gpu": 3.323054606493631e-07,
|
208 |
+
"total": 5.735969740322098e-07
|
209 |
},
|
210 |
"efficiency": {
|
211 |
"unit": "samples/kWh",
|
212 |
+
"value": 1743384.3713824158
|
213 |
}
|
214 |
}
|
215 |
}
|