IlyasMoutawwakil HF staff commited on
Commit
2438bbd
·
verified ·
1 Parent(s): 4d9f887

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -102,7 +102,7 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 975.519744,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
@@ -110,100 +110,99 @@
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 69,
114
- "total": 0.9981963205337527,
115
- "mean": 0.014466613341068876,
116
- "stdev": 0.0004957345187993836,
117
- "p50": 0.014575615882873535,
118
- "p90": 0.014918886184692384,
119
- "p95": 0.015037235069274903,
120
- "p99": 0.015897436599731446,
121
  "values": [
122
- 0.01458892822265625,
123
- 0.016097280502319337,
124
- 0.01580339241027832,
125
- 0.01495961570739746,
126
- 0.014659584045410156,
127
- 0.014672896385192872,
128
- 0.014619647979736328,
129
- 0.01456332778930664,
130
- 0.014627840042114258,
131
- 0.01491155242919922,
132
- 0.01460223960876465,
133
- 0.014655488014221191,
134
- 0.01476198387145996,
135
- 0.014722047805786133,
136
- 0.014664704322814942,
137
- 0.014731264114379883,
138
- 0.014450688362121582,
139
- 0.014575615882873535,
140
- 0.014565376281738282,
141
- 0.014609408378601075,
142
- 0.014919679641723632,
143
  0.015058943748474121,
144
- 0.014622719764709472,
145
- 0.014826496124267579,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  0.014394368171691894,
147
- 0.0146627197265625,
148
- 0.014698495864868164,
149
- 0.014749695777893066,
150
- 0.014746623992919922,
151
- 0.014691328048706055,
152
- 0.014918687820434571,
153
- 0.014718976020812988,
154
- 0.01486847972869873,
155
- 0.01511526393890381,
156
- 0.015004672050476075,
157
- 0.014763008117675782,
158
- 0.014643199920654297,
159
- 0.014478336334228516,
160
- 0.014529536247253418,
161
- 0.014470080375671386,
162
- 0.014322688102722168,
163
- 0.014534655570983887,
164
- 0.014621696472167968,
165
- 0.014914560317993163,
166
- 0.014568448066711426,
167
- 0.014261247634887696,
168
- 0.01415167999267578,
169
- 0.01416806411743164,
170
- 0.014111743927001954,
171
- 0.014034943580627441,
172
- 0.01347382354736328,
173
- 0.013394944190979004,
174
- 0.013392895698547362,
175
- 0.013431808471679688,
176
- 0.01335910415649414,
177
- 0.01358131217956543,
178
- 0.014057472229003906,
179
- 0.013873151779174805,
180
- 0.01448140811920166,
181
- 0.014158847808837891,
182
- 0.014014464378356933,
183
- 0.014082048416137695,
184
- 0.014221311569213867,
185
- 0.014028767585754394,
186
- 0.014027775764465332,
187
- 0.014048255920410157,
188
- 0.01406668758392334,
189
- 0.014090239524841308,
190
- 0.013998080253601074
191
  ]
192
  },
193
  "throughput": {
194
  "unit": "samples/s",
195
- "value": 69.12467876369703
196
  },
197
  "energy": {
198
  "unit": "kWh",
199
- "cpu": 1.6693196059765765e-07,
200
- "ram": 9.118821385591601e-08,
201
- "gpu": 3.349181083098603e-07,
202
- "total": 5.93038282763434e-07
203
  },
204
  "efficiency": {
205
  "unit": "samples/kWh",
206
- "value": 1686231.7814293704
207
  }
208
  }
209
  }
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 975.7696,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
 
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 68,
114
+ "total": 1.0103604774475095,
115
+ "mean": 0.014858242315404557,
116
+ "stdev": 0.00043792917038431076,
117
+ "p50": 0.014870016098022461,
118
+ "p90": 0.01526097936630249,
119
+ "p95": 0.01581112298965454,
120
+ "p99": 0.016236451740264893,
121
  "values": [
122
+ 0.015941632270812987,
123
+ 0.01646080017089844,
124
+ 0.016125951766967773,
125
+ 0.015928319931030274,
126
+ 0.015276032447814942,
127
+ 0.014929920196533204,
128
+ 0.01480294418334961,
129
+ 0.014877696037292481,
130
+ 0.01500057601928711,
131
+ 0.015081472396850586,
132
+ 0.014862336158752442,
133
+ 0.01507532787322998,
 
 
 
 
 
 
 
 
 
134
  0.015058943748474121,
135
+ 0.0149749755859375,
136
+ 0.015010815620422363,
137
+ 0.015012864112854005,
138
+ 0.014816255569458007,
139
+ 0.014947327613830566,
140
+ 0.014879808425903321,
141
+ 0.01496985626220703,
142
+ 0.01517568016052246,
143
+ 0.015171584129333495,
144
+ 0.014928895950317383,
145
+ 0.015122431755065918,
146
+ 0.014923775672912597,
147
+ 0.014797823905944824,
148
+ 0.01488691234588623,
149
+ 0.015140864372253418,
150
+ 0.014853119850158691,
151
+ 0.014804991722106933,
152
+ 0.014820351600646972,
153
+ 0.014896127700805664,
154
+ 0.01537331199645996,
155
+ 0.015254528045654296,
156
+ 0.014888959884643555,
157
+ 0.014962752342224122,
158
+ 0.014966815948486327,
159
+ 0.01467084789276123,
160
+ 0.014818304061889649,
161
+ 0.01455513572692871,
162
+ 0.014645248413085938,
163
+ 0.01516543960571289,
164
+ 0.015056832313537598,
165
+ 0.01559347152709961,
166
+ 0.014362624168395996,
167
+ 0.014095359802246094,
168
+ 0.01444863986968994,
169
+ 0.014469120025634765,
170
+ 0.014458880424499512,
171
+ 0.01468518352508545,
172
+ 0.014459903717041016,
173
+ 0.014443519592285157,
174
+ 0.01440665626525879,
175
+ 0.01442307186126709,
176
  0.014394368171691894,
177
+ 0.014511103630065919,
178
+ 0.014970879554748535,
179
+ 0.014405632019042968,
180
+ 0.014356479644775391,
181
+ 0.014377984046936035,
182
+ 0.014427136421203614,
183
+ 0.014496767997741699,
184
+ 0.01437388801574707,
185
+ 0.014452735900878906,
186
+ 0.014426143646240234,
187
+ 0.014504960060119629,
188
+ 0.014502911567687989,
189
+ 0.01439846420288086
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
190
  ]
191
  },
192
  "throughput": {
193
  "unit": "samples/s",
194
+ "value": 67.3027117725245
195
  },
196
  "energy": {
197
  "unit": "kWh",
198
+ "cpu": 1.7039785545969766e-07,
199
+ "ram": 9.314138673809273e-08,
200
+ "gpu": 3.465240867428548e-07,
201
+ "total": 6.100633289406452e-07
202
  },
203
  "efficiency": {
204
  "unit": "samples/kWh",
205
+ "value": 1639174.08662551
206
  }
207
  }
208
  }