IlyasMoutawwakil HF staff commited on
Commit
0366bd0
·
verified ·
1 Parent(s): 0ad1b88

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -17,7 +17,6 @@
17
  "intra_op_num_threads": null,
18
  "model_kwargs": {},
19
  "processor_kwargs": {},
20
- "hub_kwargs": {},
21
  "no_weights": true,
22
  "device_map": null,
23
  "torch_dtype": null,
@@ -105,7 +104,7 @@
105
  "load": {
106
  "memory": {
107
  "unit": "MB",
108
- "max_ram": 705.531904,
109
  "max_global_vram": 1403.518976,
110
  "max_process_vram": 0.0,
111
  "max_reserved": 773.849088,
@@ -114,31 +113,31 @@
114
  "latency": {
115
  "unit": "s",
116
  "count": 1,
117
- "total": 7.15669384765625,
118
- "mean": 7.15669384765625,
119
  "stdev": 0.0,
120
- "p50": 7.15669384765625,
121
- "p90": 7.15669384765625,
122
- "p95": 7.15669384765625,
123
- "p99": 7.15669384765625,
124
  "values": [
125
- 7.15669384765625
126
  ]
127
  },
128
  "throughput": null,
129
  "energy": {
130
  "unit": "kWh",
131
- "cpu": 1.0540606247054208e-06,
132
- "ram": 5.620494452602998e-07,
133
- "gpu": 1.754445847999657e-06,
134
- "total": 3.3705559179653775e-06
135
  },
136
  "efficiency": null
137
  },
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 987.81184,
142
  "max_global_vram": 1434.976256,
143
  "max_process_vram": 0.0,
144
  "max_reserved": 794.820608,
@@ -146,106 +145,104 @@
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 75,
150
- "total": 1.0044241905212405,
151
- "mean": 0.013392322540283203,
152
- "stdev": 0.00044885281710697786,
153
- "p50": 0.013298687934875488,
154
- "p90": 0.013793074989318848,
155
- "p95": 0.014447513580322265,
156
- "p99": 0.015130890102386477,
157
  "values": [
158
- 0.014610431671142577,
159
- 0.015358976364135742,
160
- 0.014593024253845215,
161
- 0.01386297607421875,
162
- 0.013263872146606445,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  0.01324339199066162,
164
- 0.01316864013671875,
165
- 0.013314047813415527,
166
- 0.013504511833190918,
 
 
 
 
 
167
  0.0132741117477417,
168
- 0.013221887588500977,
169
- 0.013420543670654296,
170
- 0.013306879997253418,
171
- 0.013277183532714844,
172
- 0.013376511573791505,
173
- 0.01326694393157959,
174
- 0.013125632286071777,
175
- 0.013137920379638672,
176
- 0.013150208473205567,
177
- 0.013181952476501465,
178
- 0.013496319770812988,
179
- 0.015050751686096191,
180
- 0.013369343757629394,
181
- 0.013186079978942871,
182
- 0.013390912055969238,
183
- 0.013197248458862304,
184
- 0.013191167831420898,
185
- 0.013112319946289062,
186
- 0.013355008125305176,
187
- 0.013132800102233886,
188
- 0.013170687675476075,
189
- 0.013210623741149903,
190
- 0.013244416236877441,
191
- 0.013585408210754395,
192
- 0.013557760238647461,
193
- 0.013285375595092774,
194
- 0.013202431678771973,
195
- 0.01328435230255127,
196
- 0.013121567726135253,
197
- 0.01303756809234619,
198
- 0.013142016410827637,
199
- 0.013000703811645508,
200
- 0.012841983795166016,
201
- 0.013697952270507812,
202
- 0.013829119682312012,
203
- 0.013928447723388672,
204
- 0.014385151863098144,
205
- 0.013739007949829102,
206
- 0.013446144104003906,
207
- 0.013346783638000488,
208
- 0.013419520378112794,
209
- 0.013304767608642578,
210
- 0.013288479804992676,
211
- 0.013480959892272949,
212
- 0.013277248382568359,
213
- 0.01337241554260254,
214
- 0.013503487586975099,
215
- 0.013386752128601074,
216
- 0.01356492805480957,
217
- 0.01339187240600586,
218
- 0.013298687934875488,
219
- 0.013277183532714844,
220
- 0.013320192337036133,
221
- 0.013287424087524414,
222
- 0.013242367744445802,
223
- 0.01334988784790039,
224
- 0.013298720359802245,
225
- 0.013316096305847168,
226
- 0.013289471626281739,
227
- 0.013337599754333495,
228
- 0.013344767570495606,
229
- 0.012733440399169921,
230
- 0.012661696434020996,
231
- 0.012660736083984376,
232
- 0.012816384315490722
233
  ]
234
  },
235
  "throughput": {
236
  "unit": "samples/s",
237
- "value": 74.66964725439276
238
  },
239
  "energy": {
240
  "unit": "kWh",
241
- "cpu": 1.5837208650730275e-07,
242
- "ram": 8.649198190669645e-08,
243
- "gpu": 3.696521475733336e-07,
244
- "total": 6.145162159873328e-07
245
  },
246
  "efficiency": {
247
  "unit": "samples/kWh",
248
- "value": 1627296.357661314
249
  }
250
  }
251
  }
 
17
  "intra_op_num_threads": null,
18
  "model_kwargs": {},
19
  "processor_kwargs": {},
 
20
  "no_weights": true,
21
  "device_map": null,
22
  "torch_dtype": null,
 
104
  "load": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 707.854336,
108
  "max_global_vram": 1403.518976,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 773.849088,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 1,
116
+ "total": 7.1304541015625,
117
+ "mean": 7.1304541015625,
118
  "stdev": 0.0,
119
+ "p50": 7.1304541015625,
120
+ "p90": 7.1304541015625,
121
+ "p95": 7.1304541015625,
122
+ "p99": 7.1304541015625,
123
  "values": [
124
+ 7.1304541015625
125
  ]
126
  },
127
  "throughput": null,
128
  "energy": {
129
  "unit": "kWh",
130
+ "cpu": 1.0721758008003235e-06,
131
+ "ram": 5.714629080222266e-07,
132
+ "gpu": 0.0,
133
+ "total": 1.6436387088225501e-06
134
  },
135
  "efficiency": null
136
  },
137
  "forward": {
138
  "memory": {
139
  "unit": "MB",
140
+ "max_ram": 990.183424,
141
  "max_global_vram": 1434.976256,
142
  "max_process_vram": 0.0,
143
  "max_reserved": 794.820608,
 
145
  },
146
  "latency": {
147
  "unit": "s",
148
+ "count": 73,
149
+ "total": 0.9994639692306517,
150
+ "mean": 0.013691287249734957,
151
+ "stdev": 0.0004417032779150381,
152
+ "p50": 0.013766655921936035,
153
+ "p90": 0.014075903701782227,
154
+ "p95": 0.01423790111541748,
155
+ "p99": 0.015206686668395997,
156
  "values": [
157
+ 0.015686655998229982,
158
+ 0.015020031929016114,
159
+ 0.0146810884475708,
160
+ 0.013904864311218262,
161
+ 0.013902848243713378,
162
+ 0.013881343841552735,
163
+ 0.013773823738098144,
164
+ 0.013876223564147949,
165
+ 0.014090239524841308,
166
+ 0.013814784049987794,
167
+ 0.013935615539550781,
168
+ 0.014018560409545898,
169
+ 0.013759519577026367,
170
+ 0.01376460838317871,
171
+ 0.013948927879333496,
172
+ 0.013792256355285644,
173
+ 0.013752320289611816,
174
+ 0.013751296043395997,
175
+ 0.01379532814025879,
176
+ 0.013898783683776856,
177
+ 0.01415987205505371,
178
+ 0.013991935729980469,
179
+ 0.01380352020263672,
180
+ 0.014109760284423829,
181
+ 0.013810688018798829,
182
+ 0.013801471710205078,
183
+ 0.013797375679016113,
184
+ 0.013961215972900391,
185
+ 0.013742079734802246,
186
+ 0.013773823738098144,
187
+ 0.013766655921936035,
188
+ 0.013972479820251465,
189
+ 0.014252032279968262,
190
+ 0.014228480339050293,
191
+ 0.013799424171447755,
192
+ 0.013799424171447755,
193
+ 0.01389568042755127,
194
+ 0.013906944274902343,
195
+ 0.01376255989074707,
196
+ 0.013703167915344238,
197
+ 0.013477888107299805,
198
+ 0.013847552299499511,
199
+ 0.013816831588745117,
200
+ 0.013988863945007325,
201
+ 0.013900768280029297,
202
+ 0.013385727882385253,
203
+ 0.0132925443649292,
204
+ 0.013285375595092774,
205
+ 0.013275135993957519,
206
+ 0.013244416236877441,
207
+ 0.013229056358337403,
208
+ 0.01326796817779541,
209
+ 0.013247488021850586,
210
+ 0.013264896392822266,
211
+ 0.013301728248596191,
212
+ 0.013280256271362305,
213
  0.01324339199066162,
214
+ 0.013282303810119628,
215
+ 0.013283328056335449,
216
+ 0.013203455924987792,
217
+ 0.013224960327148438,
218
+ 0.013280256271362305,
219
+ 0.01325772762298584,
220
+ 0.013288448333740235,
221
+ 0.013290495872497558,
222
  0.0132741117477417,
223
+ 0.013094911575317383,
224
+ 0.013327360153198242,
225
+ 0.013232128143310547,
226
+ 0.013281279563903809,
227
+ 0.013234175682067872,
228
+ 0.013206527709960938,
229
+ 0.013264896392822266
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
230
  ]
231
  },
232
  "throughput": {
233
  "unit": "samples/s",
234
+ "value": 73.03915123242766
235
  },
236
  "energy": {
237
  "unit": "kWh",
238
+ "cpu": 1.5251248480587606e-07,
239
+ "ram": 8.329012690816351e-08,
240
+ "gpu": 3.54273787692301e-07,
241
+ "total": 5.900763994063405e-07
242
  },
243
  "efficiency": {
244
  "unit": "samples/kWh",
245
+ "value": 1694695.8071972923
246
  }
247
  }
248
  }