IlyasMoutawwakil HF staff commited on
Commit
02726ca
·
verified ·
1 Parent(s): f05781e

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 976.101376,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -113,103 +113,103 @@
113
  "latency": {
114
  "unit": "s",
115
  "count": 73,
116
- "total": 0.9979040594100952,
117
- "mean": 0.0136699186220561,
118
- "stdev": 0.0005464524057608967,
119
- "p50": 0.013645824432373046,
120
- "p90": 0.014329446220397949,
121
- "p95": 0.014507212448120118,
122
- "p99": 0.0151649076461792,
123
  "values": [
124
- 0.014483455657958985,
125
- 0.015059967994689942,
126
- 0.014542847633361817,
127
- 0.01434931182861328,
128
- 0.014353407859802245,
129
- 0.013663167953491212,
130
- 0.013631487846374512,
131
- 0.013766655921936035,
132
- 0.013785087585449218,
133
- 0.013682687759399414,
134
- 0.013818880081176758,
135
- 0.013744128227233888,
136
- 0.013651007652282715,
137
- 0.014218239784240723,
138
- 0.013961215972900391,
139
- 0.014144512176513671,
140
- 0.014115839958190919,
141
- 0.014417920112609863,
142
- 0.014206975936889648,
143
- 0.013717503547668456,
144
- 0.013819904327392578,
145
- 0.014043135643005371,
146
- 0.014728192329406739,
147
- 0.015434752464294434,
148
- 0.014119935989379882,
149
- 0.014223360061645507,
150
- 0.01410044765472412,
151
- 0.014164992332458496,
152
- 0.01405951976776123,
153
- 0.013999103546142578,
154
- 0.014210047721862793,
155
- 0.01406771183013916,
156
- 0.013774847984313965,
157
- 0.013598719596862792,
158
- 0.013377535820007324,
159
- 0.01334169578552246,
160
- 0.014033920288085937,
161
- 0.014249983787536622,
162
- 0.014235648155212402,
163
- 0.013860799789428712,
164
- 0.013584383964538574,
165
- 0.013645824432373046,
166
- 0.013635583877563476,
167
- 0.013634559631347656,
168
- 0.013126655578613282,
169
- 0.013320192337036133,
170
  0.013120512008666992,
171
- 0.013149184226989746,
172
- 0.013128704071044921,
173
- 0.013110239982604981,
174
- 0.013138943672180176,
175
- 0.013129728317260742,
176
- 0.013129728317260742,
177
- 0.01316659164428711,
178
- 0.013147135734558106,
179
- 0.013136896133422851,
180
- 0.013124608039855956,
181
- 0.01315225601196289,
182
- 0.013107199668884278,
183
- 0.013114303588867187,
184
- 0.013097984313964844,
185
- 0.013146112442016602,
186
- 0.013190143585205078,
187
- 0.013133824348449707,
188
- 0.013179903984069824,
189
- 0.013107199668884278,
190
- 0.013115391731262208,
191
- 0.013211647987365722,
192
- 0.013022144317626954,
193
- 0.013003775596618652,
194
- 0.013151231765747071,
195
- 0.013010944366455078,
196
- 0.012971967697143554
 
 
 
 
 
 
 
 
 
 
 
 
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
- "value": 73.15332502320261
202
  },
203
  "energy": {
204
  "unit": "kWh",
205
- "cpu": 1.5623979324214215e-07,
206
- "ram": 8.50517201407046e-08,
207
- "gpu": 3.323054606493631e-07,
208
- "total": 5.735969740322098e-07
209
  },
210
  "efficiency": {
211
  "unit": "samples/kWh",
212
- "value": 1743384.3713824158
213
  }
214
  }
215
  }
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 976.564224,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
113
  "latency": {
114
  "unit": "s",
115
  "count": 73,
116
+ "total": 0.9996178541183476,
117
+ "mean": 0.013693395261895166,
118
+ "stdev": 0.0007549113929564292,
119
+ "p50": 0.013345791816711425,
120
+ "p90": 0.01502146587371826,
121
+ "p95": 0.015442329788208004,
122
+ "p99": 0.016295403060913087,
123
  "values": [
124
+ 0.015071231842041016,
125
+ 0.015310848236083984,
126
+ 0.015033344268798828,
127
+ 0.01580031967163086,
128
+ 0.016675840377807616,
129
+ 0.015138815879821778,
130
+ 0.014973952293395995,
131
+ 0.0161474552154541,
132
+ 0.01563955211639404,
133
+ 0.013751296043395997,
134
+ 0.013321215629577637,
135
+ 0.0133570556640625,
136
+ 0.013167615890502929,
137
+ 0.013322239875793456,
138
+ 0.01335910415649414,
139
+ 0.013535231590270995,
140
+ 0.013378560066223144,
141
+ 0.013334527969360351,
142
+ 0.013396991729736327,
143
+ 0.01406156826019287,
144
+ 0.01384447956085205,
145
+ 0.01386905574798584,
146
+ 0.013784064292907714,
147
+ 0.013758463859558106,
148
+ 0.01377280044555664,
149
+ 0.01344819164276123,
150
+ 0.013325311660766602,
151
+ 0.013271072387695312,
152
+ 0.013255680084228515,
153
+ 0.013198335647583008,
154
+ 0.013355008125305176,
155
+ 0.013322239875793456,
156
+ 0.013296640396118165,
157
+ 0.013281279563903809,
 
 
 
 
 
 
 
 
 
 
 
 
158
  0.013120512008666992,
159
+ 0.013301759719848634,
160
+ 0.013318143844604492,
161
+ 0.01345740795135498,
162
+ 0.013365247726440429,
163
+ 0.013345791816711425,
164
+ 0.013318143844604492,
165
+ 0.01386291217803955,
166
+ 0.014149632453918457,
167
+ 0.014183423995971679,
168
+ 0.014205951690673829,
169
+ 0.013743103981018067,
170
+ 0.013339648246765137,
171
+ 0.0133570556640625,
172
+ 0.01334169578552246,
173
+ 0.013328384399414063,
174
+ 0.01320143985748291,
175
+ 0.013280256271362305,
176
+ 0.01327616024017334,
177
+ 0.013327360153198242,
178
+ 0.013291520118713379,
179
+ 0.013339648246765137,
180
+ 0.013291520118713379,
181
+ 0.01336729621887207,
182
+ 0.013201408386230469,
183
+ 0.013296640396118165,
184
+ 0.013306879997253418,
185
+ 0.013668352127075196,
186
+ 0.013359231948852539,
187
+ 0.013301759719848634,
188
+ 0.013303808212280274,
189
+ 0.0133089599609375,
190
+ 0.013295616149902344,
191
+ 0.013373439788818359,
192
+ 0.013342720031738281,
193
+ 0.013245599746704102,
194
+ 0.013308927536010743,
195
+ 0.013254591941833496,
196
+ 0.013376511573791505
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
+ "value": 73.02790731402578
202
  },
203
  "energy": {
204
  "unit": "kWh",
205
+ "cpu": 1.6007624320439749e-07,
206
+ "ram": 8.750631552024095e-08,
207
+ "gpu": 3.419034266756745e-07,
208
+ "total": 5.894859854003129e-07
209
  },
210
  "efficiency": {
211
  "unit": "samples/kWh",
212
+ "value": 1696393.170943516
213
  }
214
  }
215
  }