IlyasMoutawwakil HF staff commited on
Commit
83737b1
·
verified ·
1 Parent(s): c43cf93

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -86,7 +86,7 @@
86
  "gpu_vram_mb": 24146608128,
87
  "optimum_benchmark_version": "0.2.1",
88
  "optimum_benchmark_commit": null,
89
- "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
92
  "accelerate_commit": null,
@@ -104,7 +104,7 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 975.536128,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
@@ -112,102 +112,101 @@
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 71,
116
- "total": 1.0081064615249633,
117
- "mean": 0.014198682556689627,
118
- "stdev": 0.0005068508079613693,
119
- "p50": 0.014232576370239258,
120
- "p90": 0.014790656089782715,
121
- "p95": 0.015072256088256835,
122
- "p99": 0.015660134506225585,
123
  "values": [
124
- 0.015837183952331545,
125
- 0.015584256172180176,
126
- 0.015238143920898438,
127
- 0.014712832450866698,
128
- 0.014153727531433105,
129
- 0.014154751777648926,
130
- 0.014261247634887696,
131
- 0.014422016143798828,
132
- 0.014307295799255371,
133
- 0.014298111915588378,
134
- 0.014348287582397461,
135
- 0.0142807035446167,
136
- 0.014226431846618653,
137
- 0.01419264030456543,
138
- 0.014170111656188965,
139
- 0.014221343994140625,
140
- 0.01425920009613037,
141
- 0.014794783592224121,
142
- 0.01511526393890381,
143
- 0.014381055831909179,
144
- 0.014232576370239258,
145
- 0.014234623908996581,
146
- 0.014407679557800293,
147
- 0.014252032279968262,
148
- 0.014178272247314452,
149
- 0.014232576370239258,
150
- 0.014364671707153321,
151
- 0.014569472312927247,
152
- 0.014782464027404785,
153
- 0.015029248237609863,
154
- 0.014739456176757813,
155
- 0.014130175590515137,
156
- 0.014137344360351562,
157
  0.01396735954284668,
158
- 0.0142673921585083,
159
- 0.01467903995513916,
160
- 0.0148602876663208,
161
- 0.014790656089782715,
162
- 0.014523391723632812,
163
- 0.014568448066711426,
164
- 0.014393343925476074,
165
- 0.01427353572845459,
166
- 0.014315520286560059,
167
- 0.014294015884399413,
168
- 0.013766624450683594,
169
- 0.013677568435668945,
170
- 0.013612031936645508,
171
- 0.013603839874267578,
172
- 0.013591551780700683,
173
- 0.013608960151672364,
174
- 0.013609984397888183,
175
- 0.013599743843078613,
176
- 0.013558783531188966,
177
- 0.013619199752807617,
178
- 0.013562879562377929,
179
- 0.01359769630432129,
180
- 0.01364684772491455,
181
- 0.013733887672424316,
182
- 0.013713408470153808,
183
- 0.013598719596862792,
184
- 0.013661184310913087,
185
- 0.013631487846374512,
186
- 0.013576191902160644,
187
- 0.013645824432373046,
188
- 0.01355673599243164,
189
- 0.013611007690429687,
190
- 0.013553664207458497,
191
- 0.014756863594055175,
192
- 0.014309375762939454,
193
- 0.014287872314453125,
194
- 0.014231552124023437
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 70.42906945819813
200
  },
201
  "energy": {
202
  "unit": "kWh",
203
- "cpu": 1.6723475499145676e-07,
204
- "ram": 9.13535853330141e-08,
205
- "gpu": 3.641747827323887e-07,
206
- "total": 6.227631230568596e-07
207
  },
208
  "efficiency": {
209
  "unit": "samples/kWh",
210
- "value": 1605746.973410142
211
  }
212
  }
213
  }
 
86
  "gpu_vram_mb": 24146608128,
87
  "optimum_benchmark_version": "0.2.1",
88
  "optimum_benchmark_commit": null,
89
+ "transformers_version": "4.41.1",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
92
  "accelerate_commit": null,
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 975.716352,
108
  "max_global_vram": 1434.976256,
109
  "max_process_vram": 0.0,
110
  "max_reserved": 794.820608,
 
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 70,
116
+ "total": 0.9995849256515507,
117
+ "mean": 0.014279784652165005,
118
+ "stdev": 0.0003427474074356825,
119
+ "p50": 0.014211584091186523,
120
+ "p90": 0.01476638708114624,
121
+ "p95": 0.014904510402679443,
122
+ "p99": 0.014988236980438233,
123
  "values": [
124
+ 0.014879712104797363,
125
+ 0.014842880249023438,
126
+ 0.014755935668945312,
127
+ 0.014850048065185547,
128
+ 0.014171135902404786,
129
+ 0.013964287757873535,
130
+ 0.014027775764465332,
131
+ 0.013940735816955567,
132
+ 0.014410752296447754,
133
+ 0.014499839782714843,
134
+ 0.014053376197814941,
135
+ 0.013979647636413574,
136
+ 0.013922304153442382,
137
+ 0.013906944274902343,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
  0.01396735954284668,
139
+ 0.014221311569213867,
140
+ 0.014520319938659668,
141
+ 0.014575615882873535,
142
+ 0.014973952293395995,
143
+ 0.014400511741638184,
144
+ 0.013952159881591797,
145
+ 0.014035072326660156,
146
+ 0.014115839958190919,
147
+ 0.014444543838500976,
148
+ 0.014616576194763184,
149
+ 0.014757887840270996,
150
+ 0.01466982364654541,
151
+ 0.014503935813903808,
152
+ 0.014529696464538574,
153
+ 0.01447116756439209,
154
+ 0.014598143577575684,
155
+ 0.01397555160522461,
156
+ 0.013925375938415528,
157
+ 0.014618623733520507,
158
+ 0.01467084789276123,
159
+ 0.01448140811920166,
160
+ 0.014569472312927247,
161
+ 0.014924799919128418,
162
+ 0.014369791984558105,
163
+ 0.014347295761108398,
164
+ 0.014313376426696778,
165
+ 0.014220288276672363,
166
+ 0.014478336334228516,
167
+ 0.014524383544921875,
168
+ 0.014963711738586426,
169
+ 0.015020031929016114,
170
+ 0.014154751777648926,
171
+ 0.01418943977355957,
172
+ 0.014354432106018066,
173
+ 0.014154751777648926,
174
+ 0.014267264366149902,
175
+ 0.014025728225708007,
176
+ 0.014110719680786133,
177
+ 0.014129119873046875,
178
+ 0.014379008293151856,
179
+ 0.014202879905700684,
180
+ 0.014017536163330077,
181
+ 0.01397152042388916,
182
+ 0.013832223892211915,
183
+ 0.014138367652893067,
184
+ 0.013792256355285644,
185
+ 0.01376460838317871,
186
+ 0.013728768348693847,
187
+ 0.01386188793182373,
188
+ 0.013935615539550781,
189
+ 0.014032896041870118,
190
+ 0.014019552230834962,
191
+ 0.013891551971435547,
192
+ 0.013945856094360352,
193
+ 0.013721599578857421
194
  ]
195
  },
196
  "throughput": {
197
  "unit": "samples/s",
198
+ "value": 70.02906726946941
199
  },
200
  "energy": {
201
  "unit": "kWh",
202
+ "cpu": 1.670953691189672e-07,
203
+ "ram": 9.112273394293878e-08,
204
+ "gpu": 3.571260573055578e-07,
205
+ "total": 6.153441603674637e-07
206
  },
207
  "efficiency": {
208
  "unit": "samples/kWh",
209
+ "value": 1625106.8335528402
210
  }
211
  }
212
  }