IlyasMoutawwakil HF staff commited on
Commit
6e82f1c
·
verified ·
1 Parent(s): c782f4f

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -11,7 +11,7 @@
11
  "model": "microsoft/deberta-v3-base",
12
  "processor": "microsoft/deberta-v3-base",
13
  "device": "cuda",
14
- "device_ids": "4",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1017.208832,
115
- "max_global_vram": 1060.777984,
116
- "max_process_vram": 45025.11616,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 7.71534228515625,
124
- "mean": 7.71534228515625,
125
  "stdev": 0.0,
126
- "p50": 7.71534228515625,
127
- "p90": 7.71534228515625,
128
- "p95": 7.71534228515625,
129
- "p99": 7.71534228515625,
130
  "values": [
131
- 7.71534228515625
132
  ]
133
  },
134
  "throughput": null,
@@ -138,89 +138,91 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1177.714688,
142
- "max_global_vram": 1142.390784,
143
- "max_process_vram": 297258.954752,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 61,
150
- "total": 1.0097399120330806,
151
- "mean": 0.016553113312017725,
152
- "stdev": 0.011588046137302705,
153
- "p50": 0.01475994873046875,
154
- "p90": 0.015008427619934083,
155
- "p95": 0.015059787750244141,
156
- "p99": 0.06291684494018544,
157
  "values": [
158
- 0.01521962833404541,
159
- 0.01500634765625,
160
- 0.014770190238952636,
161
- 0.014736108779907227,
162
- 0.014700909614562989,
163
- 0.01472362995147705,
164
- 0.014717069625854493,
165
- 0.014766828536987305,
166
- 0.014766030311584473,
167
- 0.014822989463806153,
168
- 0.014778188705444336,
169
- 0.014818989753723144,
170
- 0.014764429092407227,
171
- 0.014746829986572266,
172
- 0.014748429298400879,
173
- 0.014832268714904786,
174
- 0.014717709541320801,
175
- 0.014769389152526855,
176
- 0.014758350372314454,
177
- 0.014705549240112305,
178
- 0.014747630119323731,
179
- 0.01474026870727539,
180
- 0.014983469009399415,
181
- 0.014733389854431152,
182
- 0.014773070335388183,
183
- 0.10393405151367187,
184
- 0.014938029289245605,
185
- 0.014758668899536132,
186
- 0.014791149139404297,
187
- 0.014778349876403809,
188
- 0.014675470352172851,
189
- 0.014684909820556641,
190
- 0.014213871955871583,
191
- 0.0144543514251709,
192
- 0.014863788604736328,
193
- 0.014828429222106934,
194
- 0.01473738956451416,
195
- 0.01477514934539795,
196
- 0.01495802879333496,
197
- 0.014752589225769042,
198
- 0.01459867000579834,
199
- 0.014703149795532227,
200
- 0.014664270401000977,
201
- 0.01503578758239746,
202
- 0.01474170970916748,
203
- 0.01469770908355713,
204
- 0.014707630157470702,
205
- 0.014667790412902832,
206
- 0.015008427619934083,
207
- 0.035572040557861326,
208
- 0.014434029579162597,
209
- 0.014114192008972168,
210
- 0.014060751914978027,
211
- 0.015059787750244141,
212
- 0.014809228897094727,
213
- 0.014766989707946777,
214
- 0.01475994873046875,
215
- 0.0147295503616333,
216
- 0.015059787750244141,
217
- 0.014786190032958984,
218
- 0.01477034854888916
 
 
219
  ]
220
  },
221
  "throughput": {
222
  "unit": "samples/s",
223
- "value": 60.411596365620845
224
  },
225
  "energy": null,
226
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.0+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
11
  "model": "microsoft/deberta-v3-base",
12
  "processor": "microsoft/deberta-v3-base",
13
  "device": "cuda",
14
+ "device_ids": "5",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1377.009664,
115
+ "max_global_vram": 11.599872,
116
+ "max_process_vram": 0.0,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 8.190462890625,
124
+ "mean": 8.190462890625,
125
  "stdev": 0.0,
126
+ "p50": 8.190462890625,
127
+ "p90": 8.190462890625,
128
+ "p95": 8.190462890625,
129
+ "p99": 8.190462890625,
130
  "values": [
131
+ 8.190462890625
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1540.829184,
142
+ "max_global_vram": 12.058624,
143
+ "max_process_vram": 0.0,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 63,
150
+ "total": 1.0006453371047972,
151
+ "mean": 0.01588325931912377,
152
+ "stdev": 0.0009837273592098717,
153
+ "p50": 0.015775140762329103,
154
+ "p90": 0.01609520263671875,
155
+ "p95": 0.016193010520935056,
156
+ "p99": 0.018949126205444353,
157
  "values": [
158
+ 0.01614185905456543,
159
+ 0.016014339447021485,
160
+ 0.016331939697265624,
161
+ 0.016006980895996094,
162
+ 0.015432901382446289,
163
+ 0.015956899642944337,
164
+ 0.015893699645996094,
165
+ 0.015879940032958985,
166
+ 0.015903618812561034,
167
+ 0.016152259826660156,
168
+ 0.0161021785736084,
169
+ 0.016028579711914064,
170
+ 0.0159512996673584,
171
+ 0.016067298889160156,
172
+ 0.02321927261352539,
173
+ 0.016217378616333007,
174
+ 0.01569849967956543,
175
+ 0.01587273979187012,
176
+ 0.015801700592041015,
177
+ 0.01574666118621826,
178
+ 0.01574697971343994,
179
+ 0.01571738052368164,
180
+ 0.015769700050354003,
181
+ 0.01574026107788086,
182
+ 0.01585706043243408,
183
+ 0.01584202003479004,
184
+ 0.015812740325927735,
185
+ 0.015872260093688965,
186
+ 0.015771619796752928,
187
+ 0.015911620140075685,
188
+ 0.015861379623413087,
189
+ 0.015814339637756348,
190
+ 0.015850020408630372,
191
+ 0.015679780960083007,
192
+ 0.01571130084991455,
193
+ 0.01572122097015381,
194
+ 0.015709060668945313,
195
+ 0.01575834083557129,
196
+ 0.015686500549316407,
197
+ 0.015741540908813478,
198
+ 0.015740740776062013,
199
+ 0.0158583402633667,
200
+ 0.01619753837585449,
201
+ 0.01575193977355957,
202
+ 0.01576393985748291,
203
+ 0.01572778129577637,
204
+ 0.015752579689025877,
205
+ 0.015782659530639648,
206
+ 0.015775140762329103,
207
+ 0.015734339714050295,
208
+ 0.015242182731628418,
209
+ 0.01505722427368164,
210
+ 0.014731624603271484,
211
+ 0.01424954605102539,
212
+ 0.015184421539306641,
213
+ 0.015679620742797853,
214
+ 0.015643780708312988,
215
+ 0.015804100036621093,
216
+ 0.015719619750976563,
217
+ 0.015759461402893065,
218
+ 0.015775139808654786,
219
+ 0.015917380332946776,
220
+ 0.01580105972290039
221
  ]
222
  },
223
  "throughput": {
224
  "unit": "samples/s",
225
+ "value": 62.95936998245565
226
  },
227
  "energy": null,
228
  "efficiency": null