IlyasMoutawwakil HF staff commited on
Commit
7e1d12c
·
verified ·
1 Parent(s): 4f0ee27

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
@@ -11,7 +11,7 @@
11
  "model": "microsoft/deberta-v3-base",
12
  "processor": "microsoft/deberta-v3-base",
13
  "device": "cuda",
14
- "device_ids": "5",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
@@ -111,24 +111,24 @@
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
- "max_ram": 1016.7296,
115
  "max_global_vram": 68702.69952,
116
- "max_process_vram": 44680.245248,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
- "total": 7.7063857421875,
124
- "mean": 7.7063857421875,
125
  "stdev": 0.0,
126
- "p50": 7.7063857421875,
127
- "p90": 7.7063857421875,
128
- "p95": 7.7063857421875,
129
- "p99": 7.7063857421875,
130
  "values": [
131
- 7.7063857421875
132
  ]
133
  },
134
  "throughput": null,
@@ -138,104 +138,98 @@
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
- "max_ram": 1177.337856,
142
  "max_global_vram": 68702.69952,
143
- "max_process_vram": 289552.941056,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
- "count": 76,
150
- "total": 1.0028019676208497,
151
- "mean": 0.013194762731853285,
152
- "stdev": 0.0019164331630223362,
153
- "p50": 0.012694033622741699,
154
- "p90": 0.01402186918258667,
155
- "p95": 0.014301587820053101,
156
- "p99": 0.02423795175552368,
157
  "values": [
158
- 0.014042828559875488,
159
- 0.01340139102935791,
160
- 0.013436271667480468,
161
- 0.012806994438171387,
162
- 0.01258315372467041,
163
- 0.012446354866027833,
164
- 0.012316274642944335,
165
- 0.012202515602111816,
166
- 0.012153236389160157,
167
- 0.012184435844421387,
168
- 0.012258835792541504,
169
- 0.012274194717407226,
170
- 0.012225715637207032,
171
- 0.024384231567382814,
172
- 0.015178984642028808,
173
- 0.012629714965820313,
174
- 0.012761874198913574,
175
- 0.012524595260620117,
176
- 0.01251003360748291,
177
- 0.012561875343322754,
178
- 0.012767953872680664,
179
- 0.012549554824829102,
180
- 0.012528915405273438,
181
- 0.012670674324035645,
182
- 0.012992113113403321,
183
- 0.0125346736907959,
184
- 0.012563315391540528,
185
- 0.012536754608154297,
186
- 0.012779634475708007,
187
- 0.012817554473876953,
188
- 0.012728914260864258,
189
- 0.013284751892089844,
190
- 0.012474354743957519,
191
- 0.012747153282165528,
192
- 0.012565875053405761,
193
- 0.01253403377532959,
194
- 0.012576913833618164,
195
- 0.012802193641662598,
196
- 0.012573554039001464,
197
- 0.01253883457183838,
198
- 0.012569713592529296,
199
- 0.012747794151306153,
200
- 0.012542513847351075,
201
- 0.0125761137008667,
202
- 0.012591634750366211,
203
- 0.012813873291015625,
204
- 0.012598194122314454,
205
- 0.01255355453491211,
206
- 0.012564594268798829,
207
- 0.012828912734985352,
208
- 0.012583314895629883,
209
- 0.012591954231262206,
210
- 0.01259947395324707,
211
- 0.024189191818237303,
212
- 0.012722993850708008,
213
- 0.012642514228820801,
214
- 0.012590675354003907,
215
- 0.012821714401245117,
216
- 0.0132151517868042,
217
- 0.014285227775573731,
218
- 0.013914190292358398,
219
- 0.013520111083984375,
220
- 0.013495791435241699,
221
- 0.014056428909301758,
222
- 0.014247149467468261,
223
- 0.01435066795349121,
224
- 0.013777230262756348,
225
- 0.013687311172485351,
226
- 0.014000909805297852,
227
- 0.013314830780029296,
228
- 0.013548590660095216,
229
- 0.012729714393615723,
230
- 0.012974513053894044,
231
- 0.012716114044189453,
232
- 0.012702834129333497,
233
- 0.012685233116149903
234
  ]
235
  },
236
  "throughput": {
237
  "unit": "samples/s",
238
- "value": 75.7876454713289
239
  },
240
  "energy": null,
241
  "efficiency": null
 
3
  "name": "cuda_inference_transformers_token-classification_microsoft/deberta-v3-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "token-classification",
9
  "library": "transformers",
 
11
  "model": "microsoft/deberta-v3-base",
12
  "processor": "microsoft/deberta-v3-base",
13
  "device": "cuda",
14
+ "device_ids": "4",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
 
111
  "load": {
112
  "memory": {
113
  "unit": "MB",
114
+ "max_ram": 1377.185792,
115
  "max_global_vram": 68702.69952,
116
+ "max_process_vram": 0.0,
117
  "max_reserved": 773.849088,
118
  "max_allocated": 736.603648
119
  },
120
  "latency": {
121
  "unit": "s",
122
  "count": 1,
123
+ "total": 8.065130859375,
124
+ "mean": 8.065130859375,
125
  "stdev": 0.0,
126
+ "p50": 8.065130859375,
127
+ "p90": 8.065130859375,
128
+ "p95": 8.065130859375,
129
+ "p99": 8.065130859375,
130
  "values": [
131
+ 8.065130859375
132
  ]
133
  },
134
  "throughput": null,
 
138
  "forward": {
139
  "memory": {
140
  "unit": "MB",
141
+ "max_ram": 1541.083136,
142
  "max_global_vram": 68702.69952,
143
+ "max_process_vram": 0.0,
144
  "max_reserved": 773.849088,
145
  "max_allocated": 745.086976
146
  },
147
  "latency": {
148
  "unit": "s",
149
+ "count": 70,
150
+ "total": 1.002358015060425,
151
+ "mean": 0.014319400215148928,
152
+ "stdev": 0.0006803642423436568,
153
+ "p50": 0.014038350105285645,
154
+ "p90": 0.015039194393157958,
155
+ "p95": 0.015492024374008177,
156
+ "p99": 0.01647180494308472,
157
  "values": [
158
+ 0.015818822860717775,
159
+ 0.015087306022644043,
160
+ 0.015037066459655761,
161
+ 0.015058345794677734,
162
+ 0.014903467178344726,
163
+ 0.014866506576538086,
164
+ 0.014689066886901855,
165
+ 0.014578508377075196,
166
+ 0.014709226608276367,
167
+ 0.014711788177490234,
168
+ 0.01469018840789795,
169
+ 0.01792521667480469,
170
+ 0.014794986724853515,
171
+ 0.014096910476684571,
172
+ 0.014523788452148438,
173
+ 0.014444428443908692,
174
+ 0.014253389358520508,
175
+ 0.013838669776916505,
176
+ 0.013970829963684083,
177
+ 0.013931949615478516,
178
+ 0.013970990180969239,
179
+ 0.013987629890441895,
180
+ 0.014196589469909668,
181
+ 0.013953709602355957,
182
+ 0.013974989891052246,
183
+ 0.013943790435791015,
184
+ 0.01396714973449707,
185
+ 0.013725070953369141,
186
+ 0.013719310760498047,
187
+ 0.013715630531311035,
188
+ 0.013945550918579101,
189
+ 0.014379308700561524,
190
+ 0.013732431411743164,
191
+ 0.014287788391113282,
192
+ 0.013757070541381837,
193
+ 0.014443147659301757,
194
+ 0.013744271278381348,
195
+ 0.013926989555358886,
196
+ 0.013734190940856934,
197
+ 0.01461402702331543,
198
+ 0.014472747802734375,
199
+ 0.014646347999572753,
200
+ 0.014762507438659669,
201
+ 0.01481098747253418,
202
+ 0.014607148170471191,
203
+ 0.014412748336791991,
204
+ 0.014089070320129394,
205
+ 0.013937549591064453,
206
+ 0.013956589698791504,
207
+ 0.013929229736328125,
208
+ 0.013915630340576171,
209
+ 0.013934670448303222,
210
+ 0.013932270050048828,
211
+ 0.01398507022857666,
212
+ 0.01409818935394287,
213
+ 0.013771950721740723,
214
+ 0.013763310432434082,
215
+ 0.01372123146057129,
216
+ 0.013732110977172852,
217
+ 0.013730510711669922,
218
+ 0.013755471229553222,
219
+ 0.01371915054321289,
220
+ 0.01374747085571289,
221
+ 0.013767951011657715,
222
+ 0.013760910987854005,
223
+ 0.014479787826538085,
224
+ 0.015697863578796385,
225
+ 0.015665543556213378,
226
+ 0.015279945373535156,
227
+ 0.014623948097229004
 
 
 
 
 
 
228
  ]
229
  },
230
  "throughput": {
231
  "unit": "samples/s",
232
+ "value": 69.8353272466028
233
  },
234
  "energy": null,
235
  "efficiency": null