IlyasMoutawwakil HF staff commited on
Commit
92ea166
·
verified ·
1 Parent(s): 3fbaf7c

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
- "optimum_benchmark_commit": "57f6495c03ea0fa48e157048c97add150dcd765c",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,102 +104,104 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1028.235264,
108
- "max_global_vram": 1122.873344,
109
- "max_process_vram": 229406.285824,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 74,
116
- "total": 0.9997663888931275,
117
- "mean": 0.013510356606663883,
118
- "stdev": 0.0005014043483774389,
119
- "p50": 0.01343504285812378,
120
- "p90": 0.013944986152648926,
121
- "p95": 0.014056968116760254,
122
- "p99": 0.01492750979423522,
123
  "values": [
124
- 0.013394164085388184,
125
- 0.013874635696411133,
126
- 0.014058631896972656,
127
- 0.013770797729492187,
128
- 0.013662479400634765,
129
- 0.013673998832702637,
130
- 0.013463923454284668,
131
- 0.013675759315490723,
132
- 0.013769678115844727,
133
- 0.013494161605834961,
134
- 0.013672879219055176,
135
- 0.013538320541381836,
136
- 0.013822957038879395,
137
- 0.013479601860046387,
138
- 0.013894474983215332,
139
- 0.013358485221862794,
140
- 0.013689679145812988,
141
- 0.013362805366516113,
142
- 0.013713357925415039,
143
- 0.014056072235107422,
144
- 0.01402119255065918,
145
- 0.016779224395751954,
146
- 0.013519441604614258,
147
- 0.013117209434509277,
148
- 0.013334164619445801,
149
- 0.01310664939880371,
150
- 0.013318486213684082,
151
- 0.013033049583435059,
152
- 0.01331720542907715,
153
- 0.013103770256042481,
154
- 0.013322484970092774,
155
- 0.013283125877380371,
156
- 0.013257685661315919,
157
- 0.013022809982299806,
158
- 0.01353640079498291,
159
- 0.01323544692993164,
160
- 0.01332264518737793,
161
- 0.013026651382446288,
162
- 0.013542001724243163,
163
- 0.013103609085083007,
164
- 0.013304085731506347,
165
- 0.013011931419372558,
166
- 0.013330486297607422,
167
- 0.013066969871520995,
168
- 0.013355765342712402,
169
- 0.013085370063781738,
170
- 0.013386484146118163,
171
- 0.013040410995483398,
172
- 0.013452082633972169,
173
- 0.013155609130859376,
174
- 0.013303924560546875,
175
- 0.01391911506652832,
176
- 0.014149991035461426,
177
- 0.013727437973022462,
178
- 0.014003753662109375,
179
- 0.013956073760986329,
180
- 0.013475603103637695,
181
- 0.01311448860168457,
182
- 0.013478001594543458,
183
- 0.013802797317504882,
184
- 0.014242629051208495,
185
- 0.01369239902496338,
186
- 0.013902154922485352,
187
- 0.013680719375610351,
188
- 0.013418003082275391,
189
- 0.013128409385681153,
190
- 0.013084568977355958,
191
- 0.013052570343017577,
192
- 0.013619440078735352,
193
- 0.013131768226623534,
194
- 0.013078968048095703,
195
- 0.013084568977355958,
196
- 0.013789517402648926,
197
- 0.013010170936584473
 
 
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
- "value": 74.01729126133928
203
  },
204
  "energy": null,
205
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
+ "optimum_benchmark_commit": "2a75c0bc0d007cc875fa0f75ca41d02e46f917be",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1028.399104,
108
+ "max_global_vram": 1122.865152,
109
+ "max_process_vram": 244172.865536,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 76,
116
+ "total": 1.0025792407989498,
117
+ "mean": 0.013191832115775659,
118
+ "stdev": 0.000790626240655576,
119
+ "p50": 0.012864943981170655,
120
+ "p90": 0.014342621803283693,
121
+ "p95": 0.014600581407546998,
122
+ "p99": 0.015845580577850342,
123
  "values": [
124
+ 0.01539310073852539,
125
+ 0.012899824142456055,
126
+ 0.01292478370666504,
127
+ 0.012880463600158692,
128
+ 0.012773424148559571,
129
+ 0.012797904014587403,
130
+ 0.012578704833984374,
131
+ 0.012622224807739257,
132
+ 0.012654864311218261,
133
+ 0.012670225143432617,
134
+ 0.012578224182128906,
135
+ 0.01254974365234375,
136
+ 0.012712624549865723,
137
+ 0.012735664367675781,
138
+ 0.012594703674316407,
139
+ 0.012527504920959472,
140
+ 0.012499343872070313,
141
+ 0.012979984283447266,
142
+ 0.013120304107666016,
143
+ 0.013162384033203125,
144
+ 0.013270222663879394,
145
+ 0.01327342414855957,
146
+ 0.014731501579284668,
147
+ 0.017203020095825194,
148
+ 0.014605740547180175,
149
+ 0.014594382286071777,
150
+ 0.014277422904968261,
151
+ 0.014364301681518555,
152
+ 0.01434110164642334,
153
+ 0.013894702911376953,
154
+ 0.013113424301147461,
155
+ 0.012721424102783202,
156
+ 0.012693743705749511,
157
+ 0.012745104789733886,
158
+ 0.012914223670959473,
159
+ 0.012723343849182129,
160
+ 0.01272590446472168,
161
+ 0.012720944404602051,
162
+ 0.012911503791809081,
163
+ 0.012753265380859375,
164
+ 0.01276366424560547,
165
+ 0.012766544342041015,
166
+ 0.012821743965148926,
167
+ 0.012787823677062988,
168
+ 0.01315726375579834,
169
+ 0.012805264472961425,
170
+ 0.012788145065307618,
171
+ 0.012757104873657226,
172
+ 0.01278270435333252,
173
+ 0.013035344123840333,
174
+ 0.014598861694335937,
175
+ 0.014315662384033203,
176
+ 0.014344141960144044,
177
+ 0.014223182678222656,
178
+ 0.013690543174743653,
179
+ 0.014033422470092774,
180
+ 0.013344304084777832,
181
+ 0.013180624008178711,
182
+ 0.012780303955078126,
183
+ 0.012861264228820802,
184
+ 0.013120304107666016,
185
+ 0.012864784240722656,
186
+ 0.01284830379486084,
187
+ 0.01290558433532715,
188
+ 0.012782383918762207,
189
+ 0.012888143539428711,
190
+ 0.012867823600769043,
191
+ 0.012865103721618653,
192
+ 0.01318398380279541,
193
+ 0.012900303840637208,
194
+ 0.012855824470520019,
195
+ 0.012820943832397461,
196
+ 0.013116144180297851,
197
+ 0.012828463554382325,
198
+ 0.012854384422302247,
199
+ 0.012804143905639649
200
  ]
201
  },
202
  "throughput": {
203
  "unit": "samples/s",
204
+ "value": 75.80448198731504
205
  },
206
  "energy": null,
207
  "efficiency": null