IlyasMoutawwakil HF staff commited on
Commit
727e072
·
verified ·
1 Parent(s): c0142fb

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
- "optimum_benchmark_commit": "748abd0c7ac21cfb1798768cad39007b466ce8e8",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,95 +104,108 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1028.460544,
108
- "max_global_vram": 1123.14368,
109
- "max_process_vram": 276662.41536,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 67,
116
- "total": 1.0090848541259767,
117
- "mean": 0.015060967972029504,
118
- "stdev": 0.0014870429124849275,
119
- "p50": 0.014308751106262207,
120
- "p90": 0.017886435699462893,
121
- "p95": 0.018507168769836425,
122
- "p99": 0.018923714866638183,
123
  "values": [
124
- 0.01903385543823242,
125
- 0.016030824661254883,
126
- 0.014022671699523925,
127
- 0.013920111656188965,
128
- 0.018259777069091796,
129
- 0.014048110961914063,
130
- 0.017115779876708984,
131
- 0.017829540252685547,
132
- 0.014253870964050293,
133
- 0.014158830642700196,
134
- 0.014098832130432129,
135
- 0.01418091106414795,
136
- 0.017971778869628908,
137
- 0.014257390975952149,
138
- 0.014470669746398926,
139
- 0.014515789985656738,
140
- 0.014157551765441895,
141
- 0.014105551719665528,
142
- 0.01875321578979492,
143
- 0.018263458251953125,
144
- 0.01469434928894043,
145
- 0.014303791046142577,
146
- 0.015027148246765137,
147
- 0.014399149894714355,
148
- 0.016383464813232423,
149
- 0.014485710144042969,
150
- 0.014212591171264648,
151
- 0.014007472038269042,
152
- 0.014445870399475098,
153
- 0.014500269889831543,
154
- 0.014097711563110352,
155
- 0.013969552040100098,
156
- 0.014308751106262207,
157
- 0.01423611068725586,
158
- 0.014012111663818359,
159
- 0.014119311332702637,
160
- 0.014296271324157716,
161
- 0.014033552169799804,
162
- 0.013967791557312011,
163
- 0.014141390800476075,
164
- 0.018611616134643554,
165
- 0.01423595142364502,
166
- 0.014150832176208496,
167
- 0.014101391792297363,
168
- 0.015172747611999512,
169
- 0.01693818283081055,
170
- 0.014188431739807128,
171
- 0.01400827121734619,
172
- 0.014094032287597657,
173
- 0.014092432022094727,
174
- 0.01391531276702881,
175
- 0.014048911094665528,
176
- 0.013854832649230956,
177
- 0.01419675064086914,
178
- 0.01448410987854004,
179
- 0.017009220123291015,
180
- 0.015424427032470702,
181
- 0.014780110359191894,
182
- 0.014669870376586914,
183
- 0.014519309997558594,
184
- 0.014515150070190429,
185
- 0.018866975784301757,
186
- 0.014893387794494629,
187
- 0.01589050483703613,
188
- 0.015142348289489746,
189
- 0.014810829162597657,
190
- 0.01737801933288574
 
 
 
 
 
 
 
 
 
 
 
 
 
191
  ]
192
  },
193
  "throughput": {
194
  "unit": "samples/s",
195
- "value": 66.39679480476629
196
  },
197
  "energy": null,
198
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.0",
88
+ "optimum_benchmark_commit": "19eeac52e408e408898ac1fac3d9abefced0131b",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1028.4032,
108
+ "max_global_vram": 1122.852864,
109
+ "max_process_vram": 229403.394048,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 80,
116
+ "total": 1.0011890211105348,
117
+ "mean": 0.012514862763881685,
118
+ "stdev": 0.0006429843948469642,
119
+ "p50": 0.012178761959075928,
120
+ "p90": 0.013254919147491455,
121
+ "p95": 0.013309694814682006,
122
+ "p99": 0.014329231863021836,
123
  "values": [
124
+ 0.012785240173339844,
125
+ 0.013306038856506348,
126
+ 0.013267159461975098,
127
+ 0.013379158020019532,
128
+ 0.013194997787475586,
129
+ 0.013096439361572265,
130
+ 0.01311371898651123,
131
+ 0.013078518867492676,
132
+ 0.01315003776550293,
133
+ 0.013253559112548828,
134
+ 0.01318443775177002,
135
+ 0.013267958641052246,
136
+ 0.013268918991088867,
137
+ 0.0132017183303833,
138
+ 0.016255308151245117,
139
+ 0.012678680419921875,
140
+ 0.01222012233734131,
141
+ 0.012156441688537597,
142
+ 0.012147162437438965,
143
+ 0.012160282135009765,
144
+ 0.01214588165283203,
145
+ 0.012199321746826171,
146
+ 0.012087641716003418,
147
+ 0.012099481582641602,
148
+ 0.012152762413024902,
149
+ 0.012147642135620118,
150
+ 0.012128602027893067,
151
+ 0.01218876075744629,
152
+ 0.012124602317810058,
153
+ 0.012195322036743164,
154
+ 0.012121882438659668,
155
+ 0.012302680969238282,
156
+ 0.012087962150573731,
157
+ 0.012196601867675782,
158
+ 0.012106842041015625,
159
+ 0.012087162017822266,
160
+ 0.012161882400512695,
161
+ 0.012108121871948242,
162
+ 0.012102521896362305,
163
+ 0.012193881034851074,
164
+ 0.012126042366027831,
165
+ 0.012114521980285644,
166
+ 0.012175642013549804,
167
+ 0.012138201713562011,
168
+ 0.012116601943969726,
169
+ 0.012187642097473144,
170
+ 0.012132122039794921,
171
+ 0.01208684253692627,
172
+ 0.012225561141967774,
173
+ 0.012076762199401856,
174
+ 0.012105081558227539,
175
+ 0.013238199234008789,
176
+ 0.012900918960571288,
177
+ 0.012979958534240723,
178
+ 0.013027959823608399,
179
+ 0.01381723690032959,
180
+ 0.013689236640930176,
181
+ 0.013015479087829589,
182
+ 0.01295915985107422,
183
+ 0.013038199424743652,
184
+ 0.012882518768310546,
185
+ 0.012609881401062012,
186
+ 0.01219900131225586,
187
+ 0.012149082183837891,
188
+ 0.012119802474975586,
189
+ 0.012108121871948242,
190
+ 0.01211372184753418,
191
+ 0.012101881980895996,
192
+ 0.012181881904602052,
193
+ 0.012090361595153809,
194
+ 0.012109561920166016,
195
+ 0.012098201751708984,
196
+ 0.012190201759338378,
197
+ 0.0121049222946167,
198
+ 0.012116282463073731,
199
+ 0.012109561920166016,
200
+ 0.012130842208862305,
201
+ 0.012136281967163086,
202
+ 0.012258842468261718,
203
+ 0.012121240615844726
204
  ]
205
  },
206
  "throughput": {
207
  "unit": "samples/s",
208
+ "value": 79.90499127853272
209
  },
210
  "energy": null,
211
  "efficiency": null