IlyasMoutawwakil HF staff commited on
Commit
8eb44c0
·
verified ·
1 Parent(s): 06ffce7

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "d920fe9626db1e7915f6d3574b5b54b0159cd100",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,93 +104,95 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1029.451776,
108
- "max_global_vram": 1122.840576,
109
- "max_process_vram": 229400.526848,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 65,
116
- "total": 1.0023763360977176,
117
- "mean": 0.015421174401503344,
118
- "stdev": 0.0009725486917060441,
119
- "p50": 0.015084140777587891,
120
- "p90": 0.016410122680664063,
121
- "p95": 0.017167145919799804,
122
- "p99": 0.01833729537963867,
123
  "values": [
124
- 0.016211980819702147,
125
- 0.017384937286376953,
126
- 0.01732221794128418,
127
- 0.01730653762817383,
128
- 0.02003037643432617,
129
- 0.01660957908630371,
130
- 0.016396299362182617,
131
- 0.016387819290161133,
132
- 0.016391979217529298,
133
- 0.016304140090942384,
134
- 0.01629981994628906,
135
- 0.016318059921264647,
136
- 0.016305259704589844,
137
- 0.016267019271850584,
138
- 0.016171499252319334,
139
- 0.015441740036010742,
140
- 0.015253741264343262,
141
- 0.014663182258605956,
142
- 0.014491501808166505,
143
- 0.014649742126464844,
144
- 0.014504461288452149,
145
- 0.014698862075805664,
146
- 0.014474861145019531,
147
- 0.014606382369995117,
148
- 0.014471342086791992,
149
- 0.014731500625610352,
150
- 0.014703981399536133,
151
- 0.01494446086883545,
152
- 0.014670381546020508,
153
- 0.014910861015319824,
154
- 0.014859980583190917,
155
- 0.015252620697021484,
156
- 0.015398059844970703,
157
- 0.015402701377868653,
158
- 0.015084140777587891,
159
- 0.014751661300659179,
160
- 0.01535166072845459,
161
- 0.015242059707641602,
162
- 0.01475918197631836,
163
- 0.014674860954284668,
164
- 0.01453982162475586,
165
- 0.014873101234436035,
166
- 0.014596302032470703,
167
- 0.016488779067993165,
168
- 0.015783820152282715,
169
- 0.01641933822631836,
170
- 0.015898059844970704,
171
- 0.015783980369567872,
172
- 0.01557886028289795,
173
- 0.015596940040588378,
174
- 0.015633581161499023,
175
- 0.015164939880371094,
176
- 0.014705262184143066,
177
- 0.014601740837097168,
178
- 0.014633102416992188,
179
- 0.01514750099182129,
180
- 0.01472606086730957,
181
- 0.01471710205078125,
182
- 0.014793421745300293,
183
- 0.014857420921325684,
184
- 0.015078540802001952,
185
- 0.014725419998168945,
186
- 0.01470334243774414,
187
- 0.015024460792541505,
188
- 0.014603981018066406
 
 
189
  ]
190
  },
191
  "throughput": {
192
  "unit": "samples/s",
193
- "value": 64.84590433673549
194
  },
195
  "energy": null,
196
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "8320ce6f24eaa5099bd3b49d8d44c0c1368af14b",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1029.742592,
108
+ "max_global_vram": 1122.861056,
109
+ "max_process_vram": 241220.165632,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 67,
116
+ "total": 1.004321944236755,
117
+ "mean": 0.01498987976472769,
118
+ "stdev": 0.0006567642830083582,
119
+ "p50": 0.014843364715576172,
120
+ "p90": 0.015546917152404785,
121
+ "p95": 0.015827749347686767,
122
+ "p99": 0.017282242622375495,
123
  "values": [
124
+ 0.014245445251464844,
125
+ 0.015460965156555175,
126
+ 0.01557264518737793,
127
+ 0.015479525566101075,
128
+ 0.015127365112304687,
129
+ 0.015136804580688477,
130
+ 0.01536784553527832,
131
+ 0.01539200496673584,
132
+ 0.015438725471496582,
133
+ 0.015319366455078125,
134
+ 0.015598085403442383,
135
+ 0.01576256561279297,
136
+ 0.015855685234069824,
137
+ 0.015352484703063964,
138
+ 0.015401605606079102,
139
+ 0.015087204933166504,
140
+ 0.01521888542175293,
141
+ 0.015342406272888184,
142
+ 0.015503684997558593,
143
+ 0.018612485885620117,
144
+ 0.015285285949707031,
145
+ 0.014542405128479003,
146
+ 0.01475440502166748,
147
+ 0.014465444564819336,
148
+ 0.014656965255737304,
149
+ 0.014487685203552246,
150
+ 0.01467088508605957,
151
+ 0.014509124755859375,
152
+ 0.014721124649047852,
153
+ 0.014514884948730468,
154
+ 0.014457284927368164,
155
+ 0.01455040454864502,
156
+ 0.014685444831848145,
157
+ 0.014502566337585449,
158
+ 0.014409124374389648,
159
+ 0.014844164848327637,
160
+ 0.014597125053405761,
161
+ 0.014504324913024903,
162
+ 0.014852484703063966,
163
+ 0.014507844924926757,
164
+ 0.014452804565429687,
165
+ 0.014865124702453613,
166
+ 0.014516645431518555,
167
+ 0.014874725341796876,
168
+ 0.016596965789794922,
169
+ 0.015326564788818359,
170
+ 0.015091205596923828,
171
+ 0.01598528480529785,
172
+ 0.015136164665222168,
173
+ 0.015256325721740722,
174
+ 0.015529765129089355,
175
+ 0.015398565292358399,
176
+ 0.01463376522064209,
177
+ 0.0147574462890625,
178
+ 0.014427845001220702,
179
+ 0.014506244659423828,
180
+ 0.014448325157165527,
181
+ 0.014458085060119629,
182
+ 0.014843364715576172,
183
+ 0.014482725143432617,
184
+ 0.014437125205993653,
185
+ 0.01451536464691162,
186
+ 0.014464324951171875,
187
+ 0.014994885444641113,
188
+ 0.014488965034484864,
189
+ 0.01456528377532959,
190
+ 0.014473284721374512
191
  ]
192
  },
193
  "throughput": {
194
  "unit": "samples/s",
195
+ "value": 66.71167585700553
196
  },
197
  "energy": null,
198
  "efficiency": null