IlyasMoutawwakil HF staff commited on
Commit
2afbdb4
·
verified ·
1 Parent(s): 6e63e3f

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "dd02f26cb819965cbf86e16d9ce013cddc3b86af",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,96 +104,105 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1028.022272,
108
- "max_global_vram": 1122.852864,
109
- "max_process_vram": 268783.734784,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 68,
116
- "total": 1.0107849054336548,
117
- "mean": 0.014864483903436098,
118
- "stdev": 0.0003872017533225853,
119
- "p50": 0.01473390531539917,
120
- "p90": 0.015467776298522948,
121
- "p95": 0.015712207651138305,
122
- "p99": 0.016107189245224,
123
  "values": [
124
- 0.014805424690246581,
125
- 0.015896464347839357,
126
- 0.015785584449768065,
127
- 0.01577582359313965,
128
- 0.015550864219665528,
129
- 0.015594063758850097,
130
- 0.015474385261535644,
131
- 0.015342864036560058,
132
- 0.016535024642944336,
133
- 0.015099505424499511,
134
- 0.014473265647888183,
135
- 0.014701105117797852,
136
- 0.014919025421142578,
137
- 0.014784145355224609,
138
- 0.014535184860229491,
139
- 0.014651185035705566,
140
- 0.014759345054626465,
141
- 0.015059664726257324,
142
- 0.014775984764099121,
143
- 0.0149495849609375,
144
- 0.014709585189819336,
145
- 0.014911825180053712,
146
- 0.014657905578613281,
147
- 0.014816946029663085,
148
- 0.014672466278076172,
149
- 0.014790064811706543,
150
- 0.014650384902954101,
151
- 0.014638705253601074,
152
- 0.014822864532470703,
153
- 0.014621105194091797,
154
- 0.014539345741271973,
155
- 0.014598384857177734,
156
- 0.014576945304870605,
157
- 0.014799345016479492,
158
- 0.014570706367492676,
159
- 0.014556144714355469,
160
- 0.014572944641113281,
161
- 0.015010544776916504,
162
- 0.014599505424499512,
163
- 0.01461982536315918,
164
- 0.014587825775146484,
165
- 0.014562225341796875,
166
- 0.014602544784545899,
167
- 0.01496766471862793,
168
- 0.014579665184020997,
169
- 0.014552946090698243,
170
- 0.014530705451965333,
171
- 0.014574225425720216,
172
- 0.01454494571685791,
173
- 0.014970065116882324,
174
- 0.01460030460357666,
175
- 0.014638065338134765,
176
- 0.01464494514465332,
177
- 0.01465806484222412,
178
- 0.015000784873962402,
179
- 0.014724465370178222,
180
- 0.014936305046081544,
181
- 0.015199504852294921,
182
- 0.015066385269165039,
183
- 0.015464943885803223,
184
- 0.01487534523010254,
185
- 0.014795024871826172,
186
- 0.014756464958190918,
187
- 0.01494190502166748,
188
- 0.014643025398254395,
189
- 0.014743345260620117,
190
- 0.014703506469726563,
191
- 0.014715664863586426
 
 
 
 
 
 
 
 
 
192
  ]
193
  },
194
  "throughput": {
195
  "unit": "samples/s",
196
- "value": 67.27445140351213
197
  },
198
  "energy": null,
199
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "f85421c91b2a2b1bc0a30ceba32f9580499bf023",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1028.100096,
108
+ "max_global_vram": 1122.865152,
109
+ "max_process_vram": 254019.608576,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 77,
116
+ "total": 1.0008762855529783,
117
+ "mean": 0.012998393318869851,
118
+ "stdev": 0.00048766287357161916,
119
+ "p50": 0.012905718803405762,
120
+ "p90": 0.01331909294128418,
121
+ "p95": 0.013706323814392091,
122
+ "p99": 0.014730467872619621,
123
  "values": [
124
+ 0.012619158744812013,
125
+ 0.013092918395996094,
126
+ 0.013208276748657226,
127
+ 0.013267156600952148,
128
+ 0.012979957580566406,
129
+ 0.012979957580566406,
130
+ 0.012908597946166993,
131
+ 0.012972437858581543,
132
+ 0.012967477798461914,
133
+ 0.012999797821044922,
134
+ 0.012904758453369141,
135
+ 0.01302987766265869,
136
+ 0.01311131763458252,
137
+ 0.01323195743560791,
138
+ 0.013102197647094726,
139
+ 0.0130237979888916,
140
+ 0.015895149230957032,
141
+ 0.013189557075500489,
142
+ 0.012975478172302246,
143
+ 0.013009878158569336,
144
+ 0.012751479148864747,
145
+ 0.012867798805236816,
146
+ 0.012817399024963379,
147
+ 0.013017397880554199,
148
+ 0.012595479011535644,
149
+ 0.01273483943939209,
150
+ 0.012516119956970214,
151
+ 0.012825878143310546,
152
+ 0.012574199676513673,
153
+ 0.012755799293518066,
154
+ 0.012631158828735351,
155
+ 0.012677079200744628,
156
+ 0.01280203914642334,
157
+ 0.012589078903198242,
158
+ 0.012847477912902833,
159
+ 0.012650839805603028,
160
+ 0.012592920303344727,
161
+ 0.012868277549743653,
162
+ 0.012636439323425293,
163
+ 0.012609720230102539,
164
+ 0.012850198745727539,
165
+ 0.01266347885131836,
166
+ 0.012623958587646485,
167
+ 0.012880438804626465,
168
+ 0.012787158012390137,
169
+ 0.012654998779296874,
170
+ 0.012905718803405762,
171
+ 0.01265403938293457,
172
+ 0.012631158828735351,
173
+ 0.01263275909423828,
174
+ 0.014239953994750977,
175
+ 0.014199633598327637,
176
+ 0.014362673759460449,
177
+ 0.013413235664367676,
178
+ 0.01319915771484375,
179
+ 0.013314996719360352,
180
+ 0.012665558815002441,
181
+ 0.012628119468688965,
182
+ 0.013297877311706542,
183
+ 0.012941078186035156,
184
+ 0.013302996635437011,
185
+ 0.013122838020324707,
186
+ 0.01279883861541748,
187
+ 0.013325237274169921,
188
+ 0.012944917678833007,
189
+ 0.01268843936920166,
190
+ 0.012698518753051757,
191
+ 0.01296331787109375,
192
+ 0.013264436721801758,
193
+ 0.013582996368408203,
194
+ 0.013167158126831054,
195
+ 0.013347637176513672,
196
+ 0.013112277030944823,
197
+ 0.012727958679199219,
198
+ 0.013050678253173829,
199
+ 0.012698198318481445,
200
+ 0.012702519416809083
201
  ]
202
  },
203
  "throughput": {
204
  "unit": "samples/s",
205
+ "value": 76.93258508713485
206
  },
207
  "energy": null,
208
  "efficiency": null