IlyasMoutawwakil HF staff commited on
Commit
caaaa54
·
verified ·
1 Parent(s): 01c53ed

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,8 +85,8 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "1981150adc21fc49153cb832dbc6f0ecca02bc2a",
89
- "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
92
  "accelerate_commit": null,
@@ -104,99 +104,101 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1027.723264,
108
  "max_global_vram": 2103.222272,
109
- "max_process_vram": 258937.07776,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 71,
116
- "total": 1.0006878757476811,
117
- "mean": 0.014094195433065925,
118
- "stdev": 0.00042216359736077654,
119
- "p50": 0.014068644523620605,
120
- "p90": 0.014357125282287598,
121
- "p95": 0.014371204853057862,
122
- "p99": 0.015403797626495354,
123
  "values": [
124
- 0.01427584457397461,
125
- 0.013833285331726074,
126
- 0.01394608497619629,
127
- 0.01395744514465332,
128
- 0.013907364845275879,
129
- 0.01390512466430664,
130
- 0.013843045234680176,
131
- 0.013790884971618652,
132
- 0.013685444831848144,
133
- 0.013641124725341796,
134
- 0.013612165451049804,
135
- 0.013642245292663575,
136
- 0.013675363540649414,
137
- 0.013719044685363769,
138
- 0.013625764846801757,
139
- 0.01363984489440918,
140
- 0.013572484970092773,
141
- 0.01410880470275879,
142
- 0.014357125282287598,
143
- 0.014143364906311035,
144
- 0.017177766799926757,
145
- 0.014643525123596192,
146
- 0.014084485054016114,
147
- 0.014051685333251953,
148
- 0.014180166244506836,
149
- 0.014014084815979004,
150
- 0.014068644523620605,
151
- 0.014202565193176269,
152
- 0.014010085105895995,
153
- 0.014104004859924316,
154
- 0.01404976463317871,
155
- 0.014049284934997559,
156
- 0.014024325370788574,
157
- 0.014073124885559082,
158
- 0.014280804634094238,
159
- 0.01406832504272461,
160
- 0.014107685089111328,
161
- 0.01408304500579834,
162
- 0.014287364959716798,
163
- 0.014061285018920898,
164
- 0.014041765213012695,
165
- 0.013979044914245606,
166
- 0.01404864501953125,
167
- 0.01404256534576416,
168
- 0.014362244606018067,
169
- 0.01404768466949463,
170
- 0.014043205261230469,
171
- 0.014115525245666503,
172
- 0.014072645187377929,
173
- 0.014357284545898437,
174
- 0.014060005187988282,
175
- 0.01405568504333496,
176
- 0.014053765296936036,
177
- 0.014052804946899413,
178
- 0.014360806465148927,
179
- 0.014097125053405761,
180
- 0.014080485343933106,
181
- 0.014071684837341308,
182
- 0.014059684753417968,
183
- 0.014391044616699218,
184
- 0.014117924690246583,
185
- 0.0141027250289917,
186
- 0.014149444580078125,
187
- 0.014258245468139648,
188
- 0.014101605415344238,
189
- 0.014132004737854004,
190
- 0.014193764686584473,
191
- 0.014082884788513184,
192
- 0.014380165100097656,
193
- 0.014151365280151366,
194
- 0.014120163917541504
 
 
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 70.95119439410732
200
  },
201
  "energy": null,
202
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "347e13ca9f7f904f55669603cfb9f0b6c7e8672c",
89
+ "transformers_version": "4.41.1",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
92
  "accelerate_commit": null,
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1027.145728,
108
  "max_global_vram": 2103.222272,
109
+ "max_process_vram": 248110.108672,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 73,
116
+ "total": 1.0082166709899902,
117
+ "mean": 0.013811187273835483,
118
+ "stdev": 0.000726266533702998,
119
+ "p50": 0.013699226379394532,
120
+ "p90": 0.014690171813964844,
121
+ "p95": 0.01480133991241455,
122
+ "p99": 0.01564074661254883,
123
  "values": [
124
+ 0.014002266883850098,
125
+ 0.014975869178771972,
126
+ 0.014791708946228027,
127
+ 0.014942428588867188,
128
+ 0.014267547607421875,
129
+ 0.014693947792053222,
130
+ 0.014613788604736328,
131
+ 0.013699226379394532,
132
+ 0.014227388381958007,
133
+ 0.013823387145996093,
134
+ 0.01403378677368164,
135
+ 0.014273628234863282,
136
+ 0.014263227462768554,
137
+ 0.01735043144226074,
138
+ 0.013865467071533202,
139
+ 0.01337618637084961,
140
+ 0.013642427444458007,
141
+ 0.013349945068359376,
142
+ 0.01353234577178955,
143
+ 0.013086105346679687,
144
+ 0.013451226234436035,
145
+ 0.013416345596313477,
146
+ 0.013434266090393067,
147
+ 0.013453466415405273,
148
+ 0.014650108337402344,
149
+ 0.01389346694946289,
150
+ 0.013848187446594239,
151
+ 0.013602106094360352,
152
+ 0.014038106918334962,
153
+ 0.013146745681762695,
154
+ 0.012907224655151367,
155
+ 0.01392514705657959,
156
+ 0.014095227241516113,
157
+ 0.014496667861938477,
158
+ 0.014800827980041504,
159
+ 0.014579548835754394,
160
+ 0.014675067901611329,
161
+ 0.0146185884475708,
162
+ 0.014670268058776855,
163
+ 0.013785627365112305,
164
+ 0.01404466724395752,
165
+ 0.01381154727935791,
166
+ 0.013808827400207519,
167
+ 0.01316130542755127,
168
+ 0.013231546401977538,
169
+ 0.012996025085449218,
170
+ 0.013317625999450683,
171
+ 0.013232026100158691,
172
+ 0.014802107810974121,
173
+ 0.014262428283691406,
174
+ 0.014729308128356934,
175
+ 0.013804346084594727,
176
+ 0.014038107872009277,
177
+ 0.013625946044921875,
178
+ 0.013758907318115235,
179
+ 0.013420825958251953,
180
+ 0.013226264953613282,
181
+ 0.013027545928955078,
182
+ 0.013315864562988281,
183
+ 0.013021465301513671,
184
+ 0.013030585289001464,
185
+ 0.013556026458740234,
186
+ 0.013006746292114258,
187
+ 0.013358745574951172,
188
+ 0.013021305084228515,
189
+ 0.013002744674682617,
190
+ 0.013051384925842286,
191
+ 0.012979705810546876,
192
+ 0.013578585624694824,
193
+ 0.013036664962768554,
194
+ 0.012989303588867188,
195
+ 0.013086265563964843,
196
+ 0.013582586288452148
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
+ "value": 72.40507135070449
202
  },
203
  "energy": null,
204
  "efficiency": null