IlyasMoutawwakil HF staff commited on
Commit
2b7c9f7
·
verified ·
1 Parent(s): 4fc1404

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -83,7 +83,7 @@
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.1",
86
- "optimum_benchmark_commit": "2516ce57a5b64eefeb78dc75a171e0cdff88823e",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
@@ -102,103 +102,104 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 1025.867776,
106
- "max_global_vram": 2103.230464,
107
- "max_process_vram": 276664.758272,
108
  "max_reserved": 773.849088,
109
  "max_allocated": 745.087488
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 75,
114
- "total": 1.006791043281555,
115
- "mean": 0.013423880577087403,
116
- "stdev": 0.0005688304933573595,
117
- "p50": 0.013220816612243653,
118
- "p90": 0.014104017448425293,
119
- "p95": 0.014426354026794434,
120
- "p99": 0.015122304134368904,
121
  "values": [
122
- 0.013430736541748046,
123
- 0.013728017807006837,
124
- 0.01389329719543457,
125
- 0.013826738357543946,
126
- 0.013846258163452148,
127
- 0.013682416915893555,
128
- 0.014026737213134766,
129
- 0.013728817939758301,
130
- 0.013474576950073243,
131
- 0.01312385654449463,
132
- 0.012970417022705079,
133
- 0.013226897239685058,
134
- 0.013032496452331543,
135
- 0.013286577224731446,
136
- 0.01642802047729492,
137
- 0.01331745719909668,
138
- 0.013152815818786622,
139
- 0.01315265655517578,
140
- 0.012844977378845214,
141
- 0.013220816612243653,
142
- 0.012896495819091797,
143
- 0.013262097358703612,
144
- 0.012888497352600097,
145
- 0.013106897354125976,
146
- 0.013157456398010255,
147
- 0.014663538932800292,
148
- 0.013839858055114746,
149
- 0.014420977592468262,
150
- 0.013561138153076172,
151
- 0.013876338005065918,
152
- 0.013835376739501953,
153
- 0.014438899040222167,
154
- 0.014321297645568847,
155
- 0.013210577011108398,
156
- 0.013092657089233399,
157
- 0.012861456871032715,
158
- 0.013711696624755859,
159
- 0.012858576774597168,
160
- 0.012981136322021485,
161
- 0.01288193702697754,
162
- 0.012845935821533204,
163
- 0.013363536834716796,
164
- 0.012983217239379882,
165
- 0.012986256599426269,
166
- 0.013175697326660156,
167
- 0.012977775573730468,
168
- 0.012925457000732422,
169
- 0.013335697174072266,
170
- 0.013034577369689942,
171
- 0.013042096138000489,
172
- 0.013214097023010254,
173
- 0.013018257141113281,
174
- 0.01296129608154297,
175
- 0.013202896118164062,
176
- 0.01303201675415039,
177
- 0.013035536766052246,
178
- 0.013380496978759766,
179
- 0.013906417846679687,
180
- 0.014458738327026367,
181
- 0.014155537605285644,
182
- 0.013614418029785157,
183
- 0.014204177856445312,
184
- 0.013653457641601563,
185
- 0.013800017356872559,
186
- 0.013606577873229981,
187
- 0.01307793617248535,
188
- 0.013042896270751954,
189
- 0.013031856536865234,
190
- 0.013420177459716796,
191
- 0.013244016647338867,
192
- 0.013031215667724609,
193
- 0.013039695739746093,
194
- 0.013480977058410644,
195
- 0.01312049674987793,
196
- 0.013126096725463866
 
197
  ]
198
  },
199
  "throughput": {
200
  "unit": "samples/s",
201
- "value": 74.49410729315139
202
  },
203
  "energy": null,
204
  "efficiency": null
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.1",
86
+ "optimum_benchmark_commit": "0b24af9d7b7751f74b160dfade73ef78e10964d6",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1027.284992,
106
+ "max_global_vram": 3321.79456,
107
+ "max_process_vram": 288473.62048,
108
  "max_reserved": 773.849088,
109
  "max_allocated": 745.087488
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 76,
114
+ "total": 0.9978523683547974,
115
+ "mean": 0.01312963642572102,
116
+ "stdev": 0.00042577367963565906,
117
+ "p50": 0.01304428482055664,
118
+ "p90": 0.013621084213256836,
119
+ "p95": 0.013887521743774413,
120
+ "p99": 0.014377961874008178,
121
  "values": [
122
+ 0.013251005172729492,
123
+ 0.013822043418884277,
124
+ 0.01359516429901123,
125
+ 0.013831482887268067,
126
+ 0.013216604232788086,
127
+ 0.013096445083618164,
128
+ 0.012924924850463867,
129
+ 0.01281596565246582,
130
+ 0.012828764915466308,
131
+ 0.012809725761413574,
132
+ 0.012773726463317871,
133
+ 0.012936765670776368,
134
+ 0.012800125122070313,
135
+ 0.013037405014038085,
136
+ 0.012879805564880371,
137
+ 0.013169883728027344,
138
+ 0.012902525901794434,
139
+ 0.013016925811767579,
140
+ 0.014360281944274902,
141
+ 0.012977885246276856,
142
+ 0.012705406188964844,
143
+ 0.012685565948486329,
144
+ 0.012484286308288574,
145
+ 0.012833565711975097,
146
+ 0.012689406394958497,
147
+ 0.012856765747070313,
148
+ 0.012742526054382324,
149
+ 0.012595326423645019,
150
+ 0.013149404525756836,
151
+ 0.013547003746032715,
152
+ 0.013306043624877929,
153
+ 0.013110204696655274,
154
+ 0.012986205101013183,
155
+ 0.013364764213562011,
156
+ 0.013258523941040039,
157
+ 0.012827325820922851,
158
+ 0.012857404708862305,
159
+ 0.012651165962219239,
160
+ 0.012696765899658203,
161
+ 0.012854366302490235,
162
+ 0.012667325973510743,
163
+ 0.012664767265319824,
164
+ 0.012859326362609863,
165
+ 0.0127985258102417,
166
+ 0.012601886749267578,
167
+ 0.012876605033874512,
168
+ 0.012716606140136718,
169
+ 0.012619006156921386,
170
+ 0.012897725105285645,
171
+ 0.01338412380218506,
172
+ 0.013186844825744629,
173
+ 0.013353564262390137,
174
+ 0.013298684120178223,
175
+ 0.013263805389404297,
176
+ 0.01345244312286377,
177
+ 0.01328172492980957,
178
+ 0.013866361618041992,
179
+ 0.014136762619018554,
180
+ 0.013521883964538574,
181
+ 0.013483003616333008,
182
+ 0.01348828411102295,
183
+ 0.013174203872680664,
184
+ 0.013647004127502442,
185
+ 0.014431001663208008,
186
+ 0.01395100212097168,
187
+ 0.013499323844909667,
188
+ 0.013364124298095702,
189
+ 0.013561244010925294,
190
+ 0.01349100399017334,
191
+ 0.013509564399719239,
192
+ 0.013194844245910645,
193
+ 0.013051164627075196,
194
+ 0.013359804153442383,
195
+ 0.01260668659210205,
196
+ 0.01267228603363037,
197
+ 0.01267036533355713
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
+ "value": 76.16357129592677
203
  },
204
  "energy": null,
205
  "efficiency": null