IlyasMoutawwakil HF staff commited on
Commit
5e9687b
·
verified ·
1 Parent(s): 3c4ad4f

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.1",
88
- "optimum_benchmark_commit": "f6013cec1a849341c31271831560b1681406c092",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,94 +104,96 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1028.108288,
108
- "max_global_vram": 64978.116608,
109
- "max_process_vram": 264852.103168,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 66,
116
- "total": 0.997156338691711,
117
- "mean": 0.015108429374116841,
118
- "stdev": 0.0007025511084803037,
119
- "p50": 0.01525020122528076,
120
- "p90": 0.01586619997024536,
121
- "p95": 0.015946518898010253,
122
- "p99": 0.016081830406188963,
123
  "values": [
124
- 0.015133240699768066,
125
- 0.015873559951782225,
126
- 0.01587228012084961,
127
- 0.01577579975128174,
128
- 0.015500280380249023,
129
- 0.015417560577392578,
130
- 0.01534380054473877,
131
- 0.015630359649658202,
132
- 0.01555996036529541,
133
- 0.015339000701904297,
134
- 0.01579051971435547,
135
- 0.015777400016784668,
136
- 0.015718199729919433,
137
- 0.015349881172180176,
138
- 0.015246681213378906,
139
- 0.01557147979736328,
140
- 0.015253721237182617,
141
- 0.015604761123657227,
142
- 0.01586571979522705,
143
- 0.015523159980773926,
144
- 0.01561996078491211,
145
- 0.01586668014526367,
146
- 0.015598360061645508,
147
- 0.015540920257568359,
148
- 0.016215158462524415,
149
- 0.01601003837585449,
150
- 0.01597083854675293,
151
- 0.015978360176086427,
152
- 0.015482041358947754,
153
- 0.01526012134552002,
154
- 0.015226201057434082,
155
- 0.015274041175842286,
156
- 0.015157561302185058,
157
- 0.015412281036376953,
158
- 0.015101560592651367,
159
- 0.015045242309570312,
160
- 0.015132600784301758,
161
- 0.015475160598754882,
162
- 0.01515708065032959,
163
- 0.015099322319030762,
164
- 0.015007322311401367,
165
- 0.014998042106628418,
166
- 0.01512252140045166,
167
- 0.015760120391845704,
168
- 0.01513004207611084,
169
- 0.015213561058044434,
170
- 0.015163320541381836,
171
- 0.015075482368469239,
172
- 0.015135161399841309,
173
- 0.015663960456848144,
174
- 0.014507963180541992,
175
- 0.014221083641052247,
176
- 0.014338522911071778,
177
- 0.014132603645324707,
178
- 0.014817562103271485,
179
- 0.013550045967102051,
180
- 0.013943643569946288,
181
- 0.013854684829711915,
182
- 0.014125884056091309,
183
- 0.014327802658081056,
184
- 0.013808605194091796,
185
- 0.013520445823669434,
186
- 0.01378764533996582,
187
- 0.013968923568725585,
188
- 0.013462525367736816,
189
- 0.013747964859008789
 
 
190
  ]
191
  },
192
  "throughput": {
193
  "unit": "samples/s",
194
- "value": 66.18821687138175
195
  },
196
  "energy": null,
197
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.1",
88
+ "optimum_benchmark_commit": "79990507b694d513bac81e140baff3af23a6bff7",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1028.263936,
108
+ "max_global_vram": 65394.655232,
109
+ "max_process_vram": 248112.136192,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 68,
116
+ "total": 1.0015306053161621,
117
+ "mean": 0.014728391254649443,
118
+ "stdev": 0.0008095850224997028,
119
+ "p50": 0.015054892539978028,
120
+ "p90": 0.015497533416748047,
121
+ "p95": 0.01558668565750122,
122
+ "p99": 0.015783515853881835,
123
  "values": [
124
+ 0.014492332458496094,
125
+ 0.01547601318359375,
126
+ 0.015495853424072266,
127
+ 0.015421612739562988,
128
+ 0.015231212615966798,
129
+ 0.015265453338623046,
130
+ 0.015182252883911132,
131
+ 0.015339693069458009,
132
+ 0.015589933395385742,
133
+ 0.015414572715759278,
134
+ 0.015406892776489258,
135
+ 0.01556561279296875,
136
+ 0.01558065414428711,
137
+ 0.015387212753295899,
138
+ 0.015357933044433593,
139
+ 0.015315532684326172,
140
+ 0.01545793342590332,
141
+ 0.015492173194885253,
142
+ 0.015444652557373047,
143
+ 0.015349132537841797,
144
+ 0.015372332572937011,
145
+ 0.015501453399658202,
146
+ 0.015328172683715821,
147
+ 0.0153238525390625,
148
+ 0.015689932823181152,
149
+ 0.015396814346313476,
150
+ 0.01575537300109863,
151
+ 0.015336812973022461,
152
+ 0.015378573417663573,
153
+ 0.015227692604064941,
154
+ 0.015012972831726075,
155
+ 0.015011053085327148,
156
+ 0.01493153190612793,
157
+ 0.015261611938476562,
158
+ 0.014924012184143066,
159
+ 0.015093293190002442,
160
+ 0.015840654373168946,
161
+ 0.015262892723083496,
162
+ 0.015016491889953614,
163
+ 0.014390253067016601,
164
+ 0.013436330795288085,
165
+ 0.013480812072753907,
166
+ 0.01383729076385498,
167
+ 0.013445932388305664,
168
+ 0.013407051086425781,
169
+ 0.014068812370300292,
170
+ 0.013450251579284669,
171
+ 0.014117451667785644,
172
+ 0.01521729278564453,
173
+ 0.014640332221984863,
174
+ 0.014249132156372071,
175
+ 0.01491169261932373,
176
+ 0.014937453269958496,
177
+ 0.014629291534423829,
178
+ 0.01459185218811035,
179
+ 0.014184332847595215,
180
+ 0.01363153076171875,
181
+ 0.014030571937561034,
182
+ 0.013419852256774903,
183
+ 0.013447370529174805,
184
+ 0.013357770919799805,
185
+ 0.013474732398986817,
186
+ 0.013832491874694824,
187
+ 0.01364193058013916,
188
+ 0.013627371788024902,
189
+ 0.013464810371398926,
190
+ 0.013421451568603516,
191
+ 0.013252970695495605
192
  ]
193
  },
194
  "throughput": {
195
  "unit": "samples/s",
196
+ "value": 67.8960779022163
197
  },
198
  "energy": null,
199
  "efficiency": null