IlyasMoutawwakil HF staff commited on
Commit
745a80f
·
verified ·
1 Parent(s): 03b0ac9

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -83,7 +83,7 @@
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
- "optimum_benchmark_commit": "b04fb3c909a5873eadf03d7b46ccfac63afcdf9e",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
@@ -102,101 +102,104 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 1026.789376,
106
- "max_global_vram": 2103.222272,
107
- "max_process_vram": 308167.258112,
108
  "max_reserved": 773.849088,
109
  "max_allocated": 745.087488
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 73,
114
- "total": 1.0102282466888428,
115
- "mean": 0.013838743105326614,
116
- "stdev": 0.0006583561937221686,
117
- "p50": 0.013646746635437012,
118
- "p90": 0.01467276496887207,
119
- "p95": 0.014999966049194334,
120
- "p99": 0.015418372955322265,
121
  "values": [
122
- 0.014598909378051758,
123
- 0.015231071472167968,
124
- 0.01531363010406494,
125
- 0.015095870971679687,
126
- 0.014936029434204101,
127
- 0.014669309616088868,
128
- 0.014113147735595704,
129
- 0.014279707908630371,
130
- 0.013843387603759765,
131
- 0.013090585708618164,
132
- 0.013440506935119629,
133
- 0.01423554801940918,
134
- 0.013958106994628907,
135
- 0.015687711715698244,
136
- 0.014264987945556641,
137
- 0.014701309204101563,
138
- 0.013514745712280273,
139
- 0.01409666919708252,
140
- 0.014535069465637207,
141
- 0.014760828971862794,
142
- 0.014516828536987304,
143
- 0.01424914836883545,
144
- 0.01396130657196045,
145
- 0.014089147567749023,
146
- 0.013275865554809571,
147
- 0.013579227447509766,
148
- 0.013044666290283204,
149
- 0.013464027404785157,
150
- 0.0130133056640625,
151
- 0.013212185859680176,
152
- 0.01298210620880127,
153
- 0.013239546775817871,
154
- 0.013199706077575684,
155
- 0.012985786437988282,
156
- 0.01296146583557129,
157
- 0.014603549003601075,
158
- 0.014673628807067872,
159
- 0.014630108833312989,
160
- 0.013411066055297852,
161
- 0.014443388938903809,
162
- 0.014028827667236328,
163
- 0.013388986587524415,
164
- 0.014382908821105957,
165
- 0.014625788688659668,
166
- 0.013399706840515137,
167
- 0.01309570598602295,
168
- 0.013156346321105957,
169
- 0.013605627059936523,
170
- 0.013237626075744628,
171
- 0.013181305885314942,
172
- 0.013506107330322266,
173
- 0.013164345741271972,
174
- 0.013193145751953125,
175
- 0.013114107131958009,
176
- 0.013094745635986327,
177
- 0.013542746543884277,
178
- 0.013483707427978515,
179
- 0.014217308044433594,
180
- 0.014379549026489257,
181
- 0.013694586753845215,
182
- 0.014349148750305176,
183
- 0.013911706924438477,
184
- 0.01382498836517334,
185
- 0.013743706703186035,
186
- 0.013288986206054688,
187
- 0.013437947273254395,
188
- 0.013330905914306641,
189
- 0.013183707237243653,
190
- 0.01361026668548584,
191
- 0.013201306343078614,
192
- 0.013387065887451172,
193
- 0.013915388107299805,
194
- 0.013646746635437012
 
 
 
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 72.26089771224196
200
  },
201
  "energy": null,
202
  "efficiency": null
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "4ec62071e1c9b9c89fb7e3c044340b391a0c4120",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1027.383296,
106
+ "max_global_vram": 2103.238656,
107
+ "max_process_vram": 270757.351424,
108
  "max_reserved": 773.849088,
109
  "max_allocated": 745.087488
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 76,
114
+ "total": 1.0005363531112672,
115
+ "mean": 0.013164952014621936,
116
+ "stdev": 0.0005373820515728805,
117
+ "p50": 0.012960029125213622,
118
+ "p90": 0.01404643201828003,
119
+ "p95": 0.014325552701950073,
120
+ "p99": 0.014380473136901856,
121
  "values": [
122
+ 0.013531230926513671,
123
+ 0.013420350074768066,
124
+ 0.013096670150756835,
125
+ 0.013136030197143555,
126
+ 0.013099710464477539,
127
+ 0.013080669403076171,
128
+ 0.012921628952026367,
129
+ 0.01285922908782959,
130
+ 0.012760668754577638,
131
+ 0.012913469314575196,
132
+ 0.012627708435058593,
133
+ 0.012856828689575195,
134
+ 0.012773468971252441,
135
+ 0.012740187644958496,
136
+ 0.012879229545593262,
137
+ 0.014271231651306152,
138
+ 0.012705148696899414,
139
+ 0.012814269065856934,
140
+ 0.012738908767700195,
141
+ 0.012676829338073731,
142
+ 0.01280402946472168,
143
+ 0.012692349433898926,
144
+ 0.01302194881439209,
145
+ 0.014400992393493652,
146
+ 0.01388595199584961,
147
+ 0.013851870536804199,
148
+ 0.014323872566223144,
149
+ 0.01420531177520752,
150
+ 0.01433059310913086,
151
+ 0.014038751602172852,
152
+ 0.013372190475463867,
153
+ 0.013416669845581054,
154
+ 0.013168350219726562,
155
+ 0.014016192436218261,
156
+ 0.013548190116882325,
157
+ 0.013823552131652832,
158
+ 0.013599711418151856,
159
+ 0.013284990310668945,
160
+ 0.013296669960021972,
161
+ 0.013383870124816894,
162
+ 0.01299842929840088,
163
+ 0.012655709266662598,
164
+ 0.012898749351501464,
165
+ 0.012667069435119629,
166
+ 0.012644828796386719,
167
+ 0.012820829391479492,
168
+ 0.012687389373779296,
169
+ 0.012612188339233398,
170
+ 0.012912030220031739,
171
+ 0.012702749252319337,
172
+ 0.012633788108825683,
173
+ 0.013129150390625,
174
+ 0.01267746925354004,
175
+ 0.012598909378051758,
176
+ 0.012836348533630372,
177
+ 0.012872029304504394,
178
+ 0.012740829467773437,
179
+ 0.012659708976745605,
180
+ 0.012693788528442383,
181
+ 0.013089308738708496,
182
+ 0.01275330924987793,
183
+ 0.012699548721313476,
184
+ 0.014054112434387207,
185
+ 0.01437363338470459,
186
+ 0.014370431900024414,
187
+ 0.013712031364440918,
188
+ 0.013651070594787597,
189
+ 0.013297149658203125,
190
+ 0.01338131046295166,
191
+ 0.013636031150817872,
192
+ 0.013003870010375977,
193
+ 0.012650428771972656,
194
+ 0.012644187927246094,
195
+ 0.012656509399414062,
196
+ 0.01303267002105713,
197
+ 0.012719227790832519
198
  ]
199
  },
200
  "throughput": {
201
  "unit": "samples/s",
202
+ "value": 75.9592590150977
203
  },
204
  "energy": null,
205
  "efficiency": null