IlyasMoutawwakil HF staff commited on
Commit
7aa0c3c
·
verified ·
1 Parent(s): b7f7b88

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.1",
88
- "optimum_benchmark_commit": "f6013cec1a849341c31271831560b1681406c092",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,165 +104,162 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1010.221056,
108
- "max_global_vram": 66120.753152,
109
- "max_process_vram": 191984.836608,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.443712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 137,
116
- "total": 0.9987488837242129,
117
- "mean": 0.007290137837403013,
118
- "stdev": 0.00038295204075541623,
119
- "p50": 0.007151021003723145,
120
- "p90": 0.0076167799949646,
121
- "p95": 0.007964618873596192,
122
- "p99": 0.008891151275634763,
123
  "values": [
124
- 0.008811176300048828,
125
- 0.008134538650512696,
126
- 0.0075761399269104,
127
- 0.007533899784088135,
128
- 0.007252460956573487,
129
- 0.007017101764678955,
130
- 0.007030381202697754,
131
- 0.007050702095031738,
132
- 0.007081261157989502,
133
- 0.007067980766296386,
134
- 0.007025101184844971,
135
- 0.007070702075958252,
136
- 0.007098701000213623,
137
- 0.007371180057525635,
138
- 0.0072355008125305174,
139
- 0.007154701232910156,
140
- 0.007121581077575684,
141
- 0.0071094207763671875,
142
- 0.007231020927429199,
143
- 0.007102381229400634,
144
- 0.007137421131134033,
145
- 0.007139020919799804,
146
- 0.007163021087646484,
147
- 0.007151021003723145,
148
- 0.00713054084777832,
149
- 0.007229741096496582,
150
- 0.008936137199401855,
151
- 0.007369421005249024,
152
- 0.0072606210708618165,
153
- 0.007244460105895996,
154
- 0.007182540893554687,
155
- 0.007086701869964599,
156
- 0.007122702121734619,
157
- 0.007129900932312012,
158
- 0.007102381229400634,
159
- 0.007064940929412841,
160
- 0.007124141216278076,
161
- 0.007113901138305664,
162
- 0.0070811018943786625,
163
- 0.007092462062835693,
164
- 0.007017101764678955,
165
- 0.007039980888366699,
166
- 0.007104461193084717,
167
- 0.007101740837097168,
168
- 0.00709342098236084,
169
- 0.007082221984863281,
170
- 0.007083981037139892,
171
- 0.0072110209465026855,
172
- 0.007406859874725342,
173
- 0.0075563001632690426,
174
- 0.007664140224456787,
175
- 0.00994605255126953,
176
- 0.007038221836090088,
177
- 0.007110221862792969,
178
- 0.007115500926971436,
179
- 0.007024940967559815,
180
- 0.00721614122390747,
181
- 0.0070471811294555665,
182
- 0.007114700794219971,
183
- 0.007099341869354248,
184
- 0.007065101146697998,
185
- 0.007084461212158203,
186
- 0.007094060897827149,
187
- 0.007160301208496094,
188
- 0.0070899009704589845,
189
- 0.007077901840209961,
190
- 0.0072178997993469236,
191
- 0.007128781795501709,
192
- 0.007263980865478516,
193
- 0.007682700157165528,
194
- 0.007845899105072022,
195
- 0.007084781169891357,
196
- 0.007027982234954834,
197
- 0.0072558197975158695,
198
- 0.00709486198425293,
199
- 0.00710958194732666,
200
- 0.00707326078414917,
201
- 0.007076621055603027,
202
- 0.007141740798950196,
203
- 0.007152141094207764,
204
- 0.0071180610656738285,
205
- 0.007114221096038818,
206
- 0.007058221817016602,
207
- 0.007171660900115967,
208
- 0.0071190218925476076,
209
- 0.007126221179962158,
210
- 0.007110860824584961,
211
- 0.007457419872283936,
212
- 0.007624939918518066,
213
- 0.007598219871520996,
214
- 0.007554699897766113,
215
- 0.007588779926300049,
216
- 0.007450380802154541,
217
- 0.007496300220489502,
218
- 0.007515659809112549,
219
- 0.007611340045928955,
220
- 0.007451819896697998,
221
- 0.007417900085449219,
222
- 0.007430540084838867,
223
- 0.007424941062927246,
224
- 0.00723534107208252,
225
- 0.007268940925598145,
226
- 0.007059502124786377,
227
- 0.007108302116394043,
228
- 0.007358060836791992,
229
- 0.007954538822174072,
230
- 0.007504459857940674,
231
- 0.0070955009460449215,
232
- 0.0072606210708618165,
233
- 0.006970541000366211,
234
- 0.007347341060638428,
235
- 0.007056621074676514,
236
- 0.007183341026306153,
237
- 0.007155500888824463,
238
- 0.0071187009811401365,
239
- 0.0071750211715698245,
240
- 0.007053901195526123,
241
- 0.007156460762023926,
242
- 0.007122540950775146,
243
- 0.007121261119842529,
244
- 0.007108461856842041,
245
- 0.007194060802459717,
246
- 0.007162860870361328,
247
- 0.007119822025299072,
248
- 0.007139502048492431,
249
- 0.007167342185974121,
250
- 0.007177900791168213,
251
- 0.007269580841064453,
252
- 0.007260140895843506,
253
- 0.00721950101852417,
254
- 0.0072534198760986325,
255
- 0.008004939079284668,
256
- 0.008011658668518067,
257
- 0.008049419403076172,
258
- 0.007803500175476074,
259
- 0.007722698211669922,
260
- 0.007596940040588379
261
  ]
262
  },
263
  "throughput": {
264
  "unit": "samples/s",
265
- "value": 137.17161764340975
266
  },
267
  "energy": null,
268
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.1",
88
+ "optimum_benchmark_commit": "79990507b694d513bac81e140baff3af23a6bff7",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1010.212864,
108
+ "max_global_vram": 65423.0528,
109
+ "max_process_vram": 180607.22176,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.443712
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 134,
116
+ "total": 0.9966200375556944,
117
+ "mean": 0.007437462966833542,
118
+ "stdev": 0.00029095250587293515,
119
+ "p50": 0.0072878458499908445,
120
+ "p90": 0.007878262424468995,
121
+ "p95": 0.008000855088233947,
122
+ "p99": 0.008069115495681763,
123
  "values": [
124
+ 0.0074550461769104,
125
+ 0.007312005996704102,
126
+ 0.007557926177978516,
127
+ 0.00743632698059082,
128
+ 0.007512325763702392,
129
+ 0.0074731259346008305,
130
+ 0.007392487049102783,
131
+ 0.007391366004943847,
132
+ 0.007355686187744141,
133
+ 0.00732848596572876,
134
+ 0.0073526458740234375,
135
+ 0.007905605792999268,
136
+ 0.007572967052459717,
137
+ 0.007637286186218261,
138
+ 0.007673447132110596,
139
+ 0.007738245964050293,
140
+ 0.0077817668914794925,
141
+ 0.007756165981292725,
142
+ 0.007776327133178711,
143
+ 0.0077814459800720215,
144
+ 0.007904487133026123,
145
+ 0.007867846965789795,
146
+ 0.007745125770568848,
147
+ 0.007675207138061523,
148
+ 0.008040166854858399,
149
+ 0.007928165912628174,
150
+ 0.007890407085418702,
151
+ 0.007948966979980468,
152
+ 0.007882726192474366,
153
+ 0.0078238468170166,
154
+ 0.007803205966949463,
155
+ 0.007690725803375244,
156
+ 0.007586565971374512,
157
+ 0.007456007003784179,
158
+ 0.007593605995178223,
159
+ 0.007675847053527832,
160
+ 0.007604805946350097,
161
+ 0.007756647109985352,
162
+ 0.007785605907440186,
163
+ 0.0075825672149658204,
164
+ 0.008931047439575195,
165
+ 0.007580007076263428,
166
+ 0.007248805999755859,
167
+ 0.007205125808715821,
168
+ 0.007192805767059326,
169
+ 0.007242725849151611,
170
+ 0.007249606132507324,
171
+ 0.007141446113586426,
172
+ 0.007217926025390625,
173
+ 0.00720000696182251,
174
+ 0.007207846164703369,
175
+ 0.007182405948638916,
176
+ 0.007218245983123779,
177
+ 0.007231205940246582,
178
+ 0.0072176060676574704,
179
+ 0.007235045909881592,
180
+ 0.007266886234283447,
181
+ 0.007187205791473389,
182
+ 0.00721440601348877,
183
+ 0.007221927165985107,
184
+ 0.007268645763397217,
185
+ 0.0071908860206604,
186
+ 0.007184326171875,
187
+ 0.007183685779571534,
188
+ 0.0072033658027648925,
189
+ 0.0072028861045837405,
190
+ 0.007207365989685058,
191
+ 0.007206406116485596,
192
+ 0.007260486125946045,
193
+ 0.007210886001586914,
194
+ 0.007201926231384277,
195
+ 0.007226406097412109,
196
+ 0.007235846042633057,
197
+ 0.007173925876617432,
198
+ 0.00724928617477417,
199
+ 0.007229286193847656,
200
+ 0.0071251258850097655,
201
+ 0.007170246124267578,
202
+ 0.007258885860443115,
203
+ 0.007256005764007568,
204
+ 0.007182247161865234,
205
+ 0.0071908860206604,
206
+ 0.007198406219482422,
207
+ 0.007227046012878418,
208
+ 0.007218565940856934,
209
+ 0.007256165981292724,
210
+ 0.007230885982513428,
211
+ 0.007251046180725098,
212
+ 0.007203045845031738,
213
+ 0.007193925857543945,
214
+ 0.007224166870117187,
215
+ 0.007226565837860108,
216
+ 0.0071643261909484866,
217
+ 0.007330885887145996,
218
+ 0.008053767204284667,
219
+ 0.008050887107849122,
220
+ 0.008060646057128907,
221
+ 0.008054567337036132,
222
+ 0.008073287010192871,
223
+ 0.007979687213897704,
224
+ 0.00756864595413208,
225
+ 0.00760720682144165,
226
+ 0.007560805797576904,
227
+ 0.007594406127929687,
228
+ 0.0074830460548400875,
229
+ 0.007582726001739502,
230
+ 0.007500166893005371,
231
+ 0.007600165843963623,
232
+ 0.00757408618927002,
233
+ 0.007360006809234619,
234
+ 0.007292325973510742,
235
+ 0.007320806026458741,
236
+ 0.007268005847930908,
237
+ 0.0072713661193847656,
238
+ 0.007276485919952393,
239
+ 0.007256327152252197,
240
+ 0.007274886131286621,
241
+ 0.007274725914001465,
242
+ 0.00731648588180542,
243
+ 0.007248486042022705,
244
+ 0.007273766040802002,
245
+ 0.007257445812225342,
246
+ 0.007293127059936524,
247
+ 0.007284485816955567,
248
+ 0.0072811260223388675,
249
+ 0.007284325122833252,
250
+ 0.007302725791931152,
251
+ 0.007239366054534912,
252
+ 0.0072792072296142575,
253
+ 0.007248805999755859,
254
+ 0.007309606075286865,
255
+ 0.007320645809173584,
256
+ 0.007291205883026123,
257
+ 0.007280645847320556
 
 
 
258
  ]
259
  },
260
  "throughput": {
261
  "unit": "samples/s",
262
+ "value": 134.45445099483223
263
  },
264
  "energy": null,
265
  "efficiency": null