IlyasMoutawwakil HF staff commited on
Commit
715bdc3
1 Parent(s): b7fa4f6

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -7,6 +7,7 @@
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
 
10
  "model": "FacebookAI/roberta-base",
11
  "processor": "FacebookAI/roberta-base",
12
  "device": "cuda",
@@ -85,10 +86,10 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.3.1",
88
- "optimum_benchmark_commit": "79990507b694d513bac81e140baff3af23a6bff7",
89
- "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
- "accelerate_version": "0.31.0",
92
  "accelerate_commit": null,
93
  "diffusers_version": "0.29.2",
94
  "diffusers_commit": null,
@@ -104,162 +105,160 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1010.212864,
108
- "max_global_vram": 65423.0528,
109
- "max_process_vram": 180607.22176,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.443712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 134,
116
- "total": 0.9966200375556944,
117
- "mean": 0.007437462966833542,
118
- "stdev": 0.00029095250587293515,
119
- "p50": 0.0072878458499908445,
120
- "p90": 0.007878262424468995,
121
- "p95": 0.008000855088233947,
122
- "p99": 0.008069115495681763,
123
  "values": [
124
- 0.0074550461769104,
125
- 0.007312005996704102,
126
- 0.007557926177978516,
127
- 0.00743632698059082,
128
- 0.007512325763702392,
129
- 0.0074731259346008305,
130
- 0.007392487049102783,
131
- 0.007391366004943847,
132
- 0.007355686187744141,
133
- 0.00732848596572876,
134
- 0.0073526458740234375,
135
- 0.007905605792999268,
136
- 0.007572967052459717,
137
- 0.007637286186218261,
138
- 0.007673447132110596,
139
- 0.007738245964050293,
140
- 0.0077817668914794925,
141
- 0.007756165981292725,
142
- 0.007776327133178711,
143
- 0.0077814459800720215,
144
- 0.007904487133026123,
145
- 0.007867846965789795,
146
- 0.007745125770568848,
147
- 0.007675207138061523,
148
- 0.008040166854858399,
149
- 0.007928165912628174,
150
- 0.007890407085418702,
151
- 0.007948966979980468,
152
- 0.007882726192474366,
153
- 0.0078238468170166,
154
- 0.007803205966949463,
155
- 0.007690725803375244,
156
- 0.007586565971374512,
157
- 0.007456007003784179,
158
- 0.007593605995178223,
159
- 0.007675847053527832,
160
- 0.007604805946350097,
161
- 0.007756647109985352,
162
- 0.007785605907440186,
163
- 0.0075825672149658204,
164
- 0.008931047439575195,
165
- 0.007580007076263428,
166
- 0.007248805999755859,
167
- 0.007205125808715821,
168
- 0.007192805767059326,
169
- 0.007242725849151611,
170
- 0.007249606132507324,
171
- 0.007141446113586426,
172
- 0.007217926025390625,
173
- 0.00720000696182251,
174
- 0.007207846164703369,
175
- 0.007182405948638916,
176
- 0.007218245983123779,
177
- 0.007231205940246582,
178
- 0.0072176060676574704,
179
- 0.007235045909881592,
180
- 0.007266886234283447,
181
- 0.007187205791473389,
182
- 0.00721440601348877,
183
- 0.007221927165985107,
184
- 0.007268645763397217,
185
- 0.0071908860206604,
186
- 0.007184326171875,
187
- 0.007183685779571534,
188
- 0.0072033658027648925,
189
- 0.0072028861045837405,
190
- 0.007207365989685058,
191
- 0.007206406116485596,
192
- 0.007260486125946045,
193
- 0.007210886001586914,
194
- 0.007201926231384277,
195
- 0.007226406097412109,
196
- 0.007235846042633057,
197
- 0.007173925876617432,
198
- 0.00724928617477417,
199
- 0.007229286193847656,
200
- 0.0071251258850097655,
201
- 0.007170246124267578,
202
- 0.007258885860443115,
203
- 0.007256005764007568,
204
- 0.007182247161865234,
205
- 0.0071908860206604,
206
- 0.007198406219482422,
207
- 0.007227046012878418,
208
- 0.007218565940856934,
209
- 0.007256165981292724,
210
- 0.007230885982513428,
211
- 0.007251046180725098,
212
- 0.007203045845031738,
213
- 0.007193925857543945,
214
- 0.007224166870117187,
215
- 0.007226565837860108,
216
- 0.0071643261909484866,
217
- 0.007330885887145996,
218
- 0.008053767204284667,
219
- 0.008050887107849122,
220
- 0.008060646057128907,
221
- 0.008054567337036132,
222
- 0.008073287010192871,
223
- 0.007979687213897704,
224
- 0.00756864595413208,
225
- 0.00760720682144165,
226
- 0.007560805797576904,
227
- 0.007594406127929687,
228
- 0.0074830460548400875,
229
- 0.007582726001739502,
230
- 0.007500166893005371,
231
- 0.007600165843963623,
232
- 0.00757408618927002,
233
- 0.007360006809234619,
234
- 0.007292325973510742,
235
- 0.007320806026458741,
236
- 0.007268005847930908,
237
- 0.0072713661193847656,
238
- 0.007276485919952393,
239
- 0.007256327152252197,
240
- 0.007274886131286621,
241
- 0.007274725914001465,
242
- 0.00731648588180542,
243
- 0.007248486042022705,
244
- 0.007273766040802002,
245
- 0.007257445812225342,
246
- 0.007293127059936524,
247
- 0.007284485816955567,
248
- 0.0072811260223388675,
249
- 0.007284325122833252,
250
- 0.007302725791931152,
251
- 0.007239366054534912,
252
- 0.0072792072296142575,
253
- 0.007248805999755859,
254
- 0.007309606075286865,
255
- 0.007320645809173584,
256
- 0.007291205883026123,
257
- 0.007280645847320556
258
  ]
259
  },
260
  "throughput": {
261
  "unit": "samples/s",
262
- "value": 134.45445099483223
263
  },
264
  "energy": null,
265
  "efficiency": null
 
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "text-classification",
9
  "library": "transformers",
10
+ "model_type": "roberta",
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cuda",
 
86
  "gpu_count": 1,
87
  "gpu_vram_mb": 68702699520,
88
  "optimum_benchmark_version": "0.3.1",
89
+ "optimum_benchmark_commit": "73dd36ea04ff1cc8259f4039b6f9d56dfb9eecd1",
90
+ "transformers_version": "4.42.4",
91
  "transformers_commit": null,
92
+ "accelerate_version": "0.32.1",
93
  "accelerate_commit": null,
94
  "diffusers_version": "0.29.2",
95
  "diffusers_commit": null,
 
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
+ "max_ram": 1009.963008,
109
+ "max_global_vram": 897.159168,
110
+ "max_process_vram": 143424.16384,
111
  "max_reserved": 555.74528,
112
  "max_allocated": 499.443712
113
  },
114
  "latency": {
115
  "unit": "s",
116
+ "count": 132,
117
+ "total": 0.9987108578681946,
118
+ "mean": 0.007565991347486322,
119
+ "stdev": 0.000648445477705279,
120
+ "p50": 0.007365072965621948,
121
+ "p90": 0.008027419662475586,
122
+ "p95": 0.008628904771804807,
123
+ "p99": 0.00968346085548401,
124
  "values": [
125
+ 0.008104907989501952,
126
+ 0.007098674774169922,
127
+ 0.007117393970489502,
128
+ 0.007002035140991211,
129
+ 0.007052433967590332,
130
+ 0.007022994995117188,
131
+ 0.006911795139312744,
132
+ 0.006798195838928223,
133
+ 0.006813876152038574,
134
+ 0.0067884368896484375,
135
+ 0.007013875007629395,
136
+ 0.00761211109161377,
137
+ 0.007425392150878906,
138
+ 0.007508912086486816,
139
+ 0.007978827953338622,
140
+ 0.008759143829345704,
141
+ 0.00912746238708496,
142
+ 0.009481538772583007,
143
+ 0.011899603843688964,
144
+ 0.00969625759124756,
145
+ 0.008861542701721192,
146
+ 0.008522345542907714,
147
+ 0.008314987182617188,
148
+ 0.008096108436584473,
149
+ 0.00800602912902832,
150
+ 0.007962508201599121,
151
+ 0.007968268871307372,
152
+ 0.008155627250671387,
153
+ 0.00799354887008667,
154
+ 0.008004749298095703,
155
+ 0.008046828269958495,
156
+ 0.008011947631835938,
157
+ 0.008002348899841309,
158
+ 0.007996428966522216,
159
+ 0.008028107643127442,
160
+ 0.007992108821868897,
161
+ 0.007639951229095459,
162
+ 0.007613231182098388,
163
+ 0.007643950939178467,
164
+ 0.0076196308135986325,
165
+ 0.00767179012298584,
166
+ 0.007646831035614013,
167
+ 0.007673551082611084,
168
+ 0.007667311191558838,
169
+ 0.007630190849304199,
170
+ 0.007637391090393067,
171
+ 0.00761595106124878,
172
+ 0.009654977798461913,
173
+ 0.008021227836608887,
174
+ 0.007982988834381104,
175
+ 0.007995468139648437,
176
+ 0.007674510955810547,
177
+ 0.007641709804534912,
178
+ 0.0074823517799377445,
179
+ 0.007060914993286133,
180
+ 0.007268434047698975,
181
+ 0.007387312889099121,
182
+ 0.007289712905883789,
183
+ 0.0072927532196044925,
184
+ 0.007067793846130371,
185
+ 0.00710331392288208,
186
+ 0.007176593780517578,
187
+ 0.007128593921661377,
188
+ 0.0071940340995788575,
189
+ 0.00716443395614624,
190
+ 0.007294353008270264,
191
+ 0.007235312938690185,
192
+ 0.007306832790374756,
193
+ 0.0074775519371032715,
194
+ 0.00756139087677002,
195
+ 0.007283312797546387,
196
+ 0.007191954135894775,
197
+ 0.007258193016052246,
198
+ 0.007547951221466064,
199
+ 0.0077140297889709475,
200
+ 0.00785306978225708,
201
+ 0.007689071178436279,
202
+ 0.007312912940979004,
203
+ 0.00771819019317627,
204
+ 0.008011947631835938,
205
+ 0.007977869033813477,
206
+ 0.007961708068847656,
207
+ 0.00799786901473999,
208
+ 0.007973547935485839,
209
+ 0.007522672176361084,
210
+ 0.007375792026519775,
211
+ 0.007471951961517334,
212
+ 0.007464431762695313,
213
+ 0.007477550983428955,
214
+ 0.007468272209167481,
215
+ 0.007415631771087647,
216
+ 0.007365872859954834,
217
+ 0.007177873134613037,
218
+ 0.007157554149627685,
219
+ 0.007202514171600342,
220
+ 0.007902509212493896,
221
+ 0.00731691312789917,
222
+ 0.0073642730712890625,
223
+ 0.007148114204406739,
224
+ 0.007146674156188965,
225
+ 0.007108593940734863,
226
+ 0.00725963306427002,
227
+ 0.007147634029388427,
228
+ 0.007157554149627685,
229
+ 0.00701355504989624,
230
+ 0.007228913784027099,
231
+ 0.007201553821563721,
232
+ 0.007345552921295166,
233
+ 0.007167473793029785,
234
+ 0.007149553775787353,
235
+ 0.007117234230041504,
236
+ 0.0072209138870239255,
237
+ 0.007190033912658691,
238
+ 0.007167634010314942,
239
+ 0.007182034015655517,
240
+ 0.007157713890075684,
241
+ 0.007291633129119873,
242
+ 0.007169394016265869,
243
+ 0.00716443395614624,
244
+ 0.007147634983062744,
245
+ 0.007148754119873047,
246
+ 0.007134833812713623,
247
+ 0.007239153861999512,
248
+ 0.007191314220428467,
249
+ 0.007136754035949707,
250
+ 0.00715787410736084,
251
+ 0.007190834045410157,
252
+ 0.007168114185333252,
253
+ 0.007155313968658448,
254
+ 0.007120913028717041,
255
+ 0.007151473999023437,
256
+ 0.007156914234161377
 
 
257
  ]
258
  },
259
  "throughput": {
260
  "unit": "samples/s",
261
+ "value": 132.17038641370291
262
  },
263
  "energy": null,
264
  "efficiency": null