IlyasMoutawwakil HF staff commited on
Commit
b6836b5
·
verified ·
1 Parent(s): 960467c

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -6,19 +6,17 @@
6
  "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
- "model": "FacebookAI/roberta-base",
10
  "library": "transformers",
 
 
11
  "device": "cuda",
12
  "device_ids": "0",
13
  "seed": 42,
14
  "inter_op_num_threads": null,
15
  "intra_op_num_threads": null,
16
- "hub_kwargs": {
17
- "revision": "main",
18
- "force_download": false,
19
- "local_files_only": false,
20
- "trust_remote_code": false
21
- },
22
  "no_weights": true,
23
  "device_map": null,
24
  "torch_dtype": null,
@@ -85,7 +83,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
- "optimum_benchmark_commit": "3e2eebdc0f80ae6deeb2e1faad3e889ed5a4df2d",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,163 +102,151 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1002.672128,
108
  "max_global_vram": 898.461696,
109
- "max_process_vram": 212847.214592,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 135,
116
- "total": 0.9953043928146366,
117
- "mean": 0.007372625131960269,
118
- "stdev": 0.0002775897221710371,
119
- "p50": 0.007234007835388184,
120
- "p90": 0.007742258644104004,
121
- "p95": 0.00789737606048584,
122
- "p99": 0.008220768394470215,
123
  "values": [
124
- 0.007573363780975342,
125
- 0.007371604919433594,
126
- 0.007414485931396484,
127
- 0.007681522846221924,
128
- 0.007745203018188477,
129
- 0.007781842231750488,
130
- 0.007726643085479736,
131
- 0.007690962791442871,
132
- 0.007676723003387451,
133
- 0.007592563152313232,
134
- 0.007554803848266602,
135
- 0.0074733648300170895,
136
- 0.007425685882568359,
137
- 0.007451125144958496,
138
- 0.007515923976898193,
139
- 0.007493844985961914,
140
- 0.007355286121368408,
141
- 0.007338965892791748,
142
- 0.007334486007690429,
143
- 0.007331126213073731,
144
- 0.007349047183990478,
145
- 0.007341365814208984,
146
- 0.007369525909423828,
147
- 0.007382006168365479,
148
- 0.007417204856872558,
149
- 0.007338325977325439,
150
- 0.00881879234313965,
151
- 0.007507444858551026,
152
- 0.007210807800292969,
153
- 0.0071824870109558105,
154
- 0.007553524017333985,
155
- 0.007258007049560547,
156
- 0.00720360803604126,
157
- 0.007156888008117676,
158
- 0.007173367977142334,
159
- 0.007167448043823242,
160
- 0.007160088062286377,
161
- 0.00717352819442749,
162
- 0.007171446800231933,
163
- 0.007380246162414551,
164
- 0.007350966930389404,
165
- 0.007216087818145752,
166
- 0.007368725776672363,
167
- 0.007415444850921631,
168
- 0.00718888807296753,
169
- 0.007505364894866943,
170
- 0.0072140078544616695,
171
- 0.007185526847839355,
172
- 0.007193048000335694,
173
- 0.007164087772369385,
174
- 0.007160568237304687,
175
- 0.007184566974639893,
176
- 0.007192247867584228,
177
- 0.007160568237304687,
178
- 0.007182007789611817,
179
- 0.007202328205108643,
180
- 0.007208406925201416,
181
- 0.007180247783660889,
182
- 0.007200088024139404,
183
- 0.007195127010345459,
184
- 0.007226326942443848,
185
- 0.007204407215118408,
186
- 0.0071919279098510746,
187
- 0.0071484088897705075,
188
- 0.007174967765808105,
189
- 0.007215287208557129,
190
- 0.0072202467918396,
191
- 0.007285206794738769,
192
- 0.007209368228912353,
193
- 0.007194326877593994,
194
- 0.007194007873535156,
195
- 0.007231447219848633,
196
- 0.007208407878875733,
197
- 0.0071919279098510746,
198
- 0.007226326942443848,
199
- 0.007217526912689209,
200
- 0.007189527034759522,
201
- 0.007163447856903076,
202
- 0.007223126888275147,
203
- 0.007239127159118652,
204
- 0.007218647003173828,
205
- 0.007192727088928223,
206
- 0.007189047813415527,
207
- 0.007239448070526123,
208
- 0.007219927787780762,
209
- 0.007238967895507813,
210
- 0.00723656702041626,
211
- 0.00719976806640625,
212
- 0.007233687877655029,
213
- 0.0072183279991149905,
214
- 0.007199128150939941,
215
- 0.007192407131195068,
216
- 0.007218966960906983,
217
- 0.007157368183135986,
218
- 0.007204886913299561,
219
- 0.007219286918640136,
220
- 0.007225846767425537,
221
- 0.007198488235473633,
222
- 0.007234007835388184,
223
- 0.007188087940216065,
224
- 0.007185047149658203,
225
- 0.007220567226409912,
226
- 0.007210648059844971,
227
- 0.007207608222961426,
228
- 0.007195927143096924,
229
- 0.007233366966247558,
230
- 0.007248726844787598,
231
- 0.007704562187194825,
232
- 0.008220876693725586,
233
- 0.00812919807434082,
234
- 0.007870161056518554,
235
- 0.007768561840057373,
236
- 0.00819751739501953,
237
- 0.00817127799987793,
238
- 0.008220558166503905,
239
- 0.007939599990844726,
240
- 0.007663763046264648,
241
- 0.007737842082977295,
242
- 0.0077632818222045895,
243
- 0.007756562232971192,
244
- 0.00787928009033203,
245
- 0.007720081806182861,
246
- 0.007459444999694824,
247
- 0.007413845062255859,
248
- 0.007419284820556641,
249
- 0.007229687213897705,
250
- 0.007282647132873535,
251
- 0.007280247211456299,
252
- 0.007341365814208984,
253
- 0.007198008060455322,
254
- 0.007263607025146485,
255
- 0.0073151268959045414,
256
- 0.007330166816711426,
257
- 0.0073029670715332035,
258
- 0.0073290448188781734
259
  ]
260
  },
261
  "throughput": {
262
  "unit": "samples/s",
263
- "value": 135.6368975909284
264
  },
265
  "energy": null,
266
  "efficiency": null
 
6
  "version": "2.2.2+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
 
9
  "library": "transformers",
10
+ "model": "FacebookAI/roberta-base",
11
+ "processor": "FacebookAI/roberta-base",
12
  "device": "cuda",
13
  "device_ids": "0",
14
  "seed": 42,
15
  "inter_op_num_threads": null,
16
  "intra_op_num_threads": null,
17
+ "model_kwargs": {},
18
+ "processor_kwargs": {},
19
+ "hub_kwargs": {},
 
 
 
20
  "no_weights": true,
21
  "device_map": null,
22
  "torch_dtype": null,
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.0",
86
+ "optimum_benchmark_commit": "6fd377459e287bb09e9383ba2516b1b2a271a562",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1002.31168,
106
  "max_global_vram": 898.461696,
107
+ "max_process_vram": 231091.269632,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.507712
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 123,
114
+ "total": 0.9991405701637269,
115
+ "mean": 0.008123094066371763,
116
+ "stdev": 0.0003438575333615109,
117
+ "p50": 0.008056300163269043,
118
+ "p90": 0.008664905738830567,
119
+ "p95": 0.008697706031799316,
120
+ "p99": 0.008725594520568848,
121
  "values": [
122
+ 0.009379337310791016,
123
+ 0.007979819774627686,
124
+ 0.00794462013244629,
125
+ 0.007712141036987304,
126
+ 0.008181900024414062,
127
+ 0.007844460010528565,
128
+ 0.007848141193389892,
129
+ 0.00782077980041504,
130
+ 0.008174220085144042,
131
+ 0.007902860164642334,
132
+ 0.007730541229248047,
133
+ 0.007736781120300293,
134
+ 0.007829579830169677,
135
+ 0.007646060943603515,
136
+ 0.007686861038208008,
137
+ 0.0077156610488891605,
138
+ 0.007826060771942138,
139
+ 0.007705581188201904,
140
+ 0.0077603011131286625,
141
+ 0.007706700801849365,
142
+ 0.007710381031036377,
143
+ 0.0077022209167480465,
144
+ 0.0077211008071899415,
145
+ 0.007701261043548584,
146
+ 0.007882700920104981,
147
+ 0.007902221202850342,
148
+ 0.007738380908966064,
149
+ 0.007801899909973145,
150
+ 0.007756781101226806,
151
+ 0.007743980884552002,
152
+ 0.00799149990081787,
153
+ 0.00871997833251953,
154
+ 0.008665417671203613,
155
+ 0.008617098808288575,
156
+ 0.008707178115844727,
157
+ 0.008635819435119629,
158
+ 0.008691818237304687,
159
+ 0.008727178573608399,
160
+ 0.008682538986206054,
161
+ 0.008481899261474609,
162
+ 0.008362700462341308,
163
+ 0.008280460357666015,
164
+ 0.008167180061340332,
165
+ 0.008245738983154297,
166
+ 0.008375658988952636,
167
+ 0.00835373878479004,
168
+ 0.008182379722595215,
169
+ 0.008333258628845215,
170
+ 0.008504138946533204,
171
+ 0.008241419792175292,
172
+ 0.007998859882354737,
173
+ 0.008177578926086426,
174
+ 0.008087180137634277,
175
+ 0.008133740425109863,
176
+ 0.008044939994812011,
177
+ 0.008245100021362304,
178
+ 0.008407500267028809,
179
+ 0.008687018394470215,
180
+ 0.008153420448303222,
181
+ 0.008081259727478028,
182
+ 0.008065740585327148,
183
+ 0.008028461456298827,
184
+ 0.007837581157684325,
185
+ 0.007972620010375976,
186
+ 0.008110699653625488,
187
+ 0.008139659881591797,
188
+ 0.008013259887695313,
189
+ 0.008459979057312012,
190
+ 0.00823853874206543,
191
+ 0.00822350025177002,
192
+ 0.008054539680480957,
193
+ 0.007997739791870117,
194
+ 0.00799374008178711,
195
+ 0.007977580070495606,
196
+ 0.007986380100250243,
197
+ 0.0077444610595703125,
198
+ 0.007669900894165039,
199
+ 0.0076569409370422364,
200
+ 0.007901899814605713,
201
+ 0.008142539978027344,
202
+ 0.00804557991027832,
203
+ 0.008056300163269043,
204
+ 0.008040940284729004,
205
+ 0.008080300331115722,
206
+ 0.008010540008544922,
207
+ 0.00820125961303711,
208
+ 0.008066060066223145,
209
+ 0.00814766025543213,
210
+ 0.00832701873779297,
211
+ 0.00824158000946045,
212
+ 0.008697897911071777,
213
+ 0.008719179153442383,
214
+ 0.00866285800933838,
215
+ 0.0086342191696167,
216
+ 0.008695979118347168,
217
+ 0.00869117832183838,
218
+ 0.008608299255371094,
219
+ 0.008632457733154297,
220
+ 0.00862493896484375,
221
+ 0.008618059158325196,
222
+ 0.008702539443969726,
223
+ 0.008569418907165527,
224
+ 0.00861645793914795,
225
+ 0.008311018943786622,
226
+ 0.007981900215148927,
227
+ 0.00814717960357666,
228
+ 0.008178700447082519,
229
+ 0.007945260047912598,
230
+ 0.007681100845336914,
231
+ 0.007849259853363037,
232
+ 0.007779341220855713,
233
+ 0.007766380786895752,
234
+ 0.007758699893951416,
235
+ 0.007849260807037354,
236
+ 0.007771501064300537,
237
+ 0.0077822208404541016,
238
+ 0.007825581073760987,
239
+ 0.007795660018920898,
240
+ 0.007934541225433349,
241
+ 0.008091340065002442,
242
+ 0.008132780075073242,
243
+ 0.007940939903259277,
244
+ 0.007934380054473876
 
 
 
 
 
 
 
 
 
 
 
 
245
  ]
246
  },
247
  "throughput": {
248
  "unit": "samples/s",
249
+ "value": 123.1058007982243
250
  },
251
  "energy": null,
252
  "efficiency": null