IlyasMoutawwakil HF staff commited on
Commit
fede54d
·
verified ·
1 Parent(s): 265319e

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
- "optimum_benchmark_commit": "40484fbc84921b66f5da6e6dc8e14a8bfa42b562",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,165 +104,154 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1006.915584,
108
- "max_global_vram": 898.482176,
109
- "max_process_vram": 215132.389376,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 137,
116
- "total": 0.994896844387055,
117
- "mean": 0.007262020761949303,
118
- "stdev": 0.0003254641019984611,
119
- "p50": 0.00717931604385376,
120
- "p90": 0.007353811073303223,
121
- "p95": 0.007884784126281739,
122
- "p99": 0.008091387405395507,
123
  "values": [
124
- 0.007883952140808106,
125
- 0.00727531623840332,
126
- 0.007313076019287109,
127
- 0.00735035514831543,
128
- 0.0072877960205078125,
129
- 0.0072775559425354,
130
- 0.007311795234680176,
131
- 0.007308276176452637,
132
- 0.007298036098480225,
133
- 0.007299635887145996,
134
- 0.007288916110992432,
135
- 0.007302515029907227,
136
- 0.0073183560371398925,
137
- 0.007283316135406494,
138
- 0.007271796226501465,
139
- 0.007239956855773926,
140
- 0.0072553157806396484,
141
- 0.00721035623550415,
142
- 0.007209715843200684,
143
- 0.007218515872955323,
144
- 0.00725355577468872,
145
- 0.007202676773071289,
146
- 0.007190836906433106,
147
- 0.007202836990356445,
148
- 0.007213556766510009,
149
- 0.0071812372207641605,
150
- 0.007208277225494384,
151
- 0.007173397064208984,
152
- 0.007216596126556397,
153
- 0.007186676025390625,
154
- 0.0072431559562683104,
155
- 0.007223637104034424,
156
- 0.007229395866394043,
157
- 0.01025497817993164,
158
- 0.007618352890014648,
159
- 0.007136436939239502,
160
- 0.007122355937957764,
161
- 0.007117396831512452,
162
- 0.007145075798034668,
163
- 0.007150997161865234,
164
- 0.007149877071380616,
165
- 0.007143637180328369,
166
- 0.007161076068878174,
167
- 0.007134517192840576,
168
- 0.0071489157676696774,
169
- 0.007102197170257569,
170
- 0.007072277069091797,
171
- 0.007071796894073486,
172
- 0.00711051607131958,
173
- 0.007124116897583008,
174
- 0.0071545162200927735,
175
- 0.007125556945800781,
176
- 0.0071034770011901855,
177
- 0.007091796875,
178
- 0.007146516799926758,
179
- 0.007126195907592774,
180
- 0.007121236801147461,
181
- 0.0071127572059631345,
182
- 0.007133717060089111,
183
- 0.007138516902923584,
184
- 0.007118836879730224,
185
- 0.0071247568130493166,
186
- 0.0071573958396911625,
187
- 0.007129877090454101,
188
- 0.007066038131713867,
189
- 0.007132757186889649,
190
- 0.00717931604385376,
191
- 0.007050197124481201,
192
- 0.007128437042236328,
193
- 0.007146676063537597,
194
- 0.007127636909484863,
195
- 0.007110196113586426,
196
- 0.007182036876678467,
197
- 0.007165077209472656,
198
- 0.007180436134338379,
199
- 0.007125076770782471,
200
- 0.007114195823669433,
201
- 0.007155797004699707,
202
- 0.007171317100524903,
203
- 0.007346354961395264,
204
- 0.007423154830932617,
205
- 0.007156435966491699,
206
- 0.007122517108917236,
207
- 0.00717931604385376,
208
- 0.007167635917663574,
209
- 0.0071677970886230465,
210
- 0.007147157192230224,
211
- 0.007358994960784912,
212
- 0.007173717021942139,
213
- 0.007187477111816406,
214
- 0.007197876930236816,
215
- 0.007172436237335205,
216
- 0.007153716087341309,
217
- 0.007324275970458985,
218
- 0.0071972360610961916,
219
- 0.007142836093902588,
220
- 0.007166996955871582,
221
- 0.0071884369850158695,
222
- 0.007214675903320313,
223
- 0.007179155826568604,
224
- 0.007158515930175781,
225
- 0.007147316932678223,
226
- 0.007338514804840088,
227
- 0.007164435863494873,
228
- 0.007124276161193848,
229
- 0.0071597971916198735,
230
- 0.007154836177825928,
231
- 0.007161715984344483,
232
- 0.007114996910095215,
233
- 0.0072810759544372555,
234
- 0.007127636909484863,
235
- 0.0071905159950256346,
236
- 0.007179477214813232,
237
- 0.007176436901092529,
238
- 0.007197395801544189,
239
- 0.007166356086730957,
240
- 0.007196755886077881,
241
- 0.007166037082672119,
242
- 0.0071813960075378415,
243
- 0.007193716049194336,
244
- 0.007179636001586914,
245
- 0.0071743569374084475,
246
- 0.007176597118377686,
247
- 0.007307314872741699,
248
- 0.007214516162872314,
249
- 0.007210515975952149,
250
- 0.007222675800323487,
251
- 0.007187635898590088,
252
- 0.007524594783782959,
253
- 0.00799883222579956,
254
- 0.00807275104522705,
255
- 0.007998511791229249,
256
- 0.007551474094390869,
257
- 0.00754219388961792,
258
- 0.00788811206817627,
259
- 0.008090669631958008,
260
- 0.008091791152954102
261
  ]
262
  },
263
  "throughput": {
264
  "unit": "samples/s",
265
- "value": 137.70271840032245
266
  },
267
  "energy": null,
268
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.0",
88
+ "optimum_benchmark_commit": "e5cc8d8069420e159473795b6d1ea703cadf2a8b",
89
  "transformers_version": "4.40.2",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1006.071808,
108
+ "max_global_vram": 898.4576,
109
+ "max_process_vram": 214366.396416,
110
  "max_reserved": 555.74528,
111
  "max_allocated": 499.507712
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 126,
116
+ "total": 1.0006671333312986,
117
+ "mean": 0.007941802645486497,
118
+ "stdev": 0.00046877798037668297,
119
+ "p50": 0.007846324682235718,
120
+ "p90": 0.008392879009246826,
121
+ "p95": 0.008419518232345581,
122
+ "p99": 0.00856007742881775,
123
  "values": [
124
+ 0.007999123096466065,
125
+ 0.0077431249618530274,
126
+ 0.007846164226531982,
127
+ 0.00815432071685791,
128
+ 0.008323439598083496,
129
+ 0.008191121101379395,
130
+ 0.008046642303466797,
131
+ 0.00802184295654297,
132
+ 0.007998003005981445,
133
+ 0.007946804046630859,
134
+ 0.007849844932556152,
135
+ 0.0078109650611877446,
136
+ 0.007744726181030274,
137
+ 0.007748244762420654,
138
+ 0.007703765869140625,
139
+ 0.00769032621383667,
140
+ 0.007721364974975586,
141
+ 0.007700085163116455,
142
+ 0.007697206020355224,
143
+ 0.007776565074920654,
144
+ 0.00773368501663208,
145
+ 0.00769496488571167,
146
+ 0.007708566188812256,
147
+ 0.007639126777648926,
148
+ 0.0076277670860290525,
149
+ 0.007627286911010742,
150
+ 0.007658805847167969,
151
+ 0.0077455248832702635,
152
+ 0.007829043865203857,
153
+ 0.007833524227142334,
154
+ 0.007741044998168945,
155
+ 0.011146292686462403,
156
+ 0.007900084018707275,
157
+ 0.007419928073883057,
158
+ 0.007449207782745361,
159
+ 0.0074624881744384764,
160
+ 0.007506008148193359,
161
+ 0.0074861679077148436,
162
+ 0.007468728065490723,
163
+ 0.007673205852508545,
164
+ 0.007405048847198487,
165
+ 0.007499607086181641,
166
+ 0.0074653677940368654,
167
+ 0.007466008186340332,
168
+ 0.007442967891693115,
169
+ 0.0074844069480896,
170
+ 0.007459447860717773,
171
+ 0.007444727897644043,
172
+ 0.007452407836914063,
173
+ 0.0074391279220581055,
174
+ 0.0077474451065063475,
175
+ 0.008600397109985352,
176
+ 0.007620245933532715,
177
+ 0.007506166934967041,
178
+ 0.007680885791778565,
179
+ 0.00843527889251709,
180
+ 0.008366639137268067,
181
+ 0.00832759952545166,
182
+ 0.008353038787841797,
183
+ 0.008355599403381347,
184
+ 0.008380879402160644,
185
+ 0.008368719100952148,
186
+ 0.008344240188598633,
187
+ 0.008373679161071778,
188
+ 0.008308400154113769,
189
+ 0.008381198883056641,
190
+ 0.008412558555603028,
191
+ 0.00838711929321289,
192
+ 0.008375438690185547,
193
+ 0.008416399002075196,
194
+ 0.008405678749084473,
195
+ 0.008333839416503907,
196
+ 0.00833367919921875,
197
+ 0.008318320274353027,
198
+ 0.008388559341430664,
199
+ 0.008359599113464355,
200
+ 0.00833799934387207,
201
+ 0.008388399124145508,
202
+ 0.008398798942565918,
203
+ 0.008439118385314942,
204
+ 0.00837255859375,
205
+ 0.008376399040222167,
206
+ 0.008401198387145996,
207
+ 0.00838279914855957,
208
+ 0.008370959281921386,
209
+ 0.008367918968200683,
210
+ 0.008330479621887207,
211
+ 0.008332879066467284,
212
+ 0.008313999176025391,
213
+ 0.008383599281311035,
214
+ 0.008435117721557616,
215
+ 0.007577207088470459,
216
+ 0.007488088130950928,
217
+ 0.007502487182617187,
218
+ 0.007538486957550048,
219
+ 0.0075029668807983394,
220
+ 0.0074900069236755375,
221
+ 0.007536887168884277,
222
+ 0.008346959114074707,
223
+ 0.008377999305725097,
224
+ 0.008433037757873536,
225
+ 0.008077681541442871,
226
+ 0.008420557975769042,
227
+ 0.008397198677062989,
228
+ 0.007864563941955567,
229
+ 0.007865843772888183,
230
+ 0.00785784387588501,
231
+ 0.007858163833618164,
232
+ 0.007846485137939454,
233
+ 0.007907444000244141,
234
+ 0.00800312328338623,
235
+ 0.007957043170928955,
236
+ 0.007984403133392335,
237
+ 0.007501368045806885,
238
+ 0.007531607151031494,
239
+ 0.007507927894592285,
240
+ 0.007590806007385254,
241
+ 0.007566806793212891,
242
+ 0.007517685890197754,
243
+ 0.007533367156982422,
244
+ 0.007497046947479248,
245
+ 0.007529207229614258,
246
+ 0.007488728046417236,
247
+ 0.007495128154754639,
248
+ 0.007491288185119629,
249
+ 0.007443607807159424
 
 
 
 
 
 
 
 
 
 
 
250
  ]
251
  },
252
  "throughput": {
253
  "unit": "samples/s",
254
+ "value": 125.91599724129662
255
  },
256
  "energy": null,
257
  "efficiency": null