IlyasMoutawwakil HF staff commited on
Commit
ab81a63
·
verified ·
1 Parent(s): 087b37d

Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -83,7 +83,7 @@
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.1",
86
- "optimum_benchmark_commit": "56d026bf244c6516d8cb780280ce7cc6505f270e",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
@@ -102,161 +102,158 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 1003.7248,
106
- "max_global_vram": 1878.839296,
107
- "max_process_vram": 221212.643328,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.507712
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 133,
114
- "total": 1.000747857570648,
115
- "mean": 0.007524419981734198,
116
- "stdev": 0.00032205131044108013,
117
- "p50": 0.007361132144927979,
118
- "p90": 0.008138637351989747,
119
- "p95": 0.008191469383239745,
120
- "p99": 0.008289528846740724,
121
  "values": [
122
- 0.008795374870300294,
123
- 0.007819851875305176,
124
- 0.007956653118133546,
125
- 0.007633933067321777,
126
- 0.008032492637634277,
127
- 0.008145453453063965,
128
- 0.007512012004852295,
129
- 0.007391372203826904,
130
- 0.007641293048858642,
131
- 0.007465291976928711,
132
- 0.007826892852783204,
133
- 0.00750897216796875,
134
- 0.007261452198028565,
135
- 0.007279531955718994,
136
- 0.007527371883392334,
137
- 0.007291691780090332,
138
- 0.007316492080688477,
139
- 0.007361132144927979,
140
- 0.007257612228393555,
141
- 0.0073411321640014645,
142
- 0.007301290988922119,
143
- 0.007272972106933594,
144
- 0.007276812076568604,
145
- 0.007312972068786621,
146
- 0.007325131893157959,
147
- 0.007274251937866211,
148
- 0.007308492183685303,
149
- 0.0073020920753479,
150
- 0.007306571960449219,
151
- 0.007322411060333252,
152
- 0.007290410995483399,
153
- 0.00727105188369751,
154
- 0.00727105188369751,
155
- 0.007276172161102295,
156
- 0.007526731967926025,
157
- 0.007348011970520019,
158
- 0.007299211978912354,
159
- 0.007254891872406006,
160
- 0.007291051864624023,
161
- 0.007764331817626953,
162
- 0.007203210830688476,
163
- 0.007402252197265625,
164
- 0.007277931213378906,
165
- 0.00727489185333252,
166
- 0.007943532943725585,
167
- 0.007517611980438233,
168
- 0.007396972179412842,
169
- 0.007296971797943115,
170
- 0.00752897310256958,
171
- 0.007442252159118652,
172
- 0.007301452159881591,
173
- 0.007285770893096924,
174
- 0.007521771907806397,
175
- 0.0074763321876525875,
176
- 0.007410892009735107,
177
- 0.007824812889099121,
178
- 0.008207693099975585,
179
- 0.008158892631530761,
180
- 0.008195533752441406,
181
- 0.008012972831726074,
182
- 0.007311212062835693,
183
- 0.007485611915588379,
184
- 0.007276172161102295,
185
- 0.007259212017059326,
186
- 0.00725969123840332,
187
- 0.007264330863952637,
188
- 0.007267372131347656,
189
- 0.007370731830596924,
190
- 0.007562091827392578,
191
- 0.0073008108139038085,
192
- 0.007344972133636474,
193
- 0.007277132034301758,
194
- 0.007301931858062744,
195
- 0.007290731906890869,
196
- 0.007879212856292724,
197
- 0.007493931770324707,
198
- 0.007472492218017578,
199
- 0.007272012233734131,
200
- 0.007340012073516845,
201
- 0.007337451934814453,
202
- 0.007316812038421631,
203
- 0.007281132221221924,
204
- 0.007283852100372314,
205
- 0.007278410911560058,
206
- 0.0072352118492126465,
207
- 0.007253131866455078,
208
- 0.007312012195587158,
209
- 0.007272332191467285,
210
- 0.007286411762237549,
211
- 0.007316971778869629,
212
- 0.00730433177947998,
213
- 0.007285611152648926,
214
- 0.007248012065887452,
215
- 0.007269931793212891,
216
- 0.0073435320854187015,
217
- 0.007314092159271241,
218
- 0.007305292129516602,
219
- 0.00769185209274292,
220
- 0.00821489429473877,
221
- 0.008231212615966797,
222
- 0.008181452751159668,
223
- 0.008189932823181152,
224
- 0.007807532787322998,
225
- 0.007860012054443359,
226
- 0.007583211898803711,
227
- 0.007763052940368652,
228
- 0.007539212226867676,
229
- 0.007520172119140625,
230
- 0.0075472121238708495,
231
- 0.007539853096008301,
232
- 0.007304172039031983,
233
- 0.0073008108139038085,
234
- 0.007641932010650635,
235
- 0.007453291893005371,
236
- 0.00811137294769287,
237
- 0.008193774223327637,
238
- 0.0074120121002197266,
239
- 0.007285451889038086,
240
- 0.007560972213745117,
241
- 0.007452651977539062,
242
- 0.007313611984252929,
243
- 0.007872172832489014,
244
- 0.008171213150024415,
245
- 0.007711692810058594,
246
- 0.0073264122009277345,
247
- 0.007470411777496338,
248
- 0.0074260921478271485,
249
- 0.007733612060546875,
250
- 0.007719371795654297,
251
- 0.008018412590026856,
252
- 0.008189772605895996,
253
- 0.008175212860107422,
254
- 0.00831697177886963
255
  ]
256
  },
257
  "throughput": {
258
  "unit": "samples/s",
259
- "value": 132.90060927321124
260
  },
261
  "energy": null,
262
  "efficiency": null
 
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 68702699520,
85
  "optimum_benchmark_version": "0.2.1",
86
+ "optimum_benchmark_commit": "c1d0b062e90b79e7705510c58cea731c0d90da8a",
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
89
  "accelerate_version": "0.30.1",
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 1004.146688,
106
+ "max_global_vram": 1878.827008,
107
+ "max_process_vram": 207526.064128,
108
  "max_reserved": 555.74528,
109
  "max_allocated": 499.507712
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 130,
114
+ "total": 0.996648693561554,
115
+ "mean": 0.007666528412011954,
116
+ "stdev": 0.0006570371053670661,
117
+ "p50": 0.007380008935928345,
118
+ "p90": 0.008584106254577636,
119
+ "p95": 0.00937273178100586,
120
+ "p99": 0.009651641445159911,
121
  "values": [
122
+ 0.008584010124206542,
123
+ 0.007609929084777832,
124
+ 0.007298569202423096,
125
+ 0.007431368827819824,
126
+ 0.007248007774353028,
127
+ 0.007257769107818604,
128
+ 0.007265768051147461,
129
+ 0.007198728084564209,
130
+ 0.0071828880310058595,
131
+ 0.007233287811279297,
132
+ 0.007208488941192627,
133
+ 0.007200808048248291,
134
+ 0.007182088851928711,
135
+ 0.007168327808380127,
136
+ 0.007174889087677002,
137
+ 0.007222248077392578,
138
+ 0.007225608825683594,
139
+ 0.007797769069671631,
140
+ 0.007509288787841797,
141
+ 0.007375688076019287,
142
+ 0.007370408058166504,
143
+ 0.007198248863220215,
144
+ 0.007172327995300293,
145
+ 0.0072332887649536135,
146
+ 0.007297287940979004,
147
+ 0.007230729103088379,
148
+ 0.007208807945251465,
149
+ 0.0071945681571960445,
150
+ 0.007224809169769287,
151
+ 0.007189288139343262,
152
+ 0.007177608966827393,
153
+ 0.0071473679542541505,
154
+ 0.0071657681465148925,
155
+ 0.007310729026794433,
156
+ 0.007299048900604248,
157
+ 0.007349448204040527,
158
+ 0.007318249225616455,
159
+ 0.007311368942260742,
160
+ 0.007318408012390137,
161
+ 0.007300969123840332,
162
+ 0.007255049228668213,
163
+ 0.0072849678993225095,
164
+ 0.0073276891708374025,
165
+ 0.007338249206542969,
166
+ 0.007324488162994385,
167
+ 0.0073795289993286136,
168
+ 0.0073942489624023435,
169
+ 0.00735456895828247,
170
+ 0.00735152816772461,
171
+ 0.007342729091644287,
172
+ 0.007344809055328369,
173
+ 0.007295207977294922,
174
+ 0.007360329151153564,
175
+ 0.0073404889106750484,
176
+ 0.007347367763519287,
177
+ 0.007323369026184082,
178
+ 0.007297449111938477,
179
+ 0.007296487808227539,
180
+ 0.007297769069671631,
181
+ 0.007321448802947998,
182
+ 0.007383687973022461,
183
+ 0.0073184089660644535,
184
+ 0.007423849105834961,
185
+ 0.007528968811035156,
186
+ 0.007347688198089599,
187
+ 0.007397129058837891,
188
+ 0.007389449119567871,
189
+ 0.007383529186248779,
190
+ 0.007346568107604981,
191
+ 0.007577448844909668,
192
+ 0.0074064087867736815,
193
+ 0.007401128768920898,
194
+ 0.007433769226074219,
195
+ 0.0074249677658081055,
196
+ 0.007625769138336182,
197
+ 0.007386088848114014,
198
+ 0.007358728885650635,
199
+ 0.007371849060058594,
200
+ 0.007420807838439941,
201
+ 0.007406249046325684,
202
+ 0.007399368762969971,
203
+ 0.0073972887992858885,
204
+ 0.007411528110504151,
205
+ 0.0073992090225219725,
206
+ 0.007380488872528076,
207
+ 0.007364169120788574,
208
+ 0.0073889679908752445,
209
+ 0.007372488975524902,
210
+ 0.0074176092147827145,
211
+ 0.0073916888236999516,
212
+ 0.007387207984924316,
213
+ 0.007364008903503418,
214
+ 0.007317769050598145,
215
+ 0.007338088035583496,
216
+ 0.007389769077301025,
217
+ 0.007371849060058594,
218
+ 0.007417448997497559,
219
+ 0.007380807876586914,
220
+ 0.007571528911590577,
221
+ 0.007522889137268066,
222
+ 0.007657128810882569,
223
+ 0.00809568977355957,
224
+ 0.00767936897277832,
225
+ 0.008192009925842285,
226
+ 0.008395210266113281,
227
+ 0.008528969764709472,
228
+ 0.007949288845062255,
229
+ 0.008016969680786133,
230
+ 0.009199371337890625,
231
+ 0.009529451370239257,
232
+ 0.009662731170654297,
233
+ 0.00962449073791504,
234
+ 0.009605450630187988,
235
+ 0.009857932090759277,
236
+ 0.009605610847473144,
237
+ 0.009514572143554688,
238
+ 0.008963210105895996,
239
+ 0.008778890609741212,
240
+ 0.00881408977508545,
241
+ 0.008726090431213378,
242
+ 0.008584971427917481,
243
+ 0.008499210357666015,
244
+ 0.008504329681396484,
245
+ 0.008484649658203124,
246
+ 0.008530409812927246,
247
+ 0.008493929862976075,
248
+ 0.008480970382690429,
249
+ 0.00844929027557373,
250
+ 0.008511369705200195,
251
+ 0.008424328804016114
 
 
 
252
  ]
253
  },
254
  "throughput": {
255
  "unit": "samples/s",
256
+ "value": 130.4371348097002
257
  },
258
  "energy": null,
259
  "efficiency": null