IlyasMoutawwakil HF staff commited on
Commit
0f83bc7
·
verified ·
1 Parent(s): c624b77

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -102,7 +102,7 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 902.189056,
106
  "max_global_vram": 1195.900928,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 555.74528,
@@ -110,176 +110,179 @@
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 145,
114
- "total": 0.997332896232605,
115
- "mean": 0.006878157905052447,
116
- "stdev": 0.0004279952769912334,
117
- "p50": 0.006903808116912841,
118
- "p90": 0.007034867382049561,
119
- "p95": 0.007138508796691894,
120
- "p99": 0.009311232376098634,
121
  "values": [
122
- 0.009842687606811524,
123
- 0.009390080451965332,
124
- 0.009210880279541016,
125
- 0.007515135765075683,
126
- 0.007172095775604248,
127
- 0.0071485438346862796,
128
- 0.007143424034118652,
129
- 0.007044095993041993,
130
- 0.006956031799316406,
131
- 0.007001088142395019,
132
- 0.00692739200592041,
133
- 0.006944767951965332,
134
- 0.006966271877288818,
135
- 0.006912000179290771,
136
- 0.006834176063537598,
137
- 0.006889472007751465,
138
- 0.00693452787399292,
139
- 0.007053311824798584,
140
- 0.0069703998565673825,
141
- 0.006966271877288818,
142
- 0.006969344139099121,
143
- 0.006930431842803955,
144
- 0.00689356803894043,
145
- 0.006939648151397705,
146
- 0.007001088142395019,
147
- 0.007066624164581299,
148
- 0.006958079814910889,
149
- 0.007018527984619141,
150
- 0.006965248107910156,
151
- 0.00694374418258667,
152
- 0.006944767951965332,
153
- 0.006985727787017822,
154
- 0.007044095993041993,
155
- 0.00698367977142334,
156
- 0.0068689918518066405,
157
- 0.006922239780426025,
158
- 0.006915008068084717,
159
- 0.006953983783721924,
160
- 0.006903808116912841,
161
- 0.006873087882995605,
162
- 0.006927360057830811,
163
- 0.007018496036529541,
164
- 0.006973440170288086,
165
- 0.007027711868286133,
166
- 0.007012351989746094,
167
- 0.0069959678649902345,
168
- 0.006933504104614257,
169
- 0.006912000179290771,
170
- 0.006964223861694336,
171
- 0.007034848213195801,
172
- 0.006931456089019775,
173
- 0.006903808116912841,
174
- 0.007020544052124023,
175
- 0.006957056045532227,
176
- 0.006903808116912841,
177
- 0.006910975933074951,
178
- 0.006960127830505371,
179
- 0.006922239780426025,
180
- 0.006978559970855713,
181
  0.006957056045532227,
182
- 0.006881279945373535,
183
- 0.006924320220947266,
184
- 0.006879231929779053,
185
- 0.006906879901885986,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  0.006924287796020508,
187
- 0.006953983783721924,
188
- 0.007007232189178467,
189
  0.006966271877288818,
190
- 0.007051263809204102,
191
  0.0070348801612854,
192
- 0.0071188478469848635,
193
- 0.007009280204772949,
194
- 0.0069816322326660156,
195
- 0.006965248107910156,
196
- 0.006932447910308838,
197
- 0.0069550080299377445,
198
- 0.006965248107910156,
199
- 0.006899712085723877,
200
- 0.006852608203887939,
201
- 0.006751264095306396,
202
- 0.006899712085723877,
203
- 0.006998015880584717,
204
- 0.006756351947784424,
205
- 0.006624256134033203,
206
- 0.0065883522033691405,
207
- 0.006622208118438721,
208
- 0.0066375679969787596,
209
- 0.006804448127746582,
210
- 0.006807551860809326,
211
- 0.006807551860809326,
212
- 0.006870016098022461,
213
- 0.006852608203887939,
214
- 0.007011328220367432,
215
- 0.006989791870117188,
216
- 0.006760447978973389,
217
- 0.00675328016281128,
218
- 0.006735936164855957,
219
- 0.007278592109680176,
220
- 0.006605823993682861,
221
- 0.006544415950775147,
222
- 0.006632448196411133,
223
- 0.0066078720092773435,
224
- 0.006583295822143555,
225
- 0.0066007041931152345,
226
- 0.006569983959197998,
227
- 0.00658739185333252,
228
  0.006627327919006347,
229
- 0.006567967891693115,
230
- 0.0066109437942504885,
231
- 0.006557695865631104,
232
- 0.006441984176635742,
233
- 0.006512639999389648,
234
- 0.006657023906707763,
235
  0.006606847763061524,
236
- 0.006649856090545654,
237
- 0.0066447358131408694,
238
- 0.006533120155334473,
239
- 0.00654534387588501,
240
- 0.0065781760215759275,
241
- 0.006538176059722901,
242
- 0.006564864158630371,
243
- 0.006591487884521485,
244
- 0.006591487884521485,
245
- 0.00658022403717041,
246
- 0.00657919979095459,
247
- 0.006602752208709717,
248
- 0.006573056221008301,
249
- 0.0065474557876586915,
250
- 0.006551551818847656,
251
- 0.006584320068359375,
252
- 0.006550528049468994,
253
- 0.006598656177520752,
254
  0.0066406397819519045,
255
- 0.006593535900115967,
256
- 0.006592512130737305,
257
- 0.006598688125610351,
258
- 0.006555647850036621,
259
- 0.006633471965789795,
260
- 0.006592512130737305,
261
- 0.006562816143035889,
262
- 0.006586368083953857,
263
- 0.0066007041931152345,
264
- 0.0065771517753601075,
265
- 0.0065924801826477055,
266
- 0.0065781760215759275
 
 
 
 
 
 
267
  ]
268
  },
269
  "throughput": {
270
  "unit": "samples/s",
271
- "value": 145.3877642537744
272
  },
273
  "energy": {
274
  "unit": "kWh",
275
- "cpu": 7.669463059380917e-08,
276
- "ram": 4.1917744884783806e-08,
277
- "gpu": 1.417079986709657e-07,
278
- "total": 2.6032037414955867e-07
279
  },
280
  "efficiency": {
281
  "unit": "samples/kWh",
282
- "value": 3841420.4161579846
283
  }
284
  }
285
  }
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 902.004736,
106
  "max_global_vram": 1195.900928,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 555.74528,
 
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 148,
114
+ "total": 0.9989743700027465,
115
+ "mean": 0.0067498268243428835,
116
+ "stdev": 0.00013576317339119158,
117
+ "p50": 0.006746576070785522,
118
+ "p90": 0.006892032051086426,
119
+ "p95": 0.006954188823699951,
120
+ "p99": 0.00703199245929718,
121
  "values": [
122
+ 0.007723008155822754,
123
+ 0.006948863983154297,
124
+ 0.006976511955261231,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
  0.006957056045532227,
126
+ 0.007028736114501953,
127
+ 0.007012351989746094,
128
+ 0.006927360057830811,
129
+ 0.006938591957092285,
130
+ 0.006930431842803955,
131
+ 0.00687820816040039,
132
+ 0.006805568218231201,
133
+ 0.006783999919891357,
134
+ 0.006822912216186523,
135
+ 0.006755328178405762,
136
+ 0.0067010560035705566,
137
+ 0.006761472225189209,
138
+ 0.00674508810043335,
139
+ 0.006732800006866455,
140
+ 0.006804480075836182,
141
+ 0.006854656219482422,
142
+ 0.006767615795135498,
143
+ 0.006834176063537598,
144
+ 0.006776832103729248,
145
+ 0.006774784088134766,
146
+ 0.00673689603805542,
147
+ 0.0068321280479431154,
148
+ 0.006821887969970703,
149
+ 0.006763519763946534,
150
+ 0.00684441614151001,
151
+ 0.00682700777053833,
152
+ 0.006749184131622315,
153
+ 0.006756351947784424,
154
+ 0.006795263767242431,
155
+ 0.006816768169403077,
156
+ 0.006752255916595459,
157
+ 0.00672051191329956,
158
+ 0.006703104019165039,
159
+ 0.006668288230895996,
160
+ 0.006752255916595459,
161
+ 0.006767615795135498,
162
+ 0.006711296081542969,
163
+ 0.006806528091430664,
164
+ 0.0067686400413513184,
165
+ 0.006825984001159668,
166
+ 0.006799359798431396,
167
+ 0.006848512172698974,
168
+ 0.006822912216186523,
169
+ 0.006806528091430664,
170
+ 0.006783999919891357,
171
+ 0.006717440128326416,
172
+ 0.006729728221893311,
173
+ 0.006842368125915528,
174
+ 0.006821887969970703,
175
+ 0.006791168212890625,
176
+ 0.006796288013458252,
177
+ 0.006783999919891357,
178
+ 0.0067480640411376955,
179
+ 0.006765535831451416,
180
+ 0.00673689603805542,
181
+ 0.0067573761940002445,
182
+ 0.006859807968139648,
183
+ 0.006803455829620361,
184
+ 0.006823935985565186,
185
+ 0.00675328016281128,
186
+ 0.006738944053649902,
187
+ 0.00679423999786377,
188
+ 0.006756351947784424,
189
+ 0.00674508810043335,
190
+ 0.006759424209594727,
191
+ 0.006783999919891357,
192
+ 0.006841343879699707,
193
+ 0.006850560188293457,
194
+ 0.006842336177825928,
195
+ 0.0069283838272094726,
196
+ 0.00679423999786377,
197
+ 0.006789120197296142,
198
+ 0.006817791938781738,
199
+ 0.0067645440101623535,
200
+ 0.006724607944488525,
201
+ 0.0067758078575134275,
202
+ 0.006743040084838867,
203
+ 0.006648831844329834,
204
+ 0.006554624080657959,
205
+ 0.0065812478065490725,
206
+ 0.0065710082054138185,
207
+ 0.0065443840026855465,
208
+ 0.006466559886932373,
209
+ 0.006418432235717773,
210
+ 0.0064737281799316405,
211
+ 0.006804480075836182,
212
+ 0.00684441614151001,
213
  0.006924287796020508,
214
+ 0.006937600135803222,
 
215
  0.006966271877288818,
216
+ 0.0069928960800170895,
217
  0.0070348801612854,
218
+ 0.006759424209594727,
219
+ 0.006656000137329102,
220
+ 0.006666240215301514,
221
+ 0.0067010560035705566,
222
+ 0.006650847911834717,
223
+ 0.006694911956787109,
224
+ 0.006707200050354004,
225
+ 0.006673408031463623,
226
+ 0.006680575847625733,
227
+ 0.006698048114776612,
228
+ 0.006671296119689941,
229
+ 0.00667955207824707,
230
+ 0.006706175804138184,
231
+ 0.006654975891113281,
232
+ 0.006643712043762207,
233
+ 0.006672383785247803,
234
+ 0.00674508810043335,
235
+ 0.006635519981384277,
236
+ 0.006660096168518067,
237
+ 0.006686719894409179,
238
+ 0.006642687797546387,
239
+ 0.00667852783203125,
240
+ 0.006636544227600098,
241
+ 0.006621183872222901,
242
+ 0.006677504062652588,
243
+ 0.006666240215301514,
 
 
 
 
 
 
 
 
 
 
244
  0.006627327919006347,
245
+ 0.006670335769653321,
246
+ 0.006662144184112549,
 
 
 
 
247
  0.006606847763061524,
248
+ 0.006639616012573242,
249
+ 0.006656032085418701,
250
+ 0.006608895778656006,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
251
  0.0066406397819519045,
252
+ 0.006638591766357422,
253
+ 0.006635519981384277,
254
+ 0.006677504062652588,
255
+ 0.00667852783203125,
256
+ 0.0066375679969787596,
257
+ 0.006648831844329834,
258
+ 0.006646783828735352,
259
+ 0.0066979842185974124,
260
+ 0.0066304001808166506,
261
+ 0.006607840061187744,
262
+ 0.006593567848205566,
263
+ 0.006647808074951172,
264
+ 0.006652927875518798,
265
+ 0.006612991809844971,
266
+ 0.006663167953491211,
267
+ 0.0066713600158691405,
268
+ 0.00662937593460083,
269
+ 0.006658048152923584
270
  ]
271
  },
272
  "throughput": {
273
  "unit": "samples/s",
274
+ "value": 148.1519490831312
275
  },
276
  "energy": {
277
  "unit": "kWh",
278
+ "cpu": 7.775924455972366e-08,
279
+ "ram": 4.2507321589201794e-08,
280
+ "gpu": 1.4109488411764312e-07,
281
+ "total": 2.6136145026656855e-07
282
  },
283
  "efficiency": {
284
  "unit": "samples/kWh",
285
+ "value": 3826118.9589362815
286
  }
287
  }
288
  }