IlyasMoutawwakil HF staff commited on
Commit
2f8a41b
·
verified ·
1 Parent(s): b4aa6a0

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -82,7 +82,7 @@
82
  ],
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 24146608128,
85
- "optimum_benchmark_version": "0.2.0",
86
  "optimum_benchmark_commit": null,
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
@@ -102,7 +102,7 @@
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
- "max_ram": 975.536128,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
@@ -110,101 +110,102 @@
110
  },
111
  "latency": {
112
  "unit": "s",
113
- "count": 70,
114
- "total": 1.0018957738876342,
115
- "mean": 0.014312796769823345,
116
- "stdev": 0.000471044854333861,
117
- "p50": 0.014249471664428711,
118
- "p90": 0.014793625259399413,
119
- "p95": 0.01523875856399536,
120
- "p99": 0.01614117872238159,
121
  "values": [
122
- 0.015731712341308594,
123
- 0.01640755271911621,
124
- 0.016021503448486327,
125
- 0.015520768165588379,
126
- 0.01480191993713379,
127
- 0.014835712432861328,
128
- 0.014611424446105957,
129
- 0.014012415885925293,
130
- 0.014156800270080566,
131
- 0.014161919593811035,
132
- 0.014011360168457032,
133
- 0.014106623649597168,
134
- 0.014136256217956543,
135
- 0.01419059181213379,
136
- 0.013983743667602539,
137
- 0.014202879905700684,
138
- 0.014042112350463867,
139
- 0.014048255920410157,
140
- 0.013971455574035644,
141
- 0.014018560409545898,
142
- 0.01414252758026123,
143
- 0.01437996768951416,
144
  0.014134271621704102,
145
- 0.014041024208068848,
146
- 0.014183423995971679,
147
- 0.01409126377105713,
148
- 0.013991935729980469,
149
- 0.013936639785766602,
150
- 0.014241791725158692,
151
- 0.013945856094360352,
152
- 0.013956095695495606,
153
- 0.013971455574035644,
154
- 0.013947903633117676,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  0.014299136161804199,
156
- 0.01436569595336914,
157
- 0.014169088363647461,
158
- 0.014048255920410157,
159
- 0.014057472229003906,
160
- 0.013931520462036133,
 
161
  0.01386291217803955,
162
- 0.013939711570739746,
163
- 0.01371343994140625,
164
- 0.01421820831298828,
165
- 0.014449664115905762,
166
- 0.014491647720336913,
167
- 0.014784511566162109,
168
- 0.01489408016204834,
169
- 0.013894656181335448,
170
- 0.013721599578857421,
171
- 0.014261247634887696,
172
- 0.014329855918884277,
173
- 0.014292991638183594,
174
- 0.014353407859802245,
175
- 0.014359552383422852,
176
- 0.014307328224182129,
177
- 0.01429100799560547,
178
- 0.014792703628540039,
179
- 0.014342144012451171,
180
- 0.014360544204711913,
181
- 0.014297087669372559,
182
- 0.014299136161804199,
183
- 0.014363648414611817,
184
- 0.014320639610290528,
185
- 0.014285823822021485,
186
- 0.014332927703857423,
187
- 0.014326784133911133,
188
- 0.014334976196289062,
189
- 0.014330880165100097,
190
- 0.01425715160369873,
191
- 0.014276608467102051
192
  ]
193
  },
194
  "throughput": {
195
  "unit": "samples/s",
196
- "value": 69.86754692893906
197
  },
198
  "energy": {
199
  "unit": "kWh",
200
- "cpu": 1.6576348585479056e-07,
201
- "ram": 9.058178719290784e-08,
202
- "gpu": 3.3549023752777475e-07,
203
- "total": 5.918355105754732e-07
204
  },
205
  "efficiency": {
206
  "unit": "samples/kWh",
207
- "value": 1689658.6671990107
208
  }
209
  }
210
  }
 
82
  ],
83
  "gpu_count": 1,
84
  "gpu_vram_mb": 24146608128,
85
+ "optimum_benchmark_version": "0.2.1",
86
  "optimum_benchmark_commit": null,
87
  "transformers_version": "4.40.2",
88
  "transformers_commit": null,
 
102
  "forward": {
103
  "memory": {
104
  "unit": "MB",
105
+ "max_ram": 975.491072,
106
  "max_global_vram": 1434.976256,
107
  "max_process_vram": 0.0,
108
  "max_reserved": 794.820608,
 
110
  },
111
  "latency": {
112
  "unit": "s",
113
+ "count": 71,
114
+ "total": 1.0056614370346066,
115
+ "mean": 0.014164245592036717,
116
+ "stdev": 0.00044171859682510525,
117
+ "p50": 0.014051360130310058,
118
+ "p90": 0.014436351776123046,
119
+ "p95": 0.01508351993560791,
120
+ "p99": 0.015944806003570556,
121
  "values": [
122
+ 0.01538764762878418,
123
+ 0.01617919921875,
124
+ 0.01579529571533203,
125
+ 0.015844351768493654,
126
+ 0.014779392242431641,
127
+ 0.01420083236694336,
128
+ 0.014054400444030762,
129
+ 0.014000127792358399,
130
+ 0.013830143928527832,
131
+ 0.013831168174743653,
132
+ 0.014029824256896972,
133
+ 0.013916159629821777,
134
+ 0.013868032455444336,
135
+ 0.013856767654418945,
136
+ 0.01397043228149414,
137
+ 0.014118911743164063,
138
+ 0.0141659517288208,
139
+ 0.014126079559326172,
140
+ 0.01397862434387207,
141
+ 0.014105600357055664,
142
+ 0.014051360130310058,
 
143
  0.014134271621704102,
144
+ 0.014071807861328126,
145
+ 0.014046208381652832,
146
+ 0.014112768173217773,
147
+ 0.014109696388244629,
148
+ 0.014051327705383301,
149
+ 0.014015487670898438,
150
+ 0.014034943580627441,
151
+ 0.01425100803375244,
152
+ 0.01390182399749756,
153
+ 0.01397555160522461,
154
+ 0.013876223564147949,
155
+ 0.01386188793182373,
156
+ 0.01386086368560791,
157
+ 0.01384447956085205,
158
+ 0.013927424430847168,
159
+ 0.013837311744689941,
160
+ 0.01386393642425537,
161
+ 0.013969408035278321,
162
+ 0.014004223823547364,
163
+ 0.013921279907226563,
164
+ 0.013874176025390626,
165
+ 0.013881343841552735,
166
+ 0.014483551979064941,
167
+ 0.0140697603225708,
168
+ 0.014072832107543945,
169
+ 0.014585856437683106,
170
  0.014299136161804199,
171
+ 0.014231552124023437,
172
+ 0.014375935554504395,
173
+ 0.014306303977966308,
174
+ 0.014115839958190919,
175
+ 0.014170111656188965,
176
+ 0.014113823890686035,
177
  0.01386291217803955,
178
+ 0.014436351776123046,
179
+ 0.014094335556030273,
180
+ 0.014057536125183106,
181
+ 0.01407590389251709,
182
+ 0.014056447982788087,
183
+ 0.014050304412841797,
184
+ 0.014042112350463867,
185
+ 0.014034943580627441,
186
+ 0.014164992332458496,
187
+ 0.014210047721862793,
188
+ 0.014139391899108887,
189
+ 0.014013440132141113,
190
+ 0.014020607948303223,
191
+ 0.014040063858032227,
192
+ 0.014019583702087402
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
193
  ]
194
  },
195
  "throughput": {
196
  "unit": "samples/s",
197
+ "value": 70.60030084215782
198
  },
199
  "energy": {
200
  "unit": "kWh",
201
+ "cpu": 1.6661593805492662e-07,
202
+ "ram": 9.068229740023525e-08,
203
+ "gpu": 3.232911536944357e-07,
204
+ "total": 5.805893891495975e-07
205
  },
206
  "efficiency": {
207
  "unit": "samples/kWh",
208
+ "value": 1722387.661036525
209
  }
210
  }
211
  }