IlyasMoutawwakil HF staff commited on
Commit
c155680
·
verified ·
1 Parent(s): 2c993db

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -86,7 +86,7 @@
86
  "gpu_count": 1,
87
  "gpu_vram_mb": 68702699520,
88
  "optimum_benchmark_version": "0.3.1",
89
- "optimum_benchmark_commit": "bc46022225a5818a4648ee2abeffdd38c4a9b291",
90
  "transformers_version": "4.42.4",
91
  "transformers_commit": null,
92
  "accelerate_version": "0.32.1",
@@ -105,94 +105,103 @@
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
- "max_ram": 1034.162176,
109
  "max_global_vram": 1122.840576,
110
- "max_process_vram": 314069.344256,
111
  "max_reserved": 773.849088,
112
  "max_allocated": 745.087488
113
  },
114
  "latency": {
115
  "unit": "s",
116
- "count": 66,
117
- "total": 1.0049901885986323,
118
- "mean": 0.015227124069676254,
119
- "stdev": 0.00028688669038481727,
120
- "p50": 0.015142248630523682,
121
- "p90": 0.015520168781280517,
122
- "p95": 0.015675689220428467,
123
- "p99": 0.01610707383155822,
124
  "values": [
125
- 0.016750730514526368,
126
- 0.01570496940612793,
127
- 0.015760489463806152,
128
- 0.015752328872680663,
129
- 0.015495208740234375,
130
- 0.015422088623046875,
131
- 0.015333449363708496,
132
- 0.015303849220275879,
133
- 0.015393288612365722,
134
- 0.015256169319152833,
135
- 0.015220969200134278,
136
- 0.015314409255981445,
137
- 0.015568009376525879,
138
- 0.015467369079589843,
139
- 0.01543760871887207,
140
- 0.015291528701782226,
141
- 0.015587848663330078,
142
- 0.015298249244689942,
143
- 0.01545728874206543,
144
- 0.01514400863647461,
145
- 0.015132649421691895,
146
- 0.014979208946228027,
147
- 0.014968969345092774,
148
- 0.014908328056335449,
149
- 0.014922409057617188,
150
- 0.014978089332580567,
151
- 0.015170088768005371,
152
- 0.015035208702087402,
153
- 0.015242889404296875,
154
- 0.015140488624572753,
155
- 0.015026087760925294,
156
- 0.015108649253845214,
157
- 0.01520512866973877,
158
- 0.015127208709716797,
159
- 0.015344489097595214,
160
- 0.015029129028320312,
161
- 0.015231208801269531,
162
- 0.015074889183044433,
163
- 0.015302409172058105,
164
- 0.015072328567504883,
165
- 0.01494544792175293,
166
- 0.01539712905883789,
167
- 0.01504512882232666,
168
- 0.01504144859313965,
169
- 0.015317608833312988,
170
- 0.015099529266357423,
171
- 0.015066728591918946,
172
- 0.015396489143371581,
173
- 0.015105769157409668,
174
- 0.01510256862640381,
175
- 0.015340329170227051,
176
- 0.015072009086608888,
177
- 0.015044487953186036,
178
- 0.015263528823852538,
179
- 0.014875048637390137,
180
- 0.015004009246826171,
181
- 0.015043848991394044,
182
- 0.015011689186096192,
183
- 0.015431689262390138,
184
- 0.014999688148498535,
185
- 0.014897448539733887,
186
- 0.015030888557434082,
187
- 0.014976009368896485,
188
- 0.01554512882232666,
189
- 0.014996808052062988,
190
- 0.014980009078979492
 
 
 
 
 
 
 
 
 
191
  ]
192
  },
193
  "throughput": {
194
  "unit": "samples/s",
195
- "value": 65.67228292251389
196
  },
197
  "energy": null,
198
  "efficiency": null
 
86
  "gpu_count": 1,
87
  "gpu_vram_mb": 68702699520,
88
  "optimum_benchmark_version": "0.3.1",
89
+ "optimum_benchmark_commit": "2a33a472f309c43b5bd16946ef9cec843d02f70a",
90
  "transformers_version": "4.42.4",
91
  "transformers_commit": null,
92
  "accelerate_version": "0.32.1",
 
105
  "forward": {
106
  "memory": {
107
  "unit": "MB",
108
+ "max_ram": 1033.940992,
109
  "max_global_vram": 1122.840576,
110
+ "max_process_vram": 296348.827648,
111
  "max_reserved": 773.849088,
112
  "max_allocated": 745.087488
113
  },
114
  "latency": {
115
  "unit": "s",
116
+ "count": 75,
117
+ "total": 1.0000416383743285,
118
+ "mean": 0.013333888511657713,
119
+ "stdev": 0.0006409323004067068,
120
+ "p50": 0.013276939392089843,
121
+ "p90": 0.014167882156372071,
122
+ "p95": 0.014318889141082763,
123
+ "p99": 0.015252690143585209,
124
  "values": [
125
+ 0.015066697120666503,
126
+ 0.013205578804016113,
127
+ 0.013077739715576173,
128
+ 0.013483658790588379,
129
+ 0.013448458671569824,
130
+ 0.01354413890838623,
131
+ 0.013402379035949707,
132
+ 0.013726058959960938,
133
+ 0.013544459342956543,
134
+ 0.01328190040588379,
135
+ 0.013032938957214356,
136
+ 0.013230540275573731,
137
+ 0.012819660186767579,
138
+ 0.012962379455566407,
139
+ 0.012669100761413574,
140
+ 0.015782054901123048,
141
+ 0.013690698623657226,
142
+ 0.013024458885192871,
143
+ 0.013301739692687988,
144
+ 0.013247658729553222,
145
+ 0.012984939575195312,
146
+ 0.013279499053955078,
147
+ 0.012980460166931152,
148
+ 0.013011819839477539,
149
+ 0.013225899696350098,
150
+ 0.013236139297485352,
151
+ 0.013734699249267578,
152
+ 0.0132804594039917,
153
+ 0.013286378860473633,
154
+ 0.013322059631347655,
155
+ 0.013678857803344727,
156
+ 0.013383499145507812,
157
+ 0.013276939392089843,
158
+ 0.013144619941711426,
159
+ 0.013619178771972657,
160
+ 0.013296778678894044,
161
+ 0.013078060150146485,
162
+ 0.013330538749694825,
163
+ 0.013388938903808593,
164
+ 0.013258378982543945,
165
+ 0.014273737907409668,
166
+ 0.013426698684692383,
167
+ 0.013242058753967285,
168
+ 0.013071499824523925,
169
+ 0.01324989891052246,
170
+ 0.013278539657592774,
171
+ 0.012660619735717773,
172
+ 0.012463339805603027,
173
+ 0.012441261291503906,
174
+ 0.012438700675964356,
175
+ 0.012447179794311524,
176
+ 0.01269517993927002,
177
+ 0.012450540542602539,
178
+ 0.012425420761108398,
179
+ 0.01237197971343994,
180
+ 0.013210060119628907,
181
+ 0.013784777641296387,
182
+ 0.012528301239013672,
183
+ 0.01251550006866455,
184
+ 0.012435980796813965,
185
+ 0.012521739959716796,
186
+ 0.01285614013671875,
187
+ 0.013015979766845704,
188
+ 0.01289437961578369,
189
+ 0.013628619194030762,
190
+ 0.014279336929321288,
191
+ 0.014152618408203125,
192
+ 0.014171977996826172,
193
+ 0.014411177635192871,
194
+ 0.01415949821472168,
195
+ 0.014153898239135742,
196
+ 0.014269576072692872,
197
+ 0.01446509838104248,
198
+ 0.014161738395690918,
199
+ 0.014148138046264648
200
  ]
201
  },
202
  "throughput": {
203
  "unit": "samples/s",
204
+ "value": 74.99687725195152
205
  },
206
  "energy": null,
207
  "efficiency": null