IlyasMoutawwakil HF staff commited on
Commit
4e34b27
·
verified ·
1 Parent(s): 29795c2

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "cc9b50765eac8085269af1a207e20c6a9b260b67",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
@@ -104,90 +104,91 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1029.44768,
108
- "max_global_vram": 1122.865152,
109
- "max_process_vram": 257956.810752,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
- "count": 62,
116
- "total": 0.9976252880096436,
117
- "mean": 0.016090730451768443,
118
- "stdev": 0.0005082747557232666,
119
- "p50": 0.01600331926345825,
120
- "p90": 0.016677766418457033,
121
- "p95": 0.016838492298126218,
122
- "p99": 0.01747328422546387,
123
  "values": [
124
- 0.01569771957397461,
125
- 0.01591659927368164,
126
- 0.015865718841552735,
127
- 0.01588763999938965,
128
- 0.015690679550170897,
129
- 0.015543000221252441,
130
- 0.015418681144714355,
131
- 0.015583319664001465,
132
- 0.015596920013427735,
133
- 0.015462680816650391,
134
- 0.01548523998260498,
135
- 0.01573468017578125,
136
- 0.01557804012298584,
137
- 0.015496280670166016,
138
- 0.015506199836730957,
139
- 0.015442839622497559,
140
- 0.015400120735168457,
141
- 0.01579948043823242,
142
- 0.015955799102783202,
143
- 0.01588059902191162,
144
- 0.015997078895568846,
145
- 0.015888118743896486,
146
- 0.016174999237060547,
147
- 0.015833559989929198,
148
- 0.015788759231567383,
149
- 0.015918998718261718,
150
- 0.016162839889526368,
151
- 0.015953558921813965,
152
- 0.01601723861694336,
153
- 0.01834475135803223,
154
- 0.016465078353881835,
155
- 0.01607819938659668,
156
- 0.015844759941101075,
157
- 0.015704758644104005,
158
- 0.015678520202636718,
159
- 0.016009559631347658,
160
- 0.015711159706115724,
161
- 0.015575640678405762,
162
- 0.01645579719543457,
163
- 0.01653163719177246,
164
- 0.016690837860107423,
165
- 0.016479637145996092,
166
- 0.016731637954711914,
167
- 0.01649755859375,
168
- 0.016916116714477538,
169
- 0.01685659599304199,
170
- 0.01662955856323242,
171
- 0.016678678512573243,
172
- 0.0168441162109375,
173
- 0.016600278854370116,
174
- 0.0165820369720459,
175
- 0.01650267791748047,
176
- 0.016447797775268554,
177
- 0.01666955757141113,
178
- 0.016221399307250976,
179
- 0.016160757064819335,
180
- 0.01619611930847168,
181
- 0.016180438995361326,
182
- 0.01638427734375,
183
- 0.016107477188110352,
184
- 0.0160639591217041,
185
- 0.01610651969909668
 
186
  ]
187
  },
188
  "throughput": {
189
  "unit": "samples/s",
190
- "value": 62.14758260959468
191
  },
192
  "energy": null,
193
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "43734676b09236e2ae4d7c19f90e8e23d1f2f201",
89
  "transformers_version": "4.42.3",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.31.0",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1029.459968,
108
+ "max_global_vram": 1122.885632,
109
+ "max_process_vram": 251069.059072,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
+ "count": 63,
116
+ "total": 1.0025160417556764,
117
+ "mean": 0.015912953043740897,
118
+ "stdev": 0.0010932306697772273,
119
+ "p50": 0.015534128189086914,
120
+ "p90": 0.017585762786865234,
121
+ "p95": 0.017694481468200685,
122
+ "p99": 0.0180983508682251,
123
  "values": [
124
+ 0.014831733703613282,
125
+ 0.015563729286193848,
126
+ 0.01578708839416504,
127
+ 0.015681167602539063,
128
+ 0.015457009315490723,
129
+ 0.015333970069885254,
130
+ 0.015452849388122558,
131
+ 0.015350609779357911,
132
+ 0.016276363372802735,
133
+ 0.016875240325927736,
134
+ 0.017390275955200197,
135
+ 0.01722947692871094,
136
+ 0.017317796707153322,
137
+ 0.01686164093017578,
138
+ 0.017059078216552736,
139
+ 0.017020519256591798,
140
+ 0.017574756622314454,
141
+ 0.017700674057006836,
142
+ 0.01758851432800293,
143
+ 0.01743107604980469,
144
+ 0.017507875442504883,
145
+ 0.01759155464172363,
146
+ 0.01765603446960449,
147
+ 0.017429155349731447,
148
+ 0.01783843231201172,
149
+ 0.017698753356933595,
150
+ 0.016020206451416014,
151
+ 0.01852242851257324,
152
+ 0.015258770942687988,
153
+ 0.015051093101501465,
154
+ 0.01456341552734375,
155
+ 0.014558135986328125,
156
+ 0.014932852745056152,
157
+ 0.014809814453125,
158
+ 0.014706134796142578,
159
+ 0.015048373222351074,
160
+ 0.014690935134887695,
161
+ 0.014668535232543946,
162
+ 0.014894933700561523,
163
+ 0.014695574760437011,
164
+ 0.01465637493133545,
165
+ 0.01497557258605957,
166
+ 0.014666134834289551,
167
+ 0.014717655181884766,
168
+ 0.015068853378295898,
169
+ 0.014717974662780763,
170
+ 0.016144363403320313,
171
+ 0.01673748016357422,
172
+ 0.016272363662719728,
173
+ 0.01557972812652588,
174
+ 0.015890767097473144,
175
+ 0.015024212837219239,
176
+ 0.01536596965789795,
177
+ 0.015742127418518066,
178
+ 0.015599568367004395,
179
+ 0.015377809524536133,
180
+ 0.015506929397583007,
181
+ 0.015337809562683106,
182
+ 0.015443569183349609,
183
+ 0.015534128189086914,
184
+ 0.015490288734436036,
185
+ 0.015886606216430663,
186
+ 0.014853174209594727
187
  ]
188
  },
189
  "throughput": {
190
  "unit": "samples/s",
191
+ "value": 62.841887187829904
192
  },
193
  "energy": null,
194
  "efficiency": null