IlyasMoutawwakil HF staff commited on
Commit
1823681
·
verified ·
1 Parent(s): 78cd4ea

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark.json CHANGED
@@ -85,7 +85,7 @@
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
- "optimum_benchmark_commit": "dc29eecfd7850e87154a5a92442eb64d237d0070",
89
  "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
@@ -104,99 +104,99 @@
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
- "max_ram": 1027.01056,
108
- "max_global_vram": 2103.230464,
109
- "max_process_vram": 271741.923328,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 71,
116
- "total": 1.0067317752838136,
117
- "mean": 0.014179320778645263,
118
- "stdev": 0.00018300380326025583,
119
- "p50": 0.014146254539489747,
120
- "p90": 0.014352814674377441,
121
- "p95": 0.014407295227050781,
122
- "p99": 0.014751742839813231,
123
  "values": [
124
- 0.013863215446472168,
125
- 0.014178894996643066,
126
- 0.014320175170898437,
127
- 0.014320175170898437,
128
- 0.014437935829162597,
129
- 0.014319535255432128,
130
- 0.014146254539489747,
131
- 0.014176975250244141,
132
- 0.014077774047851562,
133
- 0.014012174606323242,
134
- 0.013957614898681641,
135
- 0.014132655143737793,
136
- 0.01428465461730957,
137
- 0.014328495025634765,
138
- 0.014228014945983887,
139
- 0.01428193473815918,
140
- 0.01442641544342041,
141
- 0.014194093704223632,
142
- 0.014306095123291016,
143
- 0.014118734359741212,
144
- 0.014111373901367187,
145
- 0.01412353515625,
146
- 0.01421473503112793,
147
- 0.014025935173034668,
148
- 0.014118735313415526,
149
- 0.014155055046081543,
150
- 0.014276974678039551,
151
- 0.014094414710998536,
152
- 0.014249774932861328,
153
- 0.014262894630432128,
154
- 0.01422017478942871,
155
- 0.014105295181274414,
156
- 0.014207374572753907,
157
- 0.014093295097351074,
158
- 0.01401489543914795,
159
- 0.01433089542388916,
160
- 0.014377935409545899,
161
- 0.014312655448913573,
162
- 0.014388175010681152,
163
- 0.01514161491394043,
164
- 0.014584654808044434,
165
- 0.014220335006713868,
166
- 0.013984015464782715,
167
- 0.01401185417175293,
168
- 0.014163213729858399,
169
- 0.013985774993896484,
170
- 0.01396097469329834,
171
- 0.014176494598388672,
172
- 0.014028815269470215,
173
- 0.013972333908081055,
174
- 0.014237133979797364,
175
- 0.013994414329528809,
176
- 0.013989615440368652,
177
- 0.013967855453491211,
178
- 0.01397297477722168,
179
- 0.014266735076904296,
180
- 0.014065613746643066,
181
- 0.01402657413482666,
182
- 0.014053774833679198,
183
- 0.014057615280151366,
184
- 0.014346895217895507,
185
- 0.014117774963378906,
186
- 0.014079375267028809,
187
- 0.01429233455657959,
188
- 0.014135054588317871,
189
- 0.014352814674377441,
190
- 0.01409777545928955,
191
- 0.014105134963989258,
192
- 0.014378415107727052,
193
- 0.014106575012207032,
194
- 0.014061293601989745
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 70.52523993293444
200
  },
201
  "energy": null,
202
  "efficiency": null
 
85
  "gpu_count": 1,
86
  "gpu_vram_mb": 68702699520,
87
  "optimum_benchmark_version": "0.2.1",
88
+ "optimum_benchmark_commit": "1981150adc21fc49153cb832dbc6f0ecca02bc2a",
89
  "transformers_version": "4.41.0",
90
  "transformers_commit": null,
91
  "accelerate_version": "0.30.1",
 
104
  "forward": {
105
  "memory": {
106
  "unit": "MB",
107
+ "max_ram": 1027.723264,
108
+ "max_global_vram": 2103.222272,
109
+ "max_process_vram": 258937.07776,
110
  "max_reserved": 773.849088,
111
  "max_allocated": 745.087488
112
  },
113
  "latency": {
114
  "unit": "s",
115
  "count": 71,
116
+ "total": 1.0006878757476811,
117
+ "mean": 0.014094195433065925,
118
+ "stdev": 0.00042216359736077654,
119
+ "p50": 0.014068644523620605,
120
+ "p90": 0.014357125282287598,
121
+ "p95": 0.014371204853057862,
122
+ "p99": 0.015403797626495354,
123
  "values": [
124
+ 0.01427584457397461,
125
+ 0.013833285331726074,
126
+ 0.01394608497619629,
127
+ 0.01395744514465332,
128
+ 0.013907364845275879,
129
+ 0.01390512466430664,
130
+ 0.013843045234680176,
131
+ 0.013790884971618652,
132
+ 0.013685444831848144,
133
+ 0.013641124725341796,
134
+ 0.013612165451049804,
135
+ 0.013642245292663575,
136
+ 0.013675363540649414,
137
+ 0.013719044685363769,
138
+ 0.013625764846801757,
139
+ 0.01363984489440918,
140
+ 0.013572484970092773,
141
+ 0.01410880470275879,
142
+ 0.014357125282287598,
143
+ 0.014143364906311035,
144
+ 0.017177766799926757,
145
+ 0.014643525123596192,
146
+ 0.014084485054016114,
147
+ 0.014051685333251953,
148
+ 0.014180166244506836,
149
+ 0.014014084815979004,
150
+ 0.014068644523620605,
151
+ 0.014202565193176269,
152
+ 0.014010085105895995,
153
+ 0.014104004859924316,
154
+ 0.01404976463317871,
155
+ 0.014049284934997559,
156
+ 0.014024325370788574,
157
+ 0.014073124885559082,
158
+ 0.014280804634094238,
159
+ 0.01406832504272461,
160
+ 0.014107685089111328,
161
+ 0.01408304500579834,
162
+ 0.014287364959716798,
163
+ 0.014061285018920898,
164
+ 0.014041765213012695,
165
+ 0.013979044914245606,
166
+ 0.01404864501953125,
167
+ 0.01404256534576416,
168
+ 0.014362244606018067,
169
+ 0.01404768466949463,
170
+ 0.014043205261230469,
171
+ 0.014115525245666503,
172
+ 0.014072645187377929,
173
+ 0.014357284545898437,
174
+ 0.014060005187988282,
175
+ 0.01405568504333496,
176
+ 0.014053765296936036,
177
+ 0.014052804946899413,
178
+ 0.014360806465148927,
179
+ 0.014097125053405761,
180
+ 0.014080485343933106,
181
+ 0.014071684837341308,
182
+ 0.014059684753417968,
183
+ 0.014391044616699218,
184
+ 0.014117924690246583,
185
+ 0.0141027250289917,
186
+ 0.014149444580078125,
187
+ 0.014258245468139648,
188
+ 0.014101605415344238,
189
+ 0.014132004737854004,
190
+ 0.014193764686584473,
191
+ 0.014082884788513184,
192
+ 0.014380165100097656,
193
+ 0.014151365280151366,
194
+ 0.014120163917541504
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 70.95119439410732
200
  },
201
  "energy": null,
202
  "efficiency": null