Upload cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark_report.json with huggingface_hub
Browse files
cuda_inference_transformers_image-classification_google/vit-base-patch16-224/benchmark_report.json
CHANGED
@@ -2,187 +2,188 @@
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
-
"max_ram": 1063.
|
6 |
"max_global_vram": 778.448896,
|
7 |
-
"max_process_vram":
|
8 |
"max_reserved": 406.847488,
|
9 |
"max_allocated": 355.912704
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
-
"count":
|
14 |
-
"total": 0.
|
15 |
-
"mean": 0.
|
16 |
-
"stdev": 0.
|
17 |
-
"p50": 0.
|
18 |
-
"p90": 0.
|
19 |
-
"p95": 0.
|
20 |
-
"p99": 0.
|
21 |
"values": [
|
22 |
-
0.
|
23 |
-
0.
|
24 |
-
0.
|
25 |
-
0.
|
26 |
-
0.
|
27 |
-
0.
|
28 |
-
0.
|
29 |
-
0.
|
30 |
-
0.
|
31 |
-
0.
|
32 |
-
0.
|
33 |
-
0.
|
34 |
-
0.
|
35 |
-
0.
|
36 |
-
0.
|
37 |
-
0.
|
38 |
-
0.
|
39 |
-
0.
|
40 |
-
0.
|
41 |
-
0.
|
42 |
-
0.
|
43 |
-
0.
|
44 |
-
0.
|
45 |
-
0.
|
46 |
-
0.
|
47 |
-
0.
|
48 |
-
0.
|
49 |
-
0.
|
50 |
-
0.
|
51 |
-
0.
|
52 |
-
0.
|
53 |
-
0.
|
54 |
-
0.
|
55 |
-
0.
|
56 |
-
0.
|
57 |
-
0.
|
58 |
-
0.
|
59 |
-
0.
|
60 |
-
0.
|
61 |
-
0.
|
62 |
-
0.
|
63 |
-
0.
|
64 |
-
0.
|
65 |
-
0.
|
66 |
-
0.
|
67 |
-
0.
|
68 |
-
0.
|
69 |
-
0.
|
70 |
-
0.
|
71 |
-
0.
|
72 |
-
0.
|
73 |
-
0.
|
74 |
-
0.
|
75 |
-
0.
|
76 |
-
0.
|
77 |
-
0.
|
78 |
-
0.
|
79 |
-
0.
|
80 |
-
0.
|
81 |
-
0.
|
82 |
-
0.
|
83 |
-
0.
|
84 |
-
0.
|
85 |
-
0.
|
86 |
-
0.
|
87 |
-
0.
|
88 |
-
0.
|
89 |
-
0.
|
90 |
-
0.
|
91 |
-
0.
|
92 |
-
0.
|
93 |
-
0.
|
94 |
-
0.
|
95 |
-
0.
|
96 |
-
0.
|
97 |
-
0.
|
98 |
-
0.
|
99 |
-
0.
|
100 |
-
0.
|
101 |
-
0.
|
102 |
-
0.
|
103 |
-
0.
|
104 |
-
0.
|
105 |
-
0.
|
106 |
-
0.
|
107 |
-
0.
|
108 |
-
0.
|
109 |
-
0.
|
110 |
-
0.
|
111 |
-
0.
|
112 |
-
0.
|
113 |
-
0.
|
114 |
-
0.
|
115 |
-
0.
|
116 |
-
0.
|
117 |
-
0.
|
118 |
-
0.
|
119 |
-
0.
|
120 |
-
0.
|
121 |
-
0.
|
122 |
-
0.
|
123 |
-
0.
|
124 |
-
0.
|
125 |
-
0.
|
126 |
-
0.
|
127 |
-
0.
|
128 |
-
0.
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.
|
132 |
-
0.
|
133 |
-
0.
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
140 |
-
0.
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
155 |
-
0.
|
156 |
-
0.
|
157 |
-
0.
|
158 |
-
0.
|
159 |
-
0.
|
160 |
-
0.
|
161 |
-
0.
|
162 |
-
0.
|
163 |
-
0.
|
164 |
-
0.
|
165 |
-
0.
|
166 |
-
0.
|
167 |
-
0.
|
168 |
-
0.
|
169 |
-
0.
|
170 |
-
0.
|
171 |
-
0.
|
172 |
-
0.
|
173 |
-
0.
|
174 |
-
0.
|
175 |
-
0.
|
176 |
-
0.
|
177 |
-
0.
|
178 |
-
0.
|
179 |
-
0.
|
180 |
-
0.
|
|
|
181 |
]
|
182 |
},
|
183 |
"throughput": {
|
184 |
"unit": "samples/s",
|
185 |
-
"value":
|
186 |
},
|
187 |
"energy": null,
|
188 |
"efficiency": null
|
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
+
"max_ram": 1063.350272,
|
6 |
"max_global_vram": 778.448896,
|
7 |
+
"max_process_vram": 172200.849408,
|
8 |
"max_reserved": 406.847488,
|
9 |
"max_allocated": 355.912704
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
+
"count": 160,
|
14 |
+
"total": 0.995341510295868,
|
15 |
+
"mean": 0.0062208844393491745,
|
16 |
+
"stdev": 0.00031795433201691175,
|
17 |
+
"p50": 0.006127607822418213,
|
18 |
+
"p90": 0.006464600706100464,
|
19 |
+
"p95": 0.006590344834327697,
|
20 |
+
"p99": 0.006915193562507629,
|
21 |
"values": [
|
22 |
+
0.006601287841796875,
|
23 |
+
0.006228649139404297,
|
24 |
+
0.006265287876129151,
|
25 |
+
0.006227688789367676,
|
26 |
+
0.006357129096984863,
|
27 |
+
0.006467048168182373,
|
28 |
+
0.006464328765869141,
|
29 |
+
0.006441129207611084,
|
30 |
+
0.006437289237976074,
|
31 |
+
0.006388328075408936,
|
32 |
+
0.006423048973083496,
|
33 |
+
0.006379368782043457,
|
34 |
+
0.00657184886932373,
|
35 |
+
0.006338088035583496,
|
36 |
+
0.006248009204864502,
|
37 |
+
0.006167208194732666,
|
38 |
+
0.006090249061584473,
|
39 |
+
0.006121928215026855,
|
40 |
+
0.006125288009643555,
|
41 |
+
0.00613456916809082,
|
42 |
+
0.006143208026885986,
|
43 |
+
0.0061088080406188966,
|
44 |
+
0.006146568775177002,
|
45 |
+
0.006094727993011475,
|
46 |
+
0.006133448123931884,
|
47 |
+
0.006141929149627685,
|
48 |
+
0.006165927886962891,
|
49 |
+
0.006134888172149658,
|
50 |
+
0.006143208980560303,
|
51 |
+
0.00613808822631836,
|
52 |
+
0.006147848129272461,
|
53 |
+
0.006164809226989746,
|
54 |
+
0.006154248237609863,
|
55 |
+
0.006174407958984375,
|
56 |
+
0.006136487960815429,
|
57 |
+
0.006122407913208008,
|
58 |
+
0.006097608089447022,
|
59 |
+
0.0061195287704467775,
|
60 |
+
0.006072648048400879,
|
61 |
+
0.00609392786026001,
|
62 |
+
0.006030407905578614,
|
63 |
+
0.006129767894744873,
|
64 |
+
0.006100008010864258,
|
65 |
+
0.006108489036560059,
|
66 |
+
0.006088808059692383,
|
67 |
+
0.006115528106689453,
|
68 |
+
0.006120968818664551,
|
69 |
+
0.00614688777923584,
|
70 |
+
0.006154407978057861,
|
71 |
+
0.006163848876953125,
|
72 |
+
0.009538573265075683,
|
73 |
+
0.006232647895812988,
|
74 |
+
0.006095848083496094,
|
75 |
+
0.00605712890625,
|
76 |
+
0.006078407764434814,
|
77 |
+
0.00609056806564331,
|
78 |
+
0.006096968173980713,
|
79 |
+
0.006069768905639648,
|
80 |
+
0.006084807872772217,
|
81 |
+
0.006115367889404297,
|
82 |
+
0.0060934491157531735,
|
83 |
+
0.006020328044891357,
|
84 |
+
0.006292007923126221,
|
85 |
+
0.006091848850250244,
|
86 |
+
0.006109288215637207,
|
87 |
+
0.0061051278114318845,
|
88 |
+
0.006105768203735352,
|
89 |
+
0.006081768035888672,
|
90 |
+
0.006118728160858154,
|
91 |
+
0.0061265678405761715,
|
92 |
+
0.006150729179382324,
|
93 |
+
0.006153287887573242,
|
94 |
+
0.006150407791137695,
|
95 |
+
0.006129608154296875,
|
96 |
+
0.006157127857208252,
|
97 |
+
0.006128647804260254,
|
98 |
+
0.006135367870330811,
|
99 |
+
0.0061356878280639645,
|
100 |
+
0.006133129119873047,
|
101 |
+
0.006136648178100586,
|
102 |
+
0.006126088142395019,
|
103 |
+
0.006166248798370361,
|
104 |
+
0.00618128776550293,
|
105 |
+
0.006201128005981446,
|
106 |
+
0.006359689235687256,
|
107 |
+
0.00615552806854248,
|
108 |
+
0.006121288776397705,
|
109 |
+
0.006174888134002686,
|
110 |
+
0.006133607864379883,
|
111 |
+
0.006088168144226074,
|
112 |
+
0.0060832080841064454,
|
113 |
+
0.006099847793579102,
|
114 |
+
0.006082249164581299,
|
115 |
+
0.0060836877822875975,
|
116 |
+
0.0060910477638244625,
|
117 |
+
0.00609168815612793,
|
118 |
+
0.006067208766937256,
|
119 |
+
0.0061136078834533696,
|
120 |
+
0.006072807788848877,
|
121 |
+
0.006077288150787353,
|
122 |
+
0.006081128120422363,
|
123 |
+
0.006051047801971436,
|
124 |
+
0.006080167770385742,
|
125 |
+
0.0060550479888916015,
|
126 |
+
0.006016327857971192,
|
127 |
+
0.00609392786026001,
|
128 |
+
0.006111688137054443,
|
129 |
+
0.0061473679542541505,
|
130 |
+
0.006085768222808838,
|
131 |
+
0.006070248126983643,
|
132 |
+
0.006066568851470947,
|
133 |
+
0.006077127933502197,
|
134 |
+
0.006070727825164795,
|
135 |
+
0.0061419281959533695,
|
136 |
+
0.006082728862762451,
|
137 |
+
0.006087048053741455,
|
138 |
+
0.0060574479103088375,
|
139 |
+
0.006104329109191895,
|
140 |
+
0.006120327949523926,
|
141 |
+
0.006952009201049805,
|
142 |
+
0.006889609813690185,
|
143 |
+
0.006628008842468261,
|
144 |
+
0.0065574488639831546,
|
145 |
+
0.006436327934265137,
|
146 |
+
0.006556808948516846,
|
147 |
+
0.006516969203948975,
|
148 |
+
0.006437129020690918,
|
149 |
+
0.006589768886566162,
|
150 |
+
0.006408968925476074,
|
151 |
+
0.006371367931365967,
|
152 |
+
0.006535048961639404,
|
153 |
+
0.006690568923950195,
|
154 |
+
0.006859368801116943,
|
155 |
+
0.006642088890075683,
|
156 |
+
0.006441127777099609,
|
157 |
+
0.0063489689826965335,
|
158 |
+
0.006453128814697265,
|
159 |
+
0.006304327964782715,
|
160 |
+
0.006468328952789306,
|
161 |
+
0.006452808856964112,
|
162 |
+
0.006334727764129639,
|
163 |
+
0.006318569183349609,
|
164 |
+
0.006153768062591553,
|
165 |
+
0.006125129222869873,
|
166 |
+
0.006107059955596924,
|
167 |
+
0.006072487831115723,
|
168 |
+
0.006082727909088135,
|
169 |
+
0.006077928066253662,
|
170 |
+
0.0060929679870605465,
|
171 |
+
0.006089929103851318,
|
172 |
+
0.006098087787628174,
|
173 |
+
0.00606816816329956,
|
174 |
+
0.006081287860870361,
|
175 |
+
0.006076807975769043,
|
176 |
+
0.006084968090057373,
|
177 |
+
0.006098408222198487,
|
178 |
+
0.00607968807220459,
|
179 |
+
0.006082888126373291,
|
180 |
+
0.006124328136444092,
|
181 |
+
0.006092648029327393
|
182 |
]
|
183 |
},
|
184 |
"throughput": {
|
185 |
"unit": "samples/s",
|
186 |
+
"value": 160.74884684799247
|
187 |
},
|
188 |
"energy": null,
|
189 |
"efficiency": null
|