Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
-
"max_ram": 908.
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
@@ -10,162 +10,161 @@
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
-
"count":
|
14 |
-
"total":
|
15 |
-
"mean": 0.
|
16 |
-
"stdev": 0.
|
17 |
-
"p50": 0.
|
18 |
-
"p90": 0.
|
19 |
-
"p95": 0.
|
20 |
-
"p99": 0.
|
21 |
"values": [
|
22 |
-
0.
|
23 |
-
0.
|
24 |
-
0.
|
25 |
-
0.
|
26 |
-
0.
|
27 |
-
0.
|
28 |
-
0.
|
29 |
-
0.007780352115631104,
|
30 |
-
0.00780083179473877,
|
31 |
-
0.007741439819335938,
|
32 |
-
0.00783564805984497,
|
33 |
-
0.007792640209197998,
|
34 |
-
0.007805920124053955,
|
35 |
-
0.007889920234680176,
|
36 |
-
0.007837696075439453,
|
37 |
-
0.007794688224792481,
|
38 |
-
0.007819263935089112,
|
39 |
-
0.00781820821762085,
|
40 |
-
0.007850016117095947,
|
41 |
-
0.007847936153411865,
|
42 |
-
0.007838719844818116,
|
43 |
-
0.007774208068847656,
|
44 |
-
0.00779366397857666,
|
45 |
-
0.007829440116882325,
|
46 |
-
0.007771135807037354,
|
47 |
-
0.0077199358940124516,
|
48 |
-
0.007724031925201416,
|
49 |
-
0.007731200218200684,
|
50 |
-
0.007751679897308349,
|
51 |
-
0.007746560096740723,
|
52 |
-
0.007814144134521485,
|
53 |
-
0.007870463848114014,
|
54 |
-
0.007841792106628418,
|
55 |
-
0.007774208068847656,
|
56 |
-
0.007721983909606934,
|
57 |
-
0.007797760009765625,
|
58 |
-
0.007839680194854736,
|
59 |
-
0.007805920124053955,
|
60 |
-
0.007841792106628418,
|
61 |
-
0.007724031925201416,
|
62 |
-
0.007745535850524903,
|
63 |
-
0.007846911907196046,
|
64 |
-
0.007717919826507568,
|
65 |
-
0.007724031925201416,
|
66 |
-
0.007781311988830566,
|
67 |
-
0.007764959812164306,
|
68 |
-
0.007795711994171142,
|
69 |
-
0.007792640209197998,
|
70 |
-
0.007920639991760254,
|
71 |
-
0.007898111820220948,
|
72 |
-
0.007871424198150635,
|
73 |
-
0.007773183822631836,
|
74 |
-
0.007808000087738037,
|
75 |
-
0.007878655910491944,
|
76 |
-
0.007802879810333252,
|
77 |
-
0.007832575798034667,
|
78 |
-
0.007748608112335205,
|
79 |
-
0.007749631881713868,
|
80 |
-
0.007781375885009765,
|
81 |
-
0.007979008197784423,
|
82 |
-
0.008287232398986816,
|
83 |
-
0.007731135845184326,
|
84 |
-
0.007562240123748779,
|
85 |
-
0.007559167861938477,
|
86 |
-
0.00774348783493042,
|
87 |
-
0.007623680114746094,
|
88 |
-
0.007641088008880615,
|
89 |
-
0.007885824203491211,
|
90 |
-
0.007749631881713868,
|
91 |
-
0.007557119846343994,
|
92 |
-
0.007473152160644531,
|
93 |
0.0074997758865356446,
|
94 |
-
0.
|
95 |
-
0.
|
96 |
-
0.007472127914428711,
|
97 |
-
0.007508992195129394,
|
98 |
-
0.007409664154052734,
|
99 |
-
0.007395328044891358,
|
100 |
-
0.0074065918922424315,
|
101 |
-
0.007378943920135498,
|
102 |
-
0.007361536026000977,
|
103 |
-
0.00743833589553833,
|
104 |
-
0.007409664154052734,
|
105 |
-
0.007455743789672851,
|
106 |
-
0.008286208152770995,
|
107 |
-
0.007442431926727295,
|
108 |
-
0.007395328044891358,
|
109 |
-
0.007370751857757568,
|
110 |
-
0.007372799873352051,
|
111 |
-
0.00734822416305542,
|
112 |
-
0.007379968166351319,
|
113 |
-
0.007417856216430664,
|
114 |
-
0.0073994240760803225,
|
115 |
-
0.007384064197540283,
|
116 |
-
0.00742195177078247,
|
117 |
-
0.007349247932434082,
|
118 |
-
0.007336959838867187,
|
119 |
-
0.007373824119567871,
|
120 |
-
0.007372799873352051,
|
121 |
-
0.007403552055358887,
|
122 |
-
0.0073697280883789065,
|
123 |
-
0.007384064197540283,
|
124 |
-
0.007357439994812012,
|
125 |
-
0.007344128131866455,
|
126 |
-
0.007346176147460938,
|
127 |
-
0.007332863807678222,
|
128 |
-
0.007355391979217529,
|
129 |
-
0.007334943771362305,
|
130 |
-
0.0073820161819458,
|
131 |
-
0.0073431038856506346,
|
132 |
-
0.0073686718940734865,
|
133 |
0.007393280029296875,
|
134 |
-
0.
|
135 |
-
0.
|
136 |
-
0.
|
137 |
-
0.
|
138 |
-
0.
|
139 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
0.007455743789672851,
|
141 |
-
0.
|
142 |
-
0.
|
143 |
-
0.
|
144 |
-
0.
|
145 |
-
0.
|
146 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
0.007402495861053467,
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
153 |
]
|
154 |
},
|
155 |
"throughput": {
|
156 |
"unit": "samples/s",
|
157 |
-
"value":
|
158 |
},
|
159 |
"energy": {
|
160 |
"unit": "kWh",
|
161 |
-
"cpu": 8.
|
162 |
-
"ram": 4.
|
163 |
-
"gpu": 1.
|
164 |
-
"total": 2.
|
165 |
},
|
166 |
"efficiency": {
|
167 |
"unit": "samples/kWh",
|
168 |
-
"value":
|
169 |
}
|
170 |
}
|
171 |
}
|
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
+
"max_ram": 908.41088,
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
+
"count": 130,
|
14 |
+
"total": 1.0025133738517762,
|
15 |
+
"mean": 0.007711641337321354,
|
16 |
+
"stdev": 0.000483235348233153,
|
17 |
+
"p50": 0.007514623880386352,
|
18 |
+
"p90": 0.007943651390075683,
|
19 |
+
"p95": 0.008097228574752808,
|
20 |
+
"p99": 0.010142044544219964,
|
21 |
"values": [
|
22 |
+
0.010848256111145019,
|
23 |
+
0.010425344467163086,
|
24 |
+
0.009448448181152343,
|
25 |
+
0.007865344047546387,
|
26 |
+
0.007726079940795898,
|
27 |
+
0.007640063762664795,
|
28 |
+
0.007522304058074952,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
0.0074997758865356446,
|
30 |
+
0.007475200176239013,
|
31 |
+
0.007478271961212158,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
0.007393280029296875,
|
33 |
+
0.007392255783081054,
|
34 |
+
0.007445504188537597,
|
35 |
+
0.007693312168121338,
|
36 |
+
0.007862271785736084,
|
37 |
+
0.007904255867004394,
|
38 |
+
0.007863296031951903,
|
39 |
+
0.007943136215209961,
|
40 |
+
0.007932928085327149,
|
41 |
+
0.007939072132110595,
|
42 |
+
0.00793497610092163,
|
43 |
+
0.007898111820220948,
|
44 |
+
0.008058879852294922,
|
45 |
+
0.008004608154296875,
|
46 |
+
0.009362431526184082,
|
47 |
+
0.008342528343200683,
|
48 |
+
0.007864319801330566,
|
49 |
+
0.007860223770141601,
|
50 |
+
0.00788479995727539,
|
51 |
+
0.007854080200195313,
|
52 |
+
0.00790835189819336,
|
53 |
+
0.007962624073028564,
|
54 |
+
0.007865344047546387,
|
55 |
+
0.00810598373413086,
|
56 |
+
0.007858176231384278,
|
57 |
+
0.007882751941680909,
|
58 |
+
0.007841792106628418,
|
59 |
+
0.007877632141113282,
|
60 |
+
0.008086527824401855,
|
61 |
+
0.007763967990875244,
|
62 |
+
0.00784281587600708,
|
63 |
+
0.007904255867004394,
|
64 |
+
0.007821375846862794,
|
65 |
+
0.007776256084442139,
|
66 |
+
0.007836671829223632,
|
67 |
+
0.007836671829223632,
|
68 |
+
0.007782400131225586,
|
69 |
+
0.007875584125518798,
|
70 |
+
0.007958528041839599,
|
71 |
+
0.007869440078735352,
|
72 |
+
0.00790015983581543,
|
73 |
+
0.007854080200195313,
|
74 |
+
0.007894015789031983,
|
75 |
+
0.007948287963867188,
|
76 |
+
0.00829030418395996,
|
77 |
+
0.007897088050842285,
|
78 |
+
0.007673855781555176,
|
79 |
+
0.00763804817199707,
|
80 |
+
0.007778304100036621,
|
81 |
+
0.007680064201354981,
|
82 |
+
0.007514111995697022,
|
83 |
+
0.007483391761779785,
|
84 |
+
0.007577631950378418,
|
85 |
+
0.007679999828338623,
|
86 |
+
0.007829567909240723,
|
87 |
+
0.007788544178009033,
|
88 |
+
0.007771135807037354,
|
89 |
+
0.007862271785736084,
|
90 |
+
0.007885824203491211,
|
91 |
+
0.007870463848114014,
|
92 |
+
0.007481279850006103,
|
93 |
+
0.007476223945617676,
|
94 |
+
0.007515135765075683,
|
95 |
+
0.007478208065032959,
|
96 |
+
0.0074486079216003415,
|
97 |
0.007455743789672851,
|
98 |
+
0.00745366382598877,
|
99 |
+
0.007468031883239746,
|
100 |
+
0.007460864067077637,
|
101 |
+
0.007450623989105225,
|
102 |
+
0.007505919933319092,
|
103 |
+
0.007448575973510742,
|
104 |
+
0.007400415897369385,
|
105 |
+
0.00744755220413208,
|
106 |
+
0.0074106879234313965,
|
107 |
+
0.00738918399810791,
|
108 |
+
0.007456768035888672,
|
109 |
+
0.007423999786376953,
|
110 |
+
0.007419904232025146,
|
111 |
+
0.00740556812286377,
|
112 |
+
0.007423999786376953,
|
113 |
+
0.007391200065612793,
|
114 |
+
0.0074700798988342285,
|
115 |
+
0.007482367992401123,
|
116 |
+
0.007689216136932373,
|
117 |
+
0.007468031883239746,
|
118 |
+
0.007469056129455566,
|
119 |
+
0.007354368209838867,
|
120 |
+
0.007444479942321777,
|
121 |
+
0.0074332160949707035,
|
122 |
+
0.007522304058074952,
|
123 |
+
0.0074700798988342285,
|
124 |
+
0.007465983867645264,
|
125 |
+
0.007418879985809326,
|
126 |
+
0.007463935852050781,
|
127 |
+
0.007422944068908691,
|
128 |
0.007402495861053467,
|
129 |
+
0.007480319976806641,
|
130 |
+
0.00744755220413208,
|
131 |
+
0.00738099193572998,
|
132 |
+
0.007451648235321045,
|
133 |
+
0.0074106879234313965,
|
134 |
+
0.007352320194244385,
|
135 |
+
0.007434239864349365,
|
136 |
+
0.007486464023590088,
|
137 |
+
0.007392255783081054,
|
138 |
+
0.007459839820861816,
|
139 |
+
0.0074629120826721195,
|
140 |
+
0.007394303798675537,
|
141 |
+
0.007400447845458984,
|
142 |
+
0.0074403839111328125,
|
143 |
+
0.007396351814270019,
|
144 |
+
0.007459839820861816,
|
145 |
+
0.00745472002029419,
|
146 |
+
0.007406527996063232,
|
147 |
+
0.007452672004699707,
|
148 |
+
0.007434239864349365,
|
149 |
+
0.0073820161819458,
|
150 |
+
0.00743939208984375,
|
151 |
+
0.007465983867645264
|
152 |
]
|
153 |
},
|
154 |
"throughput": {
|
155 |
"unit": "samples/s",
|
156 |
+
"value": 129.67408055667576
|
157 |
},
|
158 |
"energy": {
|
159 |
"unit": "kWh",
|
160 |
+
"cpu": 8.69185022484067e-08,
|
161 |
+
"ram": 4.750480571156728e-08,
|
162 |
+
"gpu": 1.61835084861313e-07,
|
163 |
+
"total": 2.96258392821287e-07
|
164 |
},
|
165 |
"efficiency": {
|
166 |
"unit": "samples/kWh",
|
167 |
+
"value": 3375431.799507647
|
168 |
}
|
169 |
}
|
170 |
}
|