Upload cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub
Browse files
cuda_inference_transformers_multiple-choice_FacebookAI/roberta-base/benchmark_report.json
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
-
"max_ram": 910.
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
@@ -10,164 +10,169 @@
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
-
"count":
|
14 |
-
"total":
|
15 |
-
"mean": 0.
|
16 |
-
"stdev": 0.
|
17 |
-
"p50": 0.
|
18 |
-
"p90": 0.
|
19 |
-
"p95": 0.
|
20 |
-
"p99": 0.
|
21 |
"values": [
|
22 |
-
0.
|
23 |
-
0.007953407764434815,
|
24 |
-
0.007986176013946533,
|
25 |
-
0.007933951854705811,
|
26 |
-
0.007885824203491211,
|
27 |
-
0.007869440078735352,
|
28 |
-
0.007864319801330566,
|
29 |
-
0.007753727912902832,
|
30 |
-
0.007677951812744141,
|
31 |
-
0.007664639949798584,
|
32 |
-
0.007675903797149658,
|
33 |
-
0.007665664196014404,
|
34 |
-
0.007734272003173828,
|
35 |
-
0.007607295989990235,
|
36 |
-
0.007686143875122071,
|
37 |
-
0.007647232055664062,
|
38 |
-
0.007708672046661377,
|
39 |
-
0.007740416049957275,
|
40 |
-
0.007675903797149658,
|
41 |
-
0.007682047843933106,
|
42 |
-
0.007642111778259277,
|
43 |
-
0.007663584232330322,
|
44 |
-
0.0077209601402282715,
|
45 |
-
0.007658495903015137,
|
46 |
-
0.007673855781555176,
|
47 |
-
0.007703551769256592,
|
48 |
-
0.0076902399063110355,
|
49 |
-
0.007614463806152344,
|
50 |
-
0.007641088008880615,
|
51 |
-
0.0076605439186096195,
|
52 |
-
0.007658495903015137,
|
53 |
-
0.007864319801330566,
|
54 |
-
0.007570432186126709,
|
55 |
-
0.007618559837341309,
|
56 |
-
0.007615488052368164,
|
57 |
-
0.007711711883544922,
|
58 |
-
0.00765235185623169,
|
59 |
-
0.0076605439186096195,
|
60 |
-
0.007903232097625732,
|
61 |
-
0.007733248233795166,
|
62 |
-
0.007684095859527588,
|
63 |
-
0.007681024074554443,
|
64 |
-
0.007625728130340576,
|
65 |
-
0.007588863849639893,
|
66 |
-
0.007633920192718506,
|
67 |
-
0.007636991977691651,
|
68 |
-
0.007631872177124023,
|
69 |
-
0.00764521598815918,
|
70 |
-
0.007634943962097168,
|
71 |
-
0.007549952030181885,
|
72 |
-
0.007732223987579345,
|
73 |
0.007606272220611572,
|
74 |
-
0.
|
75 |
-
0.007710720062255859,
|
76 |
-
0.007569407939910889,
|
77 |
-
0.007554048061370849,
|
78 |
-
0.007581727981567383,
|
79 |
-
0.00758784008026123,
|
80 |
-
0.00758784008026123,
|
81 |
-
0.007569407939910889,
|
82 |
0.007662591934204102,
|
83 |
-
0.
|
84 |
-
0.
|
85 |
-
0.
|
86 |
-
0.
|
87 |
-
0.
|
88 |
-
0.
|
89 |
-
0.
|
90 |
-
0.
|
91 |
-
0.
|
92 |
-
0.
|
93 |
-
0.
|
94 |
-
0.
|
95 |
-
0.
|
96 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
0.007307263851165771,
|
98 |
-
0.
|
99 |
-
0.
|
100 |
-
0.
|
101 |
-
0.
|
102 |
-
0.007576576232910156,
|
103 |
-
0.007621632099151611,
|
104 |
-
0.007729152202606201,
|
105 |
-
0.008162303924560547,
|
106 |
-
0.007624703884124756,
|
107 |
-
0.007373824119567871,
|
108 |
-
0.007327775955200195,
|
109 |
-
0.007356416225433349,
|
110 |
-
0.007383039951324463,
|
111 |
-
0.007322624206542969,
|
112 |
-
0.007558144092559814,
|
113 |
-
0.0073062400817871095,
|
114 |
-
0.0072427520751953125,
|
115 |
-
0.007217152118682861,
|
116 |
-
0.007204864025115967,
|
117 |
-
0.007190591812133789,
|
118 |
-
0.007211008071899414,
|
119 |
-
0.007222271919250488,
|
120 |
-
0.007208960056304932,
|
121 |
-
0.007256063938140869,
|
122 |
-
0.007208960056304932,
|
123 |
-
0.0072540159225463864,
|
124 |
-
0.0072499198913574215,
|
125 |
-
0.007236671924591064,
|
126 |
-
0.007250944137573242,
|
127 |
-
0.00719974422454834,
|
128 |
0.007251967906951904,
|
129 |
-
0.
|
130 |
-
0.
|
131 |
-
0.007280640125274658,
|
132 |
-
0.0072427520751953125,
|
133 |
-
0.007180287837982178,
|
134 |
-
0.007227392196655274,
|
135 |
-
0.0072202239036560055,
|
136 |
-
0.007176191806793213,
|
137 |
-
0.007268352031707764,
|
138 |
-
0.007269375801086426,
|
139 |
-
0.007237631797790528,
|
140 |
-
0.007237631797790528,
|
141 |
-
0.0072499198913574215,
|
142 |
-
0.00728879976272583,
|
143 |
-
0.007267327785491944,
|
144 |
0.0073359360694885255,
|
145 |
-
0.
|
146 |
-
0.
|
147 |
-
0.
|
148 |
-
0.
|
149 |
-
0.
|
150 |
-
0.
|
151 |
-
0.
|
152 |
-
0.
|
153 |
-
0.
|
154 |
-
0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
155 |
]
|
156 |
},
|
157 |
"throughput": {
|
158 |
"unit": "samples/s",
|
159 |
-
"value":
|
160 |
},
|
161 |
"energy": {
|
162 |
"unit": "kWh",
|
163 |
-
"cpu": 8.
|
164 |
-
"ram": 4.
|
165 |
-
"gpu": 1.
|
166 |
-
"total": 2.
|
167 |
},
|
168 |
"efficiency": {
|
169 |
"unit": "samples/kWh",
|
170 |
-
"value":
|
171 |
}
|
172 |
}
|
173 |
}
|
|
|
2 |
"forward": {
|
3 |
"memory": {
|
4 |
"unit": "MB",
|
5 |
+
"max_ram": 910.036992,
|
6 |
"max_global_vram": 1195.900928,
|
7 |
"max_process_vram": 0.0,
|
8 |
"max_reserved": 555.74528,
|
|
|
10 |
},
|
11 |
"latency": {
|
12 |
"unit": "s",
|
13 |
+
"count": 138,
|
14 |
+
"total": 0.9998775987625124,
|
15 |
+
"mean": 0.0072454898461051616,
|
16 |
+
"stdev": 0.00023112556359064607,
|
17 |
+
"p50": 0.0072386240959167476,
|
18 |
+
"p90": 0.007369216012954712,
|
19 |
+
"p95": 0.007695462274551392,
|
20 |
+
"p99": 0.008186562156677245,
|
21 |
"values": [
|
22 |
+
0.00808140754699707,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
0.007606272220611572,
|
24 |
+
0.007759871959686279,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
0.007662591934204102,
|
26 |
+
0.007656447887420655,
|
27 |
+
0.007911424160003662,
|
28 |
+
0.007995391845703125,
|
29 |
+
0.0076267518997192385,
|
30 |
+
0.007865344047546387,
|
31 |
+
0.00830668830871582,
|
32 |
+
0.008248319625854492,
|
33 |
+
0.007328767776489258,
|
34 |
+
0.0071198720932006835,
|
35 |
+
0.007127039909362793,
|
36 |
+
0.007132160186767578,
|
37 |
+
0.007088128089904785,
|
38 |
+
0.0070594558715820314,
|
39 |
+
0.007116799831390381,
|
40 |
+
0.007094272136688232,
|
41 |
+
0.007156735897064209,
|
42 |
+
0.007090176105499267,
|
43 |
+
0.007080959796905518,
|
44 |
+
0.0072724480628967286,
|
45 |
+
0.007104512214660645,
|
46 |
+
0.007111680030822754,
|
47 |
+
0.007051263809204102,
|
48 |
+
0.007073791980743408,
|
49 |
+
0.007118783950805664,
|
50 |
+
0.007095295906066895,
|
51 |
+
0.0070594558715820314,
|
52 |
+
0.007072768211364746,
|
53 |
+
0.0070830078125,
|
54 |
+
0.007096320152282715,
|
55 |
+
0.007079936027526855,
|
56 |
+
0.0070553598403930665,
|
57 |
+
0.007071743965148926,
|
58 |
+
0.00707583999633789,
|
59 |
+
0.0070891518592834475,
|
60 |
+
0.007074816226959229,
|
61 |
+
0.007050240039825439,
|
62 |
+
0.007070720195770264,
|
63 |
+
0.0070860800743103025,
|
64 |
+
0.007071743965148926,
|
65 |
+
0.007051263809204102,
|
66 |
+
0.007073791980743408,
|
67 |
+
0.007096320152282715,
|
68 |
+
0.007073791980743408,
|
69 |
+
0.007039040088653564,
|
70 |
+
0.00708403205871582,
|
71 |
+
0.007080959796905518,
|
72 |
+
0.0070830078125,
|
73 |
+
0.007067647933959961,
|
74 |
+
0.007019519805908203,
|
75 |
+
0.007070720195770264,
|
76 |
+
0.007088128089904785,
|
77 |
+
0.00707583999633789,
|
78 |
+
0.007041024208068848,
|
79 |
+
0.007051263809204102,
|
80 |
+
0.007065599918365479,
|
81 |
+
0.0070829758644104,
|
82 |
+
0.007071712017059326,
|
83 |
+
0.007048192024230957,
|
84 |
+
0.007076863765716553,
|
85 |
+
0.007069695949554444,
|
86 |
+
0.007060480117797851,
|
87 |
+
0.0070594558715820314,
|
88 |
+
0.007684095859527588,
|
89 |
+
0.007074816226959229,
|
90 |
+
0.00708403205871582,
|
91 |
+
0.007094272136688232,
|
92 |
+
0.007054336071014404,
|
93 |
+
0.0070891518592834475,
|
94 |
+
0.00710041618347168,
|
95 |
+
0.007101439952850342,
|
96 |
+
0.007066624164581299,
|
97 |
0.007307263851165771,
|
98 |
+
0.007304192066192627,
|
99 |
+
0.007304192066192627,
|
100 |
+
0.0072979841232299805,
|
101 |
+
0.007301119804382325,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
0.007251967906951904,
|
103 |
+
0.0072837119102478025,
|
104 |
+
0.007274496078491211,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
0.0073359360694885255,
|
106 |
+
0.007282688140869141,
|
107 |
+
0.007277567863464355,
|
108 |
+
0.007270400047302246,
|
109 |
+
0.007263232231140137,
|
110 |
+
0.007277632236480713,
|
111 |
+
0.007238592147827148,
|
112 |
+
0.00722431993484497,
|
113 |
+
0.0071823358535766605,
|
114 |
+
0.007142399787902832,
|
115 |
+
0.007177216053009033,
|
116 |
+
0.007473184108734131,
|
117 |
+
0.007361536026000977,
|
118 |
+
0.0073359360694885255,
|
119 |
+
0.0073359360694885255,
|
120 |
+
0.007327744007110596,
|
121 |
+
0.007387135982513428,
|
122 |
+
0.007361536026000977,
|
123 |
+
0.007345151901245117,
|
124 |
+
0.00733081579208374,
|
125 |
+
0.007355391979217529,
|
126 |
+
0.007290880203247071,
|
127 |
+
0.007326784133911132,
|
128 |
+
0.00733081579208374,
|
129 |
+
0.007325695991516113,
|
130 |
+
0.0073359360694885255,
|
131 |
+
0.007279615879058838,
|
132 |
+
0.0072837119102478025,
|
133 |
+
0.0072765440940856935,
|
134 |
+
0.0072837119102478025,
|
135 |
+
0.007336959838867187,
|
136 |
+
0.007296000003814697,
|
137 |
+
0.007279615879058838,
|
138 |
+
0.007265279769897461,
|
139 |
+
0.007302144050598145,
|
140 |
+
0.007354368209838867,
|
141 |
+
0.007282688140869141,
|
142 |
+
0.007267327785491944,
|
143 |
+
0.007263264179229736,
|
144 |
+
0.00724070405960083,
|
145 |
+
0.007358463764190673,
|
146 |
+
0.007266304016113281,
|
147 |
+
0.007279615879058838,
|
148 |
+
0.007323647975921631,
|
149 |
+
0.007252992153167725,
|
150 |
+
0.007251967906951904,
|
151 |
+
0.007284671783447266,
|
152 |
+
0.007290847778320312,
|
153 |
+
0.007296000003814697,
|
154 |
+
0.007278592109680176,
|
155 |
+
0.007282688140869141,
|
156 |
+
0.007238656044006348,
|
157 |
+
0.00722431993484497,
|
158 |
+
0.007065599918365479,
|
159 |
+
0.007089216232299805
|
160 |
]
|
161 |
},
|
162 |
"throughput": {
|
163 |
"unit": "samples/s",
|
164 |
+
"value": 138.01689343855108
|
165 |
},
|
166 |
"energy": {
|
167 |
"unit": "kWh",
|
168 |
+
"cpu": 8.642965473011077e-08,
|
169 |
+
"ram": 4.724945486413806e-08,
|
170 |
+
"gpu": 1.596027229781017e-07,
|
171 |
+
"total": 2.932818325723505e-07
|
172 |
},
|
173 |
"efficiency": {
|
174 |
"unit": "samples/kWh",
|
175 |
+
"value": 3409689.5509315506
|
176 |
}
|
177 |
}
|
178 |
}
|