Upload cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json with huggingface_hub
Browse files
cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased/benchmark_report.json
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"forward": {
|
3 |
+
"memory": {
|
4 |
+
"unit": "MB",
|
5 |
+
"max_ram": 1006.399488,
|
6 |
+
"max_global_vram": 930.545664,
|
7 |
+
"max_process_vram": 227376.492544,
|
8 |
+
"max_reserved": 589.299712,
|
9 |
+
"max_allocated": 439.700992
|
10 |
+
},
|
11 |
+
"latency": {
|
12 |
+
"unit": "s",
|
13 |
+
"count": 126,
|
14 |
+
"total": 1.0067562546730044,
|
15 |
+
"mean": 0.007990129005341304,
|
16 |
+
"stdev": 0.0001475636043259048,
|
17 |
+
"p50": 0.007993236064910888,
|
18 |
+
"p90": 0.00807611560821533,
|
19 |
+
"p95": 0.008149195432662962,
|
20 |
+
"p99": 0.008421234130859375,
|
21 |
+
"values": [
|
22 |
+
0.009136269569396973,
|
23 |
+
0.007929397106170655,
|
24 |
+
0.007902836799621582,
|
25 |
+
0.007826356887817383,
|
26 |
+
0.007975155830383301,
|
27 |
+
0.007975797176361083,
|
28 |
+
0.0079653959274292,
|
29 |
+
0.007815637111663819,
|
30 |
+
0.00803947639465332,
|
31 |
+
0.008018836975097657,
|
32 |
+
0.007995475769042968,
|
33 |
+
0.008049715995788574,
|
34 |
+
0.007997235774993897,
|
35 |
+
0.00797819709777832,
|
36 |
+
0.007953236103057862,
|
37 |
+
0.007969875812530518,
|
38 |
+
0.00802395725250244,
|
39 |
+
0.007975475788116455,
|
40 |
+
0.007911477088928223,
|
41 |
+
0.007978355884552002,
|
42 |
+
0.00805595588684082,
|
43 |
+
0.007992435932159423,
|
44 |
+
0.007895156860351563,
|
45 |
+
0.008057395935058593,
|
46 |
+
0.008072595596313476,
|
47 |
+
0.008006996154785156,
|
48 |
+
0.007883636951446534,
|
49 |
+
0.00791275691986084,
|
50 |
+
0.008007636070251465,
|
51 |
+
0.008050036430358887,
|
52 |
+
0.007862196922302247,
|
53 |
+
0.007998035907745362,
|
54 |
+
0.008011957168579102,
|
55 |
+
0.008009556770324708,
|
56 |
+
0.007933395862579346,
|
57 |
+
0.007930196762084962,
|
58 |
+
0.00801451587677002,
|
59 |
+
0.007974677085876465,
|
60 |
+
0.008019796371459961,
|
61 |
+
0.008011956214904786,
|
62 |
+
0.00788379716873169,
|
63 |
+
0.007807796955108643,
|
64 |
+
0.007888916969299317,
|
65 |
+
0.008084916114807128,
|
66 |
+
0.007998355865478516,
|
67 |
+
0.008009556770324708,
|
68 |
+
0.007965876102447509,
|
69 |
+
0.007979795932769776,
|
70 |
+
0.007991157054901122,
|
71 |
+
0.0079653959274292,
|
72 |
+
0.008072436332702637,
|
73 |
+
0.00803515625,
|
74 |
+
0.00794011688232422,
|
75 |
+
0.008046996116638184,
|
76 |
+
0.008011635780334472,
|
77 |
+
0.00799483585357666,
|
78 |
+
0.008027316093444824,
|
79 |
+
0.007830516815185547,
|
80 |
+
0.008028757095336915,
|
81 |
+
0.00792363691329956,
|
82 |
+
0.007687158107757568,
|
83 |
+
0.008071155548095703,
|
84 |
+
0.008196114540100097,
|
85 |
+
0.007956757068634033,
|
86 |
+
0.007848917007446289,
|
87 |
+
0.007994036197662353,
|
88 |
+
0.007949875831604005,
|
89 |
+
0.007903797149658203,
|
90 |
+
0.0077553181648254395,
|
91 |
+
0.008195955276489258,
|
92 |
+
0.007794517040252686,
|
93 |
+
0.0077533979415893555,
|
94 |
+
0.007713878154754638,
|
95 |
+
0.007626358032226562,
|
96 |
+
0.008366514205932617,
|
97 |
+
0.008038835525512695,
|
98 |
+
0.007960116863250733,
|
99 |
+
0.00789147710800171,
|
100 |
+
0.007856916904449463,
|
101 |
+
0.007874357223510741,
|
102 |
+
0.00785051679611206,
|
103 |
+
0.007886837005615235,
|
104 |
+
0.007900596141815186,
|
105 |
+
0.008439474105834961,
|
106 |
+
0.008166675567626952,
|
107 |
+
0.008089394569396972,
|
108 |
+
0.008057875633239746,
|
109 |
+
0.008049555778503418,
|
110 |
+
0.008079635620117187,
|
111 |
+
0.007950837135314942,
|
112 |
+
0.008145235061645507,
|
113 |
+
0.008009876251220703,
|
114 |
+
0.008009076118469238,
|
115 |
+
0.008042515754699707,
|
116 |
+
0.008067476272583007,
|
117 |
+
0.008002997398376465,
|
118 |
+
0.00808443546295166,
|
119 |
+
0.008150515556335449,
|
120 |
+
0.008102995872497559,
|
121 |
+
0.00799867582321167,
|
122 |
+
0.008029075622558593,
|
123 |
+
0.0080482759475708,
|
124 |
+
0.008009555816650391,
|
125 |
+
0.007945237159729003,
|
126 |
+
0.007945556163787842,
|
127 |
+
0.008004756927490234,
|
128 |
+
0.008000116348266601,
|
129 |
+
0.007972115993499757,
|
130 |
+
0.007965396881103516,
|
131 |
+
0.007973236083984375,
|
132 |
+
0.008001715660095214,
|
133 |
+
0.00798923683166504,
|
134 |
+
0.007977235794067382,
|
135 |
+
0.007981236934661864,
|
136 |
+
0.007930676937103271,
|
137 |
+
0.007975636959075928,
|
138 |
+
0.008010835647583007,
|
139 |
+
0.007965236186981202,
|
140 |
+
0.007956916809082032,
|
141 |
+
0.007998995780944824,
|
142 |
+
0.008022516250610352,
|
143 |
+
0.007999475955963134,
|
144 |
+
0.007955156803131103,
|
145 |
+
0.008010355949401856,
|
146 |
+
0.008001395225524902,
|
147 |
+
0.00795963716506958
|
148 |
+
]
|
149 |
+
},
|
150 |
+
"throughput": {
|
151 |
+
"unit": "samples/s",
|
152 |
+
"value": 125.15442483237908
|
153 |
+
},
|
154 |
+
"energy": null,
|
155 |
+
"efficiency": null
|
156 |
+
}
|
157 |
+
}
|