IlyasMoutawwakil HF staff commited on
Commit
dcd8604
·
verified ·
1 Parent(s): 7519afb

Upload cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_text-classification_FacebookAI/roberta-base/benchmark_report.json ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "forward": {
3
+ "memory": {
4
+ "unit": "MB",
5
+ "max_ram": 1011.310592,
6
+ "max_global_vram": 897.14688,
7
+ "max_process_vram": 222345.35936,
8
+ "max_reserved": 555.74528,
9
+ "max_allocated": 499.443712
10
+ },
11
+ "latency": {
12
+ "unit": "s",
13
+ "count": 130,
14
+ "total": 0.9979781708717345,
15
+ "mean": 0.007676755160551805,
16
+ "stdev": 0.000754682042599847,
17
+ "p50": 0.007872199058532713,
18
+ "p90": 0.008240805435180664,
19
+ "p95": 0.008409059715270997,
20
+ "p99": 0.009847812995910634,
21
+ "values": [
22
+ 0.0076050801277160645,
23
+ 0.007288122177124023,
24
+ 0.006812283992767334,
25
+ 0.00695388412475586,
26
+ 0.006803964138031006,
27
+ 0.006822525024414062,
28
+ 0.006824443817138672,
29
+ 0.006835003852844238,
30
+ 0.006824924945831299,
31
+ 0.0068326048851013186,
32
+ 0.006793724060058594,
33
+ 0.006803005218505859,
34
+ 0.006790524005889893,
35
+ 0.006758685111999512,
36
+ 0.006776124000549316,
37
+ 0.006732765197753906,
38
+ 0.00675884485244751,
39
+ 0.006758685111999512,
40
+ 0.00674684476852417,
41
+ 0.006764285087585449,
42
+ 0.006947644233703613,
43
+ 0.006739964962005615,
44
+ 0.006752924919128418,
45
+ 0.006735324859619141,
46
+ 0.006754205226898193,
47
+ 0.006742684841156006,
48
+ 0.007091322898864746,
49
+ 0.006758204936981201,
50
+ 0.006769403934478759,
51
+ 0.00677084493637085,
52
+ 0.006783005237579346,
53
+ 0.006783804893493652,
54
+ 0.006771164894104004,
55
+ 0.006745405197143554,
56
+ 0.006737724781036377,
57
+ 0.006750685214996338,
58
+ 0.006765404224395752,
59
+ 0.010254507064819335,
60
+ 0.007207481861114502,
61
+ 0.006951643943786621,
62
+ 0.00691404390335083,
63
+ 0.006889883995056152,
64
+ 0.006911643981933594,
65
+ 0.006952603816986084,
66
+ 0.006876605033874512,
67
+ 0.0069247641563415525,
68
+ 0.007800280094146728,
69
+ 0.007893558979034423,
70
+ 0.007863318920135498,
71
+ 0.008093077659606934,
72
+ 0.007886038780212403,
73
+ 0.008046519279479981,
74
+ 0.007744919776916504,
75
+ 0.007647960186004638,
76
+ 0.0077255601882934575,
77
+ 0.007729720115661621,
78
+ 0.007751319885253906,
79
+ 0.00769644021987915,
80
+ 0.008149718284606934,
81
+ 0.008291956901550292,
82
+ 0.007982839107513427,
83
+ 0.008048598289489745,
84
+ 0.007958518028259278,
85
+ 0.008013717651367188,
86
+ 0.008058518409729003,
87
+ 0.008768914222717285,
88
+ 0.00808123779296875,
89
+ 0.007971477985382081,
90
+ 0.007872919082641601,
91
+ 0.007870199203491211,
92
+ 0.007863638877868652,
93
+ 0.007853719234466552,
94
+ 0.008034997940063477,
95
+ 0.007893398761749267,
96
+ 0.007848120212554932,
97
+ 0.012225056648254394,
98
+ 0.00818811798095703,
99
+ 0.00777851915359497,
100
+ 0.00807083797454834,
101
+ 0.007857879161834717,
102
+ 0.007778678894042969,
103
+ 0.007803799152374268,
104
+ 0.007830039978027344,
105
+ 0.008294516563415528,
106
+ 0.007969557762145996,
107
+ 0.00797403907775879,
108
+ 0.008289396286010742,
109
+ 0.008143478393554688,
110
+ 0.008129076957702637,
111
+ 0.008172278404235839,
112
+ 0.008037558555603027,
113
+ 0.008014999389648437,
114
+ 0.007998357772827149,
115
+ 0.007988277912139893,
116
+ 0.008038357734680176,
117
+ 0.007935638904571534,
118
+ 0.008563155174255372,
119
+ 0.008175156593322753,
120
+ 0.008050357818603516,
121
+ 0.008027478218078614,
122
+ 0.008009398460388183,
123
+ 0.008087318420410156,
124
+ 0.00848443603515625,
125
+ 0.008133398056030274,
126
+ 0.008046358108520509,
127
+ 0.00795947790145874,
128
+ 0.008472275733947755,
129
+ 0.008257077217102051,
130
+ 0.008238997459411621,
131
+ 0.008169878005981445,
132
+ 0.008062357902526856,
133
+ 0.007983798027038575,
134
+ 0.008020598411560059,
135
+ 0.007871479034423827,
136
+ 0.007902039051055907,
137
+ 0.008098998069763184,
138
+ 0.007902519226074219,
139
+ 0.00782283878326416,
140
+ 0.007940279006958009,
141
+ 0.007884278774261474,
142
+ 0.007818359851837159,
143
+ 0.008031159400939942,
144
+ 0.008852113723754882,
145
+ 0.008193717002868653,
146
+ 0.008229717254638672,
147
+ 0.0081285982131958,
148
+ 0.008331795692443848,
149
+ 0.00827403736114502,
150
+ 0.008198837280273437,
151
+ 0.008226996421813964
152
+ ]
153
+ },
154
+ "throughput": {
155
+ "unit": "samples/s",
156
+ "value": 130.26337027637078
157
+ },
158
+ "energy": null,
159
+ "efficiency": null
160
+ }
161
+ }