louiecerv commited on
Commit
9994dda
·
1 Parent(s): 9efd4d1

sync with remote

Browse files
Files changed (3) hide show
  1. admission_data.csv +401 -0
  2. app.py +199 -0
  3. requirements.txt +6 -0
admission_data.csv ADDED
@@ -0,0 +1,401 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Serial No.,GRE Score,TOEFL Score,University Rating,SOP,LOR ,CGPA,Research,Chance of Admit
2
+ 1,337,118,4,4.5,4.5,9.65,1,0.92
3
+ 2,324,107,4,4,4.5,8.87,1,0.76
4
+ 3,316,104,3,3,3.5,8,1,0.72
5
+ 4,322,110,3,3.5,2.5,8.67,1,0.8
6
+ 5,314,103,2,2,3,8.21,0,0.65
7
+ 6,330,115,5,4.5,3,9.34,1,0.9
8
+ 7,321,109,3,3,4,8.2,1,0.75
9
+ 8,308,101,2,3,4,7.9,0,0.68
10
+ 9,302,102,1,2,1.5,8,0,0.5
11
+ 10,323,108,3,3.5,3,8.6,0,0.45
12
+ 11,325,106,3,3.5,4,8.4,1,0.52
13
+ 12,327,111,4,4,4.5,9,1,0.84
14
+ 13,328,112,4,4,4.5,9.1,1,0.78
15
+ 14,307,109,3,4,3,8,1,0.62
16
+ 15,311,104,3,3.5,2,8.2,1,0.61
17
+ 16,314,105,3,3.5,2.5,8.3,0,0.54
18
+ 17,317,107,3,4,3,8.7,0,0.66
19
+ 18,319,106,3,4,3,8,1,0.65
20
+ 19,318,110,3,4,3,8.8,0,0.63
21
+ 20,303,102,3,3.5,3,8.5,0,0.62
22
+ 21,312,107,3,3,2,7.9,1,0.64
23
+ 22,325,114,4,3,2,8.4,0,0.7
24
+ 23,328,116,5,5,5,9.5,1,0.94
25
+ 24,334,119,5,5,4.5,9.7,1,0.95
26
+ 25,336,119,5,4,3.5,9.8,1,0.97
27
+ 26,340,120,5,4.5,4.5,9.6,1,0.94
28
+ 27,322,109,5,4.5,3.5,8.8,0,0.76
29
+ 28,298,98,2,1.5,2.5,7.5,1,0.44
30
+ 29,295,93,1,2,2,7.2,0,0.46
31
+ 30,310,99,2,1.5,2,7.3,0,0.54
32
+ 31,300,97,2,3,3,8.1,1,0.65
33
+ 32,327,103,3,4,4,8.3,1,0.74
34
+ 33,338,118,4,3,4.5,9.4,1,0.91
35
+ 34,340,114,5,4,4,9.6,1,0.9
36
+ 35,331,112,5,4,5,9.8,1,0.94
37
+ 36,320,110,5,5,5,9.2,1,0.88
38
+ 37,299,106,2,4,4,8.4,0,0.64
39
+ 38,300,105,1,1,2,7.8,0,0.58
40
+ 39,304,105,1,3,1.5,7.5,0,0.52
41
+ 40,307,108,2,4,3.5,7.7,0,0.48
42
+ 41,308,110,3,3.5,3,8,1,0.46
43
+ 42,316,105,2,2.5,2.5,8.2,1,0.49
44
+ 43,313,107,2,2.5,2,8.5,1,0.53
45
+ 44,332,117,4,4.5,4,9.1,0,0.87
46
+ 45,326,113,5,4.5,4,9.4,1,0.91
47
+ 46,322,110,5,5,4,9.1,1,0.88
48
+ 47,329,114,5,4,5,9.3,1,0.86
49
+ 48,339,119,5,4.5,4,9.7,0,0.89
50
+ 49,321,110,3,3.5,5,8.85,1,0.82
51
+ 50,327,111,4,3,4,8.4,1,0.78
52
+ 51,313,98,3,2.5,4.5,8.3,1,0.76
53
+ 52,312,100,2,1.5,3.5,7.9,1,0.56
54
+ 53,334,116,4,4,3,8,1,0.78
55
+ 54,324,112,4,4,2.5,8.1,1,0.72
56
+ 55,322,110,3,3,3.5,8,0,0.7
57
+ 56,320,103,3,3,3,7.7,0,0.64
58
+ 57,316,102,3,2,3,7.4,0,0.64
59
+ 58,298,99,2,4,2,7.6,0,0.46
60
+ 59,300,99,1,3,2,6.8,1,0.36
61
+ 60,311,104,2,2,2,8.3,0,0.42
62
+ 61,309,100,2,3,3,8.1,0,0.48
63
+ 62,307,101,3,4,3,8.2,0,0.47
64
+ 63,304,105,2,3,3,8.2,1,0.54
65
+ 64,315,107,2,4,3,8.5,1,0.56
66
+ 65,325,111,3,3,3.5,8.7,0,0.52
67
+ 66,325,112,4,3.5,3.5,8.92,0,0.55
68
+ 67,327,114,3,3,3,9.02,0,0.61
69
+ 68,316,107,2,3.5,3.5,8.64,1,0.57
70
+ 69,318,109,3,3.5,4,9.22,1,0.68
71
+ 70,328,115,4,4.5,4,9.16,1,0.78
72
+ 71,332,118,5,5,5,9.64,1,0.94
73
+ 72,336,112,5,5,5,9.76,1,0.96
74
+ 73,321,111,5,5,5,9.45,1,0.93
75
+ 74,314,108,4,4.5,4,9.04,1,0.84
76
+ 75,314,106,3,3,5,8.9,0,0.74
77
+ 76,329,114,2,2,4,8.56,1,0.72
78
+ 77,327,112,3,3,3,8.72,1,0.74
79
+ 78,301,99,2,3,2,8.22,0,0.64
80
+ 79,296,95,2,3,2,7.54,1,0.44
81
+ 80,294,93,1,1.5,2,7.36,0,0.46
82
+ 81,312,105,3,2,3,8.02,1,0.5
83
+ 82,340,120,4,5,5,9.5,1,0.96
84
+ 83,320,110,5,5,4.5,9.22,1,0.92
85
+ 84,322,115,5,4,4.5,9.36,1,0.92
86
+ 85,340,115,5,4.5,4.5,9.45,1,0.94
87
+ 86,319,103,4,4.5,3.5,8.66,0,0.76
88
+ 87,315,106,3,4.5,3.5,8.42,0,0.72
89
+ 88,317,107,2,3.5,3,8.28,0,0.66
90
+ 89,314,108,3,4.5,3.5,8.14,0,0.64
91
+ 90,316,109,4,4.5,3.5,8.76,1,0.74
92
+ 91,318,106,2,4,4,7.92,1,0.64
93
+ 92,299,97,3,5,3.5,7.66,0,0.38
94
+ 93,298,98,2,4,3,8.03,0,0.34
95
+ 94,301,97,2,3,3,7.88,1,0.44
96
+ 95,303,99,3,2,2.5,7.66,0,0.36
97
+ 96,304,100,4,1.5,2.5,7.84,0,0.42
98
+ 97,306,100,2,3,3,8,0,0.48
99
+ 98,331,120,3,4,4,8.96,1,0.86
100
+ 99,332,119,4,5,4.5,9.24,1,0.9
101
+ 100,323,113,3,4,4,8.88,1,0.79
102
+ 101,322,107,3,3.5,3.5,8.46,1,0.71
103
+ 102,312,105,2,2.5,3,8.12,0,0.64
104
+ 103,314,106,2,4,3.5,8.25,0,0.62
105
+ 104,317,104,2,4.5,4,8.47,0,0.57
106
+ 105,326,112,3,3.5,3,9.05,1,0.74
107
+ 106,316,110,3,4,4.5,8.78,1,0.69
108
+ 107,329,111,4,4.5,4.5,9.18,1,0.87
109
+ 108,338,117,4,3.5,4.5,9.46,1,0.91
110
+ 109,331,116,5,5,5,9.38,1,0.93
111
+ 110,304,103,5,5,4,8.64,0,0.68
112
+ 111,305,108,5,3,3,8.48,0,0.61
113
+ 112,321,109,4,4,4,8.68,1,0.69
114
+ 113,301,107,3,3.5,3.5,8.34,1,0.62
115
+ 114,320,110,2,4,3.5,8.56,0,0.72
116
+ 115,311,105,3,3.5,3,8.45,1,0.59
117
+ 116,310,106,4,4.5,4.5,9.04,1,0.66
118
+ 117,299,102,3,4,3.5,8.62,0,0.56
119
+ 118,290,104,4,2,2.5,7.46,0,0.45
120
+ 119,296,99,2,3,3.5,7.28,0,0.47
121
+ 120,327,104,5,3,3.5,8.84,1,0.71
122
+ 121,335,117,5,5,5,9.56,1,0.94
123
+ 122,334,119,5,4.5,4.5,9.48,1,0.94
124
+ 123,310,106,4,1.5,2.5,8.36,0,0.57
125
+ 124,308,108,3,3.5,3.5,8.22,0,0.61
126
+ 125,301,106,4,2.5,3,8.47,0,0.57
127
+ 126,300,100,3,2,3,8.66,1,0.64
128
+ 127,323,113,3,4,3,9.32,1,0.85
129
+ 128,319,112,3,2.5,2,8.71,1,0.78
130
+ 129,326,112,3,3.5,3,9.1,1,0.84
131
+ 130,333,118,5,5,5,9.35,1,0.92
132
+ 131,339,114,5,4,4.5,9.76,1,0.96
133
+ 132,303,105,5,5,4.5,8.65,0,0.77
134
+ 133,309,105,5,3.5,3.5,8.56,0,0.71
135
+ 134,323,112,5,4,4.5,8.78,0,0.79
136
+ 135,333,113,5,4,4,9.28,1,0.89
137
+ 136,314,109,4,3.5,4,8.77,1,0.82
138
+ 137,312,103,3,5,4,8.45,0,0.76
139
+ 138,316,100,2,1.5,3,8.16,1,0.71
140
+ 139,326,116,2,4.5,3,9.08,1,0.8
141
+ 140,318,109,1,3.5,3.5,9.12,0,0.78
142
+ 141,329,110,2,4,3,9.15,1,0.84
143
+ 142,332,118,2,4.5,3.5,9.36,1,0.9
144
+ 143,331,115,5,4,3.5,9.44,1,0.92
145
+ 144,340,120,4,4.5,4,9.92,1,0.97
146
+ 145,325,112,2,3,3.5,8.96,1,0.8
147
+ 146,320,113,2,2,2.5,8.64,1,0.81
148
+ 147,315,105,3,2,2.5,8.48,0,0.75
149
+ 148,326,114,3,3,3,9.11,1,0.83
150
+ 149,339,116,4,4,3.5,9.8,1,0.96
151
+ 150,311,106,2,3.5,3,8.26,1,0.79
152
+ 151,334,114,4,4,4,9.43,1,0.93
153
+ 152,332,116,5,5,5,9.28,1,0.94
154
+ 153,321,112,5,5,5,9.06,1,0.86
155
+ 154,324,105,3,3,4,8.75,0,0.79
156
+ 155,326,108,3,3,3.5,8.89,0,0.8
157
+ 156,312,109,3,3,3,8.69,0,0.77
158
+ 157,315,105,3,2,2.5,8.34,0,0.7
159
+ 158,309,104,2,2,2.5,8.26,0,0.65
160
+ 159,306,106,2,2,2.5,8.14,0,0.61
161
+ 160,297,100,1,1.5,2,7.9,0,0.52
162
+ 161,315,103,1,1.5,2,7.86,0,0.57
163
+ 162,298,99,1,1.5,3,7.46,0,0.53
164
+ 163,318,109,3,3,3,8.5,0,0.67
165
+ 164,317,105,3,3.5,3,8.56,0,0.68
166
+ 165,329,111,4,4.5,4,9.01,1,0.81
167
+ 166,322,110,5,4.5,4,8.97,0,0.78
168
+ 167,302,102,3,3.5,5,8.33,0,0.65
169
+ 168,313,102,3,2,3,8.27,0,0.64
170
+ 169,293,97,2,2,4,7.8,1,0.64
171
+ 170,311,99,2,2.5,3,7.98,0,0.65
172
+ 171,312,101,2,2.5,3.5,8.04,1,0.68
173
+ 172,334,117,5,4,4.5,9.07,1,0.89
174
+ 173,322,110,4,4,5,9.13,1,0.86
175
+ 174,323,113,4,4,4.5,9.23,1,0.89
176
+ 175,321,111,4,4,4,8.97,1,0.87
177
+ 176,320,111,4,4.5,3.5,8.87,1,0.85
178
+ 177,329,119,4,4.5,4.5,9.16,1,0.9
179
+ 178,319,110,3,3.5,3.5,9.04,0,0.82
180
+ 179,309,108,3,2.5,3,8.12,0,0.72
181
+ 180,307,102,3,3,3,8.27,0,0.73
182
+ 181,300,104,3,3.5,3,8.16,0,0.71
183
+ 182,305,107,2,2.5,2.5,8.42,0,0.71
184
+ 183,299,100,2,3,3.5,7.88,0,0.68
185
+ 184,314,110,3,4,4,8.8,0,0.75
186
+ 185,316,106,2,2.5,4,8.32,0,0.72
187
+ 186,327,113,4,4.5,4.5,9.11,1,0.89
188
+ 187,317,107,3,3.5,3,8.68,1,0.84
189
+ 188,335,118,5,4.5,3.5,9.44,1,0.93
190
+ 189,331,115,5,4.5,3.5,9.36,1,0.93
191
+ 190,324,112,5,5,5,9.08,1,0.88
192
+ 191,324,111,5,4.5,4,9.16,1,0.9
193
+ 192,323,110,5,4,5,8.98,1,0.87
194
+ 193,322,114,5,4.5,4,8.94,1,0.86
195
+ 194,336,118,5,4.5,5,9.53,1,0.94
196
+ 195,316,109,3,3.5,3,8.76,0,0.77
197
+ 196,307,107,2,3,3.5,8.52,1,0.78
198
+ 197,306,105,2,3,2.5,8.26,0,0.73
199
+ 198,310,106,2,3.5,2.5,8.33,0,0.73
200
+ 199,311,104,3,4.5,4.5,8.43,0,0.7
201
+ 200,313,107,3,4,4.5,8.69,0,0.72
202
+ 201,317,103,3,2.5,3,8.54,1,0.73
203
+ 202,315,110,2,3.5,3,8.46,1,0.72
204
+ 203,340,120,5,4.5,4.5,9.91,1,0.97
205
+ 204,334,120,5,4,5,9.87,1,0.97
206
+ 205,298,105,3,3.5,4,8.54,0,0.69
207
+ 206,295,99,2,2.5,3,7.65,0,0.57
208
+ 207,315,99,2,3.5,3,7.89,0,0.63
209
+ 208,310,102,3,3.5,4,8.02,1,0.66
210
+ 209,305,106,2,3,3,8.16,0,0.64
211
+ 210,301,104,3,3.5,4,8.12,1,0.68
212
+ 211,325,108,4,4.5,4,9.06,1,0.79
213
+ 212,328,110,4,5,4,9.14,1,0.82
214
+ 213,338,120,4,5,5,9.66,1,0.95
215
+ 214,333,119,5,5,4.5,9.78,1,0.96
216
+ 215,331,117,4,4.5,5,9.42,1,0.94
217
+ 216,330,116,5,5,4.5,9.36,1,0.93
218
+ 217,322,112,4,4.5,4.5,9.26,1,0.91
219
+ 218,321,109,4,4,4,9.13,1,0.85
220
+ 219,324,110,4,3,3.5,8.97,1,0.84
221
+ 220,312,104,3,3.5,3.5,8.42,0,0.74
222
+ 221,313,103,3,4,4,8.75,0,0.76
223
+ 222,316,110,3,3.5,4,8.56,0,0.75
224
+ 223,324,113,4,4.5,4,8.79,0,0.76
225
+ 224,308,109,2,3,4,8.45,0,0.71
226
+ 225,305,105,2,3,2,8.23,0,0.67
227
+ 226,296,99,2,2.5,2.5,8.03,0,0.61
228
+ 227,306,110,2,3.5,4,8.45,0,0.63
229
+ 228,312,110,2,3.5,3,8.53,0,0.64
230
+ 229,318,112,3,4,3.5,8.67,0,0.71
231
+ 230,324,111,4,3,3,9.01,1,0.82
232
+ 231,313,104,3,4,4.5,8.65,0,0.73
233
+ 232,319,106,3,3.5,2.5,8.33,1,0.74
234
+ 233,312,107,2,2.5,3.5,8.27,0,0.69
235
+ 234,304,100,2,2.5,3.5,8.07,0,0.64
236
+ 235,330,113,5,5,4,9.31,1,0.91
237
+ 236,326,111,5,4.5,4,9.23,1,0.88
238
+ 237,325,112,4,4,4.5,9.17,1,0.85
239
+ 238,329,114,5,4.5,5,9.19,1,0.86
240
+ 239,310,104,3,2,3.5,8.37,0,0.7
241
+ 240,299,100,1,1.5,2,7.89,0,0.59
242
+ 241,296,101,1,2.5,3,7.68,0,0.6
243
+ 242,317,103,2,2.5,2,8.15,0,0.65
244
+ 243,324,115,3,3.5,3,8.76,1,0.7
245
+ 244,325,114,3,3.5,3,9.04,1,0.76
246
+ 245,314,107,2,2.5,4,8.56,0,0.63
247
+ 246,328,110,4,4,2.5,9.02,1,0.81
248
+ 247,316,105,3,3,3.5,8.73,0,0.72
249
+ 248,311,104,2,2.5,3.5,8.48,0,0.71
250
+ 249,324,110,3,3.5,4,8.87,1,0.8
251
+ 250,321,111,3,3.5,4,8.83,1,0.77
252
+ 251,320,104,3,3,2.5,8.57,1,0.74
253
+ 252,316,99,2,2.5,3,9,0,0.7
254
+ 253,318,100,2,2.5,3.5,8.54,1,0.71
255
+ 254,335,115,4,4.5,4.5,9.68,1,0.93
256
+ 255,321,114,4,4,5,9.12,0,0.85
257
+ 256,307,110,4,4,4.5,8.37,0,0.79
258
+ 257,309,99,3,4,4,8.56,0,0.76
259
+ 258,324,100,3,4,5,8.64,1,0.78
260
+ 259,326,102,4,5,5,8.76,1,0.77
261
+ 260,331,119,4,5,4.5,9.34,1,0.9
262
+ 261,327,108,5,5,3.5,9.13,1,0.87
263
+ 262,312,104,3,3.5,4,8.09,0,0.71
264
+ 263,308,103,2,2.5,4,8.36,1,0.7
265
+ 264,324,111,3,2.5,1.5,8.79,1,0.7
266
+ 265,325,110,2,3,2.5,8.76,1,0.75
267
+ 266,313,102,3,2.5,2.5,8.68,0,0.71
268
+ 267,312,105,2,2,2.5,8.45,0,0.72
269
+ 268,314,107,3,3,3.5,8.17,1,0.73
270
+ 269,327,113,4,4.5,5,9.14,0,0.83
271
+ 270,308,108,4,4.5,5,8.34,0,0.77
272
+ 271,306,105,2,2.5,3,8.22,1,0.72
273
+ 272,299,96,2,1.5,2,7.86,0,0.54
274
+ 273,294,95,1,1.5,1.5,7.64,0,0.49
275
+ 274,312,99,1,1,1.5,8.01,1,0.52
276
+ 275,315,100,1,2,2.5,7.95,0,0.58
277
+ 276,322,110,3,3.5,3,8.96,1,0.78
278
+ 277,329,113,5,5,4.5,9.45,1,0.89
279
+ 278,320,101,2,2.5,3,8.62,0,0.7
280
+ 279,308,103,2,3,3.5,8.49,0,0.66
281
+ 280,304,102,2,3,4,8.73,0,0.67
282
+ 281,311,102,3,4.5,4,8.64,1,0.68
283
+ 282,317,110,3,4,4.5,9.11,1,0.8
284
+ 283,312,106,3,4,3.5,8.79,1,0.81
285
+ 284,321,111,3,2.5,3,8.9,1,0.8
286
+ 285,340,112,4,5,4.5,9.66,1,0.94
287
+ 286,331,116,5,4,4,9.26,1,0.93
288
+ 287,336,118,5,4.5,4,9.19,1,0.92
289
+ 288,324,114,5,5,4.5,9.08,1,0.89
290
+ 289,314,104,4,5,5,9.02,0,0.82
291
+ 290,313,109,3,4,3.5,9,0,0.79
292
+ 291,307,105,2,2.5,3,7.65,0,0.58
293
+ 292,300,102,2,1.5,2,7.87,0,0.56
294
+ 293,302,99,2,1,2,7.97,0,0.56
295
+ 294,312,98,1,3.5,3,8.18,1,0.64
296
+ 295,316,101,2,2.5,2,8.32,1,0.61
297
+ 296,317,100,2,3,2.5,8.57,0,0.68
298
+ 297,310,107,3,3.5,3.5,8.67,0,0.76
299
+ 298,320,120,3,4,4.5,9.11,0,0.86
300
+ 299,330,114,3,4.5,4.5,9.24,1,0.9
301
+ 300,305,112,3,3,3.5,8.65,0,0.71
302
+ 301,309,106,2,2.5,2.5,8,0,0.62
303
+ 302,319,108,2,2.5,3,8.76,0,0.66
304
+ 303,322,105,2,3,3,8.45,1,0.65
305
+ 304,323,107,3,3.5,3.5,8.55,1,0.73
306
+ 305,313,106,2,2.5,2,8.43,0,0.62
307
+ 306,321,109,3,3.5,3.5,8.8,1,0.74
308
+ 307,323,110,3,4,3.5,9.1,1,0.79
309
+ 308,325,112,4,4,4,9,1,0.8
310
+ 309,312,108,3,3.5,3,8.53,0,0.69
311
+ 310,308,110,4,3.5,3,8.6,0,0.7
312
+ 311,320,104,3,3,3.5,8.74,1,0.76
313
+ 312,328,108,4,4.5,4,9.18,1,0.84
314
+ 313,311,107,4,4.5,4.5,9,1,0.78
315
+ 314,301,100,3,3.5,3,8.04,0,0.67
316
+ 315,305,105,2,3,4,8.13,0,0.66
317
+ 316,308,104,2,2.5,3,8.07,0,0.65
318
+ 317,298,101,2,1.5,2,7.86,0,0.54
319
+ 318,300,99,1,1,2.5,8.01,0,0.58
320
+ 319,324,111,3,2.5,2,8.8,1,0.79
321
+ 320,327,113,4,3.5,3,8.69,1,0.8
322
+ 321,317,106,3,4,3.5,8.5,1,0.75
323
+ 322,323,104,3,4,4,8.44,1,0.73
324
+ 323,314,107,2,2.5,4,8.27,0,0.72
325
+ 324,305,102,2,2,2.5,8.18,0,0.62
326
+ 325,315,104,3,3,2.5,8.33,0,0.67
327
+ 326,326,116,3,3.5,4,9.14,1,0.81
328
+ 327,299,100,3,2,2,8.02,0,0.63
329
+ 328,295,101,2,2.5,2,7.86,0,0.69
330
+ 329,324,112,4,4,3.5,8.77,1,0.8
331
+ 330,297,96,2,2.5,1.5,7.89,0,0.43
332
+ 331,327,113,3,3.5,3,8.66,1,0.8
333
+ 332,311,105,2,3,2,8.12,1,0.73
334
+ 333,308,106,3,3.5,2.5,8.21,1,0.75
335
+ 334,319,108,3,3,3.5,8.54,1,0.71
336
+ 335,312,107,4,4.5,4,8.65,1,0.73
337
+ 336,325,111,4,4,4.5,9.11,1,0.83
338
+ 337,319,110,3,3,2.5,8.79,0,0.72
339
+ 338,332,118,5,5,5,9.47,1,0.94
340
+ 339,323,108,5,4,4,8.74,1,0.81
341
+ 340,324,107,5,3.5,4,8.66,1,0.81
342
+ 341,312,107,3,3,3,8.46,1,0.75
343
+ 342,326,110,3,3.5,3.5,8.76,1,0.79
344
+ 343,308,106,3,3,3,8.24,0,0.58
345
+ 344,305,103,2,2.5,3.5,8.13,0,0.59
346
+ 345,295,96,2,1.5,2,7.34,0,0.47
347
+ 346,316,98,1,1.5,2,7.43,0,0.49
348
+ 347,304,97,2,1.5,2,7.64,0,0.47
349
+ 348,299,94,1,1,1,7.34,0,0.42
350
+ 349,302,99,1,2,2,7.25,0,0.57
351
+ 350,313,101,3,2.5,3,8.04,0,0.62
352
+ 351,318,107,3,3,3.5,8.27,1,0.74
353
+ 352,325,110,4,3.5,4,8.67,1,0.73
354
+ 353,303,100,2,3,3.5,8.06,1,0.64
355
+ 354,300,102,3,3.5,2.5,8.17,0,0.63
356
+ 355,297,98,2,2.5,3,7.67,0,0.59
357
+ 356,317,106,2,2,3.5,8.12,0,0.73
358
+ 357,327,109,3,3.5,4,8.77,1,0.79
359
+ 358,301,104,2,3.5,3.5,7.89,1,0.68
360
+ 359,314,105,2,2.5,2,7.64,0,0.7
361
+ 360,321,107,2,2,1.5,8.44,0,0.81
362
+ 361,322,110,3,4,5,8.64,1,0.85
363
+ 362,334,116,4,4,3.5,9.54,1,0.93
364
+ 363,338,115,5,4.5,5,9.23,1,0.91
365
+ 364,306,103,2,2.5,3,8.36,0,0.69
366
+ 365,313,102,3,3.5,4,8.9,1,0.77
367
+ 366,330,114,4,4.5,3,9.17,1,0.86
368
+ 367,320,104,3,3.5,4.5,8.34,1,0.74
369
+ 368,311,98,1,1,2.5,7.46,0,0.57
370
+ 369,298,92,1,2,2,7.88,0,0.51
371
+ 370,301,98,1,2,3,8.03,1,0.67
372
+ 371,310,103,2,2.5,2.5,8.24,0,0.72
373
+ 372,324,110,3,3.5,3,9.22,1,0.89
374
+ 373,336,119,4,4.5,4,9.62,1,0.95
375
+ 374,321,109,3,3,3,8.54,1,0.79
376
+ 375,315,105,2,2,2.5,7.65,0,0.39
377
+ 376,304,101,2,2,2.5,7.66,0,0.38
378
+ 377,297,96,2,2.5,2,7.43,0,0.34
379
+ 378,290,100,1,1.5,2,7.56,0,0.47
380
+ 379,303,98,1,2,2.5,7.65,0,0.56
381
+ 380,311,99,1,2.5,3,8.43,1,0.71
382
+ 381,322,104,3,3.5,4,8.84,1,0.78
383
+ 382,319,105,3,3,3.5,8.67,1,0.73
384
+ 383,324,110,4,4.5,4,9.15,1,0.82
385
+ 384,300,100,3,3,3.5,8.26,0,0.62
386
+ 385,340,113,4,5,5,9.74,1,0.96
387
+ 386,335,117,5,5,5,9.82,1,0.96
388
+ 387,302,101,2,2.5,3.5,7.96,0,0.46
389
+ 388,307,105,2,2,3.5,8.1,0,0.53
390
+ 389,296,97,2,1.5,2,7.8,0,0.49
391
+ 390,320,108,3,3.5,4,8.44,1,0.76
392
+ 391,314,102,2,2,2.5,8.24,0,0.64
393
+ 392,318,106,3,2,3,8.65,0,0.71
394
+ 393,326,112,4,4,3.5,9.12,1,0.84
395
+ 394,317,104,2,3,3,8.76,0,0.77
396
+ 395,329,111,4,4.5,4,9.23,1,0.89
397
+ 396,324,110,3,3.5,3.5,9.04,1,0.82
398
+ 397,325,107,3,3,3.5,9.11,1,0.84
399
+ 398,330,116,4,5,4.5,9.45,1,0.91
400
+ 399,312,103,3,3.5,4,8.78,0,0.67
401
+ 400,333,117,4,5,4,9.66,1,0.95
app.py ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import numpy as np
4
+ from sklearn.preprocessing import StandardScaler
5
+ from sklearn.linear_model import LinearRegression
6
+ from sklearn.svm import SVR
7
+ from sklearn.tree import DecisionTreeRegressor
8
+ from sklearn.ensemble import RandomForestRegressor
9
+ from sklearn.neighbors import KNeighborsRegressor
10
+ from sklearn.neural_network import MLPRegressor
11
+ from sklearn.model_selection import train_test_split
12
+ from sklearn.metrics import mean_squared_error, r2_score
13
+ import matplotlib.pyplot as plt
14
+ import seaborn as sns
15
+
16
+ # Load the dataset
17
+ data = pd.read_csv('admission_data.csv')
18
+
19
+ # drop the Serioal No. column
20
+ data = data.drop(columns=['Serial No.'])
21
+
22
+ # Pre-processing tasks
23
+ st.title("University Graduate Admission Regression Analysis")
24
+
25
+ about_text = """
26
+ # About this App
27
+
28
+ This Streamlit app is designed to analyze admission data and perform regression analysis to predict the chance of admission based on various factors. The dataset used in this app contains information about applicants' GRE scores, TOEFL scores, university ratings, SOP (Statement of Purpose) scores, LOR (Letter of Recommendation) scores, CGPA (Cumulative Grade Point Average), research experience, and the chance of admission.
29
+
30
+ ## Dataset Description
31
+
32
+ The dataset consists of the following columns:
33
+ - **GRE Score**: Graduate Record Examination score (out of 340)
34
+ - **TOEFL Score**: Test of English as a Foreign Language score (out of 120)
35
+ - **University Rating**: Rating of the university (out of 5)
36
+ - **SOP**: Strength of the Statement of Purpose (out of 5)
37
+ - **LOR**: Strength of the Letter of Recommendation (out of 5)
38
+ - **CGPA**: Cumulative Grade Point Average (out of 10)
39
+ - **Research**: Research experience (0 or 1)
40
+ - **Chance of Admit**: Probability of admission (0 to 1)
41
+
42
+ ## Regression Analysis
43
+
44
+ The app performs regression analysis using various models to predict the chance of admission based on the input features. The models used in this app include:
45
+ - **Linear Regression**: A linear approach to modeling the relationship between a dependent variable and one or more independent variables.
46
+ - **Support Vector Machine (SVM)**: A supervised machine learning algorithm used for both classification and regression challenges.
47
+ - **Decision Tree**: A non-parametric supervised learning method used for classification and regression, which splits the data into subsets based on the value of input features.
48
+ - **Random Forest**: An ensemble learning method that constructs multiple decision trees during training and outputs the mean prediction of the individual trees.
49
+ - **K-Nearest Neighbors (KNN)**: A non-parametric method used for classification and regression, predicting the value based on the k-nearest training examples in the feature space.
50
+ - **MLP Regressor**: A multi-layer perceptron regressor that optimizes the squared-loss using stochastic gradient descent.
51
+
52
+ Each model's performance is evaluated using metrics such as Mean Squared Error (MSE) and R-squared (R²). The app also provides visualizations to compare the actual vs. predicted values for each model.
53
+
54
+ Explore the tabs to see the performance of each model and understand how different factors influence the chance of admission.
55
+ """
56
+
57
+ st.expander("About this App").markdown(about_text)
58
+
59
+ # Display the dataset
60
+ st.subheader("Dataset")
61
+ st.write(data)
62
+
63
+ # Check for null values
64
+ st.subheader("Null Values")
65
+ st.write(data.isnull().sum())
66
+
67
+ # Handle null values (if any)
68
+ data = data.dropna()
69
+
70
+ # Ensure consistent data types
71
+ data = data.astype({
72
+ 'GRE Score': 'int64',
73
+ 'TOEFL Score': 'int64',
74
+ 'University Rating': 'int64',
75
+ 'SOP': 'float64',
76
+ 'LOR ': 'float64',
77
+ 'CGPA': 'float64',
78
+ 'Research': 'int64',
79
+ 'Chance of Admit ': 'float64'
80
+ })
81
+
82
+ # Scaling the data
83
+ scaler = StandardScaler()
84
+
85
+ # Save column names before scaling
86
+ data_df = data.drop(columns=['Chance of Admit ']) # Save as DataFrame before scaling
87
+ feature_names = data_df.columns # Store feature names separately
88
+
89
+ # Scale the data (returns a NumPy array)
90
+ scaled_data = scaler.fit_transform(data.drop(columns=['Chance of Admit ']))
91
+
92
+ # Convert back to DataFrame after scaling
93
+ scaled_data = pd.DataFrame(scaled_data, columns=feature_names)
94
+
95
+ # Feature importance using Random Forest
96
+ rf = RandomForestRegressor()
97
+ rf.fit(scaled_data, data['Chance of Admit '])
98
+ importances = rf.feature_importances_
99
+
100
+ # Plot feature importances
101
+ st.subheader("Feature Importances")
102
+ fig, ax = plt.subplots()
103
+ sns.barplot(x=importances, y=scaled_data.columns, ax=ax)
104
+ st.pyplot(fig)
105
+
106
+ # Split the data into training and testing sets
107
+ X_train, X_test, y_train, y_test = train_test_split(scaled_data, data['Chance of Admit '], test_size=0.2, random_state=42)
108
+
109
+ # Define models
110
+ models = {
111
+ "Linear Regression": LinearRegression(),
112
+ "Support Vector Machine": SVR(),
113
+ "Decision Tree": DecisionTreeRegressor(),
114
+ "Random Forest": RandomForestRegressor(),
115
+ "K-Nearest Neighbors": KNeighborsRegressor(),
116
+ "MLP Regressor": MLPRegressor(max_iter=500)
117
+ }
118
+
119
+ # Model descriptions
120
+ model_descriptions = {
121
+ "Linear Regression": "Linear Regression is a linear approach to modeling the relationship between a dependent variable and one or more independent variables.",
122
+ "Support Vector Machine": "Support Vector Machine (SVM) is a supervised machine learning algorithm which can be used for both classification or regression challenges.",
123
+ "Decision Tree": "Decision Tree is a non-parametric supervised learning method used for classification and regression. It splits the data into subsets based on the value of input features.",
124
+ "Random Forest": "Random Forest is an ensemble learning method that operates by constructing multiple decision trees during training and outputting the mean prediction of the individual trees.",
125
+ "K-Nearest Neighbors": "K-Nearest Neighbors (KNN) is a non-parametric method used for classification and regression. It predicts the value based on the k-nearest training examples in the feature space.",
126
+ "MLP Regressor": "MLP Regressor is a multi-layer perceptron regressor that optimizes the squared-loss using stochastic gradient descent."
127
+ }
128
+
129
+ # Create tabs for each model
130
+ st.subheader("Model Performance and Descriptions")
131
+ tabs = st.tabs(models.keys())
132
+
133
+ for tab, (model_name, model) in zip(tabs, models.items()):
134
+ with tab:
135
+ st.subheader(model_name)
136
+
137
+ # Train the model
138
+ model.fit(X_train, y_train)
139
+
140
+ # Make predictions
141
+ y_pred = model.predict(X_test)
142
+
143
+ # Calculate performance metrics
144
+ mse = mean_squared_error(y_test, y_pred)
145
+ r2 = r2_score(y_test, y_pred)
146
+
147
+ # Display performance metrics
148
+ st.write(f"**Mean Squared Error:** {mse}")
149
+ st.write(f"**R-squared:** {r2}")
150
+
151
+ # Plot actual vs predicted values
152
+ fig, ax = plt.subplots()
153
+ ax.scatter(y_test, y_pred)
154
+ ax.plot([y_test.min(), y_test.max()], [y_test.min(), y_test.max()], 'k--', lw=4)
155
+ ax.set_xlabel('Actual')
156
+ ax.set_ylabel('Predicted')
157
+ st.pyplot(fig)
158
+
159
+ # Display model description
160
+ st.write(f"**Description:** {model_descriptions[model_name]}")
161
+
162
+ # Prediction section
163
+ st.subheader("Predict Chance of Admission")
164
+
165
+ # Model selection box
166
+ selected_model_name = st.selectbox("Select Model", list(models.keys()))
167
+ selected_model = models[selected_model_name]
168
+
169
+ # Input elements for all column variables
170
+ gre_score = st.number_input("GRE Score", min_value=0, max_value=340, value=320)
171
+ toefl_score = st.number_input("TOEFL Score", min_value=0, max_value=120, value=110)
172
+ university_rating = st.number_input("University Rating", min_value=1, max_value=5, value=3)
173
+ sop = st.number_input("SOP", min_value=0.0, max_value=5.0, value=4.0)
174
+ lor = st.number_input("LOR", min_value=0.0, max_value=5.0, value=4.0)
175
+ cgpa = st.number_input("CGPA", min_value=0.0, max_value=10.0, value=8.5)
176
+ research = st.selectbox("Research", [0, 1])
177
+
178
+ # Button to predict chance of admission
179
+ if st.button("Predict"):
180
+ # Create a DataFrame for the input values
181
+ input_data = pd.DataFrame({
182
+ 'GRE Score': [gre_score],
183
+ 'TOEFL Score': [toefl_score],
184
+ 'University Rating': [university_rating],
185
+ 'SOP': [sop],
186
+ 'LOR ': [lor],
187
+ 'CGPA': [cgpa],
188
+ 'Research': [research]
189
+ })
190
+
191
+ # Scale the input data
192
+ scaled_input_data = scaler.transform(input_data)
193
+ scaled_input_data = pd.DataFrame(scaled_input_data, columns=input_data.columns) # Convert back to DataFrame
194
+
195
+ # Predict the chance of admission
196
+ prediction = selected_model.predict(scaled_input_data)
197
+
198
+ # Display the prediction
199
+ st.write(f"Predicted Chance of Admission: {prediction[0]:.2f}")
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ streamlit
2
+ pandas
3
+ numpy
4
+ scikit-learn
5
+ matplotlib
6
+ seaborn