Ayush Shrivastava commited on
Commit
ceafa16
1 Parent(s): 2b27f6f

Add application file

Browse files
Files changed (1) hide show
  1. app.py +133 -0
app.py ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # import libraries.
3
+ from sklearn.model_selection import train_test_split
4
+ from sklearn.datasets import make_regression
5
+ from keras.optimizers import SGD,Adam
6
+ from keras.models import Sequential
7
+ import matplotlib.pyplot as plt
8
+ from keras.layers import Dense
9
+ import streamlit as st
10
+
11
+
12
+
13
+ def model_MLP(X_train,y_train,X_test,layers, nodes, activation, solver, rate, iter):
14
+ """Creates a MLP model and return the predictions"""
15
+
16
+ # Define model.
17
+ model = Sequential()
18
+
19
+ # Adding first layers.
20
+ model.add(Dense(nodes, activation=activation, kernel_initializer='he_uniform', input_dim=X_train.shape[1]))
21
+
22
+ # Adding remaining hidden layers.
23
+ for i in range(layers-1):
24
+ model.add(Dense(nodes, activation=activation, kernel_initializer='he_uniform'))
25
+
26
+ # Adding output layer.
27
+ model.add(Dense(1, activation='linear'))
28
+
29
+ # Choose optimizer.
30
+ if solver == 'adam':
31
+ opt = Adam(learning_rate=rate)
32
+ else:
33
+ opt = SGD(learning_rate=rate)
34
+
35
+ # Compile model.
36
+ model.compile(optimizer=opt,loss = 'mean_squared_error',metrics=['mean_squared_error'])
37
+
38
+ # Fit model.
39
+ model.fit(X_train, y_train, epochs=iter, verbose=0)
40
+
41
+ # Evaluate model.
42
+ y_hat = model.predict(X_test)
43
+
44
+ # Return model.
45
+ return y_hat
46
+
47
+
48
+ if __name__ == '__main__':
49
+
50
+ # Adding a title to the app.
51
+ st.title("Visualize MLPs")
52
+
53
+ # Adding a subtitle to the app.
54
+ st.subheader('MLP Parameters')
55
+
56
+ # Adding two columns to display the sliders for the parameters.
57
+ left_column, right_column = st.columns(2)
58
+
59
+ with left_column:
60
+
61
+ # slider for max iterations.
62
+ iter = st.slider('Max Iteration', min_value=100,max_value= 1000,value=500,step=10)
63
+ # slider for nodes per layer.
64
+ nodes = st.slider('Nodes', min_value=1,max_value= 10,value=5,step=1)
65
+ # slider for number of hidden layers.
66
+ layers = st.slider('Hidden Layers', min_value=1,max_value= 10,value=3,step=1)
67
+ # selectbox for activation function.
68
+ activation = st.selectbox('Activation',('linear','relu','sigmoid','tanh'),index=1)
69
+
70
+ with right_column:
71
+
72
+ # slider for adding noise.
73
+ noise = st.slider('Noise', min_value=0,max_value= 100,value=50,step=10)
74
+ # slider for test-train split.
75
+ split = st.slider('Test-Train Split', min_value=0.1,max_value= 0.9,value=0.3,step=0.1)
76
+ # selectbox for solver/optimizer.
77
+ solver = st.selectbox('Solver',('adam','sgd'),index=0)
78
+ # selectbox for learning rate.
79
+ rate = float(st.selectbox('Learning Rate',('0.001','0.003','0.01','0.03','0.1','0.3','1.0'),index=3))
80
+
81
+ # Generating regression data.
82
+ X, y = make_regression(n_samples=500, n_features=1, noise=noise,random_state=42,bias=3)
83
+
84
+ # Split data into training and test sets.
85
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=split,random_state=42)
86
+
87
+ # Plotting the Prediction data.
88
+ # creating a container to display the graphs.
89
+ with st.container():
90
+
91
+ # Adding a subheader to the container.
92
+ st.subheader('Predictions')
93
+
94
+ # Adding two columns to display the graphs.
95
+ left_graph, right_graph = st.columns(2)
96
+
97
+ with left_graph:
98
+
99
+ # Plotting the training data.
100
+ st.write('Training Data set')
101
+
102
+ fig1, ax1 = plt.subplots(1)
103
+ ax1.scatter(X_train, y_train, label='train',color='blue',alpha=0.6,edgecolors='black')
104
+
105
+ # setting the labels and title of the graph.
106
+ ax1.set_xlabel('X')
107
+ ax1.set_ylabel('y')
108
+ ax1.set_title('Training Data set')
109
+ ax1.legend()
110
+
111
+ # write the graph to the app.
112
+ st.pyplot(fig1)
113
+
114
+ with right_graph:
115
+
116
+ # Plotting the test data.
117
+ st.write('Test Data set')
118
+
119
+ # Predicting the test data.
120
+ y_hat = model_MLP(X_train,y_train,X_test,layers, nodes, activation, solver, rate, iter)
121
+
122
+ fig2, ax2 = plt.subplots(1)
123
+ ax2.scatter(X_test, y_test, label='test',color='blue',alpha=0.4)
124
+ ax2.scatter(X_test, y_hat, label='prediction',c='red',alpha=0.6,edgecolors='black')
125
+
126
+ # setting the labels and title of the graph.
127
+ ax2.set_xlabel('X')
128
+ ax2.set_ylabel('y')
129
+ ax2.set_title('Test Data set')
130
+ ax2.legend()
131
+
132
+ # write the graph to the app.
133
+ st.pyplot(fig2)