huabdul commited on
Commit
c07df4a
·
1 Parent(s): 40cf08a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +132 -0
app.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import matplotlib.pyplot as plt
3
+ from matplotlib.colors import ListedColormap
4
+ from itertools import combinations
5
+
6
+ plt.rcParams['figure.dpi'] = 100
7
+
8
+ from sklearn.datasets import load_iris
9
+
10
+ from sklearn.tree import DecisionTreeClassifier
11
+ from sklearn.neighbors import KNeighborsClassifier
12
+ from sklearn.svm import SVC
13
+ from sklearn.ensemble import VotingClassifier
14
+
15
+ import gradio as gr
16
+
17
+ #==================================================
18
+ C1, C2, C3 = '#ff0000', '#ffff00', '#0000ff'
19
+ CMAP = ListedColormap([C1, C2, C3])
20
+ GRANULARITY = 0.05
21
+ SEED = 1
22
+
23
+ FEATURE_NAMES = ["Sepal Length", "Sepal Width", "Petal Length", "Petal Width"]
24
+ TARGET_NAMES = ["Setosa", "Versicolour", "Virginica"]
25
+ MODEL_NAMES = ['DecisionTreeClassifier', 'KNeighborsClassifier', 'SupportVectorClassifier', 'VotingClassifier']
26
+
27
+ iris = load_iris()
28
+ #==================================================
29
+ def get_decision_surface(X, y, model):
30
+ x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
31
+ y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
32
+ xrange = np.arange(x_min, x_max, GRANULARITY)
33
+ yrange = np.arange(y_min, y_max, GRANULARITY)
34
+ xx, yy = np.meshgrid(xrange, yrange)
35
+
36
+ Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
37
+ Z = Z.reshape(xx.shape)
38
+
39
+ return xx, yy, Z
40
+
41
+ def create_plot(feature_string, max_depth, n_neighbors, gamma, weight1, weight2, weight3):
42
+
43
+ np.random.seed(SEED)
44
+
45
+ feature_list = feature_string.split(',')
46
+ feature_list = [s.strip() for s in feature_list]
47
+ idx_x = FEATURE_NAMES.index(feature_list[0])
48
+ idx_y = FEATURE_NAMES.index(feature_list[1])
49
+
50
+ X = iris.data[:, [idx_x, idx_y]]
51
+ y = iris.target
52
+
53
+ rnd_idx = np.random.permutation(X.shape[0])
54
+ X = X[rnd_idx]
55
+ y = y[rnd_idx]
56
+
57
+ clf1 = DecisionTreeClassifier(max_depth=max_depth)
58
+ clf2 = KNeighborsClassifier(n_neighbors=n_neighbors, n_jobs=-1)
59
+ clf3 = SVC(gamma=gamma, kernel="rbf", probability=True)
60
+ eclf = VotingClassifier(
61
+ estimators=[("dt", clf1), ("knn", clf2), ("svc", clf3)],
62
+ voting="soft",
63
+ weights=[weight1, weight2, weight3],
64
+ )
65
+
66
+ clf1.fit(X, y)
67
+ clf2.fit(X, y)
68
+ clf3.fit(X, y)
69
+ eclf.fit(X, y)
70
+
71
+ fig = plt.figure(figsize=(12, 12))
72
+
73
+ for i, clf in enumerate([clf1, clf2, clf3, eclf]):
74
+ xx, yy, Z = get_decision_surface(X, y, clf)
75
+
76
+ ax = fig.add_subplot(2, 2, i+1)
77
+ ax.contourf(xx, yy, Z, cmap=CMAP, alpha=0.65)
78
+
79
+ for j, label in enumerate(TARGET_NAMES):
80
+ X_label = X[y==j,:]
81
+ y_label = y[y==j]
82
+ ax.scatter(X_label[:, 0], X_label[:, 1], c=[[C1], [C2], [C3]][j]*len(y_label), edgecolor='k', s=40, label=label)
83
+
84
+ ax.set_xlabel(feature_list[0]); ax.set_ylabel(feature_list[1])
85
+ ax.legend()
86
+ ax.set_title(f'{MODEL_NAMES[i]}')
87
+
88
+ return fig
89
+
90
+ info = '''
91
+ # Voting Classifier Decision Surface
92
+
93
+ This app plots the decision surface of four classifiers on two selected features of the Iris dataset:
94
+ - DecisionTreeClassifier.
95
+ - KNeighborsClassifier.
96
+ - SupportVectorClassifier.
97
+ - A VotingClassifier from all of the above.
98
+
99
+ Use the controls below to tune the parameters of the classifiers and the weights of each of them in the soft voting classifier and click submit. The more weight you assign to a classifier, the more importance will be assigned to its predictions compared to the other classifiers in the vote.
100
+ '''
101
+
102
+ with gr.Blocks() as demo:
103
+ gr.Markdown(info)
104
+
105
+ selections = combinations(FEATURE_NAMES, 2)
106
+ selections = [f'{s[0]}, {s[1]}' for s in selections]
107
+
108
+ dd = gr.Dropdown(selections, value=selections[0], interactive=True, label="Input features")
109
+
110
+ with gr.Row():
111
+ with gr.Column():
112
+ slider_max_depth = gr.Slider(1, 50, value=4, step=1, label='max_depth (for DecisionTreeClassifier)')
113
+ slider_n_neighbors = gr.Slider(1, 20, value=7, step=1, label='n_neighbors (for KNeighborsClassifier)')
114
+ slider_gamma = gr.Slider(0, 10, value=0.1, step=0.1, label='gamma (for SVC)')
115
+
116
+ with gr.Column():
117
+ slider_w1 = gr.Slider(0, 10, value=2, step=0.1, label='DecisionTreeClassifier weight')
118
+ slider_w2 = gr.Slider(0, 10, value=1, step=0.1, label='KNeighborsClassifier weight')
119
+ slider_w3 = gr.Slider(0, 10, value=2, step=0.1, label='SVC weight')
120
+
121
+
122
+ btn = gr.Button(value='Submit')
123
+
124
+ plot = gr.Plot(label='Decision Surfaces')
125
+
126
+ btn.click(create_plot, inputs=[dd, slider_max_depth, slider_n_neighbors, slider_gamma, slider_w1, slider_w2, slider_w3], outputs=[plot])
127
+
128
+ demo.load(create_plot, inputs=[dd, slider_max_depth, slider_n_neighbors, slider_gamma, slider_w1, slider_w2, slider_w3], outputs=[plot])
129
+
130
+
131
+ demo.launch()
132
+ #==================================================