Ayushs799 commited on
Commit
8c092ed
1 Parent(s): 1f2dcf6

final touches

Browse files
Files changed (2) hide show
  1. Introduction.md +0 -1
  2. app.py +25 -13
Introduction.md CHANGED
@@ -7,7 +7,6 @@ For example
7
 
8
  Conformal prediction is a technique for quantifying such uncertainties for AI systems. In particular, given an input, conformal prediction estimates a prediction interval in regression problems and a set of classes in classification problems. Both the prediction interval and sets are guaranteed to cover the true value with high probability.
9
 
10
- ---
11
 
12
  # Theory
13
 
 
7
 
8
  Conformal prediction is a technique for quantifying such uncertainties for AI systems. In particular, given an input, conformal prediction estimates a prediction interval in regression problems and a set of classes in classification problems. Both the prediction interval and sets are guaranteed to cover the true value with high probability.
9
 
 
10
 
11
  # Theory
12
 
app.py CHANGED
@@ -57,10 +57,11 @@ if __name__ == '__main__':
57
  st.markdown(css, unsafe_allow_html=True)
58
 
59
  with intro_tab:
60
-
61
  f = open("Introduction.md",'r')
62
  st.markdown(f.read())
63
 
 
64
 
65
  with reg_tab:
66
  with st.container():
@@ -110,13 +111,13 @@ if __name__ == '__main__':
110
  ax1.patch.set_alpha(0.0)
111
  st.pyplot(fig1)
112
 
113
-
114
 
115
  with class_tab:
116
-
117
- st.write("For Classification we are using Fashion-MNIST dataset. Fashion-MNIST is a dataset of Zalando's article images. Zalando intends Fashion-MNIST to serve as a direct drop-in for benchmarking machine learning algorithms. Each example is assigned to one of the following labels: 0 T-shirt/top, 1 Trouser,2 Pullover, 3 Dress, 4 Coat, 5 Sandal, 6 Shirt, 7 Sneaker, 8 Bag, 9 Ankle boot")
118
 
119
- st.write("---")
 
 
120
 
121
  st.write("Lets assume you have a model trained for Object detection but you cant just rely on the softmax output for that model. This is where conformal prediction comes into play. We can use the alpha value to pick up a threshold. When softmax scores go beyond this threshold score then onlt that label is considered as the predicted class.")
122
 
@@ -133,7 +134,7 @@ if __name__ == '__main__':
133
 
134
  with st.container():
135
 
136
- c1,col1, col2, col3,c2 = st.columns([0.2,0.3,0.3,0.3,0.2])
137
 
138
  with col1:
139
  fig1, ax1 = plt.figure(), plt.gca()
@@ -146,8 +147,11 @@ if __name__ == '__main__':
146
  ax1.set_yticks([])
147
  st.pyplot(fig1)
148
 
 
 
 
149
  fig1, ax1 = plt.figure(), plt.gca()
150
- ax1.bar(range(10),pred[0])
151
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
152
  ax1.set_xlabel("Classe Labels")
153
  ax1.set_ylabel("SoftMax Probabilities")
@@ -155,7 +159,8 @@ if __name__ == '__main__':
155
  ax1.set_xticks([i for i in range(10)])
156
  st.pyplot(fig1)
157
 
158
- out_labels = labels[pred[0]>sigma]
 
159
  if len(out_labels)==0:
160
  out_labels = ["None"]
161
  out_labels = ",".join(out_labels)
@@ -174,8 +179,11 @@ if __name__ == '__main__':
174
  ax1.set_yticks([])
175
  st.pyplot(fig1)
176
 
 
 
 
177
  fig1, ax1 = plt.figure(), plt.gca()
178
- ax1.bar(range(10),pred[1])
179
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
180
  ax1.set_xlabel("Classe Labels")
181
  ax1.set_ylabel("SoftMax Probabilities")
@@ -183,7 +191,7 @@ if __name__ == '__main__':
183
  ax1.set_xticks([i for i in range(10)])
184
  st.pyplot(fig1)
185
 
186
- out_labels = labels[pred[1]>sigma]
187
  if len(out_labels)==0:
188
  out_labels = ["None"]
189
  out_labels = ",".join(out_labels)
@@ -201,8 +209,11 @@ if __name__ == '__main__':
201
  ax1.set_yticks([])
202
  st.pyplot(fig1)
203
 
 
 
 
204
  fig1, ax1 = plt.figure(), plt.gca()
205
- ax1.bar(range(10),pred[2])
206
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
207
  ax1.set_xlabel("Classe Labels")
208
  ax1.set_ylabel("SoftMax Probabilities")
@@ -210,13 +221,14 @@ if __name__ == '__main__':
210
  ax1.set_xticks([i for i in range(10)])
211
  st.pyplot(fig1)
212
 
213
- out_labels = labels[pred[2]>sigma]
214
  if len(out_labels)==0:
215
  out_labels = ["None"]
216
  out_labels = ",".join(out_labels)
217
  st.write("Ouput Labels : "+out_labels)
218
  st.write("True Label : Bag")
219
-
 
220
 
221
 
222
 
 
57
  st.markdown(css, unsafe_allow_html=True)
58
 
59
  with intro_tab:
60
+ st.write("", "", "")
61
  f = open("Introduction.md",'r')
62
  st.markdown(f.read())
63
 
64
+ st.write("---")
65
 
66
  with reg_tab:
67
  with st.container():
 
111
  ax1.patch.set_alpha(0.0)
112
  st.pyplot(fig1)
113
 
114
+ st.write("---")
115
 
116
  with class_tab:
 
 
117
 
118
+ st.write("", "", "")
119
+
120
+ st.write("For Classification we are using Fashion-MNIST dataset. Fashion-MNIST is a dataset of Zalando's article images. Zalando intends Fashion-MNIST to serve as a direct drop-in for benchmarking machine learning algorithms. Each example is assigned to one of the following labels: 0 T-shirt/top, 1 Trouser,2 Pullover, 3 Dress, 4 Coat, 5 Sandal, 6 Shirt, 7 Sneaker, 8 Bag, 9 Ankle boot")
121
 
122
  st.write("Lets assume you have a model trained for Object detection but you cant just rely on the softmax output for that model. This is where conformal prediction comes into play. We can use the alpha value to pick up a threshold. When softmax scores go beyond this threshold score then onlt that label is considered as the predicted class.")
123
 
 
134
 
135
  with st.container():
136
 
137
+ c1,col1, col2, col3,c2 = st.columns([0.3,0.3,0.3,0.3,0.3])
138
 
139
  with col1:
140
  fig1, ax1 = plt.figure(), plt.gca()
 
147
  ax1.set_yticks([])
148
  st.pyplot(fig1)
149
 
150
+ out_pred = pred[0]>sigma
151
+ c = ['C2' if pred==1 else 'C0' for pred in out_pred]
152
+
153
  fig1, ax1 = plt.figure(), plt.gca()
154
+ ax1.bar(range(10),pred[0],color=c)
155
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
156
  ax1.set_xlabel("Classe Labels")
157
  ax1.set_ylabel("SoftMax Probabilities")
 
159
  ax1.set_xticks([i for i in range(10)])
160
  st.pyplot(fig1)
161
 
162
+ out_labels = labels[out_pred]
163
+
164
  if len(out_labels)==0:
165
  out_labels = ["None"]
166
  out_labels = ",".join(out_labels)
 
179
  ax1.set_yticks([])
180
  st.pyplot(fig1)
181
 
182
+ out_pred = pred[1]>sigma
183
+ c = ['C2' if pred==1 else 'C0' for pred in out_pred]
184
+
185
  fig1, ax1 = plt.figure(), plt.gca()
186
+ ax1.bar(range(10),pred[1],color = c)
187
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
188
  ax1.set_xlabel("Classe Labels")
189
  ax1.set_ylabel("SoftMax Probabilities")
 
191
  ax1.set_xticks([i for i in range(10)])
192
  st.pyplot(fig1)
193
 
194
+ out_labels = labels[out_pred]
195
  if len(out_labels)==0:
196
  out_labels = ["None"]
197
  out_labels = ",".join(out_labels)
 
209
  ax1.set_yticks([])
210
  st.pyplot(fig1)
211
 
212
+ out_pred = pred[2]>sigma
213
+ c = ['C2' if pred==1 else 'C0' for pred in out_pred]
214
+
215
  fig1, ax1 = plt.figure(), plt.gca()
216
+ ax1.bar(range(10),pred[2],color=c)
217
  ax1.axhline(y=sigma,linestyle='dashed',c='r')
218
  ax1.set_xlabel("Classe Labels")
219
  ax1.set_ylabel("SoftMax Probabilities")
 
221
  ax1.set_xticks([i for i in range(10)])
222
  st.pyplot(fig1)
223
 
224
+ out_labels = labels[out_pred]
225
  if len(out_labels)==0:
226
  out_labels = ["None"]
227
  out_labels = ",".join(out_labels)
228
  st.write("Ouput Labels : "+out_labels)
229
  st.write("True Label : Bag")
230
+
231
+ st.write("---")
232
 
233
 
234