|
|
|
"""Digit recognition.ipynb |
|
|
|
Automatically generated by Colaboratory. |
|
|
|
Original file is located at |
|
https://colab.research.google.com/drive/1ntlTNRmG8jUmse_tq4zYkkmH6AZlXwlH |
|
""" |
|
|
|
import numpy as np |
|
import matplotlib.pyplot as plt |
|
import keras |
|
from tensorflow.keras.datasets import mnist |
|
from tensorflow import keras |
|
import keras.backend as K |
|
from tensorflow.keras.layers import Dense, Flatten, Reshape, Input, Lambda, BatchNormalization, Dropout |
|
|
|
(x_train, y_train), (x_test, y_test) = mnist.load_data() |
|
|
|
x_train = x_train / 255 |
|
x_test = x_test / 255 |
|
|
|
x_train.shape |
|
|
|
hidden_dim = 2 |
|
batch_size = 32 |
|
latent_dim = 2 |
|
|
|
inputs = keras.Input(shape=(28, 28)) |
|
x = Flatten()(inputs) |
|
x = Dense(256, activation='relu')(x) |
|
x = Dense(128, activation='relu')(x) |
|
z_mean = Dense(latent_dim)(x) |
|
z_log_var = Dense(latent_dim)(x) |
|
|
|
def sampling(args): |
|
z_mean, z_log_var = args |
|
epsilon = K.random_normal(shape=(K.shape(z_mean)[0], latent_dim), mean=0.0, stddev=1.0) |
|
|
|
return z_mean + K.exp(0.5 * z_log_var) * epsilon |
|
|
|
z = keras.layers.Lambda(sampling)([z_mean, z_log_var]) |
|
|
|
decoder_inputs = keras.Input(shape=(latent_dim,)) |
|
x = Dense(128, activation='relu')(decoder_inputs) |
|
x = Dense(256, activation='relu')(x) |
|
x = Dense(784, activation='sigmoid')(x) |
|
outputs = Reshape((28, 28))(x) |
|
|
|
encoder = keras.Model(inputs, z, name='encoder') |
|
decoder = keras.Model(decoder_inputs, outputs, name='decoder') |
|
vae_outputs = decoder(encoder(inputs)) |
|
vae = keras.Model(inputs, vae_outputs, name='vae') |
|
|
|
reconstruction_loss = keras.losses.binary_crossentropy(K.flatten(inputs), K.flatten(vae_outputs)) * 28 * 28 |
|
|
|
kl_loss = -0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1) |
|
|
|
vae_loss = K.mean(reconstruction_loss + kl_loss) |
|
|
|
|
|
vae.add_loss(vae_loss) |
|
vae.compile(optimizer='adam') |
|
|
|
vae.fit(x_train, epochs=5, batch_size=batch_size, shuffle=True) |
|
|
|
encoded_imgs = encoder.predict(x_test) |
|
plt.scatter(encoded_imgs[:, 0], encoded_imgs[:, 1]) |
|
plt.show() |
|
|
|
plt.imshow(vae.predict(x_test[:1])[0], cmap='gray') |
|
|
|
one = np.random.normal(size=10000) |
|
two = np.random.normal(size=10000) |
|
|
|
plt.scatter(one, two) |
|
print(np.std(one)) |
|
|
|
plt.hist(one, bins=250) |