Using the colab version speeds up things on GPU.
The mat-file can be obtained e.g. from Stanford.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
import numpy as np from matplotlib import pyplot as plt import scipy.io from keras.models import Sequential from keras.layers.core import Dense from keras.optimizers import Adam mat = scipy.io.loadmat('train_32x32.mat') mat = mat['X'] b, h, d, n = mat.shape # Grayscale img_gray = np.zeros(shape =(n, b * h)) def rgb2gray(rgb): return np.dot(rgb[...,:3], [0.299, 0.587, 0.114]) for i in range(n): #Convert to greyscale img = rgb2gray(mat[:,:,:,i]) img = img.reshape(1, 1024) img_gray[i,:] = img # Normalize X_train = img_gray/255. img_size = X_train.shape[1] model = Sequential() model.add(Dense(256, input_dim=img_size, activation='relu')) model.add(Dense(128, activation='relu')) model.add(Dense(64, activation='relu')) model.add(Dense(32, activation='relu')) model.add(Dense(64, activation='relu')) model.add(Dense(128, activation='relu')) model.add(Dense(256, activation='relu')) model.add(Dense(img_size, activation='sigmoid')) opt = Adam() model.compile(loss='binary_crossentropy', optimizer=opt) n_epochs = 100 batch_size = 512 model.fit(X_train, X_train, epochs=n_epochs, batch_size=batch_size, shuffle=True, validation_split=0.2) pred = model.predict(X_train) n = 5 plt.figure(figsize=(15, 5)) for i in range(n): # plot original ax = plt.subplot(2, n, i + 1) plt.imshow(img_gray[i].reshape(32, 32), cmap='gray') ax = plt.subplot(2, n, i + 1 + n) plt.imshow(pred[i].reshape(32, 32), cmap='gray') plt.show() Using TensorFlow backend. Train on 58605 samples, validate on 14652 samples Epoch 1/100 58605/58605 [==============================] - 7s 117us/step - loss: 0.6576 - val_loss: 0.6344 Epoch 2/100 58605/58605 [==============================] - 6s 108us/step - loss: 0.6312 - val_loss: 0.6269 Epoch 3/100 58605/58605 [==============================] - 7s 111us/step - loss: 0.6264 - val_loss: 0.6262 Epoch 4/100 42496/58605 [====================>.........] - ETA: 2s - loss: 0.6260 |