import numpy as np
from keras.applications import VGG16, ResNet50
from keras.preprocessing.image import ImageDataGenerator
from keras import models, layers, optimizers
from keras.layers.normalization import BatchNormalization
from keras.preprocessing.image import load_img
vgg_model = VGG16(weights='imagenet', include_top=False, input_shape=(224, 224, 3))
train_dir = './flower_photos/train'
valid_dir = './flower_photos/valid'
n_train = 500 * 3
n_val = 50 * 3
datagen = ImageDataGenerator(rescale=1. / 255)
batch_size = 16
# train_features = np.zeros(shape=(n_train, 1, 1, 2048))
train_features = np.zeros(shape=(n_train, 7, 7, 512))
train_labels = np.zeros(shape=(n_train, 3))
train_generator = datagen.flow_from_directory(train_dir, target_size=(224, 224),
batch_size=batch_size,
class_mode='categorical',
shuffle=True)
i = 0
for inptuts_batch, labels_batch in train_generator:
features_batch = vgg_model.predict(inptuts_batch)
train_features[i * batch_size:(i + 1) * batch_size] = features_batch
train_labels[i * batch_size:(i + 1) * batch_size] = labels_batch
i += 1
if i * batch_size >= n_train: break
train_features = np.reshape(train_features, (n_train, 7 * 7 * 512))
# train_features = np.reshape(train_features, (n_train, 1 * 1 * 2048))
validation_features = np.zeros(shape=(n_val, 7, 7, 512))
# validation_features = np.zeros(shape=(n_val, 1, 1, 2048))
validation_labels = np.zeros(shape=(n_val, 3))
validation_generator = datagen.flow_from_directory(
valid_dir,
target_size=(224, 224),
batch_size=batch_size,
class_mode='categorical',
shuffle=False)
i = 0
for inputs_batch, labels_batch in validation_generator:
features_batch = vgg_model.predict(inputs_batch)
validation_features[i * batch_size: (i + 1) * batch_size] = features_batch
validation_labels[i * batch_size: (i + 1) * batch_size] = labels_batch
i += 1
if i * batch_size >= n_val:
break
validation_features = np.reshape(validation_features, (n_val, 7 * 7 * 512))
# validation_features = np.reshape(validation_features, (n_val, 1 * 1 * 2048))
from keras import models
from keras import layers
from keras import optimizers
from keras.layers.normalization import BatchNormalization
model = models.Sequential()
model.add(layers.Dense(512, activation='relu', input_dim=7 * 7 * 512))
# model.add(layers.Dense(512, activation='relu', input_dim=1 * 1 * 2048))
model.add(BatchNormalization())
model.add(layers.Dropout(0.5))
# model.add(layers.Dense(256, activation='relu'))
# model.add(BatchNormalization())
# model.add(layers.Dropout(0.5))
model.add(layers.Dense(3, activation='softmax'))
model.compile(optimizer=optimizers.Adam(lr=1e-4), # optimizers.RMSprop(lr=1e-3),
loss='categorical_crossentropy',
metrics=['acc'])
history = model.fit(train_features,
train_labels,
epochs=10,
batch_size=batch_size,
validation_data=(validation_features, validation_labels))
fnames = validation_generator.filenames
ground_truth = validation_generator.classes
label2index = validation_generator.class_indices
# Getting the mapping from class index to class label
idx2label = dict((v, k) for k, v in label2index.items())
predictions = model.predict_classes(validation_features)
prob = model.predict(validation_features)
errors = np.where(predictions != ground_truth)[0]
print("No of errors = {}/{}".format(len(errors), n_val))
print(label2index)
print(len(train_features), len(validation_features))
import matplotlib.pylab as plt
from keras.preprocessing.image import load_img
# errors = np.random.choice(errors, 5)
plt.figure(figsize=(20, 12))
for i in range(len(errors)):
pred_class = np.argmax(prob[errors[i]])
pred_label = idx2label[pred_class]
original = load_img('{}/{}'.format(valid_dir, fnames[errors[i]]))
plt.subplot(5, 6, i+1), plt.imshow(original), plt.axis('off')
plt.title('Original label:{}\nPrediction:{}\nconfidence:{:.3f}'.format(
fnames[errors[i]].split('\\')[0],
pred_label,
prob[errors[i]][pred_class]), size=15)
plt.savefig('error.png')
plt.show()
|