Skip to content

Instantly share code, notes, and snippets.

@jshayiding
Last active August 18, 2020 03:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jshayiding/00fe32fa499ecbaab2554ac35e2a65f5 to your computer and use it in GitHub Desktop.
Save jshayiding/00fe32fa499ecbaab2554ac35e2a65f5 to your computer and use it in GitHub Desktop.
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D, GlobalAveragePooling2D, Flatten
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.models import Model
from tensorflow.keras import regularizers
from tensorflow.keras.datasets import cifar10
import numpy as np
from tensorflow.keras import models
from tensorflow.keras.layers import Activation, Dropout, Flatten, Dense, Add
from tensorflow.keras.applications.resnet50 import ResNet50
# import cv2
# %matplotlib inline
## load cifar10 dataset
(X_train, y_train), (X_test, y_test) = cifar10.load_data()
X_train = X_train.astype('float32') / 255.0
X_test = X_test.astype('float32') / 255.0
## resize image
X_train = tf.image.resize(X_train, (224, 224))
X_test = tf.image.resize(X_test, (224, 224))
## find channel mean, std and do data normalization
train_mean = np.mean(X_train, axis=0)
train_std = np.std(X_train, axis=0)
X_train = (X_train - train_mean) / train_std
X_test = (X_test - train_mean) / train_std
nb_classes = 10
y_train = to_categorical(y_train, nb_classes)
y_test = to_categorical(y_test, nb_classes)
##
base_model = ResNet50(weights='imagenet', include_top=False, input_shape=(224, 224, 3))
x = Conv2D(32, (3, 3))(base_model.output)
x = BatchNormalization(axis=-1)(x)
x = Activation('relu')(x)
x = MaxPooling2D(pool_size=(2,2))(x)
x = Flatten()(x)
x = Dense(256)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.2)(x)
x = Dense(10)(x)
x = Activation('softmax')(x)
outputs = x
model = models.Model(base_model.input, outputs)
model.summary()
##
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
## train model
start = time.time()
history = model.fit(X_train, y_train, batch_size=50, epochs=3, verbose=1, validation_data=(X_test, y_test))
print('training dpen', history)
score = model.evaluate(X_test, y_test)
print('Test score:', score[0])
print('Test accuracy:', score[1])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment