Skip to content

Instantly share code, notes, and snippets.

@koshian2
Created May 30, 2018 02:46
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save koshian2/47e5f1d2c832bdb16bd1beefbf20100d to your computer and use it in GitHub Desktop.
Save koshian2/47e5f1d2c832bdb16bd1beefbf20100d to your computer and use it in GitHub Desktop.
CIFAR-10
from keras.models import Sequential
from keras.layers import Dense, Conv2D, MaxPool2D, Activation, Flatten, BatchNormalization
from keras.optimizers import Adam
from keras.datasets import cifar10
from keras.utils.np_utils import to_categorical
import numpy as np
import time
import matplotlib.pyplot as plt
start_time = time.time()
# データの読み込み
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# 小数化
x_train = x_train / 255.0
x_test = x_test / 255.0
# データ数
m_train, m_test = x_train.shape[0], x_test.shape[0]
# yをOneHotVector化
y_train, y_test = to_categorical(y_train), to_categorical(y_test)
# モデル
# CONV -> RELU -> MAXPOOL
model = Sequential()
model.add(Conv2D(10, (3, 3), strides=(1, 1), input_shape=x_train.shape[1:]))
model.add(Activation("relu"))
model.add(MaxPool2D((3, 3)))
# CONV -> RELU -> BN -> Flatten
model.add(Conv2D(20, (3, 3), strides=(1, 1)))
model.add(Activation("relu"))
#model.add(BatchNormalization(axis=3))
model.add(Flatten())
# Softmax
model.add(Dense(10, activation="softmax"))
# コンパイル
model.compile(optimizer=Adam(), loss="categorical_crossentropy", metrics=["accuracy"])
# フィット
history = model.fit(x_train, y_train, batch_size=64, epochs=30).history
# 経過時間
print("Elapsed[s] : ", time.time() - start_time)
# テスト精度
test_eval = model.evaluate(x_test, y_test)
print("train accuracy :", history["acc"][-1])
print("test accuracy :", test_eval[1])
# 訓練誤差のプロット
plt.plot(range(len(history["loss"])), history["loss"], marker=".")
plt.show()
#1.6GB
# BatchNormあり、epoch=30
#Elapsed[s] : 848.2327120304108
#10000/10000 [==============================] - 3s 283us/step
#train accuracy : 0.70732
#test accuracy : 0.6358
# BatchNormなし、epoch=30
#Elapsed[s] : 723.7259016036987
#10000/10000 [==============================] - 2s 225us/step
#train accuracy : 0.72154
#test accuracy : 0.6568
#_________________________________________________________________
#Layer (type) Output Shape Param #
#=================================================================
#conv2d_1 (Conv2D) (None, 30, 30, 10) 280
#_________________________________________________________________
#activation_1 (Activation) (None, 30, 30, 10) 0
#_________________________________________________________________
#max_pooling2d_1 (MaxPooling2 (None, 10, 10, 10) 0
#_________________________________________________________________
#conv2d_2 (Conv2D) (None, 8, 8, 20) 1820
#_________________________________________________________________
#activation_2 (Activation) (None, 8, 8, 20) 0
#_________________________________________________________________
#batch_normalization_1 (Batch (None, 8, 8, 20) 80
#_________________________________________________________________
#flatten_1 (Flatten) (None, 1280) 0
#_________________________________________________________________
#dense_1 (Dense) (None, 10) 12810
#=================================================================
#Total params: 14,990
#Trainable params: 14,950
#Non-trainable params: 40
#_________________________________________________________________
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment