Last active
October 15, 2020 20:32
-
-
Save esmitt/5c99cb29b8e96d02792adcb0a7176de3 to your computer and use it in GitHub Desktop.
Sequential design using Tensorflow 2
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from tensorflow.keras import Sequential | |
from tensorflow.keras.layers import Conv2D, MaxPool2D, BatchNormalization, Flatten, Dropout, Dense | |
from tensorflow.keras.regularizers import l2 | |
from tensorflow.keras.activations import relu, sigmoid | |
from tensorflow.keras.initializers import GlorotNormal | |
#this configuration uses backend.set_image_data_format('channels_first') | |
""" | |
This function creates a model composed by two convolutional + max pooling layers. | |
After, to standardize the input a batch normalization is applied. | |
This is put into a single one-dimensional layer using the Flatten layer. | |
Next, the dropout regularization technique (50%) is perfomed. | |
Finally, Dense layer output for binary classification | |
""" | |
def get_model_design(filters: list, input_shape: tuple) -> Sequential: | |
model = Sequential([Conv2D(filters[0], (5, 5), padding='same', kernel_regularizer=l2(0.001), activation=relu, input_shape=input_shape), | |
Conv2D(filters[1], (3, 3), padding='same', kernel_regularizer=l2(0.001), activation=relu), | |
MaxPool2D(pool_size=(2, 2)), | |
BatchNormalization(), | |
Flatten(), | |
Dropout(0.5), | |
Dense(1, kernel_initializer=GlorotNormal(), activation=sigmoid ) | |
]) | |
return model | |
# for this example, we used 128 and 64 filters for the two first conv layers | |
# note the input size of 3 channel for an image size of 64x64 pixels | |
model = get_model_design([128, 64], (3, 64, 64)) | |
model.summary() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment