Skip to content

Instantly share code, notes, and snippets.

@montardon
Created July 20, 2019 09:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save montardon/f1e3539e1f788050b165b0eb9ac02052 to your computer and use it in GitHub Desktop.
Save montardon/f1e3539e1f788050b165b0eb9ac02052 to your computer and use it in GitHub Desktop.
=====================================================================================================================================================================
VertexName (VertexType) nIn,nOut TotalParams ParamsShape Vertex Inputs
=====================================================================================================================================================================
input_1 (InputVertex) -,- - - -
conv2d_1 (ConvolutionLayer) 3,32 896 W:{32,3,3,3}, b:{1,32} [input_1]
batch_normalization_1 (BatchNormalization) 32,32 128 gamma:{1,32}, beta:{1,32}, mean:{1,32}, var:{1,32} [conv2d_1]
activation_1 (ActivationLayer) -,- 0 - [batch_normalization_1]
conv2d_2 (ConvolutionLayer) 32,32 9248 W:{32,32,3,3}, b:{1,32} [activation_1]
batch_normalization_2 (BatchNormalization) 32,32 128 gamma:{1,32}, beta:{1,32}, mean:{1,32}, var:{1,32} [conv2d_2]
activation_2 (ActivationLayer) -,- 0 - [batch_normalization_2]
max_pooling2d_1 (SubsamplingLayer) -,- 0 - [activation_2]
conv2d_3 (ConvolutionLayer) 32,64 18496 W:{64,32,3,3}, b:{1,64} [max_pooling2d_1]
batch_normalization_3 (BatchNormalization) 64,64 256 gamma:{1,64}, beta:{1,64}, mean:{1,64}, var:{1,64} [conv2d_3]
activation_3 (ActivationLayer) -,- 0 - [batch_normalization_3]
conv2d_4 (ConvolutionLayer) 64,64 36928 W:{64,64,3,3}, b:{1,64} [activation_3]
batch_normalization_4 (BatchNormalization) 64,64 256 gamma:{1,64}, beta:{1,64}, mean:{1,64}, var:{1,64} [conv2d_4]
activation_4 (ActivationLayer) -,- 0 - [batch_normalization_4]
max_pooling2d_2 (SubsamplingLayer) -,- 0 - [activation_4]
conv2d_5 (ConvolutionLayer) 64,128 73856 W:{128,64,3,3}, b:{1,128} [max_pooling2d_2]
batch_normalization_5 (BatchNormalization) 128,128 512 gamma:{1,128}, beta:{1,128}, mean:{1,128}, var:{1,128} [conv2d_5]
activation_5 (ActivationLayer) -,- 0 - [batch_normalization_5]
conv2d_6 (ConvolutionLayer) 128,128 147584 W:{128,128,3,3}, b:{1,128} [activation_5]
batch_normalization_6 (BatchNormalization) 128,128 512 gamma:{1,128}, beta:{1,128}, mean:{1,128}, var:{1,128} [conv2d_6]
activation_6 (ActivationLayer) -,- 0 - [batch_normalization_6]
max_pooling2d_3 (SubsamplingLayer) -,- 0 - [activation_6]
conv2d_7 (ConvolutionLayer) 128,256 295168 W:{256,128,3,3}, b:{1,256} [max_pooling2d_3]
batch_normalization_7 (BatchNormalization) 256,256 1024 gamma:{1,256}, beta:{1,256}, mean:{1,256}, var:{1,256} [conv2d_7]
activation_7 (ActivationLayer) -,- 0 - [batch_normalization_7]
conv2d_8 (ConvolutionLayer) 256,256 590080 W:{256,256,3,3}, b:{1,256} [activation_7]
batch_normalization_8 (BatchNormalization) 256,256 1024 gamma:{1,256}, beta:{1,256}, mean:{1,256}, var:{1,256} [conv2d_8]
activation_8 (ActivationLayer) -,- 0 - [batch_normalization_8]
max_pooling2d_4 (SubsamplingLayer) -,- 0 - [activation_8]
conv2d_9 (ConvolutionLayer) 256,512 1180160 W:{512,256,3,3}, b:{1,512} [max_pooling2d_4]
batch_normalization_9 (BatchNormalization) 512,512 2048 gamma:{1,512}, beta:{1,512}, mean:{1,512}, var:{1,512} [conv2d_9]
activation_9 (ActivationLayer) -,- 0 - [batch_normalization_9]
conv2d_10 (ConvolutionLayer) 512,512 2359808 W:{512,512,3,3}, b:{1,512} [activation_9]
batch_normalization_10 (BatchNormalization) 512,512 2048 gamma:{1,512}, beta:{1,512}, mean:{1,512}, var:{1,512} [conv2d_10]
activation_10 (ActivationLayer) -,- 0 - [batch_normalization_10]
max_pooling2d_5 (SubsamplingLayer) -,- 0 - [activation_10]
conv2d_11 (ConvolutionLayer) 512,1024 4719616 W:{1024,512,3,3}, b:{1,1024} [max_pooling2d_5]
batch_normalization_11 (BatchNormalization) 1024,1024 4096 gamma:{1,1024}, beta:{1,1024}, mean:{1,1024}, var:{1,1024} [conv2d_11]
activation_11 (ActivationLayer) -,- 0 - [batch_normalization_11]
conv2d_12 (ConvolutionLayer) 1024,1024 9438208 W:{1024,1024,3,3}, b:{1,1024} [activation_11]
batch_normalization_12 (BatchNormalization) 1024,1024 4096 gamma:{1,1024}, beta:{1,1024}, mean:{1,1024}, var:{1,1024} [conv2d_12]
activation_12 (ActivationLayer) -,- 0 - [batch_normalization_12]
up_sampling2d_1 (Upsampling2D) -,- 0 - [activation_12]
concatenate_1 (MergeVertex) -,- - - [up_sampling2d_1, activation_10]
conv2d_13 (ConvolutionLayer) 1536,512 7078400 W:{512,1536,3,3}, b:{1,512} [concatenate_1]
batch_normalization_13 (BatchNormalization) 512,512 2048 gamma:{1,512}, beta:{1,512}, mean:{1,512}, var:{1,512} [conv2d_13]
activation_13 (ActivationLayer) -,- 0 - [batch_normalization_13]
conv2d_14 (ConvolutionLayer) 512,512 2359808 W:{512,512,3,3}, b:{1,512} [activation_13]
batch_normalization_14 (BatchNormalization) 512,512 2048 gamma:{1,512}, beta:{1,512}, mean:{1,512}, var:{1,512} [conv2d_14]
activation_14 (ActivationLayer) -,- 0 - [batch_normalization_14]
up_sampling2d_2 (Upsampling2D) -,- 0 - [activation_14]
concatenate_2 (MergeVertex) -,- - - [up_sampling2d_2, activation_8]
conv2d_15 (ConvolutionLayer) 768,256 1769728 W:{256,768,3,3}, b:{1,256} [concatenate_2]
batch_normalization_15 (BatchNormalization) 256,256 1024 gamma:{1,256}, beta:{1,256}, mean:{1,256}, var:{1,256} [conv2d_15]
activation_15 (ActivationLayer) -,- 0 - [batch_normalization_15]
conv2d_16 (ConvolutionLayer) 256,256 590080 W:{256,256,3,3}, b:{1,256} [activation_15]
batch_normalization_16 (BatchNormalization) 256,256 1024 gamma:{1,256}, beta:{1,256}, mean:{1,256}, var:{1,256} [conv2d_16]
activation_16 (ActivationLayer) -,- 0 - [batch_normalization_16]
up_sampling2d_3 (Upsampling2D) -,- 0 - [activation_16]
concatenate_3 (MergeVertex) -,- - - [up_sampling2d_3, activation_6]
conv2d_17 (ConvolutionLayer) 384,128 442496 W:{128,384,3,3}, b:{1,128} [concatenate_3]
batch_normalization_17 (BatchNormalization) 128,128 512 gamma:{1,128}, beta:{1,128}, mean:{1,128}, var:{1,128} [conv2d_17]
activation_17 (ActivationLayer) -,- 0 - [batch_normalization_17]
conv2d_18 (ConvolutionLayer) 128,128 147584 W:{128,128,3,3}, b:{1,128} [activation_17]
batch_normalization_18 (BatchNormalization) 128,128 512 gamma:{1,128}, beta:{1,128}, mean:{1,128}, var:{1,128} [conv2d_18]
activation_18 (ActivationLayer) -,- 0 - [batch_normalization_18]
up_sampling2d_4 (Upsampling2D) -,- 0 - [activation_18]
concatenate_4 (MergeVertex) -,- - - [up_sampling2d_4, activation_4]
conv2d_19 (ConvolutionLayer) 192,64 110656 W:{64,192,3,3}, b:{1,64} [concatenate_4]
batch_normalization_19 (BatchNormalization) 64,64 256 gamma:{1,64}, beta:{1,64}, mean:{1,64}, var:{1,64} [conv2d_19]
activation_19 (ActivationLayer) -,- 0 - [batch_normalization_19]
conv2d_20 (ConvolutionLayer) 64,64 36928 W:{64,64,3,3}, b:{1,64} [activation_19]
batch_normalization_20 (BatchNormalization) 64,64 256 gamma:{1,64}, beta:{1,64}, mean:{1,64}, var:{1,64} [conv2d_20]
activation_20 (ActivationLayer) -,- 0 - [batch_normalization_20]
up_sampling2d_5 (Upsampling2D) -,- 0 - [activation_20]
concatenate_5 (MergeVertex) -,- - - [up_sampling2d_5, activation_2]
conv2d_21 (ConvolutionLayer) 96,32 27680 W:{32,96,3,3}, b:{1,32} [concatenate_5]
batch_normalization_21 (BatchNormalization) 32,32 128 gamma:{1,32}, beta:{1,32}, mean:{1,32}, var:{1,32} [conv2d_21]
activation_21 (ActivationLayer) -,- 0 - [batch_normalization_21]
conv2d_22 (ConvolutionLayer) 32,32 9248 W:{32,32,3,3}, b:{1,32} [activation_21]
batch_normalization_22 (BatchNormalization) 32,32 128 gamma:{1,32}, beta:{1,32}, mean:{1,32}, var:{1,32} [conv2d_22]
activation_22 (ActivationLayer) -,- 0 - [batch_normalization_22]
dropout_1 (DropoutLayer) -,- 0 - [activation_22]
conv2d_23 (ConvolutionLayer) 32,1 33 W:{1,32,1,1}, b:{1,1} [dropout_1]
output (CnnLossLayer) -,- 0 - [conv2d_23]
---------------------------------------------------------------------------------------------------------------------------------------------------------------------
Total Parameters: 31466753
Trainable Parameters: 31466753
Frozen Parameters: 0
=====================================================================================================================================================================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment