Created
February 24, 2022 08:59
-
-
Save jS5t3r/fde796a3154c39f961f0d3686b88b722 to your computer and use it in GitHub Desktop.
Orignal Relu
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
WideResNet( | |
(init_conv): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(conv2): Sequential( | |
(0): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(16, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential( | |
(0): Conv2d(16, 160, kernel_size=(1, 1), stride=(1, 1)) | |
) | |
) | |
(1): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(2): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(3): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
) | |
(conv3): Sequential( | |
(0): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(160, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(160, 320, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) | |
(3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential( | |
(0): Conv2d(160, 320, kernel_size=(1, 1), stride=(2, 2)) | |
) | |
) | |
(1): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(2): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(3): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(320, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
) | |
(conv4): Sequential( | |
(0): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(320, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(320, 640, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) | |
(3): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential( | |
(0): Conv2d(320, 640, kernel_size=(1, 1), stride=(2, 2)) | |
) | |
) | |
(1): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(2): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
(3): WideBasic( | |
(residual): Sequential( | |
(0): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(1): ReLU(inplace=True) | |
(2): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
(3): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(4): ReLU(inplace=True) | |
(5): Dropout(p=0.5, inplace=False) | |
(6): Conv2d(640, 640, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) | |
) | |
(shortcut): Sequential() | |
) | |
) | |
(bn): BatchNorm2d(640, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) | |
(relu): ReLU(inplace=True) | |
(avg_pool): AdaptiveAvgPool2d(output_size=(1, 1)) | |
(linear): Linear(in_features=640, out_features=10, bias=True) | |
) | |
extract_characteristics.py | |
################### | |
model.conv2[0].residual[1].register_forward_hook( get_activation('conv2_0_relu_1') ) | |
model.conv2[0].residual[4].register_forward_hook( get_activation('conv2_0_relu_4') ) | |
model.conv2[1].residual[1].register_forward_hook( get_activation('conv2_1_relu_1') ) | |
model.conv2[1].residual[4].register_forward_hook( get_activation('conv2_1_relu_4') ) | |
model.conv2[2].residual[1].register_forward_hook( get_activation('conv2_2_relu_1') ) | |
model.conv2[2].residual[4].register_forward_hook( get_activation('conv2_2_relu_4') ) | |
model.conv2[3].residual[1].register_forward_hook( get_activation('conv2_3_relu_1') ) | |
model.conv2[3].residual[4].register_forward_hook( get_activation('conv2_3_relu_4') ) | |
###################### | |
model.conv3[0].residual[1].register_forward_hook(get_activation('conv3_0_relu_1')) | |
model.conv3[0].residual[4].register_forward_hook(get_activation('conv3_0_relu_4')) | |
model.conv3[1].residual[1].register_forward_hook(get_activation('conv3_1_relu_1')) | |
model.conv3[1].residual[4].register_forward_hook(get_activation('conv3_1_relu_4')) | |
model.conv3[2].residual[1].register_forward_hook(get_activation('conv3_2_relu_1')) | |
model.conv3[2].residual[4].register_forward_hook(get_activation('conv3_2_relu_4')) | |
model.conv3[3].residual[1].register_forward_hook(get_activation('conv3_3_relu_1')) | |
model.conv3[3].residual[4].register_forward_hook(get_activation('conv3_3_relu_4')) | |
model.conv4[0].residual[1].register_forward_hook(get_activation('conv4_0_relu_1')) | |
model.conv4[0].residual[4].register_forward_hook(get_activation('conv4_0_relu_4')) | |
model.conv4[1].residual[1].register_forward_hook(get_activation('conv4_1_relu_1')) | |
model.conv4[1].residual[4].register_forward_hook(get_activation('conv4_1_relu_4')) | |
model.conv4[2].residual[1].register_forward_hook(get_activation('conv4_2_relu_1')) | |
model.conv4[2].residual[4].register_forward_hook(get_activation('conv4_2_relu_4')) | |
model.conv4[3].residual[1].register_forward_hook(get_activation('conv4_3_relu_1')) | |
model.conv4[3].residual[4].register_forward_hook(get_activation('conv4_3_relu_4')) | |
model.relu.register_forward_hook(get_activation('relu')) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment