Skip to content

Instantly share code, notes, and snippets.

@ikhlestov
Created February 7, 2017 10:44
Show Gist options
  • Save ikhlestov/b8da3e4d1e91571224d337513dc106e0 to your computer and use it in GitHub Desktop.
Save ikhlestov/b8da3e4d1e91571224d337513dc106e0 to your computer and use it in GitHub Desktop.
def composite_function(self, _input, out_features, kernel_size=3):
"""Function from paper H_l that performs:
- batch normalization
- ReLU nonlinearity
- convolution with required kernel
- dropout, if required
"""
with tf.variable_scope("composite_function"):
# BN
output = self.batch_norm(_input)
# ReLU
output = tf.nn.relu(output)
# convolution
output = self.conv2d(
output, out_features=out_features, kernel_size=kernel_size)
# dropout(in case of training and in case it is no 1.0)
output = self.dropout(output)
return output
def conv2d(self, _input, out_features, kernel_size,
strides=[1, 1, 1, 1], padding='SAME'):
in_features = int(_input.get_shape()[-1])
kernel = self.weight_variable_msra(
[kernel_size, kernel_size, in_features, out_features],
name='kernel')
output = tf.nn.conv2d(_input, kernel, strides, padding)
return output
def batch_norm(self, _input):
output = tf.contrib.layers.batch_norm(
_input, scale=True, is_training=self.is_training,
updates_collections=None)
return output
_input = 'some_3_dims'
layers = 3
for layer in range(layers):
l_output = composite_function(_input, layer_out_features)
_input = tf.concat(3, (_input, l_output))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment