Skip to content

Instantly share code, notes, and snippets.

@gudgud96
Created November 25, 2020 08:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gudgud96/72d6530a5a4ecaece09532e0ed1b3e01 to your computer and use it in GitHub Desktop.
Save gudgud96/72d6530a5a4ecaece09532e0ed1b3e01 to your computer and use it in GitHub Desktop.
Parameterized Pooling Methods
import tensorflow as tf
from tensorflow.keras import initializers
from tensorflow.keras import constraints
from tensorflow.keras import regularizers
from tensorflow.keras import backend as K
class GeneralizedMeanPooling(tf.keras.layers.Layer):
"""
Applies a 2D power-average pooling over an input signal.
Refer: https://github.com/JDAI-CV/fast-reid/fastreid/layers/pooling.py
The function computed is: :math:`f(X) = pow(sum(pow(X, p)), 1/p)`
- At p = infinity, one gets Max Pooling
- At p = 1, one gets Average Pooling
The output is of size H x W, for any input size.
The number of output features is equal to the number of input planes.
Args:
output_size: the target output size of the image of the form H x W.
Can be a tuple (H, W) or a single H for a square image H x H
H and W can be either a ``int``, or ``None`` which means the size will
be the same as that of the input.
"""
def __init__(self, norm=3, **kwargs):
super(GeneralizedMeanPooling, self).__init__(**kwargs)
assert norm > 0
self.norm = float(norm)
def build(self, input_shapes):
self.kernel = self.add_weight(name='kernel',
shape=(1,),
initializer='ones',
trainable=True)
super(GeneralizedMeanPooling, self).build(None)
def call(self, x):
# this is to ensure p is larger than 1
p = self.kernel * self.norm
x_pow = K.pow(x, p)
x_pow_sum = K.sum(x_pow, axis=(1, 2)) # 3 is channel axis
x_pow_sum /= x.shape[1] * x.shape[2]
x_res = K.pow(x_pow_sum, 1 / p)
return x_res
class AutoPool1D(tf.keras.layers.Layer):
'''
Automatically tuned soft-max pooling.
This layer automatically adapts the pooling behavior to interpolate
between mean- and max-pooling for each dimension.
This layer was taken and adapted from Brian McFee's implementation:
https://github.com/marl/autopool/blob/master/autopool/autopool.py
In this adapted version, the output can be aggregated or not using
`is_aggregate` argument.
'''
def __init__(self, axis=0, is_aggregate=False,
kernel_initializer='zeros',
kernel_constraint=None,
kernel_regularizer=None,
**kwargs):
'''
Parameters
----------
axis : int
Axis along which to perform the pooling. By default 0 (should be time).
kernel_initializer: Initializer for the weights matrix
kernel_regularizer: Regularizer function applied to the weights matrix
kernel_constraint: Constraint function applied to the weights matrix
kwargs
'''
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'), )
super(AutoPool1D, self).__init__(**kwargs)
self.axis = axis
self.is_aggregate = is_aggregate
self.kernel_initializer = initializers.get(kernel_initializer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.input_spec = tf.keras.layers.InputSpec(min_ndim=3)
self.supports_masking = True
def build(self, input_shape):
assert len(input_shape) >= 3
input_dim = input_shape[-1]
self.kernel = self.add_weight(shape=(1, input_dim),
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.input_spec = tf.keras.layers.InputSpec(min_ndim=2, axes={-1: input_dim})
self.built = True
def compute_output_shape(self, input_shape):
return self.get_output_shape_for(input_shape)
def get_output_shape_for(self, input_shape):
shape = list(input_shape)
del shape[self.axis]
return tuple(shape)
def get_config(self):
config = {'kernel_initializer': initializers.serialize(self.kernel_initializer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'axis': self.axis}
base_config = super(AutoPool1D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def call(self, x, mask=None):
scaled = self.kernel * x
max_val = K.max(scaled, axis=self.axis, keepdims=True)
softmax = K.exp(scaled - max_val)
weights = softmax / K.sum(softmax, axis=self.axis, keepdims=True)
if self.is_aggregate:
return K.sum(x * weights, axis=self.axis, keepdims=False)
else:
return weights
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment