Skip to content

Instantly share code, notes, and snippets.

@emilemathieu
Last active August 9, 2018 12:05
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save emilemathieu/f640d0a7d368196f39f25202b1f95d6d to your computer and use it in GitHub Desktop.
Save emilemathieu/f640d0a7d368196f39f25202b1f95d6d to your computer and use it in GitHub Desktop.
class Module(object):
""" Base class for neural network's layers
"""
def forward(self, X):
""" Apply the layer function to the input data
Parameters
----------
X : array-like, shape = [n_samples, depth_in, height_in, width_in]
Returns
-------
transformed data : array-like, shape = [n_samples, depth_out, height_out, width_out]
"""
raise NotImplementedError()
def __call__(self, X):
return self.forward(X)
def backward(self, output_grad):
""" Compute the gradient of the loss with respect to its parameters and to its input
Parameters
----------
output_grad : array-like, shape = [n_samples, depth_out, height_out, width_out]
gradient returned by the above layer.
Returns
-------
gradient : array-like, shape = [n_samples, depth_in, height_in, width_in]
gradient to be forwarded to bottom layers
"""
raise NotImplementedError()
def step(self, optimizer):
""" Do an optimization step in the direction given by the optimizer
Parameters
----------
optimizer : instance of Optimizer
"""
self._bias = optimizer(id(self), 'bias', self._bias, self._grad_bias)
self._weight = optimizer(id(self), 'weight', self._weight, self._grad_weight)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment