Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jamesonthecrow/b92efe82611da672d6a417ce589aea3b to your computer and use it in GitHub Desktop.
Save jamesonthecrow/b92efe82611da672d6a417ce589aea3b to your computer and use it in GitHub Desktop.
def create_instance_normalization_spec(layer):
"""Convert a DeprocessStylizedImage Keras layer to Core ML.
Args:
layer (keras.layers.Layer): An Instance Normalization Keras layer.
Returns:
spec (NeuralNetwork_pb2.NeuralNetworkLayer): a core ml layer spec
"""
# Extract the layer inputs and outputs from Keras and create
# equivalent names for Core ML.
input_name = layer._inbound_nodes[0].inbound_layers[0].name
input_name += '_output'
output_name = layer.name + '_output'
# Create a new Neural Network Layer object from the
# Core ML protobuf spec and set properties.
spec_layer = NeuralNetwork_pb2.NeuralNetworkLayer()
spec_layer.name = layer.name
spec_layer.input.append(input_name)
spec_layer.output.append(output_name)
# Layer types in Core ML are defined by the parameters
# provided to the layer. To make this a normalization layer,
# we create a batchnorm layer param object
spec_layer_params = spec_layer.batchnorm
# Extract parameters from Keras layer
weights = layer.get_weights()
channels = weights[0].shape[0]
# Parameter arrangement in Keras: gamma, beta, mean, variance
idx = 0
gamma, beta = None, None
if layer.scale:
gamma = weights[idx]
idx += 1
if layer.center:
beta = weights[idx]
idx += 1
epsilon = layer.epsilon or 1e-5
# Set the parameters
spec_layer_params.channels = channels
spec_layer_params.gamma.floatValue.extend(map(float, gamma.flatten()))
spec_layer_params.beta.floatValue.extend(map(float, beta.flatten()))
spec_layer_params.epsilon = epsilon
spec_layer_params.computeMeanVar = True
spec_layer_params.instanceNormalization = True
return spec_layer
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment