Skip to content

Instantly share code, notes, and snippets.

@jamesonthecrow
jamesonthecrow / ViewController.swift
Last active June 11, 2019 22:30
Pet Segmentation iOS View Controller with Fritz (www.fritz.ai)
import UIKit
import AVFoundation
import Fritz
class ViewController: UIViewController, UIImagePickerControllerDelegate,
UINavigationControllerDelegate {
@IBOutlet var imageView: UIImageView!
var maskView: UIImageView!
var backgroundView: UIImageView!
# Create a model with a normal convolution.
inpt = keras.layers.Input(shape=(500, 500, 3))
out = keras.layers.Conv2D(10, 10)(inpt)
model = keras.models.Model(inpt, out)
mlmodel = coremltools.converters.keras.convert(model)
mlmodel.save('convolution.mlmodel')
# Create a model with a dialted (atrous) convolution.
inpt = keras.layers.Input(shape=(500, 500, 3))
out = keras.layers.Conv2D(10, 10, dilation_rate=4)(inpt)
// PUT A BREAKPOINT HERE
// Compile the model.
let compiledModelURL = try! MLModel.compileModel(at: assetPath!)
// Initialize the model for use on a specific set of hardware
let config = MLModelConfiguration()
config.computeUnits = .all // can be .all, .cpuAndGPU, or .cpuOnly
let mlmodel = try! MLModel(contentsOf: compiledModelURL, configuration: config)
from coremltools.models.neural_network import quantization_utils
def quantize_model(mlmodel, nbits, method='linear'):
"""Quantize the weights of an mlmodel to a specific number of bits.
Args:
mlmodel (coremltools.models.MLModel): A Core ML model
nbits (int): the bit depth of the quantized weights.
method (string): the quantization method.
from coremltools.models.neural_network import flexible_shape_utils
def make_mlmodel_flexible(spec, size_range=(100, 1920):
"""Make input and output sizes of a Core ML model flexible.
Args:
spec (NeuralNetwork_pb2): a Core ML neural network spec
size_range ([Int]): a tuple containing the min and max input sizes.
"""
size_range_spec = flexible_shape_utils.NeuralNetworkImageSizeRange()
def add_reflective_padding_and_crop(mlmodel, padding_size=20):
"""Add reflective padding and crop layers to remove edge artifcats.
Because the convolution layers rely on 'same' padding, stylized images have
a small ring of distortion around the outer edge. This can be eliminated
with reflective padding on the input image. This method modifies the
original MLModel spec to add a padding layer after the input and a crop
layer before the output to remove the padding at the end.
Args:
instance_norm_spec = create_instance_normalization_spec(keras_model.layers[-1])
# Hook the layer up to the global model input and output
instance_norm_spec.input[:] = ["input1"]
instance_norm_spec.output[:] = ["output1"]
# Replace the custom layer placeholder with the new instance norm layer
mlmodel._spec.neuralNetwork.layers[-1].CopyFrom(instance_norm_spec)
mlmodel.get_spec()
"""
def create_instance_normalization_spec(layer):
"""Convert a DeprocessStylizedImage Keras layer to Core ML.
Args:
layer (keras.layers.Layer): An Instance Normalization Keras layer.
Returns:
spec (NeuralNetwork_pb2.NeuralNetworkLayer): a core ml layer spec
"""
# Instance Norm
inpt = keras.layers.Input(shape=(500, 500, 3))
out = keras_contrib.layers.InstanceNormalization(axis=-1)(inpt)
keras_model = keras.models.Model(inpt, out)
mlmodel = coremltools.converters.keras.convert(
keras_model,
add_custom_layers=True,
custom_conversion_functions={}
)
// Get the first pose
Pose pose = poseResult.getPoses().get(0);
// Get the body keypoints
Keypoints[] keypoints = pose.getKeypoints();
// Get the name of the keypoint
String partName = keypoints[0].getPartName();
PointF keypointPoisition = keypoints[0].getPosition()