Skip to content

Instantly share code, notes, and snippets.

View gali8's full-sized avatar

Daniele - mokapp.com gali8

View GitHub Profile
import coremltools
#https://apple.github.io/coremltools/generated/coremltools.converters.caffe.convert.html
coreml_modelAge = coremltools.converters.caffe.convert(('age_net.caffemodel', 'deploy_age.prototxt', 'mean.binaryproto'), image_input_names='data')
coreml_modelAge.save('Age.mlmodel')
coreml_modelGender = coremltools.converters.caffe.convert(('gender_net.caffemodel', 'deploy_gender.prototxt', 'mean.binaryproto'), image_input_names='data')
coreml_modelGender.save('Gender.mlmodel')
import coremltools
coreml_modelGender = coremltools.converters.caffe.convert(('gender_net.caffemodel', 'deploy_gender.prototxt', 'mean.binaryproto'), image_input_names='data')
coreml_modelGender.save('Gender.mlmodel')
coreml_modelAge = coremltools.converters.caffe.convert(('age_net.caffemodel', 'deploy_age.prototxt', 'mean.binaryproto'), image_input_names='data')
coreml_modelAge.save('Age.mlmodel')
@IBAction func camera(_ sender: Any) {
let cameraPicker = UIImagePickerController()
cameraPicker.delegate = self
if cameraMode {
cameraPicker.sourceType = .camera
cameraPicker.allowsEditing = true
cameraPicker.cameraCaptureMode = .photo
cameraPicker.cameraDevice = .rear
cameraPicker.showsCameraControls = true
import UIKit
import CoreML
import Vision
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
//MARK: - UIImagePickerControllerDelegate
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
picker.dismiss(animated: true, completion: nil)
guard let image = info["UIImagePickerControllerOriginalImage"] as? UIImage else {
//
// CoreMLExtension.swift
// GenderAge
//
// Created by Daniele on 12/09/17.
// Copyright © 2017 nexor. All rights reserved.
//
import UIKit
import CoreML
func predictImageUIGraphics(image: UIImage) {
let (pixelBuffer, _) = image.pixelBuffer()
guard let pixels = pixelBuffer else {
self.resetOnError()
return
}
self.lblGender?.text = analyzing
func predictImageVision(image: UIImage) {
let genderModel = try! VNCoreMLModel(for: Gender().model)
let genderRequest = VNCoreMLRequest(model: genderModel) { (req, err) in
guard let results = req.results as? [VNCoreMLFeatureValueObservation], let firstResult = results.first else {
self.resetOnError()
return
}
self.setGender(prob: firstResult.featureValue.multiArrayValue)
}
func resetOnError() {
DispatchQueue.main.async {
self.imgPicture?.image = nil
self.lblGender?.text = self.noPixels
}
}
func setGender(prob: MLMultiArray?) {
DispatchQueue.main.async {
guard let probArray = prob, probArray.count >= 2 else {
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="13529" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<device id="retina5_9" orientation="portrait">
<adaptation id="fullscreen"/>
</device>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="13527"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>