Skip to content

Instantly share code, notes, and snippets.

@giln
giln / AsynchronousOperation.swift
Created April 20, 2017 08:39 — forked from calebd/AsynchronousOperation.swift
Concurrent NSOperation in Swift
import Foundation
/// An abstract class that makes building simple asynchronous operations easy.
/// Subclasses must implement `execute()` to perform any work and call
/// `finish()` when they are done. All `NSOperation` work will be handled
/// automatically.
open class AsynchronousOperation: Operation {
// MARK: - Properties
@giln
giln / ARViewController.swift
Created March 4, 2019 14:32
ARViewController #1
import ARKit
class ARViewController: UIViewController {
// MARK: - Variables
let sceneView = ARSCNView()
// MARK: - Lifecycle
override func loadView() {
@giln
giln / ARViewController.swift
Created March 4, 2019 14:34
ARViewController #2
import ARKit
class ARViewController: UIViewController, ARSessionDelegate {
// MARK: - Variables
let sceneView = ARSCNView()
// MARK: - Lifecycle
override func loadView() {
@giln
giln / ARSessionDelegate.swift
Created March 4, 2019 14:36
ARSessionDelegate
// MARK: - ARSessionDelegate
var currentBuffer: CVPixelBuffer?
func session(_: ARSession, didUpdate frame: ARFrame) {
// We return early if currentBuffer is not nil or the tracking state of camera is not normal
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
@giln
giln / PredictionRequest.swift
Created March 4, 2019 14:38
predictionRequest
private lazy var predictionRequest: VNCoreMLRequest = {
// Load the ML model through its generated class and create a Vision request for it.
do {
let model = try VNCoreMLModel(for: HandModel().model)
let request = VNCoreMLRequest(model: model)
// This setting determines if images are scaled or cropped to fit our 224x224 input size. Here we try scaleFill so we don't cut part of the image.
request.imageCropAndScaleOption = VNImageCropAndScaleOption.scaleFill
return request
} catch {
@giln
giln / VNImageRequestHandler.swift
Created March 4, 2019 14:39
VNImageRequestHandler
let visionQueue = DispatchQueue(label: "com.viseo.ARML.visionqueue")
private func startDetection() {
// To avoid force unwrap in VNImageRequestHandler
guard let buffer = currentBuffer else { return }
// Right orientation because the pixel data for image captured by an iOS device is encoded in the camera sensor's native landscape orientation
let requestHandler = VNImageRequestHandler(cvPixelBuffer: buffer, orientation: .right)
// We perform our CoreML Requests asynchronously.
@giln
giln / HandDetector.swift
Created March 4, 2019 14:40
HandDetector
import CoreML
import Vision
public class HandDetector {
// MARK: - Variables
private let visionQueue = DispatchQueue(label: "com.viseo.ARML.visionqueue")
private lazy var predictionRequest: VNCoreMLRequest = {
// Load the ML model through its generated class and create a Vision request for it.
@giln
giln / Preview.swift
Created March 4, 2019 14:41
Preview
var previewView = UIImageView()
// MARK: - Lifecycle
override func loadView() {
super.loadView()
view = sceneView
// Create a session configuration
@giln
giln / Output.swift
Created March 4, 2019 14:42
Output
// MARK: - Private functions
let handDetector = HandDetector()
private func startDetection() {
// To avoid force unwrap in VNImageRequestHandler
guard let buffer = currentBuffer else { return }
handDetector.performDetection(inputBuffer: buffer) { outputBuffer, _ in
// Here we are on a background thread
@giln
giln / SessionConfiguration.swift
Created March 4, 2019 14:43
SessionConfiguration
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Enable Horizontal plane detection
configuration.planeDetection = .horizontal
// We want to receive the frames from the video
sceneView.session.delegate = self
// Run the session with the configuration