Skip to content

Instantly share code, notes, and snippets.

@moderateepheezy
Created April 9, 2021 10:02
Show Gist options
  • Save moderateepheezy/98d8976d19380db6f43c9963e77ae6d5 to your computer and use it in GitHub Desktop.
Save moderateepheezy/98d8976d19380db6f43c9963e77ae6d5 to your computer and use it in GitHub Desktop.
import UIKit
import AVFoundation
import Core
import Permission
import RxCocoa
import RxOptional
import RxSwift
import Vision
typealias CameraViewControllerDependencies = ViewControllerFactory
// swiftlint:disable file_length
// swiftlint:disable:next type_body_length
class CameraViewController: UIViewController {
typealias Dependencies = CameraContainer
& GalleryContainer
& ViewControllerFactoryContainer
& ClarityClassifierContainer
& APIContainer
& ApplicationEnvironment
// KAO: Setter injection
var dependencies: Dependencies!
var viewControllerFactory: ViewControllerFactory {
return dependencies.viewControllerFactory
}
private var clarityClassifier: ClarityClassifier {
return dependencies.clarityClassifier
}
private var api: HLAPIClient {
return dependencies.api
}
private var smartCaptureIsActive: Bool = true {
didSet {
updateCaptureControls()
}
}
private var smartCaptureWanted: Bool = true {
didSet {
smartCaptureIsActive = smartCaptureWanted
}
}
private var stability: Int = 0
private var cameraManager: CameraManager { return dependencies.camera }
fileprivate var gallery: Gallery { return dependencies.gallery }
private final let hasShownInstruction: String = "hasShownInstruction"
let disposeBag = DisposeBag()
var customTransition: StackedSegue!
var canShowMessages: Bool { return true }
private let lesion = Lesion()
// Disable this to see the energy impact of just running the neural net,
// otherwise it also counts the GPU activity of drawing the bounding boxes.
let drawBoundingBoxes = true
// How many predictions we can do concurrently.
static let maxInflightBuffers = 3
var requests = [VNCoreMLRequest]()
var startTimes: [CFTimeInterval] = []
var boundingBoxes = [BoundingBox]()
var colors: [UIColor] = [.white]
let ciContext = CIContext()
var resizedPixelBuffers: [CVPixelBuffer?] = []
var inflightBuffer = 0
let semaphore = DispatchSemaphore(value: CameraViewController.maxInflightBuffers)
// contains Camera Preview Layer, cropped
@IBOutlet private var cameraPreviewView: CameraPreviewView!
// contains Video images, zoomed and scaled up to fill the screen, blured and under the camera preview
@IBOutlet private var backgroundVideoView: UIImageView!
@IBOutlet var cameraButtonContainer: UIView!
@IBOutlet var captureButton: HapticButton!
@IBOutlet var enableSmartCapture: UIButton!
@IBOutlet var disableSmartCapture: UIButton!
@IBOutlet var menuButton: UIButton!
@IBOutlet var messageButton: UIButton!
@IBOutlet var messageCountLabel: UILabel!
@IBOutlet var galleryButton: UIButton!
@IBOutlet var galleryIcon: UIImageView!
@IBOutlet var galleryView: UICollectionView!
@IBOutlet var galleryTop: NSLayoutConstraint!
@IBOutlet var flashContainer: UIView!
@IBOutlet var flashImage: UIImageView!
@IBOutlet var rotateCameraContainer: UIView!
@IBOutlet var rotateCameraImage: UIImageView!
@IBOutlet var zoomContainer: UIView!
@IBOutlet var zoomImage: UIImageView!
// Instruction text
@IBOutlet var instructionContainer: UIView!
@IBOutlet var instructionHeaderLabel: UILabel!
@IBOutlet var instructionBodyLabel: UILabel!
@IBOutlet var instructionCloseButton: UIButton!
/// Configures if the view controller shows gallery view, checked only when loading the view.
var hasGallery: Bool { return true }
private var galleryIsOpen = BehaviorRelay(value: false)
private lazy var galleryDataSource = GalleryCollectionDataSource(gallery: gallery)
private lazy var captureHaptic = UINotificationFeedbackGenerator()
private lazy var smartHaptic = UISelectionFeedbackGenerator()
private var cameraEnabled: Bool = false
init(dependencies: Dependencies) {
self.dependencies = dependencies
super.init(nibName: "\(CameraViewController.self)", bundle: .main)
}
required init?(coder: NSCoder) {
super.init(coder: coder)
}
@IBAction func showMenu() {
guard let menu = sideMenuController else {
fatalError("expected menu controller to be available")
}
menu.revealMenu(animated: true)
}
@IBAction func showMessage() {
let messageCenter = viewControllerFactory.createMessageCenterViewController()
let navController = UINavigationController(rootViewController: messageCenter)
pauseCamera()
present(navController, animated: true, completion: {
navController.presentationController?.delegate = self
})
}
@IBAction func focusAtCenter() {
cameraManager.focusAtCenter()
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override func viewDidLoad() {
super.viewDidLoad()
navigationController?.isNavigationBarHidden = true
// camera observers
configureCameraPreview()
// configureClarityMonitor()
configureGallery()
observeMessageBadge()
smartCaptureWanted = false
captureButton?.mode = .manual
configureButtons()
showInstructionsIfRequired()
instructionCloseButton?.rx
.tap
.subscribe(onNext: closeInstructions)
.disposed(by: disposeBag)
// move to VM later
// use to update notificaiton badge
// TODO: KAO: This is rubish. Does a full checkup pull to update the
// Badge as a side effect. #wtf
_ = dependencies.store.patients
api.checkup.fetchAllCheckups(patientIds: dependencies.patientStore.allPatients().map { $0.id })
.take(1).subscribe().disposed(by: disposeBag)
NotificationCenter.default.rx.notification(Notification.Name("restartCamera"))
.subscribe { [weak self] _ in
self?.resumeCamera()
}
.disposed(by: disposeBag)
setUpBoundingBoxes()
setUpCoreImage()
}
func showInstructionsIfRequired() {
instructionContainer?.isHidden = true
switch captureButton?.mode {
case .smart:
guard UserDefaults.standard.bool(forKey: "hasSeenSmartModeInstructions") == false else { return }
instructionContainer?.isHidden = false
instructionHeaderLabel?.text = "Smart Mode:"
instructionBodyLabel?.text = """
Hover the camera 10-15 cm over a mole and move it around \
until the AI button changes colors. When it turns green, \
your picture is taken automatically.
"""
case .manual:
guard UserDefaults.standard.bool(forKey: "hasSeenManualModeInstructions") == false else { return }
instructionContainer?.isHidden = false
instructionHeaderLabel?.text = "Manual Mode:"
instructionBodyLabel?.text = """
Hold the camera 10-15 cm over a mole and when you \
have a clear, in-focus shot, tap the white \
button to take a picture.
"""
case .labeled, .none:
break
}
}
@objc public func closeInstructions() {
switch captureButton?.mode {
case .smart:
UserDefaults.standard.set(true, forKey: "hasSeenSmartModeInstructions")
case .manual:
UserDefaults.standard.set(true, forKey: "hasSeenManualModeInstructions")
case .labeled, .none:
break
}
instructionContainer?.isHidden = true
}
private func configureCameraOptions() {
let tapFlashImage = UITapGestureRecognizer(target: self, action: #selector(didTapFlash))
let tapRotateCamera = UITapGestureRecognizer(target: self, action: #selector(didTapRotateCamera))
let tapZoom = UITapGestureRecognizer(target: self, action: #selector(didTapZoom))
flashContainer?.addGestureRecognizer(tapFlashImage)
rotateCameraContainer?.addGestureRecognizer(tapRotateCamera)
zoomContainer?.addGestureRecognizer(tapZoom)
updateRotateImage()
updateZoomImage()
updateFlashImage()
}
@objc func didTapRotateCamera() {
cameraManager.flipCamera()
updateRotateImage()
updateZoomImage()
updateFlashImage()
}
private func updateRotateImage() {
UIView.animate(withDuration: 1.0) { [weak self] in
switch self?.cameraManager.cameraPosition {
case .front:
self?.rotateCameraImage?.image = UIImage(named: "rotate_front")?.withRenderingMode(.alwaysTemplate)
case .back:
self?.rotateCameraImage?.image = UIImage(named: "rotate_back")?.withRenderingMode(.alwaysTemplate)
case .none, .unspecified:
break
case .some:
break
}
}
}
@objc func didTapZoom() {
cameraManager.toggleZoom()
updateZoomImage()
}
private func updateZoomImage() {
UIView.animate(withDuration: 1.0) { [weak self] in
switch self?.cameraManager.zoomLevel {
case .normal:
self?.zoomImage?.image = UIImage(named: "zoom_in")?.withRenderingMode(.alwaysTemplate)
case .magnify:
self?.zoomImage?.image = UIImage(named: "zoom_out")?.withRenderingMode(.alwaysTemplate)
case .none:
break
}
self?.zoomImage?.tintColor = .white
}
}
@objc func didTapFlash() {
cameraManager.toggleFlashSetting()
updateFlashImage()
}
private func updateFlashImage() {
UIView.animate(withDuration: 1.0) { [weak self] in
switch self?.cameraManager.flashState {
case .off:
self?.flashImage?.image = UIImage(named: "flash_off")?.withRenderingMode(.alwaysTemplate)
case .on:
self?.flashImage?.image = UIImage(named: "flash_on")?.withRenderingMode(.alwaysTemplate)
case .auto:
self?.flashImage?.image = UIImage(named: "flash_auto")?.withRenderingMode(.alwaysTemplate)
case .unavailable, .none:
self?.flashImage?.image = UIImage(named: "flash_off")?.withRenderingMode(.alwaysTemplate)
}
}
}
func hideControls() {
cameraButtonContainer.isHidden = true
cameraButtonContainer.alpha = 0.0
}
func showControls() {
cameraButtonContainer.isHidden = false
cameraButtonContainer.alpha = 0.0
captureButton?.setNeedsLayout()
UIView.animate(
withDuration: 0.2,
delay: 0.15,
animations: { [weak self] in
self?.cameraButtonContainer.alpha = 1.0
self?.showInstructionsIfRequired()
self?.updateRotateImage()
self?.updateZoomImage()
self?.updateFlashImage()
},
completion: { [weak self] completed in
self?.cameraButtonContainer.isUserInteractionEnabled = completed
self?.cameraEnabled = completed
}
)
}
private func observe(gallery: Gallery) {
let galleryIsEmpty = gallery.rx.images
.map { $0.isEmpty }
.share()
galleryIsEmpty
.bind(to: galleryIcon.rx.isHidden)
.disposed(by: disposeBag)
galleryIsEmpty
.map { !$0 }
.bind(to: galleryButton.rx.isEnabled)
.disposed(by: disposeBag)
}
private func observeMessageBadge() {
guard canShowMessages else {
messageCountLabel.isHidden = true
return
}
api.unreadMessageCount.asObservable()
.filter({ $0 >= 0 })
.do(onNext: { [weak self] in
self?.messageCountLabel.text = "\($0)"
UIApplication.shared.applicationIconBadgeNumber = $0
}).map { $0 == 0 }
.bind(to: messageCountLabel.rx.isHidden)
.disposed(by: disposeBag)
}
private func configureCameraPreview(isHidden: Bool = true) {
cameraPreviewView.makeRound(radius: 5)
backgroundVideoView.alpha = isHidden ? 0 : 1
cameraPreviewView.alpha = isHidden ? 0 : 1
cameraPreviewView.videoGravity = .resizeAspectFill
cameraPreviewView.videoOrientation = .portrait
cameraPreviewView.videoPreviewLayer = cameraManager.previewLayer
cameraManager.videoImage.asObservable()
.map { UIImage(ciImage: $0.0) }
.observeOn(MainScheduler.asyncInstance)
.bind(to: backgroundVideoView.rx.image)
.disposed(by: disposeBag)
}
private func configureClarityMonitor() {
cameraManager.videoImage.asObservable()
.flatMap { self.clasify(image: $0.0, imageBuffer: $0.1) }
.observeOn(MainScheduler.asyncInstance)
.do(onNext: { self.handle(value: $0.0, imageBuffer: $0.1) })
.map { self.handle(significance: $0.0, imageBuffer: $0.1) }
.throttle(.milliseconds(5), scheduler: ConcurrentMainScheduler.instance)
.do(onNext: observe(significant:)) // look at the stability of the significance
.subscribe(onNext: handle(capture:))
.disposed(by: disposeBag)
}
private func clasify(image: CIImage, imageBuffer: CVImageBuffer) -> Observable<(Float, CVImageBuffer)> {
guard smartCaptureIsActive else { return Observable.empty() }
return clarityClassifier.rx.clarity(image: image)
.map { ($0, imageBuffer) }.asObservable()
}
private func handle(value: Float, imageBuffer _: CVImageBuffer) {
guard smartCaptureIsActive else { return }
UIView.animate(
withDuration: 0.25,
delay: 0,
options: [.beginFromCurrentState, .curveEaseIn],
animations: {
let low = RiskClassifier.RiskProfile.ObservationName.high.color
let medium = RiskClassifier.RiskProfile.ObservationName.medium.color
let high = RiskClassifier.RiskProfile.ObservationName.low.color
let color = value >= 0.85 ? high : value > 0.2 ? medium : low
self.captureButton?.smartModeColor = color
self.captureButton?.setNeedsLayout()
self.captureButton?.setNeedsDisplay()
},
completion: nil
)
}
private func handle(significance: Float, imageBuffer _: CVImageBuffer) -> Bool {
return significance >= 0.85
}
private func observe(significant: Bool) {
if significant {
stability += 1
} else {
stability = max(stability - 1, 0)
}
}
private func handle(capture: Bool) {
guard capture, cameraEnabled, stability >= 20 else { return }
cameraEnabled = false
captureButton?.animateCapture()
captureImage()
}
/// Shows camera preview and background video view once both are available.
private func showCameraPreview(animated: Bool) {
// do not display until both the camera preview and video data output are available
backgroundVideoView.alpha = 0
cameraPreviewView.alpha = 0
let animation: (() -> Void) = { [weak self] in
self?.backgroundVideoView.alpha = 1
self?.cameraPreviewView.alpha = 1
}
cameraManager.videoImage.map { _ in }
.take(1)
.observeOn(MainScheduler.asyncInstance)
.subscribe(onNext: {
if animated {
let animator = UIViewPropertyAnimator(duration: 0.5, curve: .easeInOut, animations: animation)
animator.startAnimation()
} else {
animation()
}
})
.disposed(by: disposeBag)
}
private func configureButtons() {
messageButton.rx.tap
.subscribe(onNext: { [unowned self] in self.showMessage() })
.disposed(by: disposeBag)
menuButton.rx.tap
.subscribe(onNext: { [unowned self] in self.showMenu() })
.disposed(by: disposeBag)
configureCaptureControls()
}
private func configureCaptureControls() {
captureButton.alpha = 0.8
captureButton.rx
.controlEvent(.primaryActionTriggered)
.subscribe(onNext: { [weak self] in
self?.captureImage()
})
.disposed(by: disposeBag)
enableSmartCapture.rx.tap
.subscribe(onNext: { [weak self] in
self?.smartHaptic.selectionChanged()
self?.smartCaptureWanted = true
self?.captureButton?.mode = .smart
self?.captureButton?.setNeedsDisplay()
self?.showInstructionsIfRequired()
})
.disposed(by: disposeBag)
disableSmartCapture.rx.tap
.subscribe(onNext: { [weak self] in
self?.smartHaptic.selectionChanged()
self?.smartCaptureWanted = false
self?.captureButton?.mode = .manual
self?.captureButton?.setNeedsDisplay()
self?.showInstructionsIfRequired()
})
.disposed(by: disposeBag)
configureSmartCapture()
}
func configureSmartCapture() {
// TODO: KAO, check device type and disable smart capture
// features if the device type isn't capable of realtime assessment
updateCaptureControls()
}
private func updateCaptureControls() {
guard isViewLoaded else { return }
if smartCaptureIsActive {
UIView.animate(
withDuration: 0.3,
delay: 0,
options: [.beginFromCurrentState, .curveEaseOut],
animations: {
self.enableSmartCapture.tintColor = UIColor.scheme.secondaryColor
self.disableSmartCapture.tintColor = #colorLiteral(red: 0.501960814, green: 0.501960814, blue: 0.501960814, alpha: 1)
},
completion: nil
)
UIView.animate(
withDuration: 0.9,
delay: 0,
options: [.beginFromCurrentState, .curveEaseInOut, .autoreverse, .repeat, .allowUserInteraction],
animations: {
self.captureButton.alpha = 0.5
},
completion: nil
)
} else {
captureButton.layer.removeAllAnimations()
UIView.animate(
withDuration: 0.3,
delay: 0,
options: [.beginFromCurrentState, .curveEaseInOut],
animations: {
self.captureButton.alpha = 0.8
},
completion: nil
)
UIView.animate(
withDuration: 0.3,
delay: 0,
options: [.beginFromCurrentState, .curveEaseOut],
animations: {
self.enableSmartCapture.tintColor = #colorLiteral(red: 0.501960814, green: 0.501960814, blue: 0.501960814, alpha: 1)
self.disableSmartCapture.tintColor = UIColor.scheme.secondaryColor
},
completion: nil
)
}
}
private func captureImage() {
captureHaptic.notificationOccurred(.success)
stability = 0
cameraManager.captureImage()
}
/// Checks if app has permission to use camera and if so starts capture session.
private func activateCamera() {
let startCamera = { [weak self] in
do {
try self?.cameraManager.startSession()
} catch {
env.log.error("Failed to start the camera manager: \(error)")
}
}
guard Permission.camera.status != .authorized else {
startCamera()
return
}
Permission.camera.request({ [unowned self] status in
if status == .denied {
let message = "Access to your device's Camera is needed for taking Helfies"
let alert = UIAlertController(title: "Access to Camera Denied",
message: message,
preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Ok", style: .default, handler: { _ in
alert.dismiss(animated: true)
}))
alert.addAction(UIAlertAction(title: "Settings", style: .default, handler: { _ in
let url = URL(string: UIApplication.openSettingsURLString)!
UIApplication.shared.open(url, options: [:], completionHandler: nil)
}))
self.present(alert, animated: true)
return
}
startCamera()
})
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
navigationController?.isNavigationBarHidden = true
cameraEnabled = true
// TODO: PPP clean the interface and the way of using it
resumeCamera()
configureCameraOptions()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
cameraEnabled = false
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
resumeCamera()
}
private func resumeCamera() {
cameraEnabled = true
cameraManager.delegate = self
cameraPreviewView.videoPreviewLayer = cameraManager.previewLayer
cameraManager.setVideoOutputEnabled(true)
activateCamera()
showCameraPreview(animated: cameraManager.isCaptureSessionRunning.value == false)
if UserDefaults.standard.bool(forKey: hasShownInstruction) {}
}
override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
pauseCamera()
}
private func pauseCamera() {
cameraManager.delegate = nil
cameraPreviewView.videoPreviewLayer = nil
cameraManager.setVideoOutputEnabled(false)
}
override func viewWillLayoutSubviews() {
super.viewWillLayoutSubviews()
messageCountLabel.makeRound()
messageCountLabel.addBorder(width: 1, color: .white)
}
private func configureGallery() {
guard hasGallery else {
galleryIcon.isHidden = true
return
}
galleryIcon.transform = CGAffineTransform(rotationAngle: CGFloat.pi)
// layout and data source
guard let layoutManager = galleryView.collectionViewLayout as? UICollectionViewFlowLayout else {
fatalError("expected gallery to have flow layout")
}
layoutManager.itemSize = CGSize(width: 60, height: 60)
layoutManager.minimumLineSpacing = 10
layoutManager.scrollDirection = .horizontal
layoutManager.sectionInset = UIEdgeInsets(top: 10, left: 10, bottom: 10, right: 10)
galleryView.backgroundColor = .clear
galleryView.core.register(cellWithClass: GalleryImageCell.self)
galleryView.delegate = self
galleryView.dataSource = galleryDataSource
galleryDataSource.collectionView = galleryView
galleryDataSource.longPressed
.subscribe(onNext: { [weak self] image in
self?.displayDeleteSheet(for: image)
})
.disposed(by: disposeBag)
// toggle gallery view
galleryButton.rx.tap
.filter { [unowned self] in !self.galleryIcon.isHidden }
.map { [unowned self] in !self.galleryTop.isActive }
.bind(to: galleryIsOpen)
.disposed(by: disposeBag)
// reload gallery when open
galleryIsOpen
.distinctUntilChanged()
.observeOn(MainScheduler.instance)
.subscribe(onNext: { [unowned self] isOpen in
if isOpen {
self.galleryView.reloadData()
}
self.view.layoutIfNeeded()
UIView.animate(withDuration: 0.3, animations: {
self.galleryIcon.transform = isOpen ? .identity : CGAffineTransform(rotationAngle: CGFloat.pi)
self.galleryTop.isActive = isOpen
self.galleryView.alpha = isOpen ? 1 : 0
self.view.layoutIfNeeded()
})
})
.disposed(by: disposeBag)
// observe gallery changes
observe(gallery: gallery)
}
func closeGallery() {
galleryIsOpen.accept(false)
}
func showDetails(with image: GalleryImage) {
guard let container = cameraPreviewView else {
fatalError("expected camera container to exist, has the view been initialised?")
}
let containerTop = view.convert(container.frame, to: view.window).origin.y
let topMargin = containerTop - view.safeAreaInsets.top
customTransition = StackedSegue(topMargin: topMargin, interactiveDismissal: false)
customTransition.lifecycleDelegate = self
let detailsVC = viewControllerFactory.createDiagnoseDetailsController(with: image)
let navController = UINavigationController(rootViewController: detailsVC)
navController.modalPresentationStyle = .custom
navController.transitioningDelegate = customTransition
present(navController, animated: true) { [weak self] in
self?.captureButton?.resetAnimation()
}
closeGallery()
}
// MARK: - Initialization
func setUpBoundingBoxes() {
for _ in 0 ..< Lesion.maxBoundingBoxes {
boundingBoxes.append(BoundingBox())
}
}
func setUpCoreImage() {
// Since we might be running several requests in parallel, we also need
// to do the resizing in different pixel buffers or we might overwrite a
// pixel buffer that's already in use.
for _ in 0 ..< CameraViewController.maxInflightBuffers {
var resizedPixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(nil, Lesion.inputWidth, Lesion.inputHeight,
kCVPixelFormatType_32BGRA, nil,
&resizedPixelBuffer)
if status != kCVReturnSuccess {
print("Error: could not create resized pixel buffer", status)
}
resizedPixelBuffers.append(resizedPixelBuffer)
}
}
// MARK: - Doing inference
func predict(image: UIImage) {
if let pixelBuffer = image.pixelBuffer(width: Lesion.inputWidth, height: Lesion.inputHeight) {
predict(pixelBuffer: pixelBuffer, inflightIndex: 0)
}
}
func predict(pixelBuffer: CVPixelBuffer, inflightIndex: Int) {
// This is an alternative way to resize the image (using vImage):
// if let resizedPixelBuffer = resizePixelBuffer(pixelBuffer,
// width: Lesion.inputWidth,
// height: Lesion.inputHeight) {
// Resize the input with Core Image to 416x416.
if let resizedPixelBuffer = resizedPixelBuffers[inflightIndex] {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let sx = CGFloat(Lesion.inputWidth) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
let sy = CGFloat(Lesion.inputHeight) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
let scaledImage = ciImage.transformed(by: scaleTransform)
ciContext.render(scaledImage, to: resizedPixelBuffer)
// Give the resized input to our model.
do {
let boundingBoxes = try lesion.predict(image: resizedPixelBuffer)
showOnMainThread(boundingBoxes)
} catch {
env.log.error(error)
}
}
semaphore.signal()
}
func showOnMainThread(_ boundingBoxes: [Lesion.Prediction]) {
if drawBoundingBoxes {
DispatchQueue.main.async {
// For debugging, to make sure the resized CVPixelBuffer is correct.
// var debugImage: CGImage?
// VTCreateCGImageFromCVPixelBuffer(resizedPixelBuffer, nil, &debugImage)
// self.debugImageView.image = UIImage(cgImage: debugImage!)
self.show(predictions: boundingBoxes)
}
}
}
func show(predictions: [Lesion.Prediction]) {
for i in 0 ..< boundingBoxes.count {
if i < predictions.count {
let prediction = predictions[i]
// The predicted bounding box is in the coordinate space of the input
// image, which is a square image of 416x416 pixels. We want to show it
// on the video preview, which is as wide as the screen and has a 16:9
// aspect ratio. The video preview also may be letterboxed at the top
// and bottom.
let width = view.bounds.width
let height = width * 16 / 9
let scaleX = width / CGFloat(Lesion.inputWidth)
let scaleY = height / CGFloat(Lesion.inputHeight)
let top = (view.bounds.height - height) / 2
// Translate and scale the rectangle to our own coordinate system.
var rect = prediction.rect
rect.origin.x *= scaleX
rect.origin.y *= scaleY
rect.origin.y += top
rect.size.width *= scaleX
rect.size.height *= scaleY
// Show the bounding box.
let label = String(format: "%@ %.1f", labels[prediction.classIndex], prediction.score * 100)
let color = colors[prediction.classIndex]
boundingBoxes[i].show(frame: rect, label: label, color: color)
} else {
boundingBoxes[i].hide()
}
}
}
}
extension CameraViewController: CameraManagerDelegate {
func didCapturedImage(_ image: CapturedImage) {
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
guard let self = self else { return }
do {
let image = try self.gallery.save(image)
DispatchQueue.main.async {
self.showDetails(with: image)
}
} catch {
env.log.error("Failed to save image to gallery: \(error)")
}
}
}
func videoCapture(didCaptureVideoFrame: CVPixelBuffer) {
// For debugging.
// predict(image: UIImage(named: "dog416")!); return
let pixelBuffer = didCaptureVideoFrame
// The semaphore will block the capture queue and drop frames when
// Core ML can't keep up with the camera.
semaphore.wait()
// For better throughput, we want to schedule multiple prediction requests
// in parallel. These need to be separate instances, and inflightBuffer is
// the index of the current request.
let inflightIndex = inflightBuffer
inflightBuffer += 1
if inflightBuffer >= CameraViewController.maxInflightBuffers {
inflightBuffer = 0
}
// For better throughput, perform the prediction on a concurrent
// background queue instead of on the serial VideoCapture queue.
DispatchQueue.global().async {
self.predict(pixelBuffer: pixelBuffer, inflightIndex: inflightIndex)
}
}
func didStartRecord() {
// Add the bounding box layers to the UI, on top of the video preview.
for box in boundingBoxes {
box.addToLayer(cameraPreviewView.layer)
}
}
}
extension CameraViewController: UICollectionViewDelegate {
public func collectionView(_: UICollectionView, didSelectItemAt indexPath: IndexPath) {
guard let selected = galleryDataSource.itemAt(path: indexPath) else { return }
showDetails(with: selected)
}
}
private extension CameraViewController {
func displayDeleteSheet(for image: GalleryImage) {
let sheet = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
let delete = UIAlertAction(title: "Delete", style: .destructive) { _ in try? self.gallery.delete(image: image) }
let cancel = UIAlertAction(title: "Cancel", style: .cancel)
sheet.addAction(delete)
sheet.addAction(cancel)
present(sheet, animated: true)
}
}
extension CameraViewController: StackedPresenstationDelegate {
func stackPresented() {}
func stackDismissed() {
smartCaptureIsActive = smartCaptureWanted
}
}
class ClarityIndicator: UIView {
override func layoutSubviews() {
clipsToBounds = true
layer.cornerRadius = bounds.size.height / 2
}
}
extension CameraViewController: UIAdaptivePresentationControllerDelegate {
func presentationControllerDidDismiss(_: UIPresentationController) {
resumeCamera()
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment