Last active
August 29, 2015 14:20
-
-
Save ayman/ff1237244494e497fbdf to your computer and use it in GitHub Desktop.
Simply take one photo and remember it, then count the RGB diff from the next photo. Ideally one should do this via histograms but this was quick and simple.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// RGBCountrViewController.swift | |
// RGBCountr | |
// | |
// Created by David A. Shamma on 4/9/15. | |
// Some parts taken from around the web. | |
// | |
// Simply take one photo and remember it, | |
// then count the RGB diff from the next photo. | |
// Ideally one should do this via histograms but this was quick. | |
import UIKit | |
import AVFoundation | |
class ViewController: UIViewController { | |
let captureSession = AVCaptureSession() | |
var previewLayer : AVCaptureVideoPreviewLayer? | |
var stillImageOutput: AVCaptureStillImageOutput? | |
var cr:CGFloat! | |
var cg:CGFloat! | |
var cb:CGFloat! | |
var captureDevice : AVCaptureDevice? | |
@IBOutlet weak var rLabel: UILabel! | |
@IBOutlet weak var gLabel: UILabel! | |
@IBOutlet weak var bLabel: UILabel! | |
@IBOutlet weak var rtLabel: UILabel! | |
@IBOutlet weak var gtLabel: UILabel! | |
@IBOutlet weak var btLabel: UILabel! | |
@IBOutlet weak var countButton: UIButton! | |
override func viewDidLoad() { | |
super.viewDidLoad() | |
// Do any additional setup after loading the view, typically from a nib. | |
captureSession.sessionPreset = AVCaptureSessionPresetHigh | |
let devices = AVCaptureDevice.devices() | |
// Loop through all the capture devices on this phone | |
for device in devices { | |
// Make sure this particular device supports video | |
if (device.hasMediaType(AVMediaTypeVideo)) { | |
// Finally check the position and confirm we've got the back camera | |
if(device.position == AVCaptureDevicePosition.Back) { | |
captureDevice = device as? AVCaptureDevice | |
if captureDevice != nil { | |
println("Capture device found") | |
beginSession() | |
} | |
} | |
} | |
} | |
} | |
override func willAnimateRotationToInterfaceOrientation(toInterfaceOrientation: UIInterfaceOrientation, | |
duration: NSTimeInterval) { | |
if self.previewLayer != nil { | |
if toInterfaceOrientation == UIInterfaceOrientation.LandscapeLeft { | |
self.previewLayer?.connection.videoOrientation = | |
AVCaptureVideoOrientation.LandscapeLeft | |
} else { | |
self.previewLayer?.connection.videoOrientation = | |
AVCaptureVideoOrientation.LandscapeRight | |
} | |
} | |
} | |
func updateDeviceSettings(focusValue : Float, isoValue : Float) { | |
if let device = captureDevice { | |
if(device.lockForConfiguration(nil)) { | |
device.setFocusModeLockedWithLensPosition(focusValue, | |
completionHandler: { | |
(time) -> Void in | |
// | |
}) | |
// Adjust the iso to clamp between minIso and maxIso | |
// based on the active format | |
let minISO = device.activeFormat.minISO | |
let maxISO = device.activeFormat.maxISO | |
let clampedISO = isoValue * (maxISO - minISO) + minISO | |
device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, | |
ISO: clampedISO, | |
completionHandler: { | |
(time) -> Void in | |
// | |
}) | |
device.unlockForConfiguration() | |
} | |
} | |
} | |
func touchPercent(touch : UITouch) -> CGPoint { | |
// Get the dimensions of the screen in points | |
let screenSize = UIScreen.mainScreen().bounds.size | |
// Create an empty CGPoint object set to 0, 0 | |
// var touchPer = CGPointZero | |
var touchPer = CGPoint(x: 5, y: 40) | |
// Set the x and y values to be the value of the tapped | |
// position, divided by the width/height of the screen | |
touchPer.x = touch.locationInView(self.view).x / (0.75 * screenSize.width) | |
touchPer.y = touch.locationInView(self.view).y / (0.75 * screenSize.height) | |
if (touchPer.x > 1) { touchPer.x = 1 } | |
if (touchPer.y > 1) { touchPer.y = 1 } | |
// Return the populated CGPoint | |
return touchPer | |
} | |
override func touchesBegan(touches: Set<NSObject>, withEvent event: UIEvent) { | |
let touchPer = touchPercent( touches.first! as! UITouch ) | |
// focusTo(Float(touchPer.x)) | |
updateDeviceSettings(Float(touchPer.x), isoValue: Float(touchPer.y)) | |
} | |
override func touchesMoved(touches: Set<NSObject>, withEvent event: UIEvent) { | |
let touchPer = touchPercent( touches.first! as! UITouch ) | |
// focusTo(Float(touchPer.x)) | |
updateDeviceSettings(Float(touchPer.x), isoValue: Float(touchPer.y)) | |
} | |
func configureDevice() { | |
if let device = captureDevice { | |
device.lockForConfiguration(nil) | |
device.focusMode = .Locked | |
device.unlockForConfiguration() | |
} | |
} | |
func beginSession() { | |
configureDevice() | |
var err : NSError? = nil | |
captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, | |
error: &err)) | |
if err != nil { | |
println("error: \(err?.localizedDescription)") | |
} | |
stillImageOutput = AVCaptureStillImageOutput() | |
stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] | |
if captureSession.canAddOutput(stillImageOutput) { | |
captureSession.addOutput(stillImageOutput) | |
} | |
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) | |
self.view.layer.addSublayer(previewLayer) | |
// previewLayer?.frame = self.view.layer.frame | |
self.previewLayer?.frame = self.view.bounds | |
var orientation = UIDevice.currentDevice().orientation | |
if orientation == UIDeviceOrientation.LandscapeLeft { | |
self.previewLayer?.connection.videoOrientation = | |
AVCaptureVideoOrientation.LandscapeLeft | |
} else { | |
self.previewLayer?.connection.videoOrientation = | |
AVCaptureVideoOrientation.LandscapeRight | |
} | |
var w = 0.75 * previewLayer!.frame.width | |
var h = 0.75 * previewLayer!.frame.height | |
previewLayer!.frame.size = CGSize(width: w, height: h) | |
previewLayer!.frame.offset(dx: 5, dy: -40) | |
captureSession.startRunning() | |
} | |
@IBAction func didPressCalibrate(sender: AnyObject) { | |
// Get a graphics picture. | |
let videoConnection = | |
stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) | |
if (videoConnection != nil) { | |
videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait | |
stillImageOutput?.captureStillImageAsynchronouslyFromConnection( | |
videoConnection, | |
completionHandler: {(sampleBuffer, error) in | |
if (sampleBuffer != nil) { | |
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) | |
var dataProvider = CGDataProviderCreateWithCFData(imageData) | |
var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, kCGRenderingIntentDefault) | |
var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right) | |
// Count R G and B Values. | |
var r:CGFloat = 0 | |
var g:CGFloat = 0 | |
var b:CGFloat = 0 | |
var wf:CGFloat = image!.size.width | |
var hf:CGFloat = image!.size.height | |
var w:Int = Int(wf) | |
var h:Int = Int(hf) | |
var pixelData = CGDataProviderCopyData(CGImageGetDataProvider(image?.CGImage)) | |
var data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData) | |
for x in 0..<w { | |
for y in 0..<h { | |
var p = CGPoint(x: x, y: y) | |
var pixelInfo: Int = ((Int(image!.size.width) * Int(p.y)) + Int(p.x)) * 4 | |
var fr = CGFloat(data[pixelInfo]) / CGFloat(255.0) | |
var fg = CGFloat(data[pixelInfo+1]) / CGFloat(255.0) | |
var fb = CGFloat(data[pixelInfo+2]) / CGFloat(255.0) | |
r = r + fr | |
g = g + fg | |
b = b + fb | |
} | |
} | |
// Sum and divide to get percents | |
var t = r + g + b | |
t = CGFloat(w * h) | |
if (t > 0) { | |
self.cr = r/t | |
self.rLabel.text = String(format: "%.2f", Double(self.cr)) | |
self.cg = g/t | |
self.gLabel.text = String(format: "%.2f", Double(self.cg)) | |
self.cb = b/t | |
self.bLabel.text = String(format: "%.2f", Double(self.cb)) | |
self.countButton.enabled = true | |
} | |
self.rtLabel.text = String(format: "%.2f", Double((r/t))) | |
self.gtLabel.text = String(format: "%.2f", Double((g/t))) | |
self.btLabel.text = String(format: "%.2f", Double((b/t))) | |
} | |
}) | |
} | |
} | |
@IBAction func didPressCount(sender: AnyObject) { | |
self.countButton.enabled = false | |
let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo) | |
if ((videoConnection) != nil) { | |
videoConnection.videoOrientation = AVCaptureVideoOrientation.Portrait | |
stillImageOutput?.captureStillImageAsynchronouslyFromConnection( | |
videoConnection, | |
completionHandler: {(sampleBuffer, error) in | |
if (sampleBuffer != nil) { | |
var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) | |
var dataProvider = CGDataProviderCreateWithCFData(imageData) | |
var cgImageRef = CGImageCreateWithJPEGDataProvider( | |
dataProvider, | |
nil, | |
true, | |
kCGRenderingIntentDefault) | |
var image = UIImage(CGImage: cgImageRef, | |
scale: 1.0, | |
orientation: UIImageOrientation.Right) | |
var r:CGFloat = 0 | |
var g:CGFloat = 0 | |
var b:CGFloat = 0 | |
var wf:CGFloat = image!.size.width | |
var hf:CGFloat = image!.size.height | |
var w:Int = Int(wf) | |
var h:Int = Int(hf) | |
var pixelData = | |
CGDataProviderCopyData(CGImageGetDataProvider(image?.CGImage)) | |
var data: UnsafePointer<UInt8> = CFDataGetBytePtr(pixelData) | |
for x in 0..<w { | |
for y in 0..<h { | |
var p = CGPoint(x: x, y: y) | |
var pixelInfo = Int((Int(image!.size.width) * Int(p.y)) + Int(p.x)) * 4 | |
var fr = CGFloat(data[pixelInfo]) / CGFloat(255.0) | |
var fg = CGFloat(data[pixelInfo+1]) / CGFloat(255.0) | |
var fb = CGFloat(data[pixelInfo+2]) / CGFloat(255.0) | |
r = r + fr | |
g = g + fg | |
b = b + fb | |
} | |
} | |
// Sum and divide to get percents | |
var t = r + g + b | |
t = CGFloat(w * h) | |
if t > 0 { | |
self.rLabel.text = String(format: "%.2f", Double((r/t) - self.cr)) | |
self.gLabel.text = String(format: "%.2f", Double((g/t) - self.cg)) | |
self.bLabel.text = String(format: "%.2f", Double((b/t) - self.cb)) | |
} | |
self.rtLabel.text = String(format: "%.2f", Double((r/t))) | |
self.gtLabel.text = String(format: "%.2f", Double((g/t))) | |
self.btLabel.text = String(format: "%.2f", Double((b/t))) | |
self.countButton.enabled = true | |
} | |
}) | |
} | |
} | |
@IBAction func didPressInfo(sender: AnyObject) { | |
var mess = "Slide Left and Right to adjust focus. " | |
mess += "Slide Up and Down to adjust ISO. " | |
mess += "Click Calibrate, then click Count to see the percent change." | |
var alert = UIAlertController(title: "About Color Counter", | |
message: mess, | |
preferredStyle: UIAlertControllerStyle.Alert) | |
alert.addAction(UIAlertAction(title: "Got it!", | |
style: UIAlertActionStyle.Default, | |
handler: nil)) | |
self.presentViewController(alert, animated: true, completion: nil) | |
} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment