Skip to content

Instantly share code, notes, and snippets.

@jtremback
Last active August 29, 2015 14:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jtremback/b4996f4e884f4ce0748e to your computer and use it in GitHub Desktop.
Save jtremback/b4996f4e884f4ce0748e to your computer and use it in GitHub Desktop.
quick and dirty mod of react-native-camera
#import "RCTCameraManager.h"
#import "RCTCamera.h"
#import "RCTBridge.h"
#import "RCTEventDispatcher.h"
#import "RCTUtils.h"
#import "RCTLog.h"
#import "UIView+React.h"
#import <AssetsLibrary/ALAssetsLibrary.h>
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
@implementation RCTCameraManager
RCT_EXPORT_MODULE();
- (UIView *)view
{
return [[RCTCamera alloc] initWithManager:self];
}
RCT_EXPORT_VIEW_PROPERTY(aspect, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(type, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(orientation, NSInteger);
RCT_EXPORT_VIEW_PROPERTY(flashMode, NSInteger);
- (NSDictionary *)constantsToExport
{
return @{
@"Aspect": @{
@"stretch": @(RCTCameraAspectStretch),
@"fit": @(RCTCameraAspectFit),
@"fill": @(RCTCameraAspectFill)
},
@"Type": @{
@"front": @(RCTCameraTypeFront),
@"back": @(RCTCameraTypeBack)
},
@"CaptureMode": @{
@"still": @(RCTCameraCaptureModeStill),
@"video": @(RCTCameraCaptureModeVideo)
},
@"CaptureTarget": @{
@"memory": @(RCTCameraCaptureTargetMemory),
@"disk": @(RCTCameraCaptureTargetDisk),
@"cameraRoll": @(RCTCameraCaptureTargetCameraRoll)
},
@"Orientation": @{
@"auto": @(RCTCameraOrientationAuto),
@"landscapeLeft": @(RCTCameraOrientationLandscapeLeft),
@"landscapeRight": @(RCTCameraOrientationLandscapeRight),
@"portrait": @(RCTCameraOrientationPortrait),
@"portraitUpsideDown": @(RCTCameraOrientationPortraitUpsideDown)
},
@"FlashMode": @{
@"off": @(RCTCameraFlashModeOff),
@"on": @(RCTCameraFlashModeOn),
@"auto": @(RCTCameraFlashModeAuto)
}
};
}
- (id)init {
if ((self = [super init])) {
self.session = [AVCaptureSession new];
self.session.sessionPreset = AVCaptureSessionPresetHigh;
self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.previewLayer.needsDisplayOnBoundsChange = YES;
self.sessionQueue = dispatch_queue_create("cameraManagerQueue", DISPATCH_QUEUE_SERIAL);
dispatch_async(self.sessionQueue, ^{
NSError *error = nil;
if (self.presetCamera == AVCaptureDevicePositionUnspecified) {
self.presetCamera = AVCaptureDevicePositionBack;
}
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:self.presetCamera];
if (captureDevice != nil) {
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
}
if ([self.session canAddInput:captureDeviceInput])
{
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
}
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([self.session canAddOutput:stillImageOutput])
{
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
if ([self.session canAddOutput:metadataOutput]) {
[metadataOutput setMetadataObjectsDelegate:self queue:self.sessionQueue];
[self.session addOutput:metadataOutput];
[metadataOutput setMetadataObjectTypes:metadataOutput.availableMetadataObjectTypes];
self.metadataOutput = metadataOutput;
}
__weak RCTCameraManager *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[NSNotificationCenter.defaultCenter addObserverForName:AVCaptureSessionRuntimeErrorNotification object:self.session queue:nil usingBlock:^(NSNotification *note) {
RCTCameraManager *strongSelf = weakSelf;
dispatch_async(strongSelf.sessionQueue, ^{
// Manually restarting the session since it must have been stopped due to an error.
[strongSelf.session startRunning];
});
}]];
[self.session startRunning];
});
}
return self;
}
RCT_EXPORT_METHOD(checkDeviceAuthorizationStatus:(RCTResponseSenderBlock) callback)
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
callback(@[[NSNull null], @(granted)]);
}];
}
RCT_EXPORT_METHOD(changeFlashMode:(NSInteger)flashMode) {
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device];
[self setFlashMode:flashMode forDevice:currentCaptureDevice];
}
RCT_EXPORT_METHOD(changeCamera:(NSInteger)camera) {
AVCaptureDevice *currentCaptureDevice = [self.captureDeviceInput device];
AVCaptureDevicePosition position = (AVCaptureDevicePosition)camera;
AVCaptureDevice *captureDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:(AVCaptureDevicePosition)position];
if (captureDevice == nil) {
return;
}
NSError *error = nil;
AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error)
{
NSLog(@"%@", error);
return;
}
[self.session beginConfiguration];
[self.session removeInput:self.captureDeviceInput];
if ([self.session canAddInput:captureDeviceInput])
{
[NSNotificationCenter.defaultCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentCaptureDevice];
[NSNotificationCenter.defaultCenter addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
[self.session addInput:captureDeviceInput];
self.captureDeviceInput = captureDeviceInput;
}
else
{
[self.session addInput:self.captureDeviceInput];
}
[self.session commitConfiguration];
}
RCT_EXPORT_METHOD(changeAspect:(NSString *)aspect) {
self.previewLayer.videoGravity = aspect;
}
RCT_EXPORT_METHOD(changeOrientation:(NSInteger)orientation) {
self.previewLayer.connection.videoOrientation = orientation;
}
RCT_EXPORT_METHOD(capture:(NSDictionary *)options callback:(RCTResponseSenderBlock)callback) {
NSInteger captureMode = [[options valueForKey:@"mode"] intValue];
NSInteger captureTarget = [[options valueForKey:@"target"] intValue];
NSDictionary *metadata = [options valueForKey:@"metadata"];
if (captureMode == RCTCameraCaptureModeStill) {
[self captureStill:captureTarget metadata:metadata callback:callback];
}
else if (captureMode == RCTCameraCaptureModeVideo) {
// waiting for incoming PRs
}
}
-(void)captureStill:(NSInteger)target metadata:(NSDictionary *)inputMetadata callback:(RCTResponseSenderBlock)callback {
[[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:self.previewLayer.connection.videoOrientation];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
// if (inputMetadata)
// {
// // Copy metadata from imageDataSampleBuffer
// CFDictionaryRef cfMetadata = CMCopyDictionaryOfAttachments(NULL, imageDataSampleBuffer, kCMAttachmentMode_ShouldPropagate);
//
// // Make it mutable so we can change it
// CFMutableDictionaryRef cfMutableMetadata = CFDictionaryCreateMutableCopy(NULL, 0, cfMetadata);
//
// NSMutableDictionary *location = [self getGPSDictionaryForLocation:[inputMetadata objectForKey:@"location"]];
//
// CFDictionarySetValue(cfMutableMetadata, kCGImagePropertyGPSDictionary, (__bridge const void *)(location));
//
// // set the dictionary back to the buffer
// CMSetAttachments(imageDataSampleBuffer, cfMutableMetadata, kCMAttachmentMode_ShouldPropagate);
//
// // Release metadata
// CFRelease(cfMetadata);
// CFRelease(cfMutableMetadata);
// }
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
// Create image source
CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageData, NULL);
//get all the metadata in the image
NSDictionary *imageMetadata = (NSDictionary *) CFBridgingRelease(CGImageSourceCopyPropertiesAtIndex(source, 0, NULL));
// Add GPS to metadata
//make the metadata dictionary mutable so we can add properties to it
NSMutableDictionary *imageMetadataAsMutable = [imageMetadata mutableCopy];
// Get the location metadata off the input
NSMutableDictionary *GPSDictionary = [self getGPSDictionaryForLocation:[inputMetadata objectForKey:@"location"]];
// Attach the location metadata to them image metadata
[imageMetadataAsMutable setObject:GPSDictionary forKey:(NSString *)kCGImagePropertyGPSDictionary];
// Activate device orientation sensing
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
// Find device orientation
UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];
// Remove the orientation metadata
[imageMetadataAsMutable removeObjectForKey:(NSString*)kCGImagePropertyOrientation];
// create cgimage
CGImageRef cgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);
CGFloat angle = 0;
CGImageRef cgRotatedImage = [self CGImageRotatedByAngle:cgImage angle:angle];
// this will be the data CGImageDestinationRef will write into
NSMutableData *destinationData = [NSMutableData data];
// this is the type of image (e.g., public.jpeg)
CFStringRef UTI = CGImageSourceGetType(source);
// Create destination
CGImageDestinationRef destination = CGImageDestinationCreateWithData((CFMutableDataRef)destinationData, UTI, 1, NULL);
// add the image to the destination
CGImageDestinationAddImage(destination, cgRotatedImage, (CFDictionaryRef) imageMetadataAsMutable);
// And write
CGImageDestinationFinalize(destination);
NSString *responseString;
if (target == RCTCameraCaptureTargetMemory) {
responseString = [destinationData base64EncodedStringWithOptions:0];
}
else if (target == RCTCameraCaptureTargetDisk) {
responseString = [self saveImage:destinationData withName:[[NSUUID UUID] UUIDString]];
}
else if (target == RCTCameraCaptureTargetCameraRoll) {
[[[ALAssetsLibrary alloc] init] writeImageDataToSavedPhotosAlbum:destinationData metadata:nil completionBlock:^(NSURL* url, NSError* error) {
if (error == nil) {
callback(@[[NSNull null], [url absoluteString]]);
}
else {
callback(@[RCTMakeError(error.description, nil, nil)]);
}
}];
return;
}
callback(@[[NSNull null], responseString]);
}
else {
callback(@[RCTMakeError(error.description, nil, nil)]);
}
}];
}
- (NSString *)saveImage:(NSData *)imageData withName:(NSString *)name {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths firstObject];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString *fullPath = [documentsDirectory stringByAppendingPathComponent:name];
[fileManager createFileAtPath:fullPath contents:imageData attributes:nil];
return fullPath;
}
- (CGImageRef)CGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
{
CGFloat angleInRadians = angle * (M_PI / 180);
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGRect imgRect = CGRectMake(0, 0, width, height);
CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef bmContext = CGBitmapContextCreate(NULL,
rotatedRect.size.width,
rotatedRect.size.height,
8,
0,
colorSpace,
(CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
CGContextSetAllowsAntialiasing(bmContext, TRUE);
CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
CGColorSpaceRelease(colorSpace);
CGContextTranslateCTM(bmContext,
+(rotatedRect.size.width/2),
+(rotatedRect.size.height/2));
CGContextRotateCTM(bmContext, angleInRadians);
CGContextTranslateCTM(bmContext,
-(rotatedRect.size.width/2),
-(rotatedRect.size.height/2));
CGContextDrawImage(bmContext,
CGRectMake((rotatedRect.size.width-width)/2.0f,
(rotatedRect.size.height-height)/2.0f,
width,
height),
imgRef);
CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
CFRelease(bmContext);
return rotatedImage;
}
//- (CGImageRef)CGImageRotatedByAngle:(CGImageRef)imgRef angle:(CGFloat)angle
//{
// CGFloat angleInRadians = angle * (M_PI / 180);
// CGFloat width = CGImageGetWidth(imgRef);
// CGFloat height = CGImageGetHeight(imgRef);
//
// CGRect imgRect = CGRectMake(0, 0, width, height);
// CGAffineTransform transform = CGAffineTransformMakeRotation(angleInRadians);
// CGRect rotatedRect = CGRectApplyAffineTransform(imgRect, transform);
//
// CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// CGContextRef bmContext = CGBitmapContextCreate(NULL, rotatedRect.size.width, rotatedRect.size.height, 8, 0, colorSpace, (CGBitmapInfo) kCGImageAlphaPremultipliedFirst);
// CGContextSetAllowsAntialiasing(bmContext, FALSE);
// CGContextSetInterpolationQuality(bmContext, kCGInterpolationNone);
// CGColorSpaceRelease(colorSpace);
// CGContextTranslateCTM(bmContext,
// +(rotatedRect.size.width/2),
// +(rotatedRect.size.height/2));
// CGContextRotateCTM(bmContext, angleInRadians);
// CGContextTranslateCTM(bmContext,
// -(rotatedRect.size.width/2),
// -(rotatedRect.size.height/2));
// CGContextDrawImage(bmContext, CGRectMake(0, 0,
// rotatedRect.size.width,
// rotatedRect.size.height),
// imgRef);
//
// CGImageRef rotatedImage = CGBitmapContextCreateImage(bmContext);
// CFRelease(bmContext);
//
// return rotatedImage;
//}
- (NSMutableDictionary *)getGPSDictionaryForLocation:(NSDictionary *)location {
NSMutableDictionary *gps = [NSMutableDictionary dictionary];
NSDictionary *coords = [location objectForKey:@"coords"];
// GPS tag version
[gps setObject:@"2.2.0.0" forKey:(NSString *)kCGImagePropertyGPSVersion];
// Timestamp
double timestamp = floor([[location objectForKey:@"timestamp"] doubleValue]);
NSDate *date = [NSDate dateWithTimeIntervalSince1970:timestamp];
NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
[formatter setDateFormat:@"HH:mm:ss.SSSSSS"];
[formatter setTimeZone:[NSTimeZone timeZoneWithAbbreviation:@"UTC"]];
[gps setObject:[formatter stringFromDate:date] forKey:(NSString *)kCGImagePropertyGPSTimeStamp];
[formatter setDateFormat:@"yyyy:MM:dd"];
[gps setObject:[formatter stringFromDate:date] forKey:(NSString *)kCGImagePropertyGPSDateStamp];
// Latitude
double latitude = [[coords objectForKey:@"latitude"] doubleValue];
if (latitude < 0) {
latitude = -latitude;
[gps setObject:@"S" forKey:(NSString *)kCGImagePropertyGPSLatitudeRef];
} else {
[gps setObject:@"N" forKey:(NSString *)kCGImagePropertyGPSLatitudeRef];
}
[gps setObject:[NSNumber numberWithFloat:latitude] forKey:(NSString *)kCGImagePropertyGPSLatitude];
// Longitude
double longitude = [[coords objectForKey:@"longitude"] doubleValue];
if (longitude < 0) {
longitude = -longitude;
[gps setObject:@"W" forKey:(NSString *)kCGImagePropertyGPSLongitudeRef];
} else {
[gps setObject:@"E" forKey:(NSString *)kCGImagePropertyGPSLongitudeRef];
}
[gps setObject:[NSNumber numberWithFloat:longitude] forKey:(NSString *)kCGImagePropertyGPSLongitude];
// Altitude
double altitude = [[coords objectForKey:@"altitude"] doubleValue];
if (!isnan(altitude)){
if (altitude < 0) {
altitude = -altitude;
[gps setObject:@"1" forKey:(NSString *)kCGImagePropertyGPSAltitudeRef];
} else {
[gps setObject:@"0" forKey:(NSString *)kCGImagePropertyGPSAltitudeRef];
}
[gps setObject:[NSNumber numberWithFloat:altitude] forKey:(NSString *)kCGImagePropertyGPSAltitude];
}
// Speed, must be converted from m/s to km/h
double speed = [[coords objectForKey:@"speed"] doubleValue];
if (speed >= 0){
[gps setObject:@"K" forKey:(NSString *)kCGImagePropertyGPSSpeedRef];
[gps setObject:[NSNumber numberWithFloat:speed*3.6] forKey:(NSString *)kCGImagePropertyGPSSpeed];
}
// Heading
double heading = [[coords objectForKey:@"heading"] doubleValue];
if (heading >= 0){
[gps setObject:@"T" forKey:(NSString *)kCGImagePropertyGPSTrackRef];
[gps setObject:[NSNumber numberWithFloat:heading] forKey:(NSString *)kCGImagePropertyGPSTrack];
}
return gps;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection {
NSArray *barcodeTypes = @[
AVMetadataObjectTypeUPCECode,
AVMetadataObjectTypeCode39Code,
AVMetadataObjectTypeCode39Mod43Code,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode93Code,
AVMetadataObjectTypeCode128Code,
AVMetadataObjectTypePDF417Code,
AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeAztecCode
];
for (AVMetadataMachineReadableCodeObject *metadata in metadataObjects) {
for (id barcodeType in barcodeTypes) {
if (metadata.type == barcodeType) {
[self.bridge.eventDispatcher sendDeviceEventWithName:@"CameraBarCodeRead"
body:@{
@"data": metadata.stringValue,
@"bounds": @{
@"origin": @{
@"x": [NSString stringWithFormat:@"%f", metadata.bounds.origin.x],
@"y": [NSString stringWithFormat:@"%f", metadata.bounds.origin.y]
},
@"size": @{
@"height": [NSString stringWithFormat:@"%f", metadata.bounds.size.height],
@"width": [NSString stringWithFormat:@"%f", metadata.bounds.size.width],
}
}
}];
}
}
}
}
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if (device.hasFlash && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
}
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self captureDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(@"%@", error);
}
});
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment