Skip to content

Instantly share code, notes, and snippets.

@SunXiaoShan
Created March 22, 2018 03:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save SunXiaoShan/b023fb0c7602a24325bf3155072679a7 to your computer and use it in GitHub Desktop.
Save SunXiaoShan/b023fb0c7602a24325bf3155072679a7 to your computer and use it in GitHub Desktop.
FD
- (void)setupAVCapture {
NSError *error = nil;
// Select device
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
[session setSessionPreset:AVCaptureSessionPreset640x480];
} else {
[session setSessionPreset:AVCaptureSessionPresetPhoto];
}
AVCaptureDevice *device = [self findFrontCamera];
if (nil == device) {
self.isUsingFrontFacingCamera = NO;
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
// get the input device
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (error) {
session = nil;
[self teardownAVCapture];
if ([_delegate respondsToSelector:@selector(FaceDetectionComponentError:error:)]) {
__weak typeof(self) weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
[weakSelf.delegate FaceDetectionComponentError:weakSelf error:error];
});
}
return;
}
// add the input to the session
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
// Make a video data output
self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// We want RGBA, both CoreGraphics and OpenGL work well with 'RGBA'
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[self.videoDataOutput setVideoSettings:rgbOutputSettings];
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
if ([session canAddOutput:self.videoDataOutput]) {
[session addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
self.previewLayer.backgroundColor = [[UIColor blackColor] CGColor];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspect;
CALayer *rootLayer = [self.previewView layer];
[rootLayer setMasksToBounds:YES];
[self.previewLayer setFrame:[rootLayer bounds]];
[rootLayer addSublayer:self.previewLayer];
[session startRunning];
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment