Skip to content

Instantly share code, notes, and snippets.

@bellbind
Last active June 19, 2023 01:21
Show Gist options
  • Star 8 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save bellbind/6954679 to your computer and use it in GitHub Desktop.
Save bellbind/6954679 to your computer and use it in GitHub Desktop.
[objective-c][macosx]capturing image from webcam
// capture image from webcam(e.g. face time)
// for OSX 10.9 (use AVFoundation API instead of deprecated QTKit)
// clang -fobjc-arc -Wall -Wextra -pedantic avcapture.m
// -framework Cocoa -framework AVFoundation -framework CoreMedia
// -framework QuartzCore -o avcapture
#import <AVFoundation/AVFoundation.h>
#import <AppKit/AppKit.h>
@interface Capture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (weak) AVCaptureSession* session;
- (void) captureOutput: (AVCaptureOutput*) output
didOutputSampleBuffer: (CMSampleBufferRef) buffer
fromConnection: (AVCaptureConnection*) connection;
//- (void) captureOutput: (AVCaptureOutput*) output
// didDropSampleBuffer: (CMSampleBufferRef) buffer
// fromConnection: (AVCaptureConnection*) connection;
@end
@interface Capture ()
{
CVImageBufferRef head;
CFRunLoopRef runLoop;
int count;
}
- (void) save;
@end
@implementation Capture
@synthesize session;
- (id) init
{
self = [super init];
runLoop = CFRunLoopGetCurrent();
head = nil;
count = 0;
return self;
}
- (void) dealloc
{
@synchronized (self) {
CVBufferRelease(head);
}
NSLog(@"capture released");
}
- (void) save
{
@synchronized (self) {
CIImage* ciImage =
[CIImage imageWithCVImageBuffer: head];
NSBitmapImageRep* bitmapRep =
[[NSBitmapImageRep alloc] initWithCIImage: ciImage];
NSData* jpgData =
[bitmapRep representationUsingType:NSJPEGFileType properties: nil];
[jpgData writeToFile: @"result.jpg" atomically: NO];
//NSData* pngData =
// [bitmapRep representationUsingType:NSPNGFileType properties: nil];
//[pngData writeToFile: @"result.png" atomically: NO];
}
NSLog(@"Saved");
}
- (void) captureOutput: (AVCaptureOutput*) output
didOutputSampleBuffer: (CMSampleBufferRef) buffer
fromConnection: (AVCaptureConnection*) connection
{
#pragma unused (output)
#pragma unused (connection)
CVImageBufferRef frame = CMSampleBufferGetImageBuffer(buffer);
CVImageBufferRef prev;
CVBufferRetain(frame);
@synchronized (self) {
prev = head;
head = frame;
count++;
NSLog(@"Captured");
}
CVBufferRelease(prev);
if (count == 6) {
// after skipped 5 frames
[self save];
[self.session stopRunning];
CFRunLoopStop(runLoop);
}
}
//- (void) captureOutput: (AVCaptureOutput*) output
// didDropSampleBuffer: (CMSampleBufferRef) buffer
// fromConnection: (AVCaptureConnection*) connection
//{
//#pragma unused (output)
//#pragma unused (buffer)
//#pragma unused (connection)
//}
@end
int quit(NSError * error)
{
NSLog(@"[error] %@", [error localizedDescription]);
return 1;
}
int main()
{
NSError* error = nil;
Capture* capture = [[Capture alloc] init];
//NSArray* devices =
// [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
//AVCaptureDevice* device = [devices objectAtIndex: 0];
AVCaptureDevice* device =
[AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
NSLog(@"[device] %@", device);
AVCaptureDeviceInput* input =
[AVCaptureDeviceInput deviceInputWithDevice: device error: &error];
NSLog(@"[input] %@", input);
AVCaptureVideoDataOutput* output =
[[AVCaptureVideoDataOutput alloc] init];
[output setSampleBufferDelegate: capture queue: dispatch_get_main_queue()];
NSLog(@"[output] %@", output);
AVCaptureSession* session = [[AVCaptureSession alloc] init];
[session addInput: input];
[session addOutput: output];
capture.session = session;
[session startRunning];
NSLog(@"Started");
CFRunLoopRun();
NSLog(@"Stopped");
return 0;
}
// capture image from webcam(e.g. face time)
// QTKit API version
// clang -fobjc-arc -Wall -Wextra -pedantic capture.m
// -framework Cocoa -framework QTKit -framework CoreVideo
// -framework QuartzCore -o capture
#import <QTKit/QTKit.h>
@interface Capture : NSObject
@property (weak) QTCaptureSession* session;
- (void) captureOutput: (QTCaptureOutput*) output
didOutputVideoFrame: (CVImageBufferRef) frame
withSampleBuffer: (QTSampleBuffer*) buffer
fromConnection: (QTCaptureConnection*) connection;
@end
@interface Capture ()
{
CVImageBufferRef head;
CFRunLoopRef runLoop;
int count;
}
- (void) save;
@end
@implementation Capture
@synthesize session;
- (id) init
{
self = [super init];
runLoop = CFRunLoopGetCurrent();
head = nil;
count = 0;
return self;
}
- (void) dealloc
{
@synchronized (self) {
CVBufferRelease(head);
}
NSLog(@"capture released");
}
- (void) save
{
@synchronized (self) {
CIImage* ciImage = [CIImage imageWithCVImageBuffer: head];
NSBitmapImageRep* bitmapRep =
[[NSBitmapImageRep alloc] initWithCIImage: ciImage];
NSData* jpgData =
[bitmapRep representationUsingType:NSJPEGFileType properties: nil];
[jpgData writeToFile: @"result.jpg" atomically: NO];
//NSData* pngData =
// [bitmapRep representationUsingType:NSPNGFileType properties: nil];
//[pngData writeToFile: @"result.png" atomically: NO];
}
NSLog(@"Saved");
}
- (void) captureOutput: (QTCaptureOutput*) output
didOutputVideoFrame: (CVImageBufferRef) frame
withSampleBuffer: (QTSampleBuffer*) buffer
fromConnection: (QTCaptureConnection*) connection
{
#pragma unused (output)
#pragma unused (buffer)
#pragma unused (connection)
CVImageBufferRef prev;
CVBufferRetain(frame);
@synchronized (self) {
prev = head;
head = frame;
count++;
NSLog(@"Captured");
}
CVBufferRelease(prev);
if (count == 6) {
// after skipped 5 frames
[self.session stopRunning];
[self save];
CFRunLoopStop(runLoop);
}
}
@end
int quit(NSError * error)
{
NSLog(@"[error] %@", [error localizedDescription]);
return 1;
}
int main()
{
NSError* error = nil;
Capture* capture = [[Capture alloc] init];
//NSArray* devices =
// [QTCaptureDevice inputDevicesWithMediaType: QTMediaTypeVideo];
//QTCaptureDevice* device = [devices objectAtIndex: 0];
QTCaptureDevice* device =
[QTCaptureDevice defaultInputDeviceWithMediaType: QTMediaTypeVideo];
NSLog(@"[device] %@", device);
if (![device open: &error]) return quit(error);
QTCaptureDeviceInput* input =
[[QTCaptureDeviceInput alloc] initWithDevice: device];
NSLog(@"[input] %@", input);
QTCaptureDecompressedVideoOutput* output =
[[QTCaptureDecompressedVideoOutput alloc] init];
[output setDelegate: capture];
NSLog(@"[output] %@", output);
QTCaptureSession* session = [[QTCaptureSession alloc] init];
if (![session addInput: input error: &error]) return quit(error);
if (![session addOutput: output error: &error]) return quit(error);
capture.session = session;
[session startRunning];
NSLog(@"Started");
CFRunLoopRun();
NSLog(@"Stopped");
return 0;
}
@eanon
Copy link

eanon commented Mar 15, 2018

Hi, Tried your code (simple copy/paste) in OS X 10.9, but captureOutput is never called, and then the session is never stopped. Do you know why?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment