Skip to content

Instantly share code, notes, and snippets.

@leeprobert
Created February 24, 2012 23:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save leeprobert/1904416 to your computer and use it in GitHub Desktop.
Save leeprobert/1904416 to your computer and use it in GitHub Desktop.
Cocoa class for capturing the web cam and building an image up by rows of pixels
//
// CaptureViewController.m
// TimeSmear
//
// Created by Lee Probert on 23/02/2012.
// Copyright (c) 2012 LyraSpace Ltd. All rights reserved.
//
#import "CaptureViewController.h"
@implementation CaptureViewController
@synthesize captureButton, imageView, captureView, captureSession, captureDeviceInput, captureDeviceOutput;
-(IBAction)captureButtonHandler:(id)sender
{
NSLog(@"capture button pressed");
ypos = 0;
isSmearing = YES;
if(!captureSession)
{
captureSession = [[QTCaptureSession alloc] init];
QTCaptureDevice *videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
BOOL success = NO;
NSError *error;
success = [videoDevice open:&error];
if (!success) {
[[NSAlert alertWithError:error] runModal];
return;
}
captureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:videoDevice];
[captureSession addInput:captureDeviceInput error:nil];
// Add a decompressed video output that returns raw frames to the session
captureDeviceOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[captureDeviceOutput setDelegate:self];
success = [captureSession addOutput:captureDeviceOutput error:&error];
if (!success) {
[[NSAlert alertWithError:error] runModal];
return;
}
[captureView setCaptureSession:captureSession];
[captureSession startRunning];
}
}
//---------------------------------------------------------
- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
// Store the latest frame
// This must be done in a @synchronized block because this delegate method is not called on the main thread
CVImageBufferRef imageBufferRef;
@synchronized (self)
{
imageBufferRef = videoFrame;
}
if(imageBufferRef && isSmearing)
{
NSCIImageRep *imageRep = [NSCIImageRep imageRepWithCIImage:[CIImage imageWithCVImageBuffer:imageBufferRef]];
if(!compositeImage) compositeImage = [[NSImage alloc] initWithSize:[imageRep size]];
NSImage *videoFrameImage = [[NSImage alloc] initWithSize:[imageRep size]];
[videoFrameImage addRepresentation:imageRep];
NSRect r = NSMakeRect(0.0f, ypos, [imageRep size].width, 1);
[compositeImage lockFocus];
[videoFrameImage drawAtPoint:NSMakePoint(0.0f, ypos) fromRect:r operation:NSCompositeSourceOver fraction:1.0f];
[compositeImage unlockFocus];
[imageView setImage:compositeImage];
ypos++;
[[self view] setNeedsDisplay:YES];
if(ypos >= [imageRep size].height)
{
isSmearing = NO;
[captureSession stopRunning];
}
}
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment