Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

@DarcyRayner
Created March 7, 2013 23:43
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save DarcyRayner/5112945 to your computer and use it in GitHub Desktop.
Save DarcyRayner/5112945 to your computer and use it in GitHub Desktop.
GPUImageMovie with audio playback
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageOpenGLESContext.h"
#import "GPUImageOutput.h"
/** Protocol for getting Movie played callback.
*/
@protocol GPUImageMovieDelegate <NSObject>
- (void)didCompletePlayingMovie;
@end
/** Source object for filtering movies
*/
@interface GPUImageMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property(readwrite, retain) NSURL *url;
/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
*/
@property(readwrite, nonatomic) BOOL runBenchmark;
/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL playAtActualSpeed;
/** This determines whether to play the sound channel of a movie. Defaults to NO.
*/
@property (readwrite, nonatomic) BOOL playSound;
/** This is used to send the delete Movie did complete playing alert
*/
@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;
/// @name Initialization and teardown
- (id)initWithAsset:(AVAsset *)asset;
- (id)initWithURL:(NSURL *)url;
- (void)textureCacheSetup;
/// @name Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
- (void)readNextVideoFrameFromOutput:(AVAssetReaderTrackOutput *)readerVideoTrackOutput;
- (void)readNextAudioSampleFromOutput:(AVAssetReaderTrackOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)resumeProcessing;
- (void)endProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
#import "GPUImageMovie.h"
#import "GPUImageMovieWriter.h"
@interface GPUImageMovie ()
{
BOOL audioEncodingIsFinished, videoEncodingIsFinished, hasAudioTrack;
GPUImageMovieWriter *synchronizedMovieWriter;
CVOpenGLESTextureCacheRef coreVideoTextureCache;
AVAssetReader *reader;
AVAudioPlayer *audioPlayer;
CFAbsoluteTime startActualFrameTime;
CGFloat currentVideoTime;
}
- (void)processAsset;
@end
@implementation GPUImageMovie
@synthesize url = _url;
@synthesize asset = _asset;
@synthesize runBenchmark = _runBenchmark;
@synthesize playAtActualSpeed = _playAtActualSpeed;
@synthesize delegate = _delegate;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithURL:(NSURL *)url;
{
if (!(self = [super init]))
{
return nil;
}
[self textureCacheSetup];
self.url = url;
self.asset = nil;
return self;
}
- (id)initWithAsset:(AVAsset *)asset;
{
if (!(self = [super init]))
{
return nil;
}
[self textureCacheSetup];
self.url = nil;
self.asset = asset;
return self;
}
- (void)textureCacheSetup;
{
if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageOpenGLESContext useImageProcessingContext];
#if defined(__IPHONE_6_0)
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
#else
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
#endif
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}
// Need to remove the initially created texture
[self deleteOutputTexture];
});
}
}
- (void)dealloc
{
if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
CFRelease(coreVideoTextureCache);
}
}
#pragma mark -
#pragma mark Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
{
synchronizedMovieWriter = movieWriter;
movieWriter.encodingLiveVideo = NO;
}
- (void)startProcessing
{
currentVideoTime = 0.0f;
[self setupProcessing];
}
- (void)resumeProcessing
{
[self setupProcessing];
}
- (void)setupProcessing
{
if(self.url == nil)
{
[self processAsset];
return;
}
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
if (self.playSound)
{
[self setupSound];
}
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
NSError *error = nil;
AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
if (!tracksStatus == AVKeyValueStatusLoaded)
{
return;
}
self.asset = inputAsset;
startActualFrameTime = CFAbsoluteTimeGetCurrent() - currentVideoTime;
[self processAsset];
}];
}
- (void)processAsset
{
__unsafe_unretained GPUImageMovie *weakSelf = self;
NSError *error = nil;
reader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
CMTimeRange timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(currentVideoTime, 1000), kCMTimePositiveInfinity);
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
[outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
[readerVideoTrackOutput setAlwaysCopiesSampleData:NO];
[reader addOutput:readerVideoTrackOutput];
[reader setTimeRange:timeRange];
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
hasAudioTrack = [audioTracks count] > 0;
BOOL shouldRecordAudioTrack = (hasAudioTrack && (weakSelf.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
if (shouldRecordAudioTrack)
{
audioEncodingIsFinished = NO;
// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[reader addOutput:readerAudioTrackOutput];
}
if ([reader startReading] == NO)
{
NSLog(@"Error reading from file at URL: %@", weakSelf.url);
return;
}
if (self.playSound && hasAudioTrack)
{
[audioPlayer setCurrentTime:currentVideoTime];
[audioPlayer play];
}
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}];
[synchronizedMovieWriter enableSynchronizationCallbacks];
}
else
{
while (reader.status == AVAssetReaderStatusReading)
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
if ( (shouldRecordAudioTrack) && (!audioEncodingIsFinished) )
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
}
if (reader.status == AVAssetWriterStatusCompleted) {
[weakSelf endProcessing];
if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
[self.delegate didCompletePlayingMovie];
}
}
}
}
- (void)setupSound
{
NSError *error;
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.url error:&error];
if (error) {
NSLog(@"Failed to initialise sound with error:%@",error);
}
[audioPlayer prepareToPlay];
}
- (void)readNextVideoFrameFromOutput:(AVAssetReaderTrackOutput *)readerVideoTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading)
{
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
if (_playAtActualSpeed)
{
// Do this outside of the video processing queue to not slow that down while waiting
CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
CGFloat frameTimeOffset= CMTimeGetSeconds(currentSampleTime);
CGFloat actualTimeOffset = currentActualTime - startActualFrameTime;
if (self.playSound && hasAudioTrack)
{
actualTimeOffset = [audioPlayer currentTime];
}
if (frameTimeOffset - actualTimeOffset > 0.0f)
{
usleep(1000000.0 * (frameTimeOffset - actualTimeOffset));
}
}
currentVideoTime = CMTimeGetSeconds(currentSampleTime);
__unsafe_unretained GPUImageMovie *weakSelf = self;
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:sampleBufferRef];
});
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
else
{
videoEncodingIsFinished = YES;
[self endProcessing];
}
}
else if (synchronizedMovieWriter != nil)
{
if (reader.status == AVAssetWriterStatusCompleted)
{
[self endProcessing];
}
}
}
- (void)readNextAudioSampleFromOutput:(AVAssetReaderTrackOutput *)readerAudioTrackOutput;
{
if (audioEncodingIsFinished)
{
return;
}
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
if (audioSampleBufferRef)
{
runSynchronouslyOnVideoProcessingQueue(^{
[self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
CMSampleBufferInvalidate(audioSampleBufferRef);
CFRelease(audioSampleBufferRef);
});
}
else
{
audioEncodingIsFinished = YES;
}
}
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
// CMTimeGetSeconds
// CMTimeSubtract
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
int bufferHeight = CVPixelBufferGetHeight(movieFrame);
#if TARGET_IPHONE_SIMULATOR
int bufferWidth = CVPixelBufferGetBytesPerRow(movieFrame) / 4; // This works around certain movie frame types on the Simulator (see https://github.com/BradLarson/GPUImage/issues/424)
#else
int bufferWidth = CVPixelBufferGetWidth(movieFrame);
#endif
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
CVPixelBufferLockBaseAddress(movieFrame, 0);
[GPUImageOpenGLESContext useImageProcessingContext];
CVOpenGLESTextureRef texture = NULL;
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
if (!texture || err) {
NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
outputTexture = CVOpenGLESTextureGetName(texture);
// glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
glBindTexture(GL_TEXTURE_2D, outputTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
[currentTarget setTextureDelegate:self atIndex:targetTextureIndex];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
// Flush the CVOpenGLESTexture cache and release the texture
CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
CFRelease(texture);
outputTexture = 0;
}
else
{
// Upload to texture
CVPixelBufferLockBaseAddress(movieFrame, 0);
glBindTexture(GL_TEXTURE_2D, outputTexture);
// Using BGRA extension to pull in video frame data directly
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(movieFrame));
CGSize currentSize = CGSizeMake(bufferWidth, bufferHeight);
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:currentSize atIndex:targetTextureIndex];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
}
if (_runBenchmark)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
- (void)endProcessing;
{
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{}];
}
if (audioPlayer != nil)
{
[audioPlayer stop];
}
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment