Skip to content

Instantly share code, notes, and snippets.

@steve21124
Created January 29, 2014 09:35
Show Gist options
  • Save steve21124/8684577 to your computer and use it in GitHub Desktop.
Save steve21124/8684577 to your computer and use it in GitHub Desktop.
+ (void) createMovieFromImages:(NSString*) folderName file:(NSString *) fileName withSize:(CGSize)imageSize imageArray:(NSMutableArray*) imageArray {
int duration = 60.0; // Movie duration
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *myFilePath;
if([folderName length] == 0) {
myFilePath = [documentsDirectoryPath stringByAppendingPathComponent:fileName];
} else {
myFilePath = [[[[documentsDirectoryPath stringByAppendingString:@"/"] stringByAppendingString:folderName]
stringByAppendingString:@"/"] stringByAppendingString:fileName];
}
NSError *error = nil;
/*prepare to write the movie*/
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:myFilePath] fileType:AVFileTypeQuickTimeMovie error:&error];
/* AVFileTypeQuickTimeMovie ,AVFileTypeMPEG4 */
NSParameterAssert(videoWriter);
NSDictionary *videoSettings =
[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:imageSize.width],AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height],AVVideoHeightKey, nil];
AVAssetWriterInput* writerInput =
[[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//start writing
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
/*buffer = [self pixelBufferFromCGImage:[[imageArray objectAtIndex:0] CGImage] andSize:CGSizeMake(imageSize.width,imageSize.height)];
CVPixelBufferPoolCreatePixelBuffer(NULL,adaptor.pixelBufferPool,&buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];*/
//loop through image array
int y = [imageArray count];
int x = 0;
int framePerSecond = 0;
if(duration <= y) {
float temp = (float)y/(float)duration;
framePerSecond = lroundf(temp);
} else {
framePerSecond = duration/y;
}
NSLog(@"framePerSecond:%d total image:%d duration:%d",framePerSecond, y, duration);
while(x < y) {
if(writerInput.readyForMoreMediaData == YES){
if(duration > y) {
for (int i = 1; i <= duration; i++) {
CMTime frameTime = CMTimeMake(1,1);
CMTime lastTime = CMTimeMake(i,1);
CMTime presentTime = CMTimeAdd(lastTime,frameTime);
UIImage *image = [Global getImageFromAlbum:folderName file:[imageArray objectAtIndex:x]];
buffer = [self pixelBufferFromCGImage:image.CGImage andSize:CGSizeMake(imageSize.width,imageSize.height)];
[adaptor appendPixelBuffer:buffer
withPresentationTime:presentTime];
CVBufferRelease(buffer);
if(i%framePerSecond == 0) {
x = x + 1;
if(x >= y) break;
}
}
} else {
CMTime frameTime = CMTimeMake(1,framePerSecond);
CMTime lastTime = CMTimeMake(x,framePerSecond);
CMTime presentTime = CMTimeAdd(lastTime,frameTime);
UIImage *image = [Global getImageFromAlbum:folderName file:[imageArray objectAtIndex:x]];
buffer = [self pixelBufferFromCGImage:image.CGImage andSize:CGSizeMake(imageSize.width,imageSize.height)];
[adaptor appendPixelBuffer:buffer
withPresentationTime:presentTime];
CVBufferRelease(buffer);
x = x + 1;
}
}
}
//finish writing
[writerInput markAsFinished];
[videoWriter finishWriting];
//clean up
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
[imageArray removeAllObjects];
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment