Skip to content

Instantly share code, notes, and snippets.

@ipreencekmr
Last active August 29, 2015 14:04
Show Gist options
  • Save ipreencekmr/a4c1b8c065b26771700e to your computer and use it in GitHub Desktop.
Save ipreencekmr/a4c1b8c065b26771700e to your computer and use it in GitHub Desktop.
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreGraphics/CoreGraphics.h>
#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>
#import <MobileCoreServices/UTCoreTypes.h>
#import <AssetsLibrary/AssetsLibrary.h>
typedef void(^CompletionHandler)(NSURL *finalVideoURL);
@interface VMerge : NSObject
@property(nonatomic,retain)AVAssetWriter *videoWriter;
@property(nonatomic,retain)AVAssetExportSession* assetExport;
@property(nonatomic, retain)NSString *finalVideoPath;
-(void)mergeArrayOfVideos:(NSArray*)videoPathArray withAudio:(NSURL*)audioURL andSubtitle:(NSString*)subtitle andCompletionHandler:(CompletionHandler)handler;
@end
#import "VMerge.h"
@implementation VMerge
-(void)mergeArrayOfVideos:(NSArray*)videoPathArray withAudio:(NSURL*)audioURL andSubtitle:(NSString*)subtitle andCompletionHandler:(CompletionHandler)handler
{
AVURLAsset* audioAsset;
if (audioURL!=nil) {
audioAsset = [[AVURLAsset alloc] initWithURL:audioURL options:nil];
}
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
// Get only paths the user selected
NSMutableArray *array = [NSMutableArray array];
for(NSString* string in videoPathArray){
if(![string isEqualToString:@""]){
[array addObject:string];
}
}
videoPathArray = array;
float time = 0;
for (int i = 0; i<videoPathArray.count; i++) {
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
NSError *error = nil;
BOOL ok = NO;
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
//CGAffineTransform transform = sourceVideoTrack.preferredTransform;
videoComposition.renderSize = sourceVideoTrack.naturalSize;
CGFloat FirstAssetScaleToFitRatio = 320.0/sourceVideoTrack.naturalSize.width;
if (size.width > size.height) {
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(sourceVideoTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:CMTimeMakeWithSeconds(time, 30)];
} else {
//new added
FirstAssetScaleToFitRatio = 320.0/sourceVideoTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(sourceVideoTrack.preferredTransform, FirstAssetScaleFactor) atTime:CMTimeMakeWithSeconds(time, 30)];
}
ok = [compositionVideoTrack insertTimeRange:sourceVideoTrack.timeRange ofTrack:sourceVideoTrack atTime:[composition duration] error:&error];
if (!ok) {
// Deal with the error.
NSLog(@"something went wrong");
}
NSLog(@"\n source asset duration is %f \n source vid track timerange is %f %f \n composition duration is %f \n composition vid track time range is %f %f",CMTimeGetSeconds([sourceAsset duration]), CMTimeGetSeconds(sourceVideoTrack.timeRange.start),CMTimeGetSeconds(sourceVideoTrack.timeRange.duration),CMTimeGetSeconds([composition duration]), CMTimeGetSeconds(compositionVideoTrack.timeRange.start),CMTimeGetSeconds(compositionVideoTrack.timeRange.duration));
time += CMTimeGetSeconds(sourceVideoTrack.timeRange.duration);
}
/*---- add background audio track------*/
if(audioAsset!=nil)
{
AVMutableCompositionTrack *AudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[AudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeInvalid error:nil];
}
/*--------- end audio track ----*/
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
instruction.timeRange = compositionVideoTrack.timeRange;
videoComposition.instructions = [NSArray arrayWithObject:instruction];
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.renderSize = CGSizeMake(320.0,480.0);
/*----- Apply subtitle on video --------- */
if (subtitle!=nil && ![subtitle isEqualToString:@""]) {
CGSize naturalSize;
naturalSize = CGSizeMake(320.0, 480.0);
[self applyVideoEffectsToComposition:videoComposition size:naturalSize writingText:subtitle];
}
/*---- end subtitle----*/
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"FINAL_DESTINATION-%d.mov",arc4random() % 1000]]; self.finalVideoPath = myPathDocs;
NSFileManager *fm = [NSFileManager defaultManager];
if ([fm fileExistsAtPath:myPathDocs]) {
[fm removeItemAtPath:myPathDocs error:nil];
}
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = videoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter withHandler:handler];
});
}];
}
#pragma mark- Apply Video Effects
- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition size:(CGSize)size writingText:(NSString*)writingTxt
{
// 1 - Set up the text layer
CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
[subtitle1Text setFont:@"Helvetica-Bold"];
[subtitle1Text setFontSize:18];
[subtitle1Text setFrame:CGRectMake(0, 0, size.width, 100)];
[subtitle1Text setString:writingTxt];
[subtitle1Text setAlignmentMode:kCAAlignmentCenter];
[subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
// 2 - The usual overlay
CALayer *overlayLayer = [CALayer layer];
[overlayLayer addSublayer:subtitle1Text];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
#pragma mark- Export Video
- (void)exportDidFinish:(AVAssetExportSession*)session withHandler:(CompletionHandler)handler
{
if(session.status == AVAssetExportSessionStatusCompleted){
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL
completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
NSLog(@"Error Saving File");
NSURL *destinationURL = [NSURL fileURLWithPath:self.finalVideoPath];
handler(destinationURL);
}else{
ALAssetsLibrary* library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:self.finalVideoPath] completionBlock:^(NSURL *assetURL, NSError *error){
NSLog(@"Video Successfully saved in photo Library");
}];
NSURL *destinationURL = [NSURL fileURLWithPath:self.finalVideoPath];
handler(destinationURL);
}
});
}];
}
}
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment