Skip to content

Instantly share code, notes, and snippets.

@javiersuazo
Created March 9, 2017 01:40
Show Gist options
  • Save javiersuazo/bb36083bdaa0a51d52323d997a84712e to your computer and use it in GitHub Desktop.
Save javiersuazo/bb36083bdaa0a51d52323d997a84712e to your computer and use it in GitHub Desktop.
Merge videos on ios with audio and right orientation.
#import "MKOVideoMerge.h"
#import <AVFoundation/AVFoundation.h>
@implementation MKOVideoMerge
+ (void)mergeVideoFiles:(NSArray *)fileURLs
completion:(void(^)(NSURL *mergedVideoFile, NSError *error))completion {
NSLog(@"Start merging video files ...");
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSMutableArray *instructions = [NSMutableArray new];
__block BOOL errorOccurred = NO;
__block CMTime currentTime = kCMTimeZero;
__block CGSize size = CGSizeZero;
__block int32_t highestFrameRate = 0;
__block BOOL isPortrait_ = NO;
[fileURLs enumerateObjectsUsingBlock:^(NSURL *fileURL, NSUInteger idx, BOOL *stop) {
NSDictionary *options = @{AVURLAssetPreferPreciseDurationAndTimingKey:@YES};
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:fileURL options:options];
AVAssetTrack *videoAsset = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetTrack *audioAsset = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
size = videoAsset.naturalSize;
NSLog(@"Video #%lu => width: %f height: %f", idx+1, size.width, size.height);
int32_t currentFrameRate = (int)roundf(videoAsset.nominalFrameRate);
highestFrameRate = (currentFrameRate > highestFrameRate) ? currentFrameRate : highestFrameRate;
NSLog(@"* %@ (%dfps)", [fileURL lastPathComponent], currentFrameRate);
CMTime trimmingTime = CMTimeMake(lround(videoAsset.naturalTimeScale / videoAsset.nominalFrameRate), videoAsset.naturalTimeScale);
CMTimeRange timeRange = CMTimeRangeMake(trimmingTime, CMTimeSubtract(videoAsset.timeRange.duration, trimmingTime));
NSError *videoError;
BOOL videoResult = [videoTrack insertTimeRange:timeRange ofTrack:videoAsset atTime:currentTime error:&videoError];
NSError *audioError;
BOOL audioResult = [audioTrack insertTimeRange:timeRange ofTrack:audioAsset atTime:currentTime error:&audioError];
if(!videoResult || !audioResult || videoError || audioError) {
if (completion) completion(nil, videoError? : audioError);
errorOccurred = YES;
*stop = YES;
} else {
isPortrait_ = [self isVideoPortrait:videoAsset];
AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoCompositionInstruction.timeRange = CMTimeRangeMake(currentTime, timeRange.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[videoLayerInstruction setTransform:videoAsset.preferredTransform atTime:currentTime];
videoCompositionInstruction.layerInstructions = @[videoLayerInstruction];
[instructions addObject:videoCompositionInstruction];
currentTime = CMTimeAdd(currentTime, timeRange.duration);
}
}];
if (errorOccurred == NO) {
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
NSString *fileName = [MKOVideoMerge generateFileName];
NSString *filePath = [MKOVideoMerge documentsPathWithFilePath:fileName];
exportSession.outputURL = [NSURL fileURLWithPath:filePath];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
AVMutableVideoComposition *mutableVideoComposition = [AVMutableVideoComposition videoComposition];
mutableVideoComposition.instructions = instructions;
mutableVideoComposition.frameDuration = CMTimeMake(1, highestFrameRate);
CGSize naturalSize;
if (isPortrait_) {
naturalSize = CGSizeMake(size.height, size.width);
mutableVideoComposition.renderSize = CGSizeMake(naturalSize.width, naturalSize.height);
} else {
mutableVideoComposition.renderSize = size;
}
mutableVideoComposition.frameDuration = CMTimeMake(1,30);
exportSession.videoComposition = mutableVideoComposition;
NSLog(@"Composition Duration: %ld seconds", lround(CMTimeGetSeconds(composition.duration)));
NSLog(@"Composition Framerate: %d fps", highestFrameRate);
void(^exportCompletion)(void) = ^{
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) completion(exportSession.outputURL, exportSession.error);
});
};
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:{
exportCompletion();
break;
}
case AVAssetExportSessionStatusCancelled:{
exportCompletion();
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog(@"Successfully merged video files into: %@", fileName);
exportCompletion();
break;
}
case AVAssetExportSessionStatusUnknown: {
NSLog(@"Export Status: Unknown");
}
case AVAssetExportSessionStatusExporting : {
NSLog(@"Export Status: Exporting");
}
case AVAssetExportSessionStatusWaiting: {
NSLog(@"Export Status: Wating");
}
};
}];
}
}
+ (NSURL *)applicationDocumentsDirectory {
return [[[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask] lastObject];
}
+ (NSString *)documentsPathWithFilePath:(NSString *)filePath {
return [[MKOVideoMerge applicationDocumentsDirectory].path stringByAppendingPathComponent:filePath];
}
+ (NSString *)generateFileName {
return [NSString stringWithFormat:@"video-%@.mp4", [[NSProcessInfo processInfo] globallyUniqueString]];
}
+(BOOL) isVideoPortrait:(AVAssetTrack *)videoTrack{
BOOL isPortrait = FALSE;
// NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
// if([tracks count] > 0) {
// AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
// }
return isPortrait;
}
@end
@nezhyborets
Copy link

Could you please explain this part?

CMTime trimmingTime = CMTimeMake(lround(videoAsset.naturalTimeScale / videoAsset.nominalFrameRate), videoAsset.naturalTimeScale);
CMTimeRange timeRange = CMTimeRangeMake(trimmingTime, CMTimeSubtract(videoAsset.timeRange.duration, trimmingTime));

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment