Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save SheffieldKevin/7d8e5e9748a23e8d52d9 to your computer and use it in GitHub Desktop.
Save SheffieldKevin/7d8e5e9748a23e8d52d9 to your computer and use it in GitHub Desktop.
This is a hack grab of code from Apple's AV Composition Debug Viewer application to generate a CGImage. Works on OS X and iOS. Doesn't draw text.
//
// MIAVCreateImageOfCompositionMap.m
// MovingImagesFramework
//
// Created by Kevin Meaney on 08/03/2015.
//
// Lifted from Apple's Composition Debug Viewer application.
@import Foundation;
@import AVFoundation;
#import "MIAVFoundationUtilities.h"
enum { kLeftInsetToMatchTimeSlider = 70, kRightInsetToMatchTimeSlider = 35,
kLeftMarginInset = 4};
enum { kBannerHeight = 20, kIdealRowHeight = 36, kGapAfterRows = 4 };
@interface APLCompositionTrackSegmentInfo : NSObject
{
@public
CMTimeRange timeRange;
BOOL empty;
NSString *mediaType;
NSString *description;
}
@end
@implementation APLCompositionTrackSegmentInfo
- (void)dealloc
{
mediaType = nil;
description = nil;
}
@end
@interface APLVideoCompositionStageInfo : NSObject
{
@public
CMTimeRange timeRange;
NSArray *layerNames; // for videoComposition only
NSDictionary *opacityRamps;
}
@end
@implementation APLVideoCompositionStageInfo
- (void)dealloc
{
layerNames = nil;
opacityRamps = nil;
}
@end
CGImageRef MIAVCreateImageOfCompositionMap(AVComposition *composition,
AVVideoComposition *videoComposition,
AVAudioMix *audioMix)
{
NSArray *compositionTracks;
NSArray *audioMixTracks;
NSArray *videoCompositionStages;
CGFloat compositionRectWidth;
CMTime duration;
CGFloat scaledDurationToWidth;
duration = CMTimeMake(1, 1); // avoid division by zero later
if (composition) {
NSMutableArray *tracks = [[NSMutableArray alloc] init];
for (AVCompositionTrack *t in composition.tracks) {
NSMutableArray *segments = [[NSMutableArray alloc] init];
for (AVCompositionTrackSegment *s in t.segments) {
APLCompositionTrackSegmentInfo *segment = [[APLCompositionTrackSegmentInfo alloc] init] ;
if (s.isEmpty)
segment->timeRange = s.timeMapping.target; // only used for duration
else
segment->timeRange = s.timeMapping.source; // assumes non-scaled edit
segment->empty = s.isEmpty;
segment->mediaType = t.mediaType;
if (! segment->empty) {
NSMutableString *description = [[NSMutableString alloc] init];
[description appendFormat:@"%1.1f - %1.1f: \"%@\" ", CMTimeGetSeconds(segment->timeRange.start), CMTimeGetSeconds(CMTimeRangeGetEnd(segment->timeRange)), [s.sourceURL lastPathComponent]];
if ([segment->mediaType isEqual:AVMediaTypeVideo])
[description appendString:@"(v)"];
else if ([segment->mediaType isEqual:AVMediaTypeAudio])
[description appendString:@"(a)"];
else
[description appendFormat:@"('%@')", segment->mediaType];
segment->description = description;
}
[segments addObject:segment];
}
[tracks addObject:segments];
}
compositionTracks = tracks;
duration = CMTimeMaximum(duration, composition.duration);
}
if (audioMix) {
NSMutableArray *mixTracks = [[NSMutableArray alloc] init];
for (AVAudioMixInputParameters *input in audioMix.inputParameters) {
NSMutableArray *ramp = [[NSMutableArray alloc] init];
CMTime startTime = kCMTimeZero;
float startVolume, endVolume = 1.0;
CMTimeRange timeRange;
while ([input getVolumeRampForTime:startTime startVolume:&startVolume endVolume:&endVolume timeRange:&timeRange]) {
if (CMTIME_COMPARE_INLINE(startTime, ==, kCMTimeZero) && CMTIME_COMPARE_INLINE(timeRange.start, >, kCMTimeZero)) {
#if TARGET_OS_IPHONE
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(0, 1.0)]];
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(CMTimeGetSeconds(timeRange.start), 1.0)]];
#else
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(0, 1.0)]];
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(CMTimeGetSeconds(timeRange.start), 1.0)]];
#endif
}
#if TARGET_OS_IPHONE
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(
CMTimeGetSeconds(timeRange.start), startVolume)]];
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(
CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)), endVolume)]];
#else
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(
CMTimeGetSeconds(timeRange.start), startVolume)]];
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(
CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)), endVolume)]];
#endif
startTime = CMTimeRangeGetEnd(timeRange);
}
if (CMTIME_COMPARE_INLINE(startTime, <, duration))
{
#if TARGET_OS_IPHONE
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(
CMTimeGetSeconds(duration), endVolume)]];
#else
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(
CMTimeGetSeconds(duration), endVolume)]];
#endif
}
[mixTracks addObject:ramp];
}
audioMixTracks = mixTracks;
}
if (videoComposition) {
NSMutableArray *stages = [[NSMutableArray alloc] init];
for (AVVideoCompositionInstruction *instruction in videoComposition.instructions) {
APLVideoCompositionStageInfo *stage = [[APLVideoCompositionStageInfo alloc] init];
stage->timeRange = instruction.timeRange;
NSMutableDictionary *rampsDictionary = [[NSMutableDictionary alloc] init];
if ([instruction isKindOfClass:[AVVideoCompositionInstruction class]]) {
NSMutableArray *layerNames = [[NSMutableArray alloc] init];
for (AVVideoCompositionLayerInstruction *layerInstruction in instruction.layerInstructions) {
NSMutableArray *ramp = [[NSMutableArray alloc] init];
CMTime startTime = kCMTimeZero;
float startOpacity, endOpacity = 1.0;
CMTimeRange timeRange;
while ([layerInstruction getOpacityRampForTime:startTime startOpacity:&startOpacity endOpacity:&endOpacity timeRange:&timeRange]) {
if (CMTIME_COMPARE_INLINE(startTime, ==, kCMTimeZero) &&
CMTIME_COMPARE_INLINE(timeRange.start, >, kCMTimeZero)) {
#if TARGET_OS_IPHONE
[ramp addObject:[NSValue valueWithCGPoint:
CGPointMake(CMTimeGetSeconds(timeRange.start),
startOpacity)]];
#else
[ramp addObject:[NSValue valueWithPoint:
NSMakePoint(CMTimeGetSeconds(timeRange.start),
startOpacity)]];
#endif
}
#if TARGET_OS_IPHONE
[ramp addObject:[NSValue valueWithCGPoint:CGPointMake(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)), endOpacity)]];
#else
[ramp addObject:[NSValue valueWithPoint:NSMakePoint(
CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)),
endOpacity)]];
#endif
startTime = CMTimeRangeGetEnd(timeRange);
}
NSString *name = [NSString stringWithFormat:@"%d", layerInstruction.trackID];
[layerNames addObject:name];
[rampsDictionary setObject:ramp forKey:name];
}
if ([layerNames count] > 1) {
stage->opacityRamps = rampsDictionary;
}
stage->layerNames = layerNames;
[stages addObject:stage];
}
}
videoCompositionStages = stages;
}
size_t width = 1000;
size_t height = 600;
size_t bytesPerRow = width * 4;
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context;
context = CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow,
colorSpace, kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGFloat margin = 4.0;
CGRect rect = CGRectMake(margin, margin,
width - 2 * margin, height - 2 * margin);
/*
NSMutableParagraphStyle *style = [[NSParagraphStyle defaultParagraphStyle] mutableCopy];
[style setAlignment:NSCenterTextAlignment];
NSDictionary *textAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSColor whiteColor], NSForegroundColorAttributeName, style, NSParagraphStyleAttributeName, nil];
*/
int numBanners = (compositionTracks != nil) + (audioMixTracks != nil) + (videoCompositionStages != nil);
int numRows = (int)[compositionTracks count] + (int)[audioMixTracks count] + (videoCompositionStages != nil);
CGFloat totalBannerHeight = numBanners * (kBannerHeight + kGapAfterRows);
CGFloat rowHeight = kIdealRowHeight;
if ( numRows > 0 ) {
CGFloat maxRowHeight = (rect.size.height - totalBannerHeight) / numRows;
rowHeight = MIN( rowHeight, maxRowHeight );
}
CGFloat runningTop = rect.size.height - 15;
CGRect bannerRect = rect;
bannerRect.size.height = kBannerHeight;
bannerRect.origin.y = runningTop;
CGRect rowRect = rect;
rowRect.size.height = rowHeight;
rowRect.origin.x += kLeftInsetToMatchTimeSlider;
rowRect.size.width -= (kLeftInsetToMatchTimeSlider +
kRightInsetToMatchTimeSlider);
compositionRectWidth = rowRect.size.width;
scaledDurationToWidth = compositionRectWidth / CMTimeGetSeconds(duration);
if (compositionTracks) {
bannerRect.origin.y = runningTop;
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00); // black
// [[NSString stringWithFormat:@"AVComposition"] drawInRect:bannerRect
// withAttributes:[NSDictionary
// dictionaryWithObjectsAndKeys:[NSColor whiteColor],
// NSForegroundColorAttributeName, nil]];
runningTop -= bannerRect.size.height;
for (NSArray *track in compositionTracks) {
rowRect.origin.y = runningTop;
CGRect segmentRect = rowRect;
for (APLCompositionTrackSegmentInfo *segment in track) {
segmentRect.size.width = CMTimeGetSeconds(
segment->timeRange.duration) * scaledDurationToWidth;
if (segment->empty) {
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00);
// [[NSString stringWithFormat:@"empty"]
// drawInRect:segmentRect withAttributes:textAttributes];
}
else {
if ([segment->mediaType isEqual:AVMediaTypeVideo]) {
CGContextSetRGBFillColor(context, 0.00, 0.36, 0.36, 1.00); // blue-green
CGContextSetRGBStrokeColor(context, 0.00, 0.50, 0.50, 1.00); // brigher blue-green
}
else {
CGContextSetRGBFillColor(context, 0.00, 0.24, 0.36, 1.00); // bluer-green
CGContextSetRGBStrokeColor(context, 0.00, 0.33, 0.60, 1.00); // brigher bluer-green
}
CGContextSetLineWidth(context, 2.0);
CGContextAddRect(context, CGRectInset(segmentRect, 3.0, 3.0));
CGContextDrawPath(context, kCGPathFillStroke);
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00);
// [[NSString stringWithFormat:@"%@", segment->description] drawInRect:segmentRect withAttributes:textAttributes];
}
segmentRect.origin.x += segmentRect.size.width;
}
runningTop -= rowRect.size.height;
}
runningTop -= kGapAfterRows;
}
if (videoCompositionStages) {
bannerRect.origin.y = runningTop;
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00); // white
// [[NSString stringWithFormat:@"AVVideoComposition"] drawInRect:bannerRect withAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSColor whiteColor], NSForegroundColorAttributeName,nil]];
runningTop -= bannerRect.size.height;
rowRect.origin.y = runningTop;
CGRect stageRect = rowRect;
for (APLVideoCompositionStageInfo *stage in videoCompositionStages) {
double seconds = 0.0;
CMTime time = stage->timeRange.start;
if (CMTIME_IS_NUMERIC(time) && CMTIME_COMPARE_INLINE(time, >, kCMTimeZero))
{
seconds = CMTimeGetSeconds(time);
}
stageRect.origin.x = seconds * scaledDurationToWidth +
kLeftInsetToMatchTimeSlider + kLeftMarginInset;
// stageRect.origin.x = [self horizontalPositionForTime:stage->timeRange.start];
stageRect.size.width = CMTimeGetSeconds(stage->timeRange.duration) * scaledDurationToWidth;
CGFloat layerCount = [stage->layerNames count];
CGRect layerRect = stageRect;
if (layerCount > 0)
layerRect.size.height /= layerCount;
if (layerCount > 1)
layerRect.origin.y += layerRect.size.height;
for (NSString *layerName in stage->layerNames) {
if ([layerName intValue] % 2 == 1) {
CGContextSetRGBFillColor(context, 0.55, 0.02, 0.02, 1.00); // darker red
CGContextSetRGBStrokeColor(context, 0.87, 0.10, 0.10, 1.00); // brighter red
}
else {
CGContextSetRGBFillColor(context, 0.00, 0.40, 0.76, 1.00); // darker blue
CGContextSetRGBStrokeColor(context, 0.00, 0.67, 1.00, 1.00); // brighter blue
}
CGContextSetLineWidth(context, 2.0);
CGContextAddRect(context, CGRectInset(layerRect, 3.0, 1.0));
CGContextDrawPath(context, kCGPathFillStroke);
// (if there are two layers, the first should ideally have a gradient fill.)
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00); // white
// [[NSString stringWithFormat:@"%@", layerName] drawInRect:layerRect withAttributes:textAttributes];
// Draw the opacity ramps for each layer as per the layerInstructions
NSArray *rampArray = [stage->opacityRamps objectForKey:layerName];
if ([rampArray count] > 0) {
CGRect rampRect = layerRect;
rampRect.size.width = CMTimeGetSeconds(duration) * scaledDurationToWidth;
rampRect = CGRectInset(rampRect, 3.0, 3.0);
CGContextBeginPath(context);
CGContextSetRGBStrokeColor(context, 0.95, 0.68, 0.09, 1.00); // yellow
CGContextSetLineWidth(context, 2.0);
BOOL firstPoint = YES;
for (NSValue *pointValue in rampArray) {
#if TARGET_OS_IPHONE
CGPoint timeVolumePoint = [pointValue CGPointValue];
#else
CGPoint timeVolumePoint = [pointValue pointValue];
#endif
CGPoint pointInRow;
double seconds = 0.0;
CMTime time = CMTimeMakeWithSeconds(timeVolumePoint.x, 1);
if (CMTIME_IS_NUMERIC(time) &&
CMTIME_COMPARE_INLINE(time, >, kCMTimeZero))
{
seconds = CMTimeGetSeconds(time);
}
pointInRow.x = seconds * scaledDurationToWidth +
kLeftInsetToMatchTimeSlider + kLeftMarginInset - 3.0;
// pointInRow.x = [self horizontalPositionForTime:CMTimeMakeWithSeconds(timeVolumePoint.x, 1)] - 3.0;
pointInRow.y = rampRect.origin.y - ( 0.9 - 0.8 * timeVolumePoint.y ) * rampRect.size.height + rampRect.size.height;
pointInRow.x = MAX(pointInRow.x, CGRectGetMinX(rampRect));
pointInRow.x = MIN(pointInRow.x, CGRectGetMaxX(rampRect));
if (firstPoint) {
CGContextMoveToPoint(context, pointInRow.x, pointInRow.y);
firstPoint = NO;
}
else {
CGContextAddLineToPoint(context, pointInRow.x, pointInRow.y);
}
}
CGContextStrokePath(context);
}
layerRect.origin.y -= layerRect.size.height;
}
}
runningTop -= rowRect.size.height;
runningTop -= kGapAfterRows;
}
if (audioMixTracks) {
bannerRect.origin.y = runningTop;
CGContextSetRGBFillColor(context, 0.00, 0.00, 0.00, 1.00); // white
// [[NSString stringWithFormat:@"AVAudioMix"] drawInRect:bannerRect withAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSColor whiteColor], NSForegroundColorAttributeName,nil]];
runningTop -= bannerRect.size.height;
for (NSArray *mixTrack in audioMixTracks) {
rowRect.origin.y = runningTop;
CGRect rampRect = rowRect;
rampRect.size.width = CMTimeGetSeconds(duration) * scaledDurationToWidth;
rampRect = CGRectInset(rampRect, 3.0, 3.0);
CGContextSetRGBFillColor(context, 0.55, 0.02, 0.02, 1.00); // darker red
CGContextSetRGBStrokeColor(context, 0.87, 0.10, 0.10, 1.00); // brighter red
CGContextSetLineWidth(context, 2.0);
CGContextAddRect(context, rampRect);
CGContextDrawPath(context, kCGPathFillStroke);
CGContextBeginPath(context);
CGContextSetRGBStrokeColor(context, 0.95, 0.68, 0.09, 1.00); // yellow
CGContextSetLineWidth(context, 3.0);
BOOL firstPoint = YES;
for (NSValue *pointValue in mixTrack) {
#if TARGET_OS_IPHONE
CGPoint timeVolumePoint = [pointValue CGPointValue];
#else
CGPoint timeVolumePoint = [pointValue pointValue];
#endif
CGPoint pointInRow;
pointInRow.x = rampRect.origin.x + timeVolumePoint.x * scaledDurationToWidth;
pointInRow.y = rampRect.origin.y - ( 0.9 - 0.8 * timeVolumePoint.y ) * rampRect.size.height + rampRect.size.height;
pointInRow.x = MAX(pointInRow.x, CGRectGetMinX(rampRect));
pointInRow.x = MIN(pointInRow.x, CGRectGetMaxX(rampRect));
if (firstPoint) {
CGContextMoveToPoint(context, pointInRow.x, pointInRow.y);
firstPoint = NO;
}
else {
CGContextAddLineToPoint(context, pointInRow.x, pointInRow.y);
}
}
CGContextStrokePath(context);
runningTop -= rowRect.size.height;
}
runningTop -= kGapAfterRows;
}
CGImageRef image = CGBitmapContextCreateImage(context);
CGContextRelease(context);
return image;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment