Skip to content

Instantly share code, notes, and snippets.

@simonkim
Created July 7, 2017 08:43
Show Gist options
  • Save simonkim/bb7a10f114f1c2db789aae41f7d0541f to your computer and use it in GitHub Desktop.
Save simonkim/bb7a10f114f1c2db789aae41f7d0541f to your computer and use it in GitHub Desktop.
Swift version of AVCustomEdit sample clone without Custom Compositor
import Foundation
import AVFoundation
struct AVFTransition {
let duration: CMTime = CMTime(seconds: 1.0, preferredTimescale: 1000)
/// tracks must contain 2 elements
func instruction(at startTime: CMTime, tracks: [AVAssetTrack]) -> AVMutableVideoCompositionInstruction {
let timeRange = CMTimeRangeMake(startTime, self.duration)
let instTrans = AVMutableVideoCompositionInstruction()
instTrans.timeRange = timeRange
let from = AVMutableVideoCompositionLayerInstruction(assetTrack: tracks[0])
from.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: timeRange)
let to = AVMutableVideoCompositionLayerInstruction(assetTrack: tracks[1])
to.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: timeRange)
instTrans.layerInstructions = [from, to]
return instTrans;
}
}
struct AVFClip {
let asset: AVAsset
private let _timeRange: CMTimeRange
var transition: AVFTransition?
var startTime: CMTime = kCMTimeInvalid
var timeRange: CMTimeRange {
let duration = CMTIME_IS_INDEFINITE(_timeRange.duration) ? asset.duration : _timeRange.duration
return CMTimeRange(start: _timeRange.start, duration: duration)
}
var videoTrack: AVAssetTrack? {
let tracks = asset.tracks(withMediaType: AVMediaTypeVideo)
return tracks.count > 0 ? tracks[0] : nil
}
var audioTrack: AVAssetTrack? {
let tracks = asset.tracks(withMediaType: AVMediaTypeAudio)
return tracks.count > 0 ? tracks[0] : nil
}
var transitionDuration: CMTime {
return transition?.duration ?? kCMTimeZero
}
var hasTransition: Bool {
return transition != nil
}
init(_ asset: AVAsset, timeRange: CMTimeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimeIndefinite)) {
self.asset = asset
_timeRange = timeRange
}
}
class AVFEditor {
let clips: [AVFClip]
var composition: AVMutableComposition?
var videoComposition: AVMutableVideoComposition?
var overlayClips: [AVFClip] = []
enum EditorError: Error {
case failedToLoadKey(String)
case assetNotComposible
case videoTrackNotFound
case noClipsProvided
}
var playerItem: AVPlayerItem? {
guard let composition = composition else {
return nil
}
guard let videoComposition = videoComposition else {
return nil
}
let result = AVPlayerItem(asset: composition)
result.videoComposition = videoComposition
return result
}
init(clips: [AVFClip]) {
self.clips = clips
}
/// Should return [] if successful
private func loadAssets(clips: [AVFClip]) -> ([AVFClip], Error?){
let keys = ["tracks", "duration", "composable"]
var unloaded: [AVFClip] = []
var error: EditorError? = nil
let dg = DispatchGroup()
for clip in clips {
let asset = clip.asset
dg.enter()
asset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
var nserror: NSError? = nil
var failed = false
let failedKeys = keys.filter( { asset.statusOfValue(forKey: $0, error: &nserror) == AVKeyValueStatus.failed})
if failedKeys.count != 0 {
failed = true
print("Failed loading values for keys \(failedKeys) from asset: \(asset)")
error = .failedToLoadKey(failedKeys[0])
} else if !asset.isComposable {
failed = true
print("Not composable asset: \(asset)")
error = .assetNotComposible
} else if clip.videoTrack == nil {
failed = true
error = .videoTrackNotFound
print("Video track not found asset: \(asset)")
}
if failed {
unloaded.append(clip)
}
dg.leave()
})
}
dg.wait()
return (unloaded, error)
}
func instrBypass(_ timeRange: CMTimeRange, assetTrack: AVAssetTrack) -> AVMutableVideoCompositionInstruction {
let result = AVMutableVideoCompositionInstruction()
result.timeRange = timeRange
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: assetTrack)
result.layerInstructions = [layerInst]
return result
}
/// Load assets and build composition
func buildComposition(_ complete: (_ editor: AVFEditor, _ error: Error? )->Void) {
if self.clips.count == 0 {
complete(self, EditorError.noClipsProvided)
return
}
let (failedAssets, assetError) = loadAssets(clips: self.clips + overlayClips)
if failedAssets.count > 0 {
complete(self, assetError)
return
}
var error: Error? = nil
let composition = AVMutableComposition()
let vtracks = [
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid),
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid),
]
let atracks = [
composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid),
composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid),
]
var nextTrackTime = kCMTimeZero
var durLastTransition: CMTime = kCMTimeZero
var instructions: [AVMutableVideoCompositionInstruction] = []
for (index, clip) in clips.enumerated() {
let trackIdx = index % 2;
let timeRange = clip.timeRange
do {
try vtracks[trackIdx].insertTimeRange(timeRange, of:clip.videoTrack!, at:nextTrackTime)
if let audioTrack = clip.audioTrack {
try atracks[trackIdx].insertTimeRange(timeRange, of:audioTrack, at:nextTrackTime)
}
// passthrough
let durTransition = clip.transitionDuration
let rangeBypass = CMTimeRange(start: nextTrackTime + durLastTransition,
duration: timeRange.duration - durTransition - durLastTransition)
instructions.append(instrBypass(rangeBypass, assetTrack: vtracks[trackIdx]))
// transition
if let transition = clip.transition {
let instrTransition = transition.instruction(at: rangeBypass.end, tracks: [vtracks[trackIdx], vtracks[1 - trackIdx]])
instructions.append(instrTransition)
}
nextTrackTime = rangeBypass.end
durLastTransition = durTransition
} catch let e {
error = e
break
}
}
if overlayClips.count > 0 {
// Only one overlay clip supported for now
let overlayClip = overlayClips[0]
let overlayTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
var timeRange = overlayClip.timeRange
let durOver = (overlayClip.startTime + timeRange.duration) - instructions.last!.timeRange.end
if durOver > kCMTimeZero {
timeRange.duration = timeRange.duration - durOver
}
do {
try overlayTrack.insertTimeRange(timeRange, of: overlayClip.videoTrack!, at: overlayClip.startTime)
let instrOverlayLayer = overlayClips[0].instrOverlayLayer(track: overlayTrack)
for instruction in instructions {
instruction.layerInstructions = [instrOverlayLayer] + instruction.layerInstructions
}
} catch let e {
print(e)
}
}
if error == nil {
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = instructions
videoComposition.renderSize = clips[0].videoTrack!.naturalSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
self.composition = composition
self.videoComposition = videoComposition
}
complete(self, error)
}
}
/// Overlay Layer Instruction
extension AVFClip {
func instrOverlayLayer(track: AVAssetTrack) -> AVMutableVideoCompositionLayerInstruction {
let instr = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let transform = CGAffineTransform(scaleX: 0.5, y: 0.5)
instr.setTransform(transform, at: kCMTimeZero)
return instr
}
}
/// AVFVideoCompositionSamples.swift
/// Usage sample of AVFEditor and Tests
import XCTest
class AVFVideoCompositionSamples: XCTestCase {
var player: AVPlayer!
var playerLayer: AVPlayerLayer!
override func setUp() {
super.setUp()
player = AVPlayer()
playerLayer = AVPlayerLayer(player: player)
// assuming self.preview is connected to a UIView of the Host App
playerLayer.frame = self.preview.layer.bounds
self.preview.layer.addSublayer(playerLayer)
}
override func tearDown() {
playerLayer.removeFromSuperlayer()
super.tearDown()
}
func testEditorTwoClipTrimTransition() {
let documentsUrl = ...;
let clipNames = ["video1.mp4", "video2.mp4"]
var clips: [AVFClip] = []
var clip1 = AVFClip(AVURLAsset(url: documentsUrl.appendingPathComponent(clipNames[0])),
timeRange: CMTimeRange(start:CMTime(seconds: 3.0, preferredTimescale: 1000), duration: CMTime(seconds: 5.0, preferredTimescale: 1000)))
clip1.transition = AVFTransition()
clips.append(clip1)
let clip2 = AVFClip(AVURLAsset(url: documentsUrl.appendingPathComponent(clipNames[1])),
timeRange: CMTimeRange(start:CMTime(seconds: 7.0, preferredTimescale: 1000), duration: CMTime(seconds: 5.0, preferredTimescale: 1000)))
clips.append(clip2)
let editor = AVFEditor(clips:clips)
editor.buildComposition { (editor, error) in
XCTAssert(error == nil, "Failed building composition")
if error == nil {
XCTAssert(editor.composition!.tracks[0].segments[0].timeMapping.target.start.seconds == kCMTimeZero.seconds)
XCTAssert(editor.composition!.tracks[0].segments[0].timeMapping.target.duration.seconds == clip1.timeRange.duration.seconds)
XCTAssert(editor.composition!.tracks[1].segments[1].timeMapping.target.start.seconds == clip1.timeRange.duration.seconds - clip1.transitionDuration.seconds)
XCTAssert(editor.composition!.tracks[1].segments[1].timeMapping.target.duration.seconds == clip2.timeRange.duration.seconds)
XCTAssert(editor.videoComposition!.instructions[0].timeRange.start.seconds == kCMTimeZero.seconds)
XCTAssert(editor.videoComposition!.instructions[0].timeRange.duration.seconds == clip1.timeRange.duration.seconds - clip1.transitionDuration.seconds)
XCTAssert(editor.videoComposition!.instructions[1].timeRange.start.seconds == editor.videoComposition!.instructions[0].timeRange.end.seconds)
XCTAssert(editor.videoComposition!.instructions[1].timeRange.duration.seconds == clip1.transitionDuration.seconds)
XCTAssert(editor.videoComposition!.instructions[2].timeRange.start.seconds == clip1.timeRange.duration.seconds, "\(editor.videoComposition!.instructions[2].timeRange.start.seconds) != expected \(clip1.timeRange.duration.seconds)")
XCTAssert(editor.videoComposition!.instructions[2].timeRange.duration.seconds == clip2.timeRange.duration.seconds - clip1.transitionDuration.seconds)
let playerItem = editor.playerItem
XCTAssert(playerItem!.duration.seconds == clip1.timeRange.duration.seconds + clip2.timeRange.duration.seconds - clip1.transitionDuration.seconds, "Duration \(playerItem!.duration.seconds)")
player.replaceCurrentItem(with: playerItem)
player.play()
}
}
repeat {
RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.05))
} while(player.rate > 0.0)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment