Skip to content

Instantly share code, notes, and snippets.

@woxtu
Last active January 31, 2020 13:31
Show Gist options
  • Save woxtu/a918c354a51911372c60d2e1be91e6e9 to your computer and use it in GitHub Desktop.
Save woxtu/a918c354a51911372c60d2e1be91e6e9 to your computer and use it in GitHub Desktop.
Time-lapse -ify in Swift
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
Version 2, December 2004
Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
Everyone is permitted to copy and distribute verbatim or modified
copies of this license document, and changing it is allowed as long
as the name is changed.
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. You just DO WHAT THE FUCK YOU WANT TO.
#!/usr/bin/env xcrun swift
import AVFoundation
extension Array {
func nth(_ index: Int) -> Array.Element? {
return (self.indices ~= index) ? self[index] : nil
}
}
extension CGImage {
var frame: CGRect {
return CGRect(x: 0, y: 0, width: self.width, height: self.height)
}
}
extension AVAsset {
var size: CGSize {
return self.tracks(withMediaType: AVMediaTypeVideo).nth(0)?.naturalSize ?? CGSize.zero
}
}
extension AVAssetWriterInputPixelBufferAdaptor {
func append(image: CGImage, withPresentationTime presentationTime: CMTime) -> Bool {
guard let pixelBufferPool = self.pixelBufferPool else {
fatalError("Failed to allocate the PixelBufferPool")
}
var pixelBufferOut: CVPixelBuffer? = nil
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
guard let pixelBuffer = pixelBufferOut else {
fatalError("Failed to create the PixelBuffer")
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let context = CGContext(
data: CVPixelBufferGetBaseAddress(pixelBuffer),
width: image.width,
height: image.height,
bitsPerComponent: image.bitsPerComponent,
bytesPerRow: image.bytesPerRow,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: image.bitmapInfo.rawValue)
context?.draw(image, in: image.frame)
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
return self.append(pixelBuffer, withPresentationTime: presentationTime)
}
}
// ₍₍ (ง╹◡╹)ว ⁾⁾
guard let inputPath = CommandLine.arguments.nth(1) else {
print("USAGE: timelapse <input-path> [output-path] [sampling-interval] [frame-rate]")
exit(0)
}
let outputPath = CommandLine.arguments.nth(2) ?? "output.mp4"
let interval = CommandLine.arguments.nth(3).flatMap { Double($0) } ?? 1
let frameRate = CommandLine.arguments.nth(4).flatMap { Int32($0) } ?? 15
let outputUrl = URL(fileURLWithPath: outputPath)
let semaphore = DispatchSemaphore(value: 0)
do {
if FileManager.default.fileExists(atPath: outputPath) {
try FileManager.default.removeItem(at: outputUrl)
}
} catch let error {
fatalError(error.localizedDescription)
}
let asset = AVAsset(url: URL(fileURLWithPath: inputPath))
let writer: AVAssetWriter
do {
writer = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileTypeMPEG4)
} catch let error {
fatalError(error.localizedDescription)
}
let input = AVAssetWriterInput(
mediaType: AVMediaTypeVideo,
outputSettings: [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: asset.size.width,
AVVideoHeightKey: asset.size.height,
])
let adaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: input,
sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB),
kCVPixelBufferWidthKey as String: asset.size.width,
kCVPixelBufferHeightKey as String: asset.size.height,
])
var times = [kCMTimeZero]
while let current = times.last, current < asset.duration {
times.append(current + CMTimeMakeWithSeconds(interval, 100))
}
writer.add(input)
writer.startWriting()
writer.startSession(atSourceTime: kCMTimeZero)
AVAssetImageGenerator(asset: asset)
.generateCGImagesAsynchronously(forTimes: times.map { NSValue(time: $0) }) { time, image, _, _, _ in
if let image = image {
let _ = adaptor.append(image: image, withPresentationTime: CMTimeMake(Int64(times.index(of: time)!), frameRate))
}
if times.last == time {
input.markAsFinished()
writer.endSession(atSourceTime: CMTimeMake(Int64(times.count), frameRate))
writer.finishWriting {
semaphore.signal()
}
}
}
let _ = semaphore.wait(timeout: DispatchTime.distantFuture)
@MobileAppVault
Copy link

Hi,

i am not able to use this nice piece of code with the following commands.

swift timelapse.swift /images/ output.mp4 1 15

I get an exception: [AVAssetWriterInput initWithMediaType:outputSettings:sourceFormatHint:] AVVideoSettings dictionary must specify a positive width'

So I saw you create the video size depends on the input let asset = AVAsset(url: URL(fileURLWithPath: inputPath)) but this will not work because the inputs are images instead of a video file. :(

Could you please add some instructions on how to use this code?

Many Thanks!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment