Skip to content

Instantly share code, notes, and snippets.

@Starmel
Created April 26, 2019 15:28
Show Gist options
  • Save Starmel/a80d6579ff4fec4fa6e188fa483b3775 to your computer and use it in GitHub Desktop.
Save Starmel/a80d6579ff4fec4fa6e188fa483b3775 to your computer and use it in GitHub Desktop.
import AVFoundation
open class ImageVideoGenerator {
public static func generateVideo(image: CGImage, duration: TimeInterval, frameSize: CGSize) throws -> URL {
let outputURL = URL.createTempFile()
let videoWriter = try AVAssetWriter(outputURL: outputURL, fileType: .mp4)
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: frameSize.width,
AVVideoHeightKey: frameSize.height]
let videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
let adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
sourcePixelBufferAttributes: nil)
guard videoWriter.canAdd(videoWriterInput) else {
throw ImageVideoGeneratorError.writerInputNotAdded
}
videoWriter.add(videoWriterInput)
videoWriter.startWriting()
let timeScale: Int32 = 600 // recommended in CMTime for movies.
let startFrameTime = CMTime(seconds: 0, preferredTimescale: timeScale)
let endFrameTime = CMTime(seconds: duration / 2, preferredTimescale: timeScale)
videoWriter.startSession(atSourceTime: startFrameTime)
let buffer = try self.pixelBuffer(fromImage: image, size: frameSize)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
adaptor.append(buffer, withPresentationTime: startFrameTime)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
adaptor.append(buffer, withPresentationTime: endFrameTime)
videoWriterInput.markAsFinished()
videoWriter.finishWritingSynchronously()
return outputURL
}
private static func pixelBuffer(fromImage image: CGImage, size: CGSize) throws -> CVPixelBuffer {
// Create Options for Buffer
let options = [kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true] as CFDictionary
// Allocate Buffer
var pixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault,
Int(size.width),
Int(size.height),
kCVPixelFormatType_32ARGB,
options,
&pixelBuffer)
guard let buffer = pixelBuffer, status == kCVReturnSuccess else {
throw ImageVideoGeneratorError.pixelBufferNotCreated(status: status)
}
// Lock Buffer
CVPixelBufferLockBaseAddress(buffer, [])
// Get Address to Write
guard let pixelData = CVPixelBufferGetBaseAddress(buffer) else {
throw ImageVideoGeneratorError.pixelPointerNotFound
}
let bytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
// Create Context to Draw
guard let context = CGContext(data: pixelData,
width: Int(size.width),
height: Int(size.height),
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else {
throw ImageVideoGeneratorError.contextNotCreated
}
// Draw Image in Buffer
context.concatenate(CGAffineTransform(rotationAngle: 0))
context.draw(image, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
// Unlock Buffer
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment