Skip to content

Instantly share code, notes, and snippets.

@Willib
Last active May 28, 2023 13:24
Show Gist options
  • Save Willib/b97b08d8d877ca5d875ff14abb4c3f1a to your computer and use it in GitHub Desktop.
Save Willib/b97b08d8d877ca5d875ff14abb4c3f1a to your computer and use it in GitHub Desktop.
export images(or one image) to a video in swift 3
//
// CXEImageToVideo.swift
// VideoAPPTest
//
// Created by Wulei on 16/12/14.
// Copyright © 2016年 wulei. All rights reserved.
//
import Foundation
import AVFoundation
import UIKit
typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?
class CXEImageToVideo: NSObject{
//MARK: Private Properties
private var assetWriter:AVAssetWriter!
private var writeInput:AVAssetWriterInput!
private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
private var videoSettings:[String : Any]!
private var frameTime:CMTime!
private var fileURL:URL!
private var completionBlock: CXEMovieMakerCompletion?
//MARK: Class Method
class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
if(Int(width) % 16 != 0){
print("warning: video settings width must be divisible by 16")
}
let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: width,
AVVideoHeightKey: height]
return videoSettings
}
//MARK: Public methods
init(videoSettings: [String: Any]) {
super.init()
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
var tempPath:String
repeat{
let random = arc4random()
tempPath = paths[0] + "/\(random).mp4"
}while(FileManager.default.fileExists(atPath: tempPath))
// let tempPath = paths[0] + "/exprotvideo.mp4"
// if(FileManager.default.fileExists(atPath: tempPath)){
// guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
// print("remove path failed")
// return
// }
// }
self.fileURL = URL(fileURLWithPath: tempPath)
self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)
self.videoSettings = videoSettings
self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
assert(self.assetWriter.canAdd(self.writeInput), "add failed")
self.assetWriter.add(self.writeInput)
let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
self.frameTime = CMTimeMake(1, 1)
}
func createMovieFrom(url: URL, duration: Int, withCompletion: @escaping CXEMovieMakerCompletion){
var urls = [URL]()
var index = duration
while(index > 0){
urls.append(url)
index -= 1
}
self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
}
func createMovieFrom(image: UIImage, duration: Int,withCompletion: @escaping CXEMovieMakerCompletion){
var images = [UIImage]()
var index = duration
while(index > 0){
images.append(image)
index -= 1
}
self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
return inputObject as? UIImage}, withCompletion: withCompletion)
}
//MARK: Private methods
private func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){
self.completionBlock = withCompletion
self.assetWriter.startWriting()
self.assetWriter.startSession(atSourceTime: kCMTimeZero)
let mediaInputQueue = DispatchQueue(label: "mediaInputQueue")
var i = 0
let frameNumber = images.count
self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
while(true){
if(i >= frameNumber){
break
}
if (self.writeInput.isReadyForMoreMediaData){
var sampleBuffer:CVPixelBuffer?
autoreleasepool{
let img = extractor(images[i])
if img == nil{
i += 1
print("Warning: counld not extract one of the frames")
// continue
}
sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
}
if (sampleBuffer != nil){
if(i == 0){
self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
}else{
let value = i - 1
let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
let presentTime = CMTimeAdd(lastTime, self.frameTime)
self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
}
i = i + 1
}
}
}
self.writeInput.markAsFinished()
self.assetWriter.finishWriting {
DispatchQueue.main.sync {
self.completionBlock!(self.fileURL)
}
}
}
}
private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
var pxbuffer:CVPixelBuffer?
let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")
CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
assert(context != nil, "context is nil")
context!.concatenate(CGAffineTransform.identity)
context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pxbuffer
}
}
//
// CXEImageToAssetURL.swift
// CXEngine
//
// Created by wulei on 16/12/14.
// Copyright © 2016年 wulei. All rights reserved.
//
import Foundation
import AVFoundation
import UIKit
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?
class CXEImageToVideo: NSObject{
//MARK: Private Properties
private var assetWriter:AVAssetWriter!
private var writeInput:AVAssetWriterInput!
private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
private var videoSettings:[String : Any]!
private var frameTime:CMTime!
private var fileURL:URL!
//MARK: Class Method
class func videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
if(Int(width) % 16 != 0){
print("warning: video settings width must be divisible by 16")
}
let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: width,
AVVideoHeightKey: height]
return videoSettings
}
//MARK: Public methods
init(videoSettings: [String: Any]) {
super.init()
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
var tempPath:String
repeat{
let random = arc4random()
tempPath = paths[0] + "/\(random).mp4"
}while(FileManager.default.fileExists(atPath: tempPath))
self.fileURL = URL(fileURLWithPath: tempPath)
self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)
self.videoSettings = videoSettings
self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
assert(self.assetWriter.canAdd(self.writeInput), "add failed")
self.assetWriter.add(self.writeInput)
let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
self.frameTime = CMTimeMake(600, 600)
}
func createMovieFrom(url: URL, duration:Int) -> URL{
var urls = [URL]()
var index = duration
while(index > 0){
urls.append(url)
index -= 1
}
return self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
return UIImage(data: try! Data(contentsOf: inputObject as! URL))})
}
func createMovieFrom(image: UIImage, duration:Int) -> URL{
var images = [UIImage]()
var index = duration
while(index > 0){
images.append(image)
index -= 1
}
return self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
return inputObject as? UIImage})
}
//MARK: Private methods
private func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor) -> URL{
self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: kCMTimeZero)
let zeroTime = CMTimeMake(Int64(0),self.frameTime.timescale)
self.assetWriter.startSession(atSourceTime: zeroTime)
var i = 0
let frameNumber = images.count
while !self.writeInput.isReadyForMoreMediaData {}
while(true){
if(i >= frameNumber){
break
}
if (self.writeInput.isReadyForMoreMediaData){
var sampleBuffer:CVPixelBuffer?
autoreleasepool{
let img = extractor(images[i])
if img == nil{
i += 1
print("Warning: counld not extract one of the frames")
// continue
}
sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
}
if (sampleBuffer != nil){
if(i == 0){
self.bufferAdapter.append(sampleBuffer!, withPresentationTime: kCMTimeZero)
}else{
let value = i - 1
let lastTime = CMTimeMake(Int64(value), self.frameTime.timescale)
let presentTime = CMTimeAdd(lastTime, self.frameTime)
self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
}
i = i + 1
}
}
}
self.writeInput.markAsFinished()
self.assetWriter.finishWriting {}
var isSuccess:Bool = false
while(!isSuccess){
switch self.assetWriter.status {
case .completed:
isSuccess = true
print("completed")
case .writing:
sleep(1)
print("writing")
case .failed:
isSuccess = true
print("failed")
case .cancelled:
isSuccess = true
print("cancelled")
default:
isSuccess = true
print("unknown")
}
}
return self.fileURL
}
private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
var pxbuffer:CVPixelBuffer?
let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")
CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
assert(context != nil, "context is nil")
context!.concatenate(CGAffineTransform.identity)
context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pxbuffer
}
}
//
// CXEImageToAssetURL.swift
// CXEngine
//
// Created by wulei on 16/12/14.
// Copyright © 2016年 wulei. All rights reserved.
//
import Foundation
import AVFoundation
import UIKit
import Photos
fileprivate extension UIImage{
func normalizedImage() -> UIImage?{
if self.imageOrientation == .up{
return self
}
UIGraphicsBeginImageContextWithOptions(self.size, false, self.scale)
self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
let normalImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return normalImage
}
// func clipImage() -> UIImage {
// var x = CGFloat(0)
// var y = CGFloat(0)
// let imageHeight = (self.size.width * 9) / 16
// y = (self.size.height - imageHeight)/2
// var rcTmp = CGRect(origin: CGPoint(x: x, y: y), size: self.size)
// if self.scale > 1.0 {
// rcTmp = CGRect(x: rcTmp.origin.x * self.scale, y: rcTmp.origin.y * self.scale, width: rcTmp.size.width * self.scale, height: rcTmp.size.height * self.scale)
// }
// rcTmp.size.height = imageHeight
// let imageRef = self.cgImage!.cropping(to: rcTmp)
// let result = UIImage(cgImage: imageRef!, scale: self.scale, orientation: self.imageOrientation)
// return result
// return self
// }
}
public typealias CXEImageToVideoProgress = (Float) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?
public class CXEImageToVideo: NSObject{
//MARK: Private Properties
private var assetWriter:AVAssetWriter!
private var writeInput:AVAssetWriterInput!
private var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
private var videoSettings:[String : Any]!
private var frameTime:CMTime!
private var fileURL:URL!
private var duration:Int = 0
//MARK: Class Method
private func videoSettingsFunc(width:Int, height:Int) -> [String: Any]{
if(Int(width) % 16 != 0){
print("warning: video settings width must be divisible by 16")
}
let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: width,
AVVideoHeightKey: height]
return videoSettings
}
//MARK: Public methods
public init(fileURL: URL, videoWidth:Int, videoHeight:Int) {
super.init()
self.videoSettings = videoSettingsFunc(width: videoWidth, height: videoHeight)
self.fileURL = fileURL
self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileTypeQuickTimeMovie)
self.writeInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
assert(self.assetWriter.canAdd(self.writeInput), "add failed")
self.assetWriter.add(self.writeInput)
let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
self.frameTime = CMTimeMake(1, 25)
}
// public func createMovieFrom(url: URL, duration:Int, progressExtractor: CXEImageToVideoProgress){
// self.duration = duration
// self.createMovieFromSource(image: url as AnyObject, extractor:{(inputObject:AnyObject) ->UIImage? in
// return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, progressExtractor: progressExtractor)
// }
public func createMovieFrom(imageData: Data, duration:Int, progressExtractor: CXEImageToVideoProgress){
var image = UIImage(data: imageData)
image = image?.normalizedImage()
assert(image != nil)
self.duration = duration
self.createMovieFromSource(image: image!, extractor: {(inputObject:AnyObject) -> UIImage? in
return inputObject as? UIImage}, progressExtractor: progressExtractor)
}
//MARK: Private methods
private func createMovieFromSource(image: AnyObject, extractor: @escaping CXEMovieMakerUIImageExtractor, progressExtractor: CXEImageToVideoProgress){
self.assetWriter.startWriting()
let zeroTime = CMTimeMake(Int64(0),self.frameTime.timescale)
self.assetWriter.startSession(atSourceTime: zeroTime)
while !self.writeInput.isReadyForMoreMediaData {
usleep(100)
}
var sampleBuffer:CVPixelBuffer?
var pxDataBuffer:CVPixelBuffer?
let img = extractor(image)
assert(img != nil)
let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
let originHeight = frameWidth * img!.cgImage!.height / img!.cgImage!.width
let heightDifference = originHeight - frameHeight
let frameCounts = self.duration * Int(self.frameTime.timescale)
let spacingOfHeight = heightDifference / frameCounts
sampleBuffer = self.newPixelBufferFrom(cgImage: img!.cgImage!)
assert(sampleBuffer != nil)
var presentTime = CMTimeMake(1, self.frameTime.timescale)
var stepRows = 0
for i in 0..<frameCounts {
progressExtractor(Float(i) / Float(frameCounts))
CVPixelBufferLockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pointer = CVPixelBufferGetBaseAddress(sampleBuffer!)
var pxData = pointer?.assumingMemoryBound(to: UInt8.self)
let bytes = CVPixelBufferGetBytesPerRow(sampleBuffer!) * stepRows
pxData = pxData?.advanced(by: bytes)
let status = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, pxData!, CVPixelBufferGetBytesPerRow(sampleBuffer!), nil, nil, options as CFDictionary?, &pxDataBuffer)
assert(status == kCVReturnSuccess && pxDataBuffer != nil, "newPixelBuffer failed")
CVPixelBufferUnlockBaseAddress(sampleBuffer!, CVPixelBufferLockFlags(rawValue: 0))
while !self.writeInput.isReadyForMoreMediaData {
usleep(100)
}
if (self.writeInput.isReadyForMoreMediaData){
if i == 0{
self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: zeroTime)
}else{
self.bufferAdapter.append(pxDataBuffer!, withPresentationTime: presentTime)
}
presentTime = CMTimeAdd(presentTime, self.frameTime)
}
stepRows += spacingOfHeight
}
self.writeInput.markAsFinished()
self.assetWriter.finishWriting {}
var isSuccess:Bool = false
while(!isSuccess){
switch self.assetWriter.status {
case .completed:
isSuccess = true
print("completed")
case .writing:
usleep(100)
print("writing")
case .failed:
isSuccess = true
print("failed")
case .cancelled:
isSuccess = true
print("cancelled")
default:
isSuccess = true
print("unknown")
}
}
}
private func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
var pxbuffer:CVPixelBuffer?
let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
let originHeight = frameWidth * cgImage.height / cgImage.width
let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, originHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")
CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pxdata, width: frameWidth, height: originHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
assert(context != nil, "context is nil")
context!.concatenate(CGAffineTransform.identity)
context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: frameWidth, height: originHeight))
CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pxbuffer
}
}
@jatinRB
Copy link

jatinRB commented Dec 22, 2020

i tried code like you but there is memory issue occured, while crating video. there is any possible solution for buffer release ?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment