Skip to content

Instantly share code, notes, and snippets.

@Coder-ACJHP
Last active August 26, 2021 06:58
Show Gist options
  • Save Coder-ACJHP/b78cce558de8af8a8ad2f83d02cf576e to your computer and use it in GitHub Desktop.
Save Coder-ACJHP/b78cce558de8af8a8ad2f83d02cf576e to your computer and use it in GitHub Desktop.
This code creating MPEG4 video with multiple small video parts from given UIImage array and then merging all of video parts into one and save it in user library
@IBAction func onApply(_ sender: Any) {
if selectedAssetInfo.isEmpty {
return
}
var tempVideoURLs: Array<URL> = []
waiting.startAnimating()
waiting.isHidden = false
let serialQueue = DispatchQueue(label: "mediaInputQueue")
var outputSettings = VideoSettings()
outputSettings.fps = 24
outputSettings.size = CGSize(width: 1920, height: 1080)
outputSettings.queue = serialQueue
var frameSize = outputSettings.size
frameSize.height = videoFormat.height * (outputSettings.size.width / videoFormat.width)
if frameSize.height >= outputSettings.size.height {
frameSize.width = videoFormat.width * (outputSettings.size.height / videoFormat.height)
frameSize.height = outputSettings.size.height
}
let contentView = self.timelineView.scrollView.getContentView()
var nextPhoto = contentView.prepareExporting(frameSize: frameSize)
g_engine.setVideoFormat(Float(frameSize.width), height: Float(frameSize.height))
g_engine.setExportMode(true)
waiting.startAnimating()
waiting.isHidden = false
let semaphore = DispatchSemaphore(value: 1)
var imageList: Array<UIImage> = []
let bytesInMegabyte = 1024.0 * 1024.0
let physicalMemorySpace = Double(ProcessInfo.processInfo.physicalMemory) / bytesInMegabyte
let allowedMemorySize: Double = (physicalMemorySpace / 2) - 200
var usedMemory: Double = 0
var videoCreator: VideoExporter!
repeat {
semaphore.wait()
serialQueue.async { [self] in
outputSettings.videoFilename = "anylightVideo_temp_\(tempVideoURLs.count)"
while (nextPhoto != nil && usedMemory < allowedMemorySize) {
imageList.append(nextPhoto!)
nextPhoto = nil
contentView.getImageForExporting()?.downsample(to: outputSettings.size, completionHandler: { (resultImage) in
nextPhoto = resultImage
usedMemory = memoryUsage()
})
}
print("Dividing photos to new part...")
print("Collected photo count: \(imageList.count)")
guard imageList.count > 0 else {
semaphore.signal()
print("No more photos to create video.")
return
}
print("Creating video creator...")
videoCreator = VideoExporter(settings: outputSettings)
videoCreator.images = imageList
videoCreator.export {
imageList = []
usedMemory = .zero
tempVideoURLs.append(outputSettings.outputURL)
print("part of video exported")
semaphore.signal()
}
}
} while contentView.currentIndexReachedLimit() != true
DispatchQueue.main.async { [self] in
self.waiting.stopAnimating()
self.waiting.isHidden = true
g_engine.setVideoFormat(Float(self.imageView.frame.width), height: Float(self.imageView.frame.height))
g_engine.setExportMode(false)
print("Engine released")
imageList.removeAll()
merge(withSettings: outputSettings, tempVideoURLs: tempVideoURLs)
}
}
func merge(withSettings settings: VideoSettings, tempVideoURLs: Array<URL>) {
guard tempVideoURLs.count > 0 else {
print("No video to merge!")
return
}
var assetList = Array<AVAsset>()
tempVideoURLs.forEach { assetList.append(AVAsset(url: $0)) }
let mixedComposition = AVMutableComposition()
var insertTime = CMTime.zero
var trackList: Array<AVMutableCompositionTrack> = []
for videoAsset in assetList {
guard let videoTrack = mixedComposition.addMutableTrack(
withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)
) else { continue }
// 3
do {
let ctTimeRange = CMTimeRangeMake(start: .zero, duration: videoAsset.duration)
guard let videoAssetTrack = videoAsset.tracks(withMediaType: .video).first else { return }
try videoTrack.insertTimeRange(ctTimeRange, of: videoAssetTrack, at: insertTime)
} catch {
print("Failed to load video track!")
return
}
trackList.append(videoTrack)
let audioTrack = videoAsset.tracks(withMediaType: .audio)
if audioTrack.isEmpty == false {
guard let audioTrack = mixedComposition.addMutableTrack(
withMediaType: .audio,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)
) else { return }
do {
let ctTimeRange = CMTimeRangeMake(start: .zero, duration: videoAsset.duration)
guard let audioAssetTrack = videoAsset.tracks(withMediaType: .audio).first else { return }
try audioTrack.insertTimeRange(ctTimeRange, of: audioAssetTrack, at: insertTime)
} catch {
print("Failed to load auido track!")
return
}
}
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(
start: .zero,
duration: insertTime
)
for (index, track) in trackList.enumerated() {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
if index == 0 {
let assetDuration = assetList[index].duration
instruction.setOpacity(0.0, at: assetDuration)
}
mainInstruction.layerInstructions.append(instruction)
}
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: settings.fps)
mainComposition.renderSize = settings.size
// 11
guard let documentDirectory = FileManager.default.urls(
for: .documentDirectory,
in: .userDomainMask).first
else { return }
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .long
dateFormatter.timeStyle = .short
let date = dateFormatter.string(from: Date())
let outputFileURL = documentDirectory.appendingPathComponent("anylightVideo-\(date).mp4")
let fileManager = FileManager()
if fileManager.fileExists(atPath: outputFileURL.path) {
try? fileManager.removeItem(at: outputFileURL)
}
guard let exporter = AVAssetExportSession(
asset: mixedComposition,
presetName: AVAssetExportPresetHighestQuality)
else { return }
exporter.outputURL = outputFileURL
exporter.outputFileType = AVFileType.mp4
exporter.shouldOptimizeForNetworkUse = true
exporter.videoComposition = mainComposition
exporter.exportAsynchronously {
DispatchQueue.main.async { [self] in
// Save video in user library
exportDidFinish(exporter)
// Release sources
trackList.removeAll()
cleanUpTempFiles(tempVideoURLs)
}
}
trackExportingProgress(exporter)
}
func exportDidFinish(_ session: AVAssetExportSession) {
// 1
guard session.status == AVAssetExportSession.Status.completed,
let outputURL = session.outputURL
else { return }
// 3
let saveVideoToPhotos = {
// 4
let changes: () -> Void = {
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL)
}
PHPhotoLibrary.shared().performChanges(changes) { saved, error in
DispatchQueue.main.async {
let success = saved && (error == nil)
let title = success ? "Success" : "Error"
let message = success ? "Video saved" : "Failed to save video"
let alert = UIAlertController(
title: title,
message: message,
preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "OK", style: .cancel, handler: { (action) in }))
self.present(alert, animated: true, completion: nil)
}
}
}
// 5
if PHPhotoLibrary.authorizationStatus() != .authorized {
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
saveVideoToPhotos()
}
}
} else {
saveVideoToPhotos()
}
}
func cleanUpTempFiles(_ files: Array<URL>) {
let fileManager = FileManager()
for file in files {
if fileManager.fileExists(atPath: file.path) {
try? fileManager.removeItem(at: file)
print("\(file.lastPathComponent) removed successfully.")
}
}
}
func trackExportingProgress(_ session: AVAssetExportSession) {
DispatchQueue.main.async {
Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { timer in
switch session.status {
case .exporting:
print("Video exporter progress: \(session.progress * 100)")
break
case .completed:
print("Video exporter progress: \(session.progress * 100)")
timer.invalidate()
break
case .failed:
print("Video exporter operation failed!")
timer.invalidate()
break
case .cancelled:
print("Video exporter operation cancelled by user.")
timer.invalidate()
break
default: break
}
}
}
}
// Here is memory usage code
func memoryUsage() -> Double {
var taskInfo = task_vm_info_data_t()
var count = mach_msg_type_number_t(MemoryLayout<task_vm_info>.size) / 4
let result: kern_return_t = withUnsafeMutablePointer(to: &taskInfo) {
$0.withMemoryRebound(to: integer_t.self, capacity: 1) {
task_info(mach_task_self_, task_flavor_t(TASK_VM_INFO), $0, &count)
}
}
var used: UInt64 = 0
if result == KERN_SUCCESS {
used = UInt64(taskInfo.phys_footprint)
}
let bytesInMegabyte = 1024.0 * 1024.0
let usedMemory = Double(used) / bytesInMegabyte
return usedMemory
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment