Skip to content

Instantly share code, notes, and snippets.

@bananafish911
Created March 5, 2019 14:36
Show Gist options
  • Save bananafish911/4a2d2a30cd4772a557a43d127ebc5940 to your computer and use it in GitHub Desktop.
Save bananafish911/4a2d2a30cd4772a557a43d127ebc5940 to your computer and use it in GitHub Desktop.
Video Converter Swift 4.2
// Created by Victor on 1/16/19.
// Copyright © 2019 tchop. All rights reserved.
//
// https://medium.com/samkirkiles/swift-using-avassetwriter-to-compress-video-files-for-network-transfer-4dcc7b4288c5
import Foundation
import AVFoundation
class VideoConverter {
func fileSizeInMB(url: URL) -> Double {
do {
let attribute = try FileManager.default.attributesOfItem(atPath: url.path)
if let size = attribute[FileAttributeKey.size] as? NSNumber {
return size.doubleValue / 1000000.0
}
} catch {
print("Error: \(error)")
}
return 0.0
}
/// Compress video file
///
/// - Parameters:
/// - sourceUrl: URL of an initial file to be compressed
/// - completion: returns compressed-file url or sourceUrl if there was an error
func compressVideo(sourceUrl: URL,
progress: @escaping ((Double) -> Void),
completion: @escaping ((URL) -> Void)) {
let onErrorHandler: (String)->() = { message in
debugPrint("VideoConverter failed:" + message)
completion(sourceUrl) // leave without comression
}
var audioFinished = false
var videoFinished = false
let outputFileType = AVFileType.mov
let outputUrl = prepareOutputUrl(filetype: outputFileType)
let assetToCompress = AVAsset(url: sourceUrl)
let assetReader = try? AVAssetReader(asset: assetToCompress)
if assetReader == nil {
return onErrorHandler("can't initialize AVAssetReader")
}
let videoTrack = assetToCompress.tracks(withMediaType: AVMediaType.video).first!
let audioTrack = assetToCompress.tracks(withMediaType: AVMediaType.audio).first!
let videoReaderSettings: [String: Any] = [(kCVPixelBufferPixelFormatTypeKey as String): kCVPixelFormatType_32ARGB]
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
if assetReader!.canAdd(assetReaderVideoOutput) {
assetReader!.add(assetReaderVideoOutput)
} else {
return onErrorHandler("Couldn't add video output reader")
}
if assetReader!.canAdd(assetReaderAudioOutput) {
assetReader!.add(assetReaderAudioOutput)
} else {
return onErrorHandler("Couldn't add audio output reader")
}
let videoOutputSettings = videoRendererOutputSettings(originalVideoSize: videoTrack.naturalSize)
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
videoInput.transform = videoTrack.preferredTransform
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
let assetWriter = try? AVAssetWriter(outputURL: outputUrl, fileType: outputFileType)
if assetWriter == nil {
return onErrorHandler("can't initialize AVAssetWriter")
}
assetWriter!.shouldOptimizeForNetworkUse = true
assetWriter!.add(videoInput)
assetWriter!.add(audioInput)
assetWriter!.startWriting()
assetReader!.startReading()
assetWriter!.startSession(atSourceTime: kCMTimeZero)
let closeWriter: () -> Void = {
if (audioFinished && videoFinished) {
assetWriter!.finishWriting(completionHandler: {
progress(1.0)
completion(assetWriter!.outputURL)
})
assetReader!.cancelReading()
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData) {
if let sample = assetReaderAudioOutput.copyNextSampleBuffer() {
audioInput.append(sample)
} else {
audioInput.markAsFinished()
DispatchQueue.main.async {
audioFinished = true
closeWriter()
}
break;
}
}
}
let assetToCompressDurationTime = CMTimeGetSeconds(assetToCompress.duration)
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
while(videoInput.isReadyForMoreMediaData) {
if let sample = assetReaderVideoOutput.copyNextSampleBuffer() {
let timeStamp = CMSampleBufferGetPresentationTimeStamp(sample)
let timeSecond = CMTimeGetSeconds(timeStamp)
let progressPercentage = timeSecond / assetToCompressDurationTime
videoInput.append(sample)
DispatchQueue.main.async {
progress(progressPercentage)
}
} else {
videoInput.markAsFinished()
DispatchQueue.main.async {
videoFinished = true
closeWriter()
}
break;
}
}
}
}
private func prepareOutputUrl(filetype: AVFileType) -> URL {
let outputUrl = URL(fileURLWithPath: "\(NSTemporaryDirectory())/tchopCompressedMovie." + filetype.rawValue)
if FileManager.default.fileExists(atPath: outputUrl.path) {
try? FileManager.default.removeItem(atPath: outputUrl.path)
}
return outputUrl
}
// MARK: - Compression goes here:
/// Renderer settings. Returns a dictionary of output settings to be used for sample output
func videoRendererOutputSettings(originalVideoSize: CGSize) -> [String: Any] {
// reduce video frame size
var reducedSize: CGSize
let linesLimit: CGFloat = 480.0
let initialLines = min(originalVideoSize.width, originalVideoSize.height) // for vertical videos as well
let ratio = initialLines / linesLimit
if ratio > 1 {
reducedSize = CGSize(width: originalVideoSize.width / ratio, height: originalVideoSize.height / ratio)
} else {
reducedSize = originalVideoSize // video is already small
}
// Codec settings
let videoSettings: [String: Any] = [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey: NSNumber(value: 2*1024*1024)],
AVVideoCodecKey: AVVideoCodecH264,
AVVideoHeightKey: reducedSize.height,
AVVideoWidthKey: reducedSize.width
]
return videoSettings
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment