Skip to content

Instantly share code, notes, and snippets.

@odemolliens
Forked from jaumevn/compression.swift
Created February 8, 2018 13:24
Show Gist options
  • Save odemolliens/b69a5f98ee08bf4ab4ba1207ff219fcb to your computer and use it in GitHub Desktop.
Save odemolliens/b69a5f98ee08bf4ab4ba1207ff219fcb to your computer and use it in GitHub Desktop.
Video Compression in Swift
func compress(videoPath : String, exportVideoPath : String, renderSize : CGSize, completion : (Bool) -> ()) {
let videoUrl = NSURL(fileURLWithPath: videoPath)
if (!existsFileAtUrl(videoUrl)) {
completion(false)
return
}
let videoAssetUrl = AVURLAsset(URL: videoUrl, options: nil)
let videoTrackArray = videoAssetUrl.tracksWithMediaType(AVMediaTypeVideo)
if videoTrackArray.count < 1 {
completion(false)
return
}
let videoAssetTrack = videoTrackArray[0]
let audioTrackArray = videoAssetUrl.tracksWithMediaType(AVMediaTypeAudio)
if audioTrackArray.count < 1 {
completion(false)
return
}
let audioAssetTrack = audioTrackArray[0]
// input readers
let outputUrl = NSURL(fileURLWithPath: exportVideoPath)
let videoWriter = try! AVAssetWriter(URL: outputUrl, fileType: AVFileTypeQuickTimeMovie)
videoWriter.shouldOptimizeForNetworkUse = true
let vSettings = videoSettings(renderSize)
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: vSettings)
videoWriterInput.expectsMediaDataInRealTime = false
videoWriterInput.transform = videoAssetTrack.preferredTransform
videoWriter.addInput(videoWriterInput)
let aSettings = audioSettings()
let audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: aSettings)
audioWriterInput.expectsMediaDataInRealTime = false
audioWriterInput.transform = audioAssetTrack.preferredTransform
videoWriter.addInput(audioWriterInput)
// output readers
let videoReaderSettings : [String : Int] = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoAssetTrack, outputSettings: videoReaderSettings)
let videoReader = try! AVAssetReader(asset: videoAssetUrl)
videoReader.addOutput(videoReaderOutput)
var settings = [String : AnyObject]()
settings[AVFormatIDKey] = Int(kAudioFormatLinearPCM)
let audioReaderOutput = AVAssetReaderTrackOutput(track: audioAssetTrack, outputSettings: settings)
let audioReader = try! AVAssetReader(asset: videoAssetUrl)
audioReader.addOutput(audioReaderOutput)
videoWriter.startWriting()
videoReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
let processingVideoQueue = dispatch_queue_create("processingVideoCompressionQueue", nil)
videoWriterInput.requestMediaDataWhenReadyOnQueue(processingVideoQueue) {
while (videoWriterInput.readyForMoreMediaData) {
let sampleVideoBuffer = videoReaderOutput.copyNextSampleBuffer()
if (videoReader.status == .Reading && sampleVideoBuffer != nil) {
videoWriterInput.appendSampleBuffer(sampleVideoBuffer!)
} else {
videoWriterInput.markAsFinished()
if (videoReader.status == .Completed) {
audioReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
let processingAudioQueue = dispatch_queue_create("processingAudioCompressionQueue", nil)
audioWriterInput.requestMediaDataWhenReadyOnQueue(processingAudioQueue, usingBlock: {
while (audioWriterInput.readyForMoreMediaData) {
let sampleAudioBuffer = audioReaderOutput.copyNextSampleBuffer()
if (audioReader.status == .Reading && sampleAudioBuffer != nil) {
audioWriterInput.appendSampleBuffer(sampleAudioBuffer!)
} else {
audioWriterInput.markAsFinished()
if (audioReader.status == .Completed) {
videoWriter.finishWritingWithCompletionHandler({
completion(true)
})
}
}
}
})
}
}
}
}
}
func videoSettings(size : CGSize) -> [String : AnyObject] {
var compressionSettings = [String : AnyObject]()
compressionSettings[AVVideoAverageBitRateKey] = 425000
var settings = [String : AnyObject]()
settings[AVVideoCompressionPropertiesKey] = compressionSettings
settings[AVVideoCodecKey] = AVVideoCodecH264 as AnyObject?
settings[AVVideoHeightKey] = size.height as AnyObject?
settings[AVVideoWidthKey] = size.width as AnyObject?
return settings
}
func audioSettings() -> [String : AnyObject] {
// set up the channel layout
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
// set up a dictionary with our outputsettings
var settings = [String : AnyObject]()
settings[AVFormatIDKey] = Int(kAudioFormatMPEG4AAC)
settings[AVSampleRateKey] = 44100
settings[AVNumberOfChannelsKey] = 1
settings[AVEncoderBitRateKey] = 96000
settings[AVChannelLayoutKey] = NSData(bytes:&channelLayout, length:sizeof(AudioChannelLayout))
return settings
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment