Skip to content

Instantly share code, notes, and snippets.

@omarojo
Last active May 4, 2017 11:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save omarojo/4cb9db4c602ed7b596be37b4bd3a8047 to your computer and use it in GitHub Desktop.
Save omarojo/4cb9db4c602ed7b596be37b4bd3a8047 to your computer and use it in GitHub Desktop.
LF Custom Live Content
import UIKit
import AVFoundation
import lf
import GPUImage
import VideoToolbox
class G8LiveStreamer: NSObject {
var streamUrl:String?
var streamName:String?
var streamOrientation: UIDeviceOrientation?
var inputVideoData : G8RawDataOutput?
var rtmpConnection:RTMPConnection = RTMPConnection()
var rtmpStream:RTMPStream!
var GPUPIXELBUFF : CVPixelBuffer? = nil
var PTS = kCMTimeZero
var timer : Timer?
var FPS : Int = 30
open static let NOTIFSTATUS = "G8LiveStreamerStatus" //used for the stream status notification
fileprivate let semaphore:DispatchSemaphore = DispatchSemaphore(value: 1)
fileprivate let lockQueue:DispatchQueue = DispatchQueue(
label: "com.generate.HaishinKit.GPUImageOutput.lock", qos: DispatchQoS.userInteractive, attributes: []
)
fileprivate var streamedVideoSize : CGSize = CGSize(width: 1280, height: 720) //this is the dimensions of the streamed video, not the image resolution, it gets redefined every time the user start() and cannot be changed once the stream has started. Actual resolution comes from GPUImage camera setup.
override init(){
}
init(streamUrl:String, streamName: String) {
super.init()
self.streamUrl = streamUrl
self.streamName = streamName
self.streamOrientation = UIDevice.current.orientation
let sampleRate:Double = 44_100
do {
try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate)
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
try AVAudioSession.sharedInstance().setMode(AVAudioSessionModeDefault)
try AVAudioSession.sharedInstance().setActive(true)
} catch {
}
self.prepareRTMP()
}
func prepareRTMP(){
rtmpConnection = RTMPConnection()
rtmpStream = RTMPStream(connection: rtmpConnection)
rtmpStream.syncOrientation = true
rtmpStream.attachAudio(AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio), automaticallyConfiguresApplicationAudioSession: false)
setLockedOrientation()
rtmpStream.audioSettings["bitrate"] = 48 * 1024 //thats 48 kbps the default is 32kbps
rtmpStream.captureSettings["fps"] = 30.0 // Not sure if this has an impact, I think is for when you are using the native camera feed.
}
func setLockedOrientation(){
self.streamOrientation = UIDevice.current.orientation
if !rtmpConnection.connected{
if(self.streamOrientation?.isPortrait)!{
self.streamedVideoSize.height = 1280
self.streamedVideoSize.width = 720
}else if (self.streamOrientation?.isLandscape)! {
self.streamedVideoSize.height = 720
self.streamedVideoSize.width = 1280
}
rtmpStream.videoSettings = [
"width": streamedVideoSize.width,
"height": streamedVideoSize.height,
"bitrate" : 500 * 1024, //thats 160 kbps the default is 160kbps
"profileLevel": kVTProfileLevel_H264_Baseline_AutoLevel, // H264 Profile require "import VideoToolbox"
"maxKeyFrameIntervalDuration": 2, // key frame / sec
"scalingMode": kVTScalingMode_Normal
]
}else {
assertionFailure("You cant change the streaming video dimensions once the stream has started !")
}
}
func start(){
if(self.rtmpConnection.connected == false){
UIApplication.shared.isIdleTimerDisabled = true
self.prepareRTMP()
rtmpConnection.addEventListener(Event.RTMP_STATUS, selector:#selector(rtmpStatusHandler(_:)), observer: self)
rtmpConnection.connect(self.streamUrl!)
Swift.print(" \n-G8Live =========================\nConnecting to \(self.streamUrl)\n\n")
self.startPushingCustomContent()
}
}
func stop(){
UIApplication.shared.isIdleTimerDisabled = false
self.rtmpConnection.close()
self.stopPushingCustomContent()
self.rtmpConnection.removeEventListener(Event.RTMP_STATUS, selector:#selector(rtmpStatusHandler(_:)), observer: self)
}
func rtmpStatusHandler(_ notification:Notification) {
let e:Event = Event.from(notification)
NotificationCenter.default.post(
name: Notification.Name(rawValue: G8LiveStreamer.NOTIFSTATUS), object: self, userInfo: ["event": e]
)
if let data:ASObject = e.data as? ASObject , let code:String = data["code"] as? String {
switch code {
case RTMPConnection.Code.connectSuccess.rawValue:
Swift.print("\n-G8Live =========================\nConnected...\n\n")
rtmpStream!.publish(self.streamName!)
case RTMPStream.Code.publishStart.rawValue:
Swift.print("\n-G8Live =========================\nWe are LIVE !! publishing to \(self.streamName!) \n\n")
default:
Swift.print("\n-G8Live =========================\nFailed Error: \(code)\n\n")
break
}
}
}
//MARK: Custom Content Stream
func startPushingCustomContent(){
self.setupGPUImageCallback()
//Append the custom pixelBuffer to LF stream every 1/30
self.timer = Timer.scheduledTimer(timeInterval: 1.0 / Double(self.FPS), //30 fps
target: self,
selector: #selector(sendGPUPixelBuffer),
userInfo: nil,
repeats: true);
}
func stopPushingCustomContent(){
PTS = kCMTimeZero //not sure if I really need this
self.timer?.invalidate();
}
func setupGPUImageCallback (){
if let rawData = self.inputVideoData {
rawData.dataAvailableCallbackWithSize = {[unowned self] dataArray, frameSize in //This Callback gets called evertime there is a new frame from GPUImage2
//CREATE THE PIXELBUFFER for each FRAME coming from GPUImage2
let numberOfBytesPerRow = frameSize.width;
let data = Data.init(bytes: dataArray)
data.withUnsafeBytes { (u8Ptr: UnsafePointer<UInt8>) -> Void in
let rawPtr = UnsafeMutableRawPointer(mutating: u8Ptr)
var pixelBuffer : CVPixelBuffer?;
_ = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
Int(frameSize.width),
Int(frameSize.height),
kCVPixelFormatType_32BGRA,
rawPtr,
Int(numberOfBytesPerRow*4), nil, nil, nil,
&pixelBuffer);
if pixelBuffer != nil {
self.GPUPIXELBUFF = pixelBuffer //so it can be used later to create the CMSampleBuffer
//Im using [unowned self] so the previous line works properly. otherwise self.GPUPIXELBUFF would always by nil
}
}
}
}
}//end pushCustomBuffer func
func sendGPUPixelBuffer(){
guard semaphore.wait(timeout: DispatchTime.now()) == .success else {
return
}
lockQueue.async {
self.pushPixelBufferToStream(pixelBuffer: self.GPUPIXELBUFF)
self.semaphore.signal()
}
}
func pushPixelBufferToStream(pixelBuffer : CVPixelBuffer?){
if let imageBuffer = pixelBuffer {
var timingInfo:CMSampleTimingInfo = CMSampleTimingInfo(
duration: CMTimeMake(1,30),
presentationTimeStamp: self.PTS,
decodeTimeStamp: kCMTimeInvalid
)
var videoFormatDescription:CMVideoFormatDescription? = nil
_ = CMVideoFormatDescriptionCreateForImageBuffer(
kCFAllocatorDefault,
imageBuffer,
&videoFormatDescription
)
var sampleBuffer:CMSampleBuffer? = nil
_ = CMSampleBufferCreateForImageBuffer(
kCFAllocatorDefault,
imageBuffer,
true,
nil,
nil,
videoFormatDescription!,
&timingInfo,
&sampleBuffer
)
if let buffer:CMSampleBuffer = sampleBuffer {
rtmpStream.appendSampleBuffer(buffer, withType: .video); //Here is where it complains with EXC_BAD_ACCESS
}
//increment our timestamp
self.PTS = CMTimeAdd(self.PTS, CMTimeMake(1, 30));
}
}
}//end Class
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment