Skip to content

Instantly share code, notes, and snippets.

@pbodsk
Last active September 9, 2019 01:06
Show Gist options
  • Save pbodsk/40bee78940385847b7e9780a70e2a937 to your computer and use it in GitHub Desktop.
Save pbodsk/40bee78940385847b7e9780a70e2a937 to your computer and use it in GitHub Desktop.
@objc protocol AURenderCallbackDelegate {
func performRender(_ ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus
}
func renderCallback(_ inRefCon:UnsafeMutableRawPointer,
ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp:UnsafePointer<AudioTimeStamp>,
inBusNumber:UInt32,
inNumberFrames:UInt32,
ioData:UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
let delegate = unsafeBitCast(inRefCon, to: AURenderCallbackDelegate.self)
let result = delegate.performRender(ioActionFlags,
inTimeStamp: inTimeStamp,
inBusNumber: inBusNumber,
inNumberFrames: inNumberFrames,
ioData: ioData)
return result
}
class AudioUnitGraphManager: NSObject, AURenderCallbackDelegate {
let filePlayers: [AudioUnitFilePlayer]
var inputFormat: AudioStreamBasicDescription = AudioStreamBasicDescription()
var graph: AUGraph? = nil
var outputNode: AUNode = AUNode()
var tempoNode: AUNode = AUNode()
var mixerNode: AUNode = AUNode()
var mixerUnit: AudioUnit? = nil
var tempoUnit: AudioUnit? = nil
var currentTempo: AudioUnitTempo = .normal {
didSet {
changeTempo()
}
}
var currentPlayerState: AudioPlayerState
var loopStartPosition: TimeInterval?
var loopEndPosition: TimeInterval?
var masterVolume: Float = 1.0
var previousAudioTimeStamp = AudioTimeStamp()
init(stemInitDataElements: [MultitrackPlayerStemInitData], normalizedOutputMultiplier: Float = 1.0) {
//nothing fancy going on here
}
func initializeGraph() throws {
//Again, nothing fancy here, open files and determine if this is a file format we support
}
func createGraph() throws {
//Create graph
var result = NewAUGraph(&graph)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed NewAUGraph", returnCode: result)
}
result = AUGraphClearConnections(graph!)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphClearConnections", returnCode: result)
}
//Output device
//Device specific instantiation
var outputComponentDescription = PlatformSpecificAudioUtility.outputComponentDescription() //I needed to handle both iOS and macOS here
result = AUGraphAddNode(graph!, &outputComponentDescription, &outputNode)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphAddNode - output", returnCode: result)
}
// Added various nodes here
//Open graph
result = AUGraphOpen(graph!)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphOpen", returnCode: result)
}
//prepare mixerUnit
result = AUGraphNodeInfo(graph!, mixerNode, nil, &mixerUnit)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphNodeInfo - mixer", returnCode: result)
}
//prepare tempoUnit
result = AUGraphNodeInfo(graph!, tempoNode, nil, &tempoUnit)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphNodeInfo - tempo", returnCode: result)
}
//connect tempo to outout
result = AUGraphConnectNodeInput(graph!, tempoNode, 0, outputNode, 0)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphConnectNodeInput - tempo -> output", returnCode: result)
}
//connect mixer to tempo
result = AUGraphConnectNodeInput(graph!, mixerNode, 0, tempoNode, 0)
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed AUGraphConnectNodeInput - mixer -> output", returnCode: result)
}
var busCount: UInt32 = UInt32(filePlayers.count)
result = AudioUnitSetProperty(mixerUnit!, AudioUnitPropertyID(kAudioUnitProperty_ElementCount), AudioUnitScope(kAudioUnitScope_Input), 0, &busCount, UInt32(MemoryLayout<UInt32>.size))
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed kAudioUnitProperty_ElementCount mixer", returnCode: result)
}
//Set MaximumFramePerSlice to 4096 to handle playing in the background
// https://developer.apple.com/library/ios/qa/qa1606/_index.html
var maxFPS: UInt32 = UInt32(4096)
result = AudioUnitSetProperty(mixerUnit!, AudioUnitPropertyID(kAudioUnitProperty_MaximumFramesPerSlice), AudioUnitScope(kAudioUnitScope_Global), 0, &maxFPS, UInt32(MemoryLayout<UInt32>.size))
if result != noErr {
throw AudioUnitGraphManagerError.graphCreationError(description: "failed kAudioUnitProperty_ElementCount mixer", returnCode: result)
}
try connectAllFilePlayersToMixer()
//BINGO!! Here we are
AudioUnitAddRenderNotify(mixerUnit!, renderCallback, Unmanaged.passUnretained(self).toOpaque())
}
//And here is the actual delegate callback
//MARK: AURenderDelegateMethods
func performRender(_ ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
let ioActionFlagsValue = ioActionFlags.pointee
//In my case I needed to broadcast the current time
if ioActionFlagsValue == AudioUnitRenderActionFlags.unitRenderAction_PreRender {
let inAudioTimeStamp = inTimeStamp.pointee
if shouldBroadcast(inAudioTimeStamp) {
previousAudioTimeStamp = inAudioTimeStamp
let currentTimeValue = currentTime()
let totalTimeValue = duration()
let timeValues = MultitrackPlayerTimeValues(currentTime: currentTimeValue, totalTime: totalTimeValue)
let userInfoDict = [NotificationCenterConstants.MultitrackPlayer.TimeKey : timeValues ]
NotificationCenter.default.post(name: NotificationCenterConstants.MultitrackPlayer.TimeValueUpdated, object: nil, userInfo: userInfoDict)
}
}
return noErr
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment