Skip to content

Instantly share code, notes, and snippets.

@lewislepton
Created March 31, 2017 13:25
Show Gist options
  • Save lewislepton/926bddcc9332df44c1da73bc5f057922 to your computer and use it in GitHub Desktop.
Save lewislepton/926bddcc9332df44c1da73bc5f057922 to your computer and use it in GitHub Desktop.
AudioKit on Audio Unit v3 (extension)
/*
<samplecode>
<abstract>
Utility classes to manage audio formats and buffers for an audio unit implementation's input and output audio busses.
</abstract>
</samplecode>
*/
#import <AudioToolbox/AudioToolbox.h>
#import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h>
#pragma mark BufferedAudioBus Utility Class
// Utility classes to manage audio formats and buffers for an audio unit implementation's input and output audio busses.
// Reusable non-ObjC class, accessible from render thread.
struct BufferedAudioBus {
AUAudioUnitBus* bus = nullptr;
AUAudioFrameCount maxFrames = 0;
AVAudioPCMBuffer* pcmBuffer = nullptr;
AudioBufferList const* originalAudioBufferList = nullptr;
AudioBufferList* mutableAudioBufferList = nullptr;
void init(AVAudioFormat* defaultFormat, AVAudioChannelCount maxChannels) {
maxFrames = 0;
pcmBuffer = nullptr;
originalAudioBufferList = nullptr;
mutableAudioBufferList = nullptr;
bus = [[AUAudioUnitBus alloc] initWithFormat:defaultFormat error:nil];
bus.maximumChannelCount = maxChannels;
}
void allocateRenderResources(AUAudioFrameCount inMaxFrames) {
maxFrames = inMaxFrames;
pcmBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:bus.format frameCapacity: maxFrames];
originalAudioBufferList = pcmBuffer.audioBufferList;
mutableAudioBufferList = pcmBuffer.mutableAudioBufferList;
}
void deallocateRenderResources() {
pcmBuffer = nullptr;
originalAudioBufferList = nullptr;
mutableAudioBufferList = nullptr;
}
};
#pragma mark - BufferedOutputBus: BufferedAudioBus
#pragma mark prepareOutputBufferList()
/*
BufferedOutputBus
This class provides a prepareOutputBufferList method to copy the internal buffer pointers
to the output buffer list in case the client passed in null buffer pointers.
*/
struct BufferedOutputBus: BufferedAudioBus {
void prepareOutputBufferList(AudioBufferList* outBufferList, AVAudioFrameCount frameCount, bool zeroFill) {
UInt32 byteSize = frameCount * sizeof(float);
for (UInt32 i = 0; i < outBufferList->mNumberBuffers; ++i) {
outBufferList->mBuffers[i].mNumberChannels = originalAudioBufferList->mBuffers[i].mNumberChannels;
outBufferList->mBuffers[i].mDataByteSize = byteSize;
if (outBufferList->mBuffers[i].mData == nullptr) {
outBufferList->mBuffers[i].mData = originalAudioBufferList->mBuffers[i].mData;
}
if (zeroFill) {
memset(outBufferList->mBuffers[i].mData, 0, byteSize);
}
}
}
};
#pragma mark - BufferedInputBus: BufferedAudioBus
#pragma mark pullInput()
#pragma mark prepareInputBufferList()
/*
BufferedInputBus
This class manages a buffer into which an audio unit with input busses can
pull its input data.
*/
struct BufferedInputBus : BufferedAudioBus {
/*
Gets input data for this input by preparing the input buffer list and pulling
the pullInputBlock.
*/
AUAudioUnitStatus pullInput(AudioUnitRenderActionFlags *actionFlags,
AudioTimeStamp const* timestamp,
AVAudioFrameCount frameCount,
NSInteger inputBusNumber,
AURenderPullInputBlock pullInputBlock) {
if (pullInputBlock == nullptr) {
return kAudioUnitErr_NoConnection;
}
prepareInputBufferList();
return pullInputBlock(actionFlags, timestamp, frameCount, inputBusNumber, mutableAudioBufferList);
}
/*
prepareInputBufferList populates the mutableAudioBufferList with the data
pointers from the originalAudioBufferList.
The upstream audio unit may overwrite these with its own pointers, so each
render cycle this function needs to be called to reset them.
*/
void prepareInputBufferList() {
UInt32 byteSize = maxFrames * sizeof(float);
mutableAudioBufferList->mNumberBuffers = originalAudioBufferList->mNumberBuffers;
for (UInt32 i = 0; i < originalAudioBufferList->mNumberBuffers; ++i) {
mutableAudioBufferList->mBuffers[i].mNumberChannels = originalAudioBufferList->mBuffers[i].mNumberChannels;
mutableAudioBufferList->mBuffers[i].mData = originalAudioBufferList->mBuffers[i].mData;
mutableAudioBufferList->mBuffers[i].mDataByteSize = byteSize;
}
}
};
//
// IAYBridge.swift
// Elliott
//
// Created by yellow on 06/10/2016.
// Copyright © 2016 iamyellow.net. All rights reserved.
//
import AVFoundation
import AudioKit
open class IAYBridge: AKNode {
internal var internalAU: IAYBridgeAudioUnit?
var ringBuffer: UnsafeMutableRawPointer? {
return internalAU?.ringBuffer()
}
public init(_ input: AKNode) {
var description = AudioComponentDescription()
description.componentType = kAudioUnitType_Effect
description.componentSubType = fourCC("brdg")
description.componentManufacturer = fourCC("IAYe")
description.componentFlags = 0
description.componentFlagsMask = 0
AUAudioUnit.registerSubclass(
IAYBridgeAudioUnit.self,
as: description,
name: "Elliott Bridge",
version: UInt32.max)
super.init()
AVAudioUnit.instantiate(with: description, options: []) {
avAudioUnit, error in
guard let avAudioUnitEffect = avAudioUnit else { return }
self.avAudioNode = avAudioUnitEffect
self.internalAU = avAudioUnitEffect.auAudioUnit as? IAYBridgeAudioUnit
AudioKit.engine.attach(self.avAudioNode)
input.addConnectionPoint(self)
}
}
}
//
// IAYBridgeAudioUnit.h
// Elliott
//
// Created by yellow on 06/10/2016.
// Copyright © 2016 iamyellow.net. All rights reserved.
//
#ifndef IAYBridgeAudioUnit_h
#define IAYBridgeAudioUnit_h
#import <AudioToolbox/AudioToolbox.h>
@interface IAYBridgeAudioUnit : AUAudioUnit
-(nullable void*)ringBuffer;
@end
#endif /* IAYBridgeAudioUnit_h */
//
// IAYBridgeAudioUnit.m
// Elliott
//
// Created by yellow on 06/10/2016.
// Copyright © 2016 iamyellow.net. All rights reserved.
//
#import "IAYBridgeAudioUnit.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioKit/AudioKit-Swift.h>
#import <AudioKit/TPCircularBuffer.h>
#import "BufferedAudioBus.hpp"
@interface IAYBridgeAudioUnit()
@property AUAudioUnitBus* outputBus;
@property AUAudioUnitBusArray* inputBusArray;
@property AUAudioUnitBusArray* outputBusArray;
@property (nonatomic, readwrite) AUParameterTree* parameterTree;
@end
@implementation IAYBridgeAudioUnit {
BufferedInputBus _inputBus;
TPCircularBuffer _ringBuffer;
UInt32 _frames, _numberOfBuffers, _bytesPerBuffer;
}
@synthesize parameterTree = _parameterTree;
-(instancetype)initWithComponentDescription:(AudioComponentDescription)cd
options:(AudioComponentInstantiationOptions)options
error:(NSError**)outError
{
self = [super initWithComponentDescription:cd options:options error:outError];
if (!self) {
return nil;
}
AVAudioFormat* defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:AKSettings.sampleRate
channels:AKSettings.numberOfChannels];
_parameterTree = [AUParameterTree createTreeWithChildren:@[]];
_inputBus.init(defaultFormat, 8);
_inputBusArray = [[AUAudioUnitBusArray alloc] initWithAudioUnit:self
busType:AUAudioUnitBusTypeInput
busses: @[_inputBus.bus]];
_outputBus = [[AUAudioUnitBus alloc] initWithFormat:defaultFormat
error:nil];
_outputBusArray = [[AUAudioUnitBusArray alloc] initWithAudioUnit:self
busType:AUAudioUnitBusTypeOutput
busses: @[_outputBus]];
self.maximumFramesToRender = 1024;
return self;
}
-(nullable void*)ringBuffer
{
return &_ringBuffer;
}
#pragma mark - AUAudioUnit Overrides
-(AUAudioUnitBusArray*)inputBusses
{
return _inputBusArray;
}
-(AUAudioUnitBusArray*)outputBusses
{
return _outputBusArray;
}
-(BOOL)allocateRenderResourcesAndReturnError:(NSError**)outError
{
if (![super allocateRenderResourcesAndReturnError:outError]) {
return NO;
}
if (self.outputBus.format.channelCount != _inputBus.bus.format.channelCount) {
if (outError) {
*outError = [NSError errorWithDomain:NSOSStatusErrorDomain
code:kAudioUnitErr_FailedInitialization
userInfo:nil];
}
self.renderResourcesAllocated = NO;
return NO;
}
_frames = ceil([AKSettings sampleRate] * [AVAudioSession sharedInstance].IOBufferDuration);
const AudioStreamBasicDescription* asbd = [AKSettings audioFormat].streamDescription;
_numberOfBuffers = (asbd->mFormatFlags & kAudioFormatFlagIsNonInterleaved) == kAudioFormatFlagIsNonInterleaved ? asbd->mChannelsPerFrame : 1;
_bytesPerBuffer = asbd->mBytesPerFrame * _frames;
TPCircularBufferInit(&_ringBuffer, 3 * _numberOfBuffers * _bytesPerBuffer);
_inputBus.allocateRenderResources(_frames);
return YES;
}
-(void)deallocateRenderResources
{
[super deallocateRenderResources];
TPCircularBufferCleanup(&_ringBuffer);
_inputBus.deallocateRenderResources();
}
-(AUInternalRenderBlock)internalRenderBlock
{
__block BufferedInputBus* input = &_inputBus;
__block TPCircularBuffer* ringBuffer = &_ringBuffer;
return ^AUAudioUnitStatus(AudioUnitRenderActionFlags* actionFlags,
const AudioTimeStamp* timestamp,
AVAudioFrameCount frameCount,
NSInteger outputBusNumber,
AudioBufferList* outputData,
const AURenderEvent* realtimeEventListHead,
AURenderPullInputBlock pullInputBlock) {
AudioUnitRenderActionFlags pullFlags = 0;
AUAudioUnitStatus err = input->pullInput(&pullFlags, timestamp, frameCount, 0, pullInputBlock);
if (err != noErr) {
return err;
}
AudioBufferList* inAudioBufferList = input->mutableAudioBufferList;
UInt32 neededBytes = _numberOfBuffers * _bytesPerBuffer;
int32_t availableBytes;
void* dstBuffer = TPCircularBufferHead(ringBuffer, &availableBytes);
if (availableBytes >= neededBytes) {
void* srcBuffer = (void*)inAudioBufferList->mBuffers[0].mData;
memcpy(dstBuffer, srcBuffer, _bytesPerBuffer);
if (_numberOfBuffers > 1) {
memcpy((char*)dstBuffer + _bytesPerBuffer, srcBuffer, _bytesPerBuffer);
}
TPCircularBufferProduce(ringBuffer, neededBytes);
}
return -1;
};
}
@end
//
// MyAudioUnit.h
// ElliottAUExtension
//
// Created by yellow on 18/06/16.
// Copyright © 2016 iamyellow.net. All rights reserved.
//
#import <AudioToolbox/AudioToolbox.h>
@interface MyAudioUnit : AUAudioUnit
@end
//
// MyAudioUnit.m
// ElliottAUExtension
//
// Created by yellow on 18/06/16.
// Copyright © 2016 iamyellow.net. All rights reserved.
//
#import "MyAudioUnit.h"
#import <AVFoundation/AVFoundation.h>
#import <AudioUnit/AudioUnit.h>
#import <ElliottKit/ElliottKit-Swift.h>
#import <AudioKit/AudioKit-Swift.h>
#import <AudioKit/TPCircularBuffer.h>
@interface MyAudioUnit ()
@property AUAudioUnitBusArray* inputBusArray;
@property AUAudioUnitBus* outputBus;
@property AUAudioUnitBusArray* outputBusArray;
@property (nonatomic, readwrite) AUParameterTree* parameterTree;
@property (nonatomic, strong) IAYInstrument* instrument;
@end
@implementation MyAudioUnit
@synthesize parameterTree = _parameterTree;
-(instancetype)initWithComponentDescription:(AudioComponentDescription)componentDescription
options:(AudioComponentInstantiationOptions)options
error:(NSError**)outError
{
self = [super initWithComponentDescription:componentDescription options:options error:outError];
if (self == nil) {
return nil;
}
self.instrument = [[IAYInstrument alloc] init:YES];
_parameterTree = [AUParameterTree createTreeWithChildren:@[]];
AVAudioFormat* defaultFormat = [[AVAudioFormat alloc] initStandardFormatWithSampleRate:[AKSettings sampleRate]
channels:[AKSettings numberOfChannels]];
self.outputBus = [[AUAudioUnitBus alloc] initWithFormat:defaultFormat
error:nil];
self.outputBusArray = [[AUAudioUnitBusArray alloc] initWithAudioUnit:self
busType:AUAudioUnitBusTypeOutput
busses:@[self.outputBus]];
self.maximumFramesToRender = 1024;
return self;
}
#pragma mark - AUAudioUnit Overrides
-(AUAudioUnitBusArray*)outputBusses
{
return self.outputBusArray;
}
-(BOOL)allocateRenderResourcesAndReturnError:(NSError**)outError
{
if (![super allocateRenderResourcesAndReturnError:outError]) {
return NO;
}
NSArray<NSURL*>* exsFilePaths = [FileUtil getExs24Instruments:[FileUtil sharedFolderURL]];
if (exsFilePaths.count == 0) {
NSLog(@">> no EXS24");
return YES;
}
if (![self.instrument loadExs24At:exsFilePaths[0]]) {
NSLog(@">> error loading %@", exsFilePaths[0]);
}
return YES;
}
-(void)deallocateRenderResources
{
[super deallocateRenderResources];
}
#pragma mark - Render
-(AUInternalRenderBlock)internalRenderBlock
{
// http://rypress.com/tutorials/objective-c/blocks
__block AVAudioUnitSampler* sampler = self.instrument.avAudioUnitSampler;
__block TPCircularBuffer* ringBuffer = (TPCircularBuffer*)self.instrument.ringBuffer;
return ^AUAudioUnitStatus(AudioUnitRenderActionFlags* actionFlags,
const AudioTimeStamp* timestamp,
AVAudioFrameCount frameCount,
NSInteger outputBusNumber,
AudioBufferList* data,
const AURenderEvent* realtimeEventListHead,
AURenderPullInputBlock pullInputBlock) {
// midi events
AURenderEvent const* event = realtimeEventListHead;
while (event != NULL) {
if (event->head.eventType == AURenderEventMIDI) {
[sampler sendMIDIEvent:event->MIDI.data[0]
data1:event->MIDI.data[1]
data2:event->MIDI.data[2]];
}
event = event->head.next;
}
// render
if (ringBuffer) {
UInt32 neededBytes = data->mBuffers[0].mDataByteSize;
if (data->mNumberBuffers > 1) {
neededBytes += data->mBuffers[1].mDataByteSize;
}
int32_t availableBytes;
void* srcBuffer = TPCircularBufferTail(ringBuffer, &availableBytes);
if (availableBytes >= neededBytes) {
void* dstBuffer = data->mBuffers[0].mData;
UInt32 offset = data->mBuffers[0].mDataByteSize;
memcpy(dstBuffer, srcBuffer, offset);
if (data->mNumberBuffers > 1) {
dstBuffer = data->mBuffers[1].mData;
memcpy(dstBuffer, (char*)srcBuffer + offset, data->mBuffers[1].mDataByteSize);
}
TPCircularBufferConsume(ringBuffer, neededBytes);
}
}
return noErr;
};
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment