Skip to content

Instantly share code, notes, and snippets.

@gwio
Created January 22, 2019 22:33
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gwio/edb4b64b6aefa84c2874ec572c8c9ec1 to your computer and use it in GitHub Desktop.
Save gwio/edb4b64b6aefa84c2874ec572c8c9ec1 to your computer and use it in GitHub Desktop.
OF_AudioBus_Link_Template 2019
//
// SoundOutputStream.h
// Created by Lukasz Karluk on 13/06/13.
// http://julapy.com/blog
//
#pragma once
#import "SoundStream.h"
#import "ABLLink.h"
/*
* Structure that stores engine-related data that can be changed from
* the main thread.
*/
typedef struct {
UInt64 outputLatency; // Hardware output latency in HostTime
Float64 resetToBeatTime;
BOOL requestStart;
BOOL requestStop;
Float64 proposeBpm;
Float64 quantum;
} EngineData;
/*
* Structure that stores all data needed by the audio callback.
*/
typedef struct {
ABLLinkRef ablLink;
// Shared between threads. Only write when engine not running.
Float64 sampleRate;
// Shared between threads. Only write when engine not running.
Float64 secondsToHostTime;
// Shared between threads. Written by the main thread and only
// read by the audio thread when doing so will not block.
EngineData sharedEngineData;
// Copy of sharedEngineData owned by audio thread.
EngineData localEngineData;
// Owned by audio thread
UInt64 timeAtLastClick;
// Owned by audio thread
BOOL isPlaying;
} LinkData;
@interface SoundOutputStream : SoundStream
@property (nonatomic) Float64 bpm;
@property (readonly, nonatomic) Float64 beatTime;
@property (nonatomic) Float64 quantum;
@property (nonatomic) BOOL isPlaying;
@property (readonly, nonatomic) BOOL isLinkEnabled;
@property (readonly, nonatomic) ABLLinkRef linkRef;
@property LinkData linkData;
@property (nonatomic) double quantumCount;
@property (nonatomic)UInt64 bang;
- (ABLLinkRef)getLinkRef;
- (LinkData *)getLinkData;
- (const double *) getQuantumCountPtr;
- (const UInt64 *) getBangPtr;
@end
//
// SoundOutputStream.m
// Created by Lukasz Karluk on 13/06/13.
// http://julapy.com/blog
//
// Original code by,
// Memo Akten, http://www.memo.tv
// Marek Bareza http://mrkbrz.com/
// Updated 2012 by Dan Wilcox <danomatika@gmail.com>
//
// references,
// http://www.cocoawithlove.com/2010/10/ios-tone-generator-introduction-to.html
// http://atastypixel.com/blog/using-remoteio-audio-unit/
// http://www.stefanpopp.de/2011/capture-iphone-microphone/
//
#import "SoundOutputStream.h"
//copied from linkhut exmaple
#include <AudioToolbox/AudioToolbox.h>
#include <AVFoundation/AVFoundation.h>
#include <libkern/OSAtomic.h>
#include <mach/mach_time.h>
#define INVALID_BEAT_TIME DBL_MIN
#define INVALID_BPM DBL_MIN
static OSSpinLock lock;
//-------
/*
* Pull data from the main thread to the audio thread if lock can be
* obtained. Otherwise, just use the local copy of the data.
*/
static void pullEngineData(LinkData* linkData, EngineData* output) {
// Always reset the signaling members to their default state
output->resetToBeatTime = INVALID_BEAT_TIME;
output->proposeBpm = INVALID_BPM;
output->requestStart = NO;
output->requestStop = NO;
// Attempt to grab the lock guarding the shared engine data but
// don't block if we can't get it.
if (OSSpinLockTry(&lock)) {
// Copy non-signaling members to the local thread cache
linkData->localEngineData.outputLatency =
linkData->sharedEngineData.outputLatency;
linkData->localEngineData.quantum = linkData->sharedEngineData.quantum;
// Copy signaling members directly to the output and reset
output->resetToBeatTime = linkData->sharedEngineData.resetToBeatTime;
linkData->sharedEngineData.resetToBeatTime = INVALID_BEAT_TIME;
output->requestStart = linkData->sharedEngineData.requestStart;
linkData->sharedEngineData.requestStart = NO;
output->requestStop = linkData->sharedEngineData.requestStop;
linkData->sharedEngineData.requestStop = NO;
output->proposeBpm = linkData->sharedEngineData.proposeBpm;
linkData->sharedEngineData.proposeBpm = INVALID_BPM;
OSSpinLockUnlock(&lock);
}
// Copy from the thread local copy to the output. This happens
// whether or not we were able to grab the lock.
output->outputLatency = linkData->localEngineData.outputLatency;
output->quantum = linkData->localEngineData.quantum;
}
/*
* Render a metronome sound into the given buffer according to the
* given session state and quantum.
*/
static void renderMetronomeIntoBuffer(
const ABLLinkSessionStateRef sessionState,
const Float64 quantum,
const UInt64 beginHostTime,
const Float64 sampleRate,
const Float64 secondsToHostTime,
const UInt32 bufferSize,
UInt64* timeAtLastClick,
SInt16* buffer,
UInt64* whole)
{
// Metronome frequencies
static const Float64 highTone = 1567.98;
static const Float64 lowTone = 1108.73;
// 100ms click duration
static const Float64 clickDuration = 0.1;
// The number of host ticks that elapse between samples
const Float64 hostTicksPerSample = secondsToHostTime / sampleRate;
for (UInt32 i = 0; i < bufferSize; ++i) {
Float64 amplitude = 0.;
// Compute the host time for this sample.
const UInt64 hostTime = beginHostTime + llround(i * hostTicksPerSample);
const UInt64 lastSampleHostTime = hostTime - llround(hostTicksPerSample);
// Only make sound for positive beat magnitudes. Negative beat
// magnitudes are count-in beats.
if (ABLLinkBeatAtTime(sessionState, hostTime, quantum) >= 0.) {
// If the phase wraps around between the last sample and the
// current one with respect to a 1 beat quantum, then a click
// should occur.
if (ABLLinkPhaseAtTime(sessionState, hostTime, 1) <
ABLLinkPhaseAtTime(sessionState, lastSampleHostTime, 1)) {
*timeAtLastClick = hostTime;
}
const Float64 secondsAfterClick =
(hostTime - *timeAtLastClick) / secondsToHostTime;
// If we're within the click duration of the last beat, render
// the click tone into this sample
if (secondsAfterClick == 0.0) {
*whole += 1;
}
}
//ignore this, not making clicks ;)
//buffer[i] = (SInt16)(32761. * amplitude);
}
}
static OSStatus soundOutputStreamRenderCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
SoundOutputStream * stream = (SoundOutputStream *)inRefCon;
AudioBuffer * audioBuffer = &ioData->mBuffers[0];
// clearing the buffer before handing it off to the user
// this saves us from horrible noises if the user chooses not to write anything
memset(audioBuffer->mData, 0, audioBuffer->mDataByteSize);
int bufferSize = (audioBuffer->mDataByteSize / sizeof(Float32)) / audioBuffer->mNumberChannels;
bufferSize = MIN(bufferSize, MAX_BUFFER_SIZE / audioBuffer->mNumberChannels);
if([stream.delegate respondsToSelector:@selector(soundStreamRequested:output:bufferSize:numOfChannels:)]) {
[stream.delegate soundStreamRequested:stream
output:(float*)audioBuffer->mData
bufferSize:bufferSize
numOfChannels:audioBuffer->mNumberChannels];
}
//copy from linkhut.....
//--------------
//LinkData *linkData = (LinkData *)inRefCon;
// Get a copy of the current link session state.
const ABLLinkSessionStateRef sessionState =
ABLLinkCaptureAudioSessionState(stream.getLinkRef);
// Get a copy of relevant engine parameters.
EngineData engineData;
pullEngineData(stream.getLinkData, &engineData);
// The mHostTime member of the timestamp represents the time at
// which the buffer is delivered to the audio hardware. The output
// latency is the time from when the buffer is delivered to the
// audio hardware to when the beginning of the buffer starts
// reaching the output. We add those values to get the host time
// at which the first sample of this buffer will reach the output.
const UInt64 hostTimeAtBufferBegin =
inTimeStamp->mHostTime + engineData.outputLatency;
if (engineData.requestStart && !ABLLinkIsPlaying(sessionState)) {
// Request starting playback at the beginning of this buffer.
ABLLinkSetIsPlaying(sessionState, YES, hostTimeAtBufferBegin);
}
if (engineData.requestStop && ABLLinkIsPlaying(sessionState)) {
// Request stopping playback at the beginning of this buffer.
ABLLinkSetIsPlaying(sessionState, NO, hostTimeAtBufferBegin);
}
if (!stream.getLinkData->isPlaying && ABLLinkIsPlaying(sessionState)) {
// Reset the session state's beat timeline so that the requested
// beat time corresponds to the time the transport will start playing.
// The returned beat time is the actual beat time mapped to the time
// playback will start, which therefore may be less than the requested
// beat time by up to a quantum.
ABLLinkRequestBeatAtStartPlayingTime(sessionState, 0., engineData.quantum);
stream.getLinkData->isPlaying = YES;
}
else if(stream.getLinkData->isPlaying && !ABLLinkIsPlaying(sessionState)) {
stream.getLinkData->isPlaying = NO;
}
// Handle a tempo proposal
if (engineData.proposeBpm != INVALID_BPM) {
// Propose that the new tempo takes effect at the beginning of
// this buffer.
ABLLinkSetTempo(sessionState, engineData.proposeBpm, hostTimeAtBufferBegin);
}
ABLLinkCommitAudioSessionState(stream.getLinkData->ablLink, sessionState);
// When playing, render the metronome sound
if (stream.getLinkData->isPlaying) {
// Only render the metronome sound to the first channel. This
// might help with source separate for timing analysis.
renderMetronomeIntoBuffer(
sessionState, engineData.quantum, hostTimeAtBufferBegin, stream.getLinkData->sampleRate,
stream.getLinkData->secondsToHostTime, inNumberFrames, stream.getLinkData->timeAtLastClick,
(SInt16*)ioData->mBuffers[0].mData, stream.getBangPtr);
}
//----------
return noErr;
}
//----------------------------------------------------------------
@interface SoundOutputStream() {
//
LinkData _linkData;
}
@end
@implementation SoundOutputStream
- (id)initWithNumOfChannels:(NSInteger)value0
withSampleRate:(NSInteger)value1
withBufferSize:(NSInteger)value2 {
self = [super initWithNumOfChannels:value0
withSampleRate:value1
withBufferSize:value2];
if(self) {
streamType = SoundStreamTypeOutput;
}
return self;
}
- (const UInt64 *) getTripleBangPtr{
return &_bang;
}
- (BOOL)isPlaying {
const ABLLinkSessionStateRef sessionState = ABLLinkCaptureAppSessionState(_linkData.ablLink);
return ABLLinkIsPlaying(sessionState);
}
- (void)setIsPlaying:(BOOL)isPlaying {
OSSpinLockLock(&lock);
if (isPlaying) {
_linkData.sharedEngineData.requestStart = YES;
}
else {
_linkData.sharedEngineData.requestStop = YES;
}
OSSpinLockUnlock(&lock);
}
- (void)setBpm:(Float64)bpm {
OSSpinLockLock(&lock);
_linkData.sharedEngineData.proposeBpm = bpm;
OSSpinLockUnlock(&lock);
}
- (void)initLinkData:(Float64)bpm {
mach_timebase_info_data_t timeInfo;
mach_timebase_info(&timeInfo);
_bang = 0;
lock = OS_SPINLOCK_INIT;
_linkData.ablLink = ABLLinkNew(bpm);
_linkData.sampleRate = [AVAudioSession sharedInstance].sampleRate;
_linkData.secondsToHostTime = (1.0e9 * timeInfo.denom) / (Float64)timeInfo.numer;
_linkData.sharedEngineData.outputLatency =
_linkData.secondsToHostTime * [AVAudioSession sharedInstance].outputLatency;
_linkData.sharedEngineData.resetToBeatTime = INVALID_BEAT_TIME;
_linkData.sharedEngineData.proposeBpm = INVALID_BPM;
_linkData.sharedEngineData.requestStart = NO;
_linkData.sharedEngineData.requestStop = NO;
_linkData.sharedEngineData.quantum = 4; // quantize to 4 beats
_linkData.localEngineData = _linkData.sharedEngineData;
_linkData.timeAtLastClick = 0;
}
- (ABLLinkRef)getLinkRef {
return _linkData.ablLink;
}
- (const LinkData*)getLinkData{
return &_linkData;
}
- (void)dealloc {
[self stop];
[super dealloc];
}
- (void)start {
[super start];
///custome ableton link
[self initLinkData:120];
if([self isStreaming] == YES) {
return; // already running.
}
[self configureAudioSession];
//---------------------------------------------------------- audio unit.
// Configure the search parameters to find the default playback output unit
// (called the kAudioUnitSubType_RemoteIO on iOS but
// kAudioUnitSubType_DefaultOutput on Mac OS X)
AudioComponentDescription desc = {
.componentType = kAudioUnitType_Output,
.componentSubType = kAudioUnitSubType_RemoteIO,
.componentManufacturer = kAudioUnitManufacturer_Apple
};
// get component and get audio units.
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
[self checkStatus:AudioComponentInstanceNew(inputComponent, &audioUnit)];
//---------------------------------------------------------- enable io.
// enable output out of AudioUnit.
UInt32 on = 1;
[self checkStatus:AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&on,
sizeof(on))];
//---------------------------------------------------------- format.
// Describe format
AudioStreamBasicDescription audioFormat = {
.mSampleRate = sampleRate,
.mFormatID = kAudioFormatLinearPCM,
.mFormatFlags = kAudioFormatFlagsNativeFloatPacked,
.mFramesPerPacket = 1,
.mChannelsPerFrame = numOfChannels,
.mBytesPerFrame = sizeof(Float32) * numOfChannels,
.mBytesPerPacket = sizeof(Float32) * numOfChannels,
.mBitsPerChannel = sizeof(Float32) * 8
};
// Apply format
[self checkStatus:AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(AudioStreamBasicDescription))];
//---------------------------------------------------------- render callback.
AURenderCallbackStruct callback = {soundOutputStreamRenderCallback, self};
[self checkStatus:AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
kOutputBus,
&callback,
sizeof(callback))];
//---------------------------------------------------------- go!
[self checkStatus:AudioUnitInitialize(audioUnit)];
[self checkStatus:AudioOutputUnitStart(audioUnit)];
}
- (void)stop {
[super stop];
if([self isStreaming] == NO) {
return;
}
[self checkStatus:AudioOutputUnitStop(audioUnit)];
[self checkStatus:AudioUnitUninitialize(audioUnit)];
[self checkStatus:AudioComponentInstanceDispose(audioUnit)];
audioUnit = nil;
}
@end
//
// SoundStream.m
// Created by Lukasz Karluk on 13/06/13.
// http://julapy.com/blog
//
//
#import "SoundStream.h"
#import <AVFoundation/AVFoundation.h>
@interface SoundStream() {
//
}
@end
@implementation SoundStream
@synthesize delegate;
@synthesize streamType;
@synthesize numOfChannels;
@synthesize sampleRate;
@synthesize bufferSize;
@synthesize numOfBuffers;
@synthesize audioUnit;
@synthesize bInterruptedWhileRunning;
- (id)initWithNumOfChannels:(NSInteger)value0
withSampleRate:(NSInteger)value1
withBufferSize:(NSInteger)value2 {
self = [super init];
if(self) {
numOfChannels = value0;
sampleRate = value1;
bufferSize = value2;
numOfBuffers = 1; // always 1.
audioUnit = nil;
bInterruptedWhileRunning = NO;
if([SoundStream shouldUseAudioSessionNotifications]) {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(handleInterruption:)
name:AVAudioSessionInterruptionNotification
object:nil];
} else {
}
}
return self;
}
- (void)dealloc {
[super dealloc];
if([SoundStream shouldUseAudioSessionNotifications]) {
[[NSNotificationCenter defaultCenter] removeObserver:self
name:AVAudioSessionInterruptionNotification
object:nil];
} else {
}
}
- (void)start {
}
- (void)stop {
[[AVAudioSession sharedInstance] setActive:NO error:nil];
}
- (BOOL)isStreaming {
return (audioUnit != nil);
}
#pragma mark - Audio Session Config
- (void)configureAudioSession {
NSError * audioSessionError = nil;
AVAudioSession * audioSession = [AVAudioSession sharedInstance];
if(![audioSession setActive:YES error:&audioSessionError]) {
[self reportError:audioSessionError];
// if we can't even activate the session, we better abort early
return;
}
// setting sample rate (this has different selectors for iOS 5- and iOS 6+)
double trueSampleRate = sampleRate;
if([audioSession respondsToSelector:@selector(setPreferredSampleRate:error:)]) {
if(![audioSession setPreferredSampleRate:sampleRate error:&audioSessionError]) {
[self reportError:audioSessionError];
audioSessionError = nil;
}
trueSampleRate = [audioSession sampleRate];
}
sampleRate = trueSampleRate;
// setting buffer size
NSTimeInterval bufferDuration = bufferSize / trueSampleRate;
if(![audioSession setPreferredIOBufferDuration:bufferDuration error:&audioSessionError]) {
[self reportError:audioSessionError];
audioSessionError = nil;
}
}
#pragma mark - Interruptions
- (void) handleInterruption:(NSNotification *)notification {
#ifdef __IPHONE_6_0
NSUInteger interruptionType = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
if(interruptionType == AVAudioSessionInterruptionTypeBegan) {
// [self beginInterruption];
} else if(interruptionType == AVAudioSessionInterruptionTypeEnded) {
// [self endInterruption];
}
#endif
}
- (void)beginInterruption {
if([self isStreaming]) {
self.bInterruptedWhileRunning = YES;
}
//[self stop];
if([self.delegate respondsToSelector:@selector(soundStreamBeginInterruption:)]) {
[self.delegate soundStreamBeginInterruption:self];
}
}
- (void)endInterruption {
if(self.bInterruptedWhileRunning) {
self.bInterruptedWhileRunning = NO;
[self start];
}
if([self.delegate respondsToSelector:@selector(soundStreamEndInterruption:)]) {
[self.delegate soundStreamEndInterruption:self];
}
}
// iOS 5- needs a delegate for Audio Session interruptions, iOS 6+ can use notifications
+ (BOOL) shouldUseAudioSessionNotifications {
return [[[UIDevice currentDevice] systemVersion] floatValue] >= 6;
}
#pragma mark - Error Handling
- (BOOL)checkStatus:(OSStatus)status {
if(status == noErr) {
return YES;
} else if([self.delegate respondsToSelector:@selector(soundStreamError:error:)]) {
NSString * errorCode = [self stringForAudioUnitError:status];
NSString * fullErrorString = [errorCode stringByAppendingFormat:@" (%i)", (int)status];
[self.delegate soundStreamError:self error:fullErrorString];
}
return NO;
}
- (NSString *)stringForAudioUnitError:(OSStatus)status {
if(status == kAudioUnitErr_InvalidProperty) {
return @"kAudioUnitErr_InvalidProperty";
} else if(status == kAudioUnitErr_InvalidParameter) {
return @"kAudioUnitErr_InvalidParameter";
} else if(status == kAudioUnitErr_InvalidElement) {
return @"kAudioUnitErr_InvalidElement";
} else if(status == kAudioUnitErr_NoConnection) {
return @"kAudioUnitErr_NoConnection";
} else if(status == kAudioUnitErr_FailedInitialization) {
return @"kAudioUnitErr_FailedInitialization";
} else if(status == kAudioUnitErr_TooManyFramesToProcess) {
return @"kAudioUnitErr_TooManyFramesToProcess";
} else if(status == kAudioUnitErr_InvalidFile) {
return @"kAudioUnitErr_InvalidFile";
} else if(status == kAudioUnitErr_FormatNotSupported) {
return @"kAudioUnitErr_FormatNotSupported";
} else if(status == kAudioUnitErr_Uninitialized) {
return @"kAudioUnitErr_Uninitialized";
} else if(status == kAudioUnitErr_InvalidScope) {
return @"kAudioUnitErr_InvalidScope";
} else if(status == kAudioUnitErr_PropertyNotWritable) {
return @"kAudioUnitErr_PropertyNotWritable";
} else if(status == kAudioUnitErr_CannotDoInCurrentContext) {
return @"kAudioUnitErr_CannotDoInCurrentContext";
} else if(status == kAudioUnitErr_InvalidPropertyValue) {
return @"kAudioUnitErr_InvalidPropertyValue";
} else if(status == kAudioUnitErr_PropertyNotInUse) {
return @"kAudioUnitErr_PropertyNotInUse";
} else if(status == kAudioUnitErr_Initialized) {
return @"kAudioUnitErr_Initialized";
} else if(status == kAudioUnitErr_InvalidOfflineRender) {
return @"kAudioUnitErr_InvalidOfflineRender";
} else if(status == kAudioUnitErr_Unauthorized) {
return @"kAudioUnitErr_Unauthorized";
} else {
return @"Unknown";
}
}
- (void) reportError:(NSError *)error {
if(error && [self.delegate respondsToSelector:@selector(soundStreamError:error:)]) {
[self.delegate soundStreamError:self error:[error localizedDescription]];
}
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment