Last active
June 28, 2017 21:04
-
-
Save gwio/b523d8a5568adb1860950fc4acd23cad to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//tempochange event and link settings view, I put it in my appdelegate | |
ABLLinkSetSessionTempoCallback( | |
stream.getLinkRef, onSessionTempoChanged, (__bridge void *)self); | |
_linkSettings = [ABLLinkSettingsViewController instance:stream.getLinkRef]; | |
ofxiOSDisableIdleTimer(); | |
return YES; | |
static void onSessionTempoChanged(Float64 bpm, void* context) { | |
// ViewController* vc = (__bridge ViewController *)context; | |
// [vc updateSessionTempo:bpm]; | |
NSLog(@"bpm change %f",bpm); | |
if ( dynamic_cast<ofApp*>(ofGetAppPtr()) != NULL){ | |
dynamic_cast<ofApp*>(ofGetAppPtr())->setBpmSliderVal(bpm); | |
} | |
} | |
- (void)showLinkSettings{ | |
if(navController.view.hidden){ | |
navController.view.hidden = NO; | |
} else { | |
navController = [[UINavigationController alloc] initWithRootViewController:_linkSettings]; | |
_linkSettings.navigationItem.rightBarButtonItem = | |
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone | |
target:self | |
action:@selector(hideLinkSettings:)]; | |
navController.modalPresentationStyle = UIModalPresentationPopover; | |
UIPopoverPresentationController *popC = _linkSettings.popoverPresentationController; | |
popC.permittedArrowDirections = UIPopoverArrowDirectionAny; | |
popC.sourceRect = [ofxiOSGetGLParentView() frame]; | |
// we recommend using a size of 320x400 for the display in a popover | |
_linkSettings.preferredContentSize = CGSizeMake(320., 400.); | |
// | |
// UIButton *button = (UIButton *)sender; | |
// popC.sourceView = button.superview; | |
popC.backgroundColor = [UIColor whiteColor]; | |
_linkSettings.view.backgroundColor = [UIColor whiteColor]; | |
[ofxiOSGetGLParentView() addSubview:navController.view]; | |
} | |
} | |
- (void)hideLinkSettings:(id)sender { | |
#pragma unused(sender) | |
navController.view.hidden = YES; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// SoundOutputStream.h | |
// Created by Lukasz Karluk on 13/06/13. | |
// http://julapy.com/blog | |
// | |
#pragma once | |
#import "SoundStream.h" | |
#import "ABLLink.h" | |
//#import "ABLLinkSettingsViewController.h" | |
/* | |
* Structure that stores engine-related data that can be changed from | |
* the main thread. | |
*/ | |
typedef struct { | |
UInt32 outputLatency; // Hardware output latency in HostTime | |
Float64 resetToBeatTime; | |
Float64 proposeBpm; | |
Float64 quantum; | |
BOOL isPlaying; | |
} EngineData; | |
/* | |
* Structure that stores all data needed by the audio callback. | |
*/ | |
typedef struct { | |
ABLLinkRef ablLink; | |
// Shared between threads. Only write when engine not running. | |
Float64 sampleRate; | |
// Shared between threads. Only write when engine not running. | |
Float64 secondsToHostTime; | |
// Shared between threads. Written by the main thread and only | |
// read by the audio thread when doing so will not block. | |
EngineData sharedEngineData; | |
// Copy of sharedEngineData owned by audio thread. | |
EngineData localEngineData; | |
// Owned by audio thread | |
UInt64 timeAtLastClick; | |
} LinkData; | |
@interface SoundOutputStream : SoundStream | |
@property (nonatomic) Float64 bpm; | |
@property (readonly, nonatomic) Float64 beatTime; | |
@property (nonatomic) Float64 quantum; | |
@property (nonatomic) BOOL isPlaying; | |
@property (readonly, nonatomic) BOOL isLinkEnabled; | |
@property (readonly, nonatomic) ABLLinkRef linkRef; | |
@property LinkData linkData; | |
@property (nonatomic) UInt64 quarterBang; | |
@property (nonatomic) UInt64 tripleBang; | |
@property (nonatomic) UInt64 wholeBang; | |
@property (nonatomic) UInt64 halfBang; | |
@property (nonatomic) double quantumCount; | |
- (ABLLinkRef)getLinkRef; | |
- (const LinkData *)getLinkData; | |
- (const UInt64 *) getQuarterBangPtr; | |
- (const UInt64 *) getTripleBangPtr; | |
- (const UInt64 *) getWholeBangPtr; | |
- (const UInt64 *) getHalfBangPtr; | |
- (const double *) getQuantumCountPtr; | |
@end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// SoundOutputStream.m | |
// Created by Lukasz Karluk on 13/06/13. | |
// http://julapy.com/blog | |
// | |
// Original code by, | |
// Memo Akten, http://www.memo.tv | |
// Marek Bareza http://mrkbrz.com/ | |
// Updated 2012 by Dan Wilcox <danomatika@gmail.com> | |
// | |
// references, | |
// http://www.cocoawithlove.com/2010/10/ios-tone-generator-introduction-to.html | |
// http://atastypixel.com/blog/using-remoteio-audio-unit/ | |
// http://www.stefanpopp.de/2011/capture-iphone-microphone/ | |
// | |
#import "SoundOutputStream.h" | |
#include <mach/mach_time.h> | |
#include <libkern/OSAtomic.h> | |
#include <AudioToolbox/AudioToolbox.h> | |
#include <AVFoundation/AVFoundation.h> | |
static OSSpinLock lock; | |
#define INVALID_BEAT_TIME DBL_MIN | |
#define INVALID_BPM DBL_MIN | |
//link structs | |
//---- | |
/* | |
* Pull data from the main thread to the audio thread if lock can be | |
* obtained. Otherwise, just use the local copy of the data. | |
*/ | |
static void pullEngineData(LinkData* linkData, EngineData* output) { | |
// Always reset the signaling members to their default state | |
output->resetToBeatTime = INVALID_BEAT_TIME; | |
output->proposeBpm = INVALID_BPM; | |
// Attempt to grab the lock guarding the shared engine data but | |
// don't block if we can't get it. | |
if (OSSpinLockTry(&lock)) { | |
// Copy non-signaling members to the local thread cache | |
linkData->localEngineData.outputLatency = | |
linkData->sharedEngineData.outputLatency; | |
linkData->localEngineData.quantum = linkData->sharedEngineData.quantum; | |
linkData->localEngineData.isPlaying = linkData->sharedEngineData.isPlaying; | |
// Copy signaling members directly to the output and reset | |
output->resetToBeatTime = linkData->sharedEngineData.resetToBeatTime; | |
linkData->sharedEngineData.resetToBeatTime = INVALID_BEAT_TIME; | |
output->proposeBpm = linkData->sharedEngineData.proposeBpm; | |
linkData->sharedEngineData.proposeBpm = INVALID_BPM; | |
OSSpinLockUnlock(&lock); | |
} | |
// Copy from the thread local copy to the output. This happens | |
// whether or not we were able to grab the lock. | |
output->outputLatency = linkData->localEngineData.outputLatency; | |
output->quantum = linkData->localEngineData.quantum; | |
output->isPlaying = linkData->localEngineData.isPlaying; | |
}//---- | |
/* | |
* Render a metronome sound into the given buffer according to the | |
* given timeline and quantum. | |
*/ | |
static void renderMetronomeIntoBuffer( | |
const ABLLinkTimelineRef timeline, | |
const Float64 quantum, | |
const UInt64 beginHostTime, | |
const Float64 sampleRate, | |
const Float64 secondsToHostTime, | |
const UInt32 bufferSize, | |
UInt64 timeAtLastClick, | |
SInt16* buffer, | |
UInt64* quarter, | |
UInt64* triple, | |
UInt64* whole, | |
UInt64* half, | |
double* quantumCount) | |
{ | |
// Metronome frequencies | |
static const Float64 highTone = 1567.98; | |
static const Float64 lowTone = 1108.73; | |
// 100ms click duration | |
static const Float64 clickDuration = 0.001; | |
// The number of host ticks that elapse between samples | |
const Float64 hostTicksPerSample = secondsToHostTime / sampleRate ; | |
for (UInt32 i = 0; i < bufferSize; ++i) { | |
Float64 amplitude = 0.; | |
// Compute the host time for this sample. | |
const UInt64 hostTime = beginHostTime + llround(i * hostTicksPerSample); | |
const UInt64 lastSampleHostTime = hostTime - llround(hostTicksPerSample); | |
// Only make sound for positive beat magnitudes. Negative beat | |
// magnitudes are count-in beats. | |
double bb = ABLLinkBeatAtTime(timeline, hostTime, quantum); | |
*quantumCount = bb; | |
if (ABLLinkBeatAtTime(timeline, hostTime, quantum) >= 0.) { | |
// If the phase wraps around between the last sample and the | |
// current one with respect to a 1 beat quantum, then a click | |
// should occur. | |
if (ABLLinkPhaseAtTime(timeline, hostTime, 0.25) < | |
ABLLinkPhaseAtTime(timeline, lastSampleHostTime, 0.25)) { | |
timeAtLastClick = hostTime; | |
} | |
UInt64 tripleLastClick; | |
if (ABLLinkPhaseAtTime(timeline, hostTime, 0.3333) < | |
ABLLinkPhaseAtTime(timeline, lastSampleHostTime, 0.3333)) { | |
tripleLastClick = hostTime; | |
} | |
UInt64 wholeLastClick; | |
if (ABLLinkPhaseAtTime(timeline, hostTime, 1) < | |
ABLLinkPhaseAtTime(timeline, lastSampleHostTime, 1)) { | |
wholeLastClick = hostTime; | |
} | |
UInt64 halfLastClick; | |
if (ABLLinkPhaseAtTime(timeline, hostTime, 0.5) < | |
ABLLinkPhaseAtTime(timeline, lastSampleHostTime, 0.5)) { | |
halfLastClick = hostTime; | |
} | |
const Float64 secondsAfterClick = | |
(hostTime - timeAtLastClick) / secondsToHostTime; | |
const Float64 secondsAfterClickTriple = | |
(hostTime - tripleLastClick) / secondsToHostTime; | |
const Float64 secondsAfterClickWhole = | |
(hostTime - wholeLastClick) / secondsToHostTime; | |
const Float64 secondsAfterClickHalf = | |
(hostTime - halfLastClick) / secondsToHostTime; | |
if (secondsAfterClick == 0.0) { | |
*quarter +=1; | |
} | |
if (secondsAfterClickTriple == 0.0) { | |
*triple +=1; | |
} | |
if (secondsAfterClickWhole == 0.0) { | |
*whole +=1; | |
} | |
if (secondsAfterClickHalf == 0.0) { | |
*half +=1; | |
} | |
} | |
} | |
} | |
//---- | |
static OSStatus soundOutputStreamRenderCallback(void *inRefCon, | |
AudioUnitRenderActionFlags *ioActionFlags, | |
const AudioTimeStamp *inTimeStamp, | |
UInt32 inBusNumber, | |
UInt32 inNumberFrames, | |
AudioBufferList *ioData) { | |
SoundOutputStream * stream = (SoundOutputStream *)inRefCon; | |
AudioBuffer * audioBuffer = &ioData->mBuffers[0]; | |
// clearing the buffer before handing it off to the user | |
// this saves us from horrible noises if the user chooses not to write anything | |
memset(audioBuffer->mData, 0, audioBuffer->mDataByteSize); | |
int bufferSize = (audioBuffer->mDataByteSize / sizeof(Float32)) / audioBuffer->mNumberChannels; | |
bufferSize = MIN(bufferSize, MAX_BUFFER_SIZE / audioBuffer->mNumberChannels); | |
//--------------- ableton link | |
// Get a copy of the current link timeline. | |
//LinkData linkData = stream.getLinkData; | |
const ABLLinkTimelineRef timeline = | |
ABLLinkCaptureAudioTimeline(stream.getLinkRef); | |
// Get a copy of relevant engine parameters. | |
EngineData engineData; | |
pullEngineData(stream.getLinkData, &engineData); | |
const UInt64 hostTimeAtBufferBegin = | |
inTimeStamp->mHostTime + engineData.outputLatency; | |
// When playing, render the metronome sound | |
// Handle a timeline reset | |
if (engineData.resetToBeatTime != INVALID_BEAT_TIME) { | |
// Reset the beat timeline so that the requested beat time | |
// occurs near the beginning of this buffer. The requested beat | |
// time may not occur exactly at the beginning of this buffer | |
// due to quantization, but it is guaranteed to occur within a | |
// quantum after the beginning of this buffer. The returned beat | |
// time is the actual beat time mapped to the beginning of this | |
// buffer, which therefore may be less than the requested beat | |
// time by up to a quantum. | |
ABLLinkRequestBeatAtTime( | |
timeline, engineData.resetToBeatTime, hostTimeAtBufferBegin, | |
engineData.quantum); | |
} | |
// Handle a tempo proposal | |
if (engineData.proposeBpm != INVALID_BPM) { | |
// Propose that the new tempo takes effect at the beginning of | |
// this buffer. | |
ABLLinkSetTempo(timeline, engineData.proposeBpm, hostTimeAtBufferBegin); | |
} | |
if([stream.delegate respondsToSelector:@selector(soundStreamRequested:output:bufferSize:numOfChannels:)]) { | |
[stream.delegate soundStreamRequested:stream | |
output:(float*)audioBuffer->mData | |
bufferSize:bufferSize | |
numOfChannels:audioBuffer->mNumberChannels]; | |
} | |
// When playing, render the metronome sound | |
if (engineData.isPlaying) { | |
// Only render the metronome sound to the first channel. This | |
// might help with source separate for timing analysis. | |
renderMetronomeIntoBuffer( | |
timeline, engineData.quantum, hostTimeAtBufferBegin, stream.getLinkData->sampleRate, | |
stream.getLinkData->secondsToHostTime, inNumberFrames, stream.getLinkData->timeAtLastClick, | |
(SInt16*)ioData->mBuffers[0].mData, stream.getQuarterBangPtr, stream.getTripleBangPtr, stream.getWholeBangPtr, stream.getHalfBangPtr, stream.getQuantumCountPtr); | |
} | |
ABLLinkCommitAudioTimeline(stream.getLinkData->ablLink, timeline); | |
return noErr; | |
} | |
//---------------------------------------------------------------- | |
@interface SoundOutputStream() { | |
// | |
LinkData _linkData; | |
} | |
@end | |
@implementation SoundOutputStream | |
- (id)initWithNumOfChannels:(NSInteger)value0 | |
withSampleRate:(NSInteger)value1 | |
withBufferSize:(NSInteger)value2 { | |
self = [super initWithNumOfChannels:value0 | |
withSampleRate:value1 | |
withBufferSize:value2]; | |
if(self) { | |
streamType = SoundStreamTypeOutput; | |
} | |
return self; | |
} | |
- (BOOL)isPlaying { | |
return _linkData.sharedEngineData.isPlaying; | |
} | |
- (const UInt64 *) getQuarterBangPtr{ | |
return &_quarterBang; | |
} | |
- (const UInt64 *) getWholeBangPtr{ | |
return &_wholeBang; | |
} | |
- (const UInt64 *) getHalfBangPtr{ | |
return &_halfBang; | |
} | |
- (const UInt64 *) getTripleBangPtr{ | |
return &_tripleBang; | |
} | |
- (const double *) getQuantumCountPtr{ | |
return &_quantumCount; | |
} | |
- (void)setIsPlaying:(BOOL)isPlaying { | |
OSSpinLockLock(&lock); | |
_linkData.sharedEngineData.isPlaying = isPlaying; | |
if (isPlaying) { | |
_linkData.sharedEngineData.resetToBeatTime = 0; | |
} | |
OSSpinLockUnlock(&lock); | |
} | |
- (void)setBpm:(Float64)bpm { | |
OSSpinLockLock(&lock); | |
_linkData.sharedEngineData.proposeBpm = bpm; | |
OSSpinLockUnlock(&lock); | |
} | |
- (void)initLinkData:(Float64)bpm { | |
mach_timebase_info_data_t timeInfo; | |
mach_timebase_info(&timeInfo); | |
_quarterBang = 0; | |
_tripleBang = 0; | |
_quantumCount = 0; | |
_wholeBang = 0; | |
lock = OS_SPINLOCK_INIT; | |
_linkData.ablLink = ABLLinkNew(bpm); | |
_linkData.sampleRate = [[AVAudioSession sharedInstance] sampleRate]; | |
_linkData.secondsToHostTime = (1.0e9 * timeInfo.denom) / (Float64)timeInfo.numer; | |
_linkData.sharedEngineData.outputLatency = | |
(UInt32)(_linkData.secondsToHostTime * [AVAudioSession sharedInstance].outputLatency); | |
_linkData.sharedEngineData.resetToBeatTime = INVALID_BEAT_TIME; | |
_linkData.sharedEngineData.proposeBpm = INVALID_BPM; | |
_linkData.sharedEngineData.quantum = 1; // quantize to 4 beats | |
_linkData.sharedEngineData.isPlaying = false; | |
_linkData.localEngineData = _linkData.sharedEngineData; | |
_linkData.timeAtLastClick = 0; | |
} | |
- (ABLLinkRef)getLinkRef{ | |
return _linkData.ablLink; | |
} | |
- (const LinkData*)getLinkData{ | |
return &_linkData; | |
} | |
- (void)dealloc { | |
[self stop]; | |
[super dealloc]; | |
} | |
- (void)handleRouteChange:(NSNotification *)notification { | |
#pragma unused(notification) | |
const UInt32 outputLatency = | |
(UInt32)(_linkData.secondsToHostTime * [AVAudioSession sharedInstance].outputLatency); | |
OSSpinLockLock(&lock); | |
_linkData.sharedEngineData.outputLatency = outputLatency; | |
OSSpinLockUnlock(&lock); | |
} | |
- (void)start { | |
[super start]; | |
///custome ableton link | |
[self initLinkData:120]; | |
if([self isStreaming] == YES) { | |
return; // already running. | |
} | |
[self configureAudioSession]; | |
[[NSNotificationCenter defaultCenter] addObserver:self | |
selector:@selector(handleRouteChange:) | |
name:AVAudioSessionRouteChangeNotification | |
object:[AVAudioSession sharedInstance]]; | |
//---------------------------------------------------------- audio unit. | |
// Configure the search parameters to find the default playback output unit | |
// (called the kAudioUnitSubType_RemoteIO on iOS but | |
// kAudioUnitSubType_DefaultOutput on Mac OS X) | |
AudioComponentDescription desc = { | |
.componentType = kAudioUnitType_Output, | |
.componentSubType = kAudioUnitSubType_RemoteIO, | |
.componentManufacturer = kAudioUnitManufacturer_Apple, | |
}; | |
// get component and get audio units. | |
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc); | |
[self checkStatus:AudioComponentInstanceNew(inputComponent, &audioUnit)]; | |
//---------------------------------------------------------- enable io. | |
// enable output out of AudioUnit. | |
UInt32 on = 1; | |
[self checkStatus:AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_EnableIO, | |
kAudioUnitScope_Output, | |
kOutputBus, | |
&on, | |
sizeof(on))]; | |
//---------------------------------------------------------- format. | |
// Describe format | |
AudioStreamBasicDescription audioFormat = { | |
.mSampleRate = sampleRate, | |
.mFormatID = kAudioFormatLinearPCM, | |
.mFormatFlags = kAudioFormatFlagsNativeFloatPacked, | |
.mFramesPerPacket = 1, | |
.mChannelsPerFrame = numOfChannels, | |
.mBytesPerFrame = sizeof(Float32) * numOfChannels, | |
.mBytesPerPacket = sizeof(Float32) * numOfChannels, | |
.mBitsPerChannel = sizeof(Float32) * 8 | |
}; | |
// Apply format | |
[self checkStatus:AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_StreamFormat, | |
kAudioUnitScope_Input, | |
kOutputBus, | |
&audioFormat, | |
sizeof(AudioStreamBasicDescription))]; | |
//---------------------------------------------------------- render callback. | |
AURenderCallbackStruct callback = {soundOutputStreamRenderCallback, self}; | |
[self checkStatus:AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_SetRenderCallback, | |
kAudioUnitScope_Global, | |
kOutputBus, | |
&callback, | |
sizeof(callback))]; | |
//---------------------------------------------------------- go! | |
[self checkStatus:AudioUnitInitialize(audioUnit)]; | |
[self checkStatus:AudioOutputUnitStart(audioUnit)]; | |
[self setIsPlaying:TRUE]; | |
} | |
- (void)stop { | |
[super stop]; | |
if([self isStreaming] == NO) { | |
return; | |
} | |
[self checkStatus:AudioOutputUnitStop(audioUnit)]; | |
[self checkStatus:AudioUnitUninitialize(audioUnit)]; | |
[self checkStatus:AudioComponentInstanceDispose(audioUnit)]; | |
audioUnit = nil; | |
} | |
@end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment