Skip to content

Instantly share code, notes, and snippets.

@olenhad
Created June 6, 2017 10:41
Show Gist options
  • Save olenhad/e0eba99430e1ac37d6c1c3be97f6e516 to your computer and use it in GitHub Desktop.
Save olenhad/e0eba99430e1ac37d6c1c3be97f6e516 to your computer and use it in GitHub Desktop.
Simple Audio Mixer
//
// LFAudioMixer.m
// Garena
//
// Created by Omer Iqbal on 8/3/17.
// Copyright © 2017 Garena. All rights reserved.
//
#import "LFAudioMixer.h"
#import <AVFoundation/AVFoundation.h>
@interface LFAudioMixer ()
@property (nonatomic, strong) NSMutableArray<AVAudioPCMBuffer *> *micBuffers;
@property (nonatomic, strong) NSMutableArray<AVAudioPCMBuffer *> *appBuffers;
@end
@implementation LFAudioMixer
- (instancetype)init {
self = [super init];
if (self) {
_micBuffers = [NSMutableArray array];
_appBuffers = [NSMutableArray array];
}
return self;
}
static inline SInt16 TPMixSamples(SInt16 a, SInt16 b) {
return
// If both samples are negative, mixed signal must have an amplitude between the lesser of A and B, and the minimum permissible negative amplitude
a < 0 && b < 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MIN) :
// If both samples are positive, mixed signal must have an amplitude between the greater of A and B, and the maximum permissible positive amplitude
( a > 0 && b > 0 ?
((int)a + (int)b) - (((int)a * (int)b)/INT16_MAX)
// If samples are on opposite sides of the 0-crossing, mixed signal should reflect that samples cancel each other out somewhat
:
a + b);
}
- (AVAudioPCMBuffer *)mixPCMBuffer:(AVAudioPCMBuffer *)b1 withBuffer:(AVAudioPCMBuffer *)b2 {
AVAudioPCMBuffer *smaller = b1.frameLength < b2.frameLength ? b1: b2;
AVAudioPCMBuffer *larger = b1.frameLength >= b2.frameLength ? b1: b2;
NSLog(@"b1: %@, b2:%@", b1, b2);
uint16_t mixedBuffer[larger.frameLength];
memset(mixedBuffer, 0, larger.frameLength * sizeof(uint16_t));
for (NSUInteger i = 0; i < smaller.frameLength; i++) {
mixedBuffer[i] = TPMixSamples(*smaller.int16ChannelData[i], *larger.int16ChannelData[i]);
}
for (NSUInteger i = smaller.frameLength; i < larger.frameLength; i++) {
mixedBuffer[i] = *larger.int16ChannelData[i];
}
AudioBuffer mixedAudioBuffer;
mixedAudioBuffer.mData = mixedBuffer;
mixedAudioBuffer.mDataByteSize = larger.frameLength * 2;
mixedAudioBuffer.mNumberChannels = 1;
AudioBufferList bufferList;
bufferList.mBuffers[0] = mixedAudioBuffer;
bufferList.mNumberBuffers = 1;
AVAudioPCMBuffer *mixed = [[AVAudioPCMBuffer alloc] initWithPCMFormat:larger.format frameCapacity:larger.frameLength];
mixed.mutableAudioBufferList->mBuffers[0] = mixedAudioBuffer;
mixed.mutableAudioBufferList->mNumberBuffers = 1;
return mixed;
}
- (void)queueMicAudioBuffer:(AVAudioPCMBuffer *)buffer {
[self.micBuffers addObject:buffer];
//[self processQueue];
}
- (void)queueAppAudioBuffer:(AVAudioPCMBuffer *)buffer {
[self.appBuffers addObject:buffer];
//[self processQueue];
}
- (void)processQueue {
if (self.micBuffers.count == 0 || self.appBuffers.count == 0) {
return;
}
AVAudioPCMBuffer *firstMic = [self.micBuffers firstObject];
AVAudioPCMBuffer *firstApp = [self.appBuffers firstObject];
AVAudioPCMBuffer *mixed = [self mixPCMBuffer:firstMic withBuffer:firstApp];
[self.delegate mixer:self didMixPCMBuffer:mixed];
[self.micBuffers removeObjectAtIndex:0];
[self.appBuffers removeObjectAtIndex:0];
[self processQueue];
}
@end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment