Skip to content

Instantly share code, notes, and snippets.

@olokobayusuf
Last active December 16, 2019 01:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save olokobayusuf/19ea5ac14a3aa848dcf8dab78fa524bc to your computer and use it in GitHub Desktop.
Save olokobayusuf/19ea5ac14a3aa848dcf8dab78fa524bc to your computer and use it in GitHub Desktop.
An implementation of a NatMic audio device that produces audio data from a backing AudioSource or AudioListener.
/*
* NatMic
* Copyright (c) 2019 Yusuf Olokoba
*/
namespace NatMic.Recorders {
using UnityEngine;
using System;
using System.Runtime.CompilerServices;
public sealed class AudioStream {
#region --Client API--
public readonly int Capacity;
public int Length {
[MethodImpl(MethodImplOptions.Synchronized)]
get => Math.Max(writeIndex - readIndex, 0);
}
public AudioStream (int capacity = 1 << 16) => this.Capacity = capacity;
public AudioStream (long baseTimestamp, int capacity = 1 << 16) : this(capacity) => this.baseTimestamp = baseTimestamp;
[MethodImpl(MethodImplOptions.Synchronized)]
public void Dispose () {
buffer = null;
readIndex =
writeIndex = 0;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Read (float[] dst) {
if (readIndex < 0) {
ReadNegative(dst);
return;
}
var bufferIndex = readIndex % Capacity;
var remaining = Capacity - bufferIndex;
var copyCount = Mathf.Min(dst.Length, remaining);
var residualCount = dst.Length - copyCount;
Array.Copy(buffer, bufferIndex, dst, 0, copyCount);
Array.Copy(buffer, 0, dst, copyCount, residualCount);
readIndex += dst.Length;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Seek (int offset) { // INCOMPLETE // Support negative offsets
readIndex += offset;
}
#endregion
#region --Operations--
private readonly long baseTimestamp;
private float[] buffer;
private int writeIndex, readIndex;
[MethodImpl(MethodImplOptions.Synchronized)]
void IAudioProcessor.OnSampleBuffer (float[] sampleBuffer, int sampleRate, int channelCount, long timestamp) {
// Override timestamp
timestamp = VirtualDevice.CurrentTimestamp;
// Create buffer
if (buffer == null) {
buffer = new float[Capacity];
if (baseTimestamp != 0L) {
var timeDelta = (baseTimestamp - timestamp) / 1e+9;
var frameDelta = timeDelta * sampleRate;
var sampleDelta = frameDelta * channelCount;
Seek((int)sampleDelta);
}
}
// Write to buffer
var bufferIndex = writeIndex % Capacity;
var remaining = Capacity - bufferIndex;
var copyCount = Mathf.Min(sampleBuffer.Length, remaining);
var residualCount = sampleBuffer.Length - copyCount;
Array.Copy(sampleBuffer, 0, buffer, bufferIndex, copyCount);
Array.Copy(sampleBuffer, copyCount, buffer, 0, residualCount);
writeIndex += sampleBuffer.Length;
}
private void ReadBackward (float[] dst) {
if (dst.Length < -readIndex) {
Array.Clear(dst, 0, dst.Length);
readIndex += dst.Length;
}
else {
var readCount = -readIndex;
var surrogate = new float[dst.Length - readCount];
readIndex += readCount;
Read(surrogate);
Array.Clear(dst, 0, readCount);
Array.Copy(surrogate, 0, dst, readCount, surrogate.Length);
}
}
#endregion
}
}
/*
* NatMic
* Copyright (c) 2019 Yusuf Olokoba
*/
namespace NatMic {
using UnityEngine;
using System;
using System.Linq; // Yikes
using System.Threading;
using Recorders;
using Internal;
/// <summary>
/// Virtual device that mixes audio from multiple devices into one stream
/// </summary>
[Doc(@"MixerDevice")]
public sealed class MixerDevice : IAudioDevice {
#region --Client API--
/// <summary>
/// Source audio devices
/// </summary>
[Doc(@"MixerDeviceSources")]
public readonly IAudioDevice[] Sources;
/// <summary>
/// Create a mixer device that mixes audio from multiple audio devices
/// </summary>
/// <param name="sources">Source audio devices to mix audio from</param>
[Doc(@"MixerDeviceCtor")]
public MixerDevice (params IAudioDevice[] sources) : base() => this.Sources = sources;
#endregion
#region --IAudioDevice--
/// <summary>
/// Is the device currently recording?
/// </summary>
[Doc(@"IsRecording")]
public bool IsRecording {
get => processor != null;
}
/// <summary>
/// Start recording from the audio device.
/// All source devices MUST support the requested sample rate and channel count.
/// </summary>
/// <param name="requestedSampleRate">Requested sample rate</param>
/// <param name="requestedChannelCount">Requested channel count</param>
/// <param name="processor">Delegate to receive audio sample buffers</param>
[Doc(@"StartRecording", @"MixerDeviceStartRecordingDescription")]
public bool StartRecording (int requestedSampleRate, int requestedChannelCount, IAudioProcessor processor) {
this.sampleRate = requestedSampleRate;
this.channelCount = requestedChannelCount;
this.processor = processor;
new Thread(MixerLoop).Start();
return true;
}
/// <summary>
/// Stop recording from the audio device
/// </summary>
[Doc(@"StopRecording")]
public void StopRecording () => this.processor = null;
#endregion
#region --Operations--
private volatile IAudioProcessor processor;
private int sampleRate, channelCount;
private const int BufferSize = 1024; // In frames
private void MixerLoop () {
// Start devices
var sleepTimeMs = (int)(1e+3 * BufferSize / sampleRate);
var mixedBuffer = new float[BufferSize * channelCount];
var sourceBuffer = new float[mixedBuffer.Length];
var baseTimestamp = VirtualDevice.CurrentTimestamp;
var sourceStreams = Sources.Select(device => {
var stream = new AudioStream(baseTimestamp);
device.StartRecording(sampleRate, channelCount, stream);
return stream;
}).ToArray();
// Mix
while (true) {
Thread.Sleep(sleepTimeMs / 2);
if (!sourceStreams.All(stream => stream.Length >= BufferSize))
continue;
foreach (var stream in sourceStreams) {
stream.Read(sourceBuffer);
Mix(mixedBuffer, sourceBuffer, mixedBuffer);
}
var localProcessor = processor;
if (localProcessor == null)
break;
try {
localProcessor.OnSampleBuffer(mixedBuffer, sampleRate, channelCount, AudioDevice.CurrentTimestamp);
} catch (Exception ex) {
Debug.LogError("NatMic Error: MixerDevice processor raised exception: "+ex);
}
Array.Clear(mixedBuffer, 0, mixedBuffer.Length);
}
// Stop devices
foreach (var device in Sources)
device.StopRecording();
foreach (var stream in sourceStreams)
stream.Dispose();
}
public override string ToString () => "MixerDevice<" + string.Join(", ", Sources.Select(s => s.ToString()).ToArray()) + ">";
private static void Mix (float[] srcA, float[] srcB, float[] dst) {
for (int i = 0; i < srcA.Length; i++) {
var sum = srcA[i] + srcB[i];
var product = srcA[i] * srcB[i];
var mult = product > 0 ? srcA[i] > 0 ? -1 : 1 : 0;
dst[i] = sum + mult * product;
}
}
#endregion
}
}
/*
* NatMic
* Copyright (c) 2019 Yusuf Olokoba
*/
namespace NatMic {
using UnityEngine;
using System;
using Internal;
/// <summary>
/// A virtual audio device backed by a Unity audio component
/// </summary>
[Doc(@"VirtualDevice")]
public sealed class VirtualDevice : IAudioDevice, IDisposable {
#region --Client API--
/// <summary>
/// Create a virtual audio device backed by an AudioSource
/// </summary>
/// <param name="audioSource">Backing AudioSource for this audio device</param>
[Doc(@"VirtualDeviceCtorSource")]
public VirtualDevice (AudioSource audioSource) : this(audioSource as Component) { }
/// <summary>
/// Create a virtual audio device backed by an AudioListener
/// </summary>
/// <param name="audioListener">Backing AudioListener for this audio device</param>
[Doc(@"VirtualDeviceCtorListener")]
public VirtualDevice (AudioListener audioListener) : this(audioListener as Component) { }
/// <summary>
/// Dispose the virtual device and release resources
/// </summary>
[Doc(@"VirtualDeviceDispose")]
public void Dispose () => VirtualDeviceAttachment.Destroy(attachment);
public long CurrentTimestamp {
get => Stopwatch.GetTimestamp() * 100L;
}
#endregion
#region --IAudioDevice--
/// <summary>
/// Is the device currently recording?
/// </summary>
[Doc(@"IsRecording")]
public bool IsRecording {
get => processor != null;
}
/// <summary>
/// Start recording from the audio device
/// </summary>
/// <param name="requestedSampleRate">Requested sample rate</param>
/// <param name="requestedChannelCount">Requested channel count</param>
/// <param name="processor">Delegate to receive audio sample buffers</param>
[Doc(@"StartRecording")]
public bool StartRecording (int requestedSampleRate, int requestedChannelCount, IAudioProcessor processor) {
this.processor = processor;
return true;
}
/// <summary>
/// Stop recording from the audio device
/// </summary>
[Doc(@"StopRecording")]
public void StopRecording () => this.processor = null;
#endregion
#region --Operations--
private readonly VirtualDeviceAttachment attachment;
private readonly int sampleRate;
private volatile IAudioProcessor processor;
private VirtualDevice (Component component) {
this.attachment = component.gameObject.AddComponent<VirtualDeviceAttachment>();
this.attachment.parent = this;
this.sampleRate = AudioSettings.outputSampleRate;
}
private void OnSampleBuffer (float[] data, int channels) {
if (IsRecording)
try {
processor.OnSampleBuffer(data, sampleRate, channels, CurrentTimestamp);
} catch (Exception ex) {
Debug.LogError("NatMic Error: VirtualDevice processor raised exception: "+ex);
}
}
public override string ToString () => "VirtualDevice<" + attachment.gameObject + ">";
private class VirtualDeviceAttachment : MonoBehaviour {
public VirtualDevice parent;
private void OnAudioFilterRead (float[] data, int channels) => parent.OnSampleBuffer(data, channels);
}
#endregion
}
}
@olokobayusuf
Copy link
Author

Note

These two audio devices were removed from the NatMic package because we were facing timing concerns with mixing that we couldn't get around. For more information, check this out.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment