Skip to content

Instantly share code, notes, and snippets.

@VisualMelon
Created March 8, 2020 00:23
Show Gist options
  • Star 3 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save VisualMelon/d02edcd7c44fadcd6f5745e92c449a90 to your computer and use it in GitHub Desktop.
Save VisualMelon/d02edcd7c44fadcd6f5745e92c449a90 to your computer and use it in GitHub Desktop.
NCoreAudioTest
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="NAudio" Version="1.10.0" />
<PackageReference Include="OpenTK.NetStandard" Version="1.0.5.12" />
</ItemGroup>
</Project>
Test application providing a basic (almost certainly non-conformant) implementation of `IWavePlayer` using OpenTk and OpenAl. Needs an appropriate copy of OpenAl for you platform (e.g. throw OpenAl.dll into the working directory or installing the lib under linux).
(Barely) tested on win10 and raspbian linux (Raspberry Pi 4 Model B).
The WaveFormat mapping is surely lacking, but should cope with basic floating point providers and Mono/Sterio 8/16bit PCM.
using NAudio.Wave;
using OpenTK;
using OpenTK.Audio.OpenAL;
using System;
using System.Threading.Tasks;
namespace NCoreAudioTest
{
public class SineProvider : ISampleProvider
{
public SineProvider(WaveFormat waveFormat, double frequency, double amplitude, double duration)
{
if (waveFormat.Channels != 1)
throw new ArgumentException("Must be a mono wave format.");
Time = 0;
WaveFormat = waveFormat;
Frequency = frequency;
Amplitude = amplitude;
Duration = duration;
}
public WaveFormat WaveFormat { get; }
public double Time { get; set; }
public double Frequency { get; set; }
public double Amplitude { get; set; }
public double Duration { get; set; }
public int Read(float[] buffer, int offset, int count)
{
var dt = 1.0 / WaveFormat.SampleRate;
for (int i = 0; i < count; i++)
{
if (Time >= Duration)
{
Time = Duration;
return i;
}
buffer[offset + i] = (float)(Math.Sin(Frequency * Math.PI * 2.0 * Time) * Amplitude);
Time += dt;
}
return count;
}
}
class Program
{
static async Task Main(string[] args)
{
var sine = new SineProvider(WaveFormat.CreateIeeeFloatWaveFormat(44100, 1), 440, 0.5, 5);
using var ctx = new TkAudioContext();
using var tkout = new TkWavePlayer(ctx, 4096);
tkout.Init(sine);
tkout.Play();
var tcs = new TaskCompletionSource<StoppedEventArgs>();
tkout.PlaybackStopped += (s, sea) => tcs.SetResult(sea);
while (true)
{
await Task.Delay(1);
if (tcs.Task.IsCompleted)
break;
if (Console.KeyAvailable)
{
var k = Console.ReadKey(true);
if (k.Key == ConsoleKey.Spacebar)
{
if (tkout.Paused)
{
tkout.Play();
}
else
{
tkout.Pause();
}
}
else if (k.Key == ConsoleKey.S)
{
tkout.Stop();
}
}
}
await tcs.Task;
}
}
public class TkAudioContext : IDisposable
{
public IntPtr Device { get; private set; }
public ContextHandle Context { get; private set; }
public TkAudioContext()
{
Init();
}
private unsafe void Init()
{
Device = Alc.OpenDevice(null);
Context = Alc.CreateContext(Device, (int*)null);
Alc.MakeContextCurrent(Context);
}
~TkAudioContext()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected void Dispose(bool disposing)
{
if (Context != ContextHandle.Zero)
{
Alc.MakeContextCurrent(ContextHandle.Zero);
Alc.DestroyContext(Context);
}
Context = ContextHandle.Zero;
if (Device != IntPtr.Zero)
{
Alc.CloseDevice(Device);
}
Device = IntPtr.Zero;
}
}
public class TkWavePlayer : IWavePlayer, IDisposable
{
private float volume;
public float Volume
{
get => volume;
set
{
volume = value;
}
}
public PlaybackState PlaybackState { get; }
public event EventHandler<StoppedEventArgs> PlaybackStopped;
private TkAudioContext Context { get; }
public int BufferSize { get; }
private IWaveProvider WaveProvider;
private int Source;
private int NextBuffer;
private int OtherBuffer;
private byte[] Buffer;
private Accumulator Accumulator;
private System.Threading.ManualResetEventSlim Signaller;
private System.Threading.CancellationTokenSource PlayerCanceller;
private Task Player;
public bool Paused { get; private set; } = false;
public bool Stopped { get; private set; } = false;
public TkWavePlayer(TkAudioContext context, int bufferSize)
{
Context = context;
BufferSize = bufferSize;
}
public unsafe void Init(IWaveProvider waveProvider)
{
WaveProvider = waveProvider;
AL.GenSources(1, out Source);
AL.GenBuffers(1, out NextBuffer);
AL.GenBuffers(1, out OtherBuffer);
Buffer = new byte[BufferSize];
Accumulator = new Accumulator(waveProvider, Buffer);
Signaller = new System.Threading.ManualResetEventSlim(false);
}
public void Pause()
{
if (Stopped)
throw new InvalidOperationException("Stopped");
Paused = true;
PlayerCanceller?.Cancel();
PlayerCanceller = null;
AL.SourcePause(Source);
}
public void Play()
{
if (Stopped)
throw new InvalidOperationException("Stopped");
Paused = false;
if (PlayerCanceller == null)
{
PlayerCanceller = new System.Threading.CancellationTokenSource();
Player = PlayLoop(PlayerCanceller.Token).ContinueWith(PlayerStopped);
}
}
private void PlayerStopped(Task t)
{
if (!Paused)
{
PlaybackStopped?.Invoke(this, new StoppedEventArgs(t?.Exception));
}
}
public void Stop()
{
if (Stopped)
throw new InvalidOperationException("Already stopped");
Paused = false;
if (PlayerCanceller != null)
{
PlayerCanceller?.Cancel();
PlayerCanceller = null;
AL.SourceStop(Source);
}
else
{
PlaybackStopped?.Invoke(this, new StoppedEventArgs());
}
}
private async Task PlayLoop(System.Threading.CancellationToken ct)
{
AL.SourcePlay(Source);
await Task.Yield();
again:
AL.GetSource(Source, ALGetSourcei.BuffersQueued, out int queued);
AL.GetSource(Source, ALGetSourcei.BuffersProcessed, out int processed);
AL.GetSource(Source, ALGetSourcei.SourceState, out int state);
if ((ALSourceState)state != ALSourceState.Playing)
{
AL.SourcePlay(Source);
}
if (processed == 0 && queued == 2)
{
await Task.Delay(1);
goto again;
}
if (processed > 0)
{
AL.SourceUnqueueBuffers(Source, processed);
}
var notFinished = await Accumulator.Accumulate(ct);
Accumulator.Reset();
if (!notFinished)
{
return;
}
AL.BufferData(NextBuffer, TranslateFormat(WaveProvider.WaveFormat), Buffer, Buffer.Length, WaveProvider.WaveFormat.SampleRate);
AL.SourceQueueBuffer(Source, NextBuffer);
(NextBuffer, OtherBuffer) = (OtherBuffer, NextBuffer);
goto again;
}
~TkWavePlayer()
{
Dispose(false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected void Dispose(bool disposing)
{
AL.DeleteSource(Source);
AL.DeleteBuffer(NextBuffer);
AL.DeleteBuffer(OtherBuffer);
}
public static ALFormat TranslateFormat(WaveFormat format)
{
if (format.Channels == 2)
{
if (format.BitsPerSample == 32)
{
return ALFormat.StereoFloat32Ext;
}
else if (format.BitsPerSample == 16)
{
return ALFormat.Stereo16;
}
else if (format.BitsPerSample == 8)
{
return ALFormat.Stereo8;
}
}
else if (format.Channels == 1)
{
if (format.BitsPerSample == 32)
{
return ALFormat.MonoFloat32Ext;
}
else if (format.BitsPerSample == 16)
{
return ALFormat.Mono16;
}
else if (format.BitsPerSample == 8)
{
return ALFormat.Mono8;
}
}
throw new FormatException("Cannot translate WaveFormat.");
}
}
public class Accumulator
{
public Accumulator(IWaveProvider provider, byte[] buffer)
{
Provider = provider ?? throw new ArgumentNullException(nameof(provider));
Buffer = buffer ?? throw new ArgumentNullException(nameof(buffer));
Position = 0;
}
public IWaveProvider Provider { get; }
public byte[] Buffer { get; }
public int Position { get; private set; }
private object Locker = new object();
public async Task<bool> Accumulate(System.Threading.CancellationToken ct)
{
if (Position == Buffer.Length)
return true;
await Task.Yield();
lock (Locker)
{
while (Position != Buffer.Length)
{
if (ct.IsCancellationRequested)
throw new TaskCanceledException();
var read = Provider.Read(Buffer, Position, Buffer.Length - Position);
if (read == 0)
return false;
Position += read;
}
return true;
}
}
public void Reset()
{
Position = 0;
}
}
}
@dev-bre
Copy link

dev-bre commented Sep 26, 2020

This is very interesting. Do you plan on providing the audio capture side as well?

@VisualMelon
Copy link
Author

@dev-bre I have no plans as such. If I happen to need audio input at some point, that's probably what I'd try, but unless I run into the need myself it isn't something I'm likely to invest any time in.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment