Skip to content

Instantly share code, notes, and snippets.

@neilt6
Last active October 31, 2024 02:16
Show Gist options
  • Save neilt6/6d07322070470536ea0ba409c343c2a5 to your computer and use it in GitHub Desktop.
Save neilt6/6d07322070470536ea0ba409c343c2a5 to your computer and use it in GitHub Desktop.
NAudio Driver for Xamarin.Android
using Android.Media;
using NAudio.Wave;
using System;
using System.Threading;
namespace NAudio.Wave
{
/// <summary>
/// Represents an Android wave player implemented using <see cref="AudioTrack"/>.
/// </summary>
public class AudioTrackOut : IWavePlayer
{
#region Fields
IWaveProvider m_WaveProvider;
AudioTrack m_AudioTrack;
Thread m_PlaybackThread;
float m_Volume;
bool m_IsDisposed;
#endregion
#region Properties
/// <summary>
/// Gets the current playback state.
/// </summary>
public PlaybackState PlaybackState { get; private set; }
/// <summary>
/// Gets or sets the volume in % (0.0 to 1.0).
/// </summary>
public float Volume
{
get => m_Volume;
set
{
m_Volume = (value < 0.0f) ? 0.0f : (value > 1.0f) ? 1.0f : value;
m_AudioTrack?.SetVolume(m_Volume);
}
}
/// <summary>
/// Gets or sets the desired latency in milliseconds.
/// </summary>
public int DesiredLatency { get; set; }
/// <summary>
/// Gets or sets the number of buffers to use.
/// </summary>
public int NumberOfBuffers { get; set; }
/// <summary>
/// Gets or sets the usage.
/// </summary>
public AudioUsageKind Usage { get; set; }
/// <summary>
/// Gets or sets the content type.
/// </summary>
public AudioContentType ContentType { get; set; }
/// <summary>
/// Gets or sets the performance mode.
/// </summary>
public AudioTrackPerformanceMode PerformanceMode { get; set; }
#endregion
#region Events
/// <summary>
/// Occurs when the player has stopped.
/// </summary>
public event EventHandler<StoppedEventArgs> PlaybackStopped;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="AudioTrackOut"/> class.
/// </summary>
public AudioTrackOut()
{
//Initialize the fields and properties
m_WaveProvider = null;
m_AudioTrack = null;
m_PlaybackThread = null;
m_Volume = 1.0f;
m_IsDisposed = false;
PlaybackState = PlaybackState.Stopped;
DesiredLatency = 300;
NumberOfBuffers = 2;
Usage = AudioUsageKind.Media;
ContentType = AudioContentType.Music;
PerformanceMode = AudioTrackPerformanceMode.None;
}
/// <summary>
/// Releases the unmanaged resources used by the current instance of the <see cref="AudioTrackOut"/> class.
/// </summary>
~AudioTrackOut()
{
//Dispose of this object
Dispose(false);
}
#endregion
#region Public Methods
/// <summary>
/// Initializes the player with the specified wave provider.
/// </summary>
/// <param name="waveProvider">The wave provider to be played.</param>
public void Init(IWaveProvider waveProvider)
{
//Make sure we haven't been disposed
ThrowIfDisposed();
//Check the player state
if (m_WaveProvider != null)
{
throw new InvalidOperationException("This wave player instance has already been initialized");
}
//Initialize the wave provider
Encoding encoding;
if (waveProvider == null)
{
throw new ArgumentNullException(nameof(waveProvider));
}
else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm || waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
{
encoding = waveProvider.WaveFormat.BitsPerSample switch
{
8 => Encoding.Pcm8bit,
16 => Encoding.Pcm16bit,
32 => Encoding.PcmFloat,
_ => throw new ArgumentException("Input wave provider must be 8-bit, 16-bit, or 32-bit", nameof(waveProvider))
};
}
else
{
throw new ArgumentException("Input wave provider must be PCM or IEEE float", nameof(waveProvider));
}
m_WaveProvider = waveProvider;
//Determine the channel mask
ChannelOut channelMask = m_WaveProvider.WaveFormat.Channels switch
{
1 => ChannelOut.Mono,
2 => ChannelOut.Stereo,
_ => throw new ArgumentException("Input wave provider must be mono or stereo", nameof(waveProvider))
};
//Determine the buffer size
int minBufferSize = AudioTrack.GetMinBufferSize(m_WaveProvider.WaveFormat.SampleRate, channelMask, encoding);
int bufferSize = m_WaveProvider.WaveFormat.ConvertLatencyToByteSize(DesiredLatency);
if (bufferSize < minBufferSize)
{
bufferSize = minBufferSize;
}
//Initialize the audio track
m_AudioTrack = new AudioTrack.Builder()
.SetAudioAttributes(new AudioAttributes.Builder()
.SetUsage(Usage)
.SetContentType(ContentType)
.Build())
.SetAudioFormat(new AudioFormat.Builder()
.SetEncoding(encoding)
.SetSampleRate(m_WaveProvider.WaveFormat.SampleRate)
.SetChannelMask(channelMask)
.Build())
.SetBufferSizeInBytes(bufferSize)
.SetTransferMode(AudioTrackMode.Stream)
.SetPerformanceMode(PerformanceMode)
.Build();
m_AudioTrack.SetVolume(Volume);
}
/// <summary>
/// Starts the player.
/// </summary>
public void Play()
{
//Make sure we haven't been disposed
ThrowIfDisposed();
//Check the player state
ThrowIfNotInitialized();
if (PlaybackState == PlaybackState.Playing)
{
return;
}
//Start the wave player
PlaybackState = PlaybackState.Playing;
m_AudioTrack.Play();
if (m_PlaybackThread == null || !m_PlaybackThread.IsAlive)
{
m_PlaybackThread = new Thread(PlaybackThread);
m_PlaybackThread.Priority = ThreadPriority.Highest;
m_PlaybackThread.Start();
}
}
/// <summary>
/// Pauses the player.
/// </summary>
public void Pause()
{
//Make sure we haven't been disposed
ThrowIfDisposed();
//Check the player state
ThrowIfNotInitialized();
if (PlaybackState == PlaybackState.Stopped || PlaybackState == PlaybackState.Paused)
{
return;
}
//Pause the wave player
PlaybackState = PlaybackState.Paused;
m_AudioTrack.Pause();
}
/// <summary>
/// Stops the player.
/// </summary>
public void Stop()
{
//Make sure we haven't been disposed
ThrowIfDisposed();
//Check the player state
ThrowIfNotInitialized();
if (PlaybackState == PlaybackState.Stopped)
{
return;
}
//Stop the wave player
PlaybackState = PlaybackState.Stopped;
m_AudioTrack.Stop();
m_PlaybackThread.Join();
}
/// <summary>
/// Releases all resources used by the current instance of the <see cref="AudioTrackOut"/> class.
/// </summary>
public void Dispose()
{
//Dispose of this object
Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
#region Protected Methods
/// <summary>
/// Raises the <see cref="PlaybackStopped"/> event with the provided arguments.
/// </summary>
/// <param name="exception">An optional exception that occured.</param>
protected virtual void OnPlaybackStopped(Exception exception = null)
{
//Raise the playback stopped event
PlaybackStopped?.Invoke(this, new StoppedEventArgs(exception));
}
/// <summary>
/// Releases the unmanaged resources used by the <see cref="AudioTrackOut"/>, and optionally releases the managed resources.
/// </summary>
/// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
//Clean up any managed and unmanaged resources
if (!m_IsDisposed)
{
if (disposing)
{
if (PlaybackState != PlaybackState.Stopped)
{
Stop();
}
m_AudioTrack?.Release();
m_AudioTrack?.Dispose();
}
m_IsDisposed = true;
}
}
#endregion
#region Private Methods
private void PlaybackThread()
{
//Run the playback logic
Exception exception = null;
try
{
PlaybackLogic();
}
catch (Exception e)
{
exception = e;
}
finally
{
PlaybackState = PlaybackState.Stopped;
OnPlaybackStopped(exception);
}
}
private void PlaybackLogic()
{
//Initialize the wave buffer
int waveBufferSize = (m_AudioTrack.BufferSizeInFrames + NumberOfBuffers - 1) / NumberOfBuffers * m_WaveProvider.WaveFormat.BlockAlign;
waveBufferSize = (waveBufferSize + 3) & ~3;
WaveBuffer waveBuffer = new WaveBuffer(waveBufferSize);
waveBuffer.ByteBufferCount = waveBufferSize;
//Run the playback loop
while (PlaybackState != PlaybackState.Stopped)
{
//Check the playback state
if (PlaybackState != PlaybackState.Playing)
{
Thread.Sleep(10);
continue;
}
//Fill the wave buffer with new samples
int bytesRead = m_WaveProvider.Read(waveBuffer.ByteBuffer, 0, waveBuffer.ByteBufferCount);
if (bytesRead > 0)
{
//Clear the unused space in the wave buffer if necessary
if (bytesRead < waveBuffer.ByteBufferCount)
{
waveBuffer.ByteBufferCount = (bytesRead + 3) & ~3;
Array.Clear(waveBuffer.ByteBuffer, bytesRead, waveBuffer.ByteBufferCount - bytesRead);
}
//Write the wave buffer to the audio track
WriteBuffer(waveBuffer);
}
else
{
//Stop the audio track
m_AudioTrack.Stop();
break;
}
}
//Flush the audio track
m_AudioTrack.Flush();
}
private void WriteBuffer(IWaveBuffer waveBuffer)
{
//Write the specified wave buffer to the audio track
if (m_WaveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
{
m_AudioTrack.Write(waveBuffer.ByteBuffer, 0, waveBuffer.ByteBufferCount);
}
else if (m_WaveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
{
//AudioTrack.Write doesn't appreciate WaveBuffer.FloatBuffer
float[] floatBuffer = new float[waveBuffer.FloatBufferCount];
for (int i = 0; i < waveBuffer.FloatBufferCount; i++)
{
floatBuffer[i] = waveBuffer.FloatBuffer[i];
}
m_AudioTrack.Write(floatBuffer, 0, floatBuffer.Length, WriteMode.Blocking);
}
}
private void ThrowIfNotInitialized()
{
//Throw an exception if this object has not been initialized
if (m_WaveProvider == null)
{
throw new InvalidOperationException("This wave player instance has not been initialized");
}
}
private void ThrowIfDisposed()
{
//Throw an exception if this object has been disposed
if (m_IsDisposed)
{
throw new ObjectDisposedException(GetType().FullName);
}
}
#endregion
}
}
@JunielKatarn
Copy link

This is wonderful. Thanks!

@LSXAxeller
Copy link

Thank you for the wonderful work. However, may I ask for assistance? When I try to call the function Play(), I encounter an exception, and the application becomes inactive. Is there a specific way to implement it?

[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.dll [External]
[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.WinMM.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.WinMM.dll [External]
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail

this is the package I installed <PackageReference Include="NAudio" Version="2.2.1" />

@neilt6
Copy link
Author

neilt6 commented Mar 15, 2024

Thank you for the wonderful work. However, may I ask for assistance? When I try to call the function Play(), I encounter an exception, and the application becomes inactive. Is there a specific way to implement it?

[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.dll [External]
[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.WinMM.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.WinMM.dll [External]
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail

this is the package I installed <PackageReference Include="NAudio" Version="2.2.1" />

You should be using the NAudio.Core NuGet to avoid pulling in Windows dependencies. Based on the exception, it’s trying to load NAudio.WinMM.dll which definitely won’t work on mobile.

@LSXAxeller
Copy link

Thank you for the wonderful work. However, may I ask for assistance? When I try to call the function Play(), I encounter an exception, and the application becomes inactive. Is there a specific way to implement it?

[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.dll [External]
[monodroid-assembly] open_from_bundles: failed to load assembly NAudio.WinMM.dll
Loaded assembly: /data/data/com.RI.NNPlayer/files/.__override__/NAudio.WinMM.dll [External]
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail
[monodroid-assembly] Shared library 'Msacm32.dll' not loaded, p/invoke 'acmFormatSuggest' may fail

this is the package I installed <PackageReference Include="NAudio" Version="2.2.1" />

You should be using the NAudio.Core NuGet to avoid pulling in Windows dependencies. Based on the exception, it’s trying to load NAudio.WinMM.dll which definitely won’t work on mobile.

After removing NAudio and installing NAudio.Core, I encountered issues loading MP3 files on Android. I attempted to use NLayer to address this, but encountered the following error:

[monodroid-assembly] open_from_bundles: failed to load assembly NLayer.NAudioSupport.dll
[monodroid-assembly] open_from_bundles: failed to load assembly NLayer.dll
Loaded assembly: /data/user/0/com.bloodmoon.novelnodeplayer/files/.__override__/NLayer.NAudioSupport.dll [External]
Loaded assembly: /data/user/0/com.bloodmoon.novelnodeplayer/files/.__override__/NLayer.dll [External]
[AudioTrackExtImpl] AudioTrackExtImpl init
[AudioTrack] set() streamType -1, sampleRate 48000, format 0x5, channelMask 0x3, frameCount 14400, flags #0, notificationFrames 0, sessionId 0, transferType 3, uid -1, pid -1 cbf 1
[AudioTrack] set(): Building AudioTrack with attributes: usage=1 content=2 flags=0xa00 tags=[]
[AudioTrack] createTrack_l(375) on outputId(29) : 0x72679b9400, mCblk = 0x726addf000,  mLatency = 380, mAfLatency = 80, frameCount = 14400, mSampleRate = 48000, mFlags = 0x8, mReqFrameCount = 14400, mNotificationFramesAct = 7200
[ListServiceUtils] mListServiceUtils::init CallingPid 10260
[ListServiceUtils] mListServiceUtils::init this 0x71e214ac00
[AudioTrack] setVolume left 1.000 right 1.000 , callingPid 10260
[AudioTrack] start(375): prior state:STATE_STOPPED output 29 stream 3 session 3081
Thread started:  #8
[monodroid-assembly] open_from_bundles: failed to load assembly System.Runtime.Extensions.dll
Loaded assembly: /data/user/0/com.bloodmoon.novelnodeplayer/files/.__override__/System.Runtime.Extensions.dll [External]
[monodroid-assembly] open_from_bundles: failed to load assembly System.IO.dll
Loaded assembly: /data/user/0/com.bloodmoon.novelnodeplayer/files/.__override__/System.IO.dll [External]
[novelnodeplaye] Explicit concurrent copying GC freed 4126(267KB) AllocSpace objects, 2(120KB) LOS objects, 78% free, 3312KB/15MB, paused 22us,16us total 7.735ms
[ProfileInstaller] Installing profile for com.bloodmoon.novelnodeplayer
Thread started: <Thread Pool> #9

It seems NLayer is not functioning as expected. Can you suggest an alternative library to handle MP3 loading, since NAudio.Core only supports WAV files?

@SineVector241
Copy link

Hi. I have taken this an updated it for nullability projects and fleshed it out to sort of follow the NAudio format (which includes removing throwing on dispose)

using Android.Media;
using System;
using System.Threading;

namespace NAudio.Wave
{
    public class AndroidAudioPlayer : IWavePlayer
    {
        private readonly SynchronizationContext? _synchronizationContext;
        IWaveProvider? _waveProvider;
        AudioTrack? _audioTrack;
        float _volume;

        public PlaybackState PlaybackState { get; private set; }

        public float Volume
        {
            get => _volume;
            set
            {
                _volume = (value < 0.0f) ? 0.0f : (value > 1.0f) ? 1.0f : value;
                _audioTrack?.SetVolume(_volume);
            }
        }

        public int DesiredLatency { get; set; }

        public int NumberOfBuffers { get; set; }

        public AudioUsageKind Usage { get; set; }

        public AudioContentType ContentType { get; set; }

        public AudioTrackPerformanceMode PerformanceMode { get; set; }

        public WaveFormat OutputWaveFormat { get; set; }

        public event EventHandler<StoppedEventArgs>? PlaybackStopped;

        public AndroidAudioPlayer()
        {
            _synchronizationContext = SynchronizationContext.Current;

            _volume = 1.0f;
            PlaybackState = PlaybackState.Stopped;
            NumberOfBuffers = 2;
            DesiredLatency = 300;
            OutputWaveFormat = new WaveFormat();

            Usage = AudioUsageKind.Media;
            ContentType = AudioContentType.Music;
            PerformanceMode = AudioTrackPerformanceMode.None;
        }

        ~AndroidAudioPlayer()
        {
            //Dispose of this object
            Dispose(false);
        }

        public void Init(IWaveProvider waveProvider)
        {
            if (PlaybackState != PlaybackState.Stopped)
            {
                throw new InvalidOperationException("Can't re-initialize during playback");
            }
            if (_audioTrack != null)
            {
                ClosePlayer();
            }

            //Initialize the wave provider
            Encoding encoding;
            if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm || waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
            {
                encoding = waveProvider.WaveFormat.BitsPerSample switch
                {
                    8 => Encoding.Pcm8bit,
                    16 => Encoding.Pcm16bit,
                    32 => Encoding.PcmFloat,
                    _ => throw new ArgumentException("Input wave provider must be 8-bit, 16-bit, or 32-bit", nameof(waveProvider))
                };
            }
            else
            {
                throw new ArgumentException("Input wave provider must be PCM or IEEE float", nameof(waveProvider));
            }
            _waveProvider = waveProvider;

            //Determine the channel mask
            ChannelOut channelMask = _waveProvider.WaveFormat.Channels switch
            {
                1 => ChannelOut.Mono,
                2 => ChannelOut.Stereo,
                _ => throw new ArgumentException("Input wave provider must be mono or stereo", nameof(waveProvider))
            };

            //Determine the buffer size
            int minBufferSize = AudioTrack.GetMinBufferSize(_waveProvider.WaveFormat.SampleRate, channelMask, encoding);
            int bufferSize = _waveProvider.WaveFormat.ConvertLatencyToByteSize(DesiredLatency);
            if (bufferSize < minBufferSize)
            {
                bufferSize = minBufferSize;
            }

            _audioTrack = new AudioTrack.Builder()
                .SetAudioAttributes(new AudioAttributes.Builder()
                    .SetUsage(Usage)!
                    .SetContentType(ContentType)!
                    .Build()!)
                .SetAudioFormat(new AudioFormat.Builder()
                    .SetEncoding(encoding)!
                    .SetSampleRate(_waveProvider.WaveFormat.SampleRate)!
                    .SetChannelMask(channelMask)!
                    .Build()!)
                .SetBufferSizeInBytes(bufferSize)
                .SetTransferMode(AudioTrackMode.Stream)
                .SetPerformanceMode(PerformanceMode)
                .Build();
            _audioTrack.SetVolume(Volume);
        }

        public void Play()
        {
            if (PlaybackState == PlaybackState.Playing)
            {
                return;
            }

            if (_waveProvider == null || _audioTrack == null)
            {
                throw new InvalidOperationException("Must call Init first");
            }

            //Start the wave player
            if (PlaybackState == PlaybackState.Stopped)
            {
                PlaybackState = PlaybackState.Playing;
                _audioTrack.Play();
                ThreadPool.QueueUserWorkItem(state => PlaybackThread(), null);
            }
            else if (PlaybackState == PlaybackState.Paused)
            {
                Resume();
            }
        }

        public void Pause()
        {
            if (_waveProvider == null || _audioTrack == null)
            {
                throw new InvalidOperationException("Must call Init first");
            }

            if (PlaybackState == PlaybackState.Stopped || PlaybackState == PlaybackState.Paused)
            {
                return;
            }

            //Pause the wave player
            PlaybackState = PlaybackState.Paused;
            _audioTrack.Pause();
        }

        public void Stop()
        {
            if (_waveProvider == null || _audioTrack == null)
            {
                throw new InvalidOperationException("Must call Init first");
            }

            if (PlaybackState == PlaybackState.Stopped)
            {
                return;
            }

            //Stop the wave player
            PlaybackState = PlaybackState.Stopped;
            _audioTrack.Stop();
        }

        public void Dispose()
        {
            //Dispose of this object
            Dispose(true);
            GC.SuppressFinalize(this);
        }

        private void Resume()
        {
            if (PlaybackState == PlaybackState.Paused)
            {
                _audioTrack?.Play();
                PlaybackState = PlaybackState.Playing;
            }
        }

        private void ClosePlayer()
        {
            if (_audioTrack != null)
            {
                _audioTrack.Stop();
                _audioTrack.Release();
                _audioTrack.Dispose();
                _audioTrack = null;
            }
        }

        private void PlaybackThread()
        {
            Exception? exception = null;
            try
            {
                PlaybackLogic();
            }
            catch (Exception e)
            {
                exception = e;
            }
            finally
            {
                PlaybackState = PlaybackState.Stopped;
                // we're exiting our background thread
                RaisePlaybackStoppedEvent(exception);
            }
        }

        private void PlaybackLogic()
        {
            if (_waveProvider == null || _audioTrack == null)
            {
                throw new InvalidOperationException("Must call Init first");
            }

            //Initialize the wave buffer
            int waveBufferSize = (_audioTrack.BufferSizeInFrames + NumberOfBuffers - 1) / NumberOfBuffers * _waveProvider.WaveFormat.BlockAlign;
            waveBufferSize = (waveBufferSize + 3) & ~3;
            WaveBuffer waveBuffer = new(waveBufferSize)
            {
                ByteBufferCount = waveBufferSize
            };

            //Run the playback loop
            while (PlaybackState != PlaybackState.Stopped)
            {
                //Check the playback state
                if (PlaybackState != PlaybackState.Playing)
                {
                    Thread.Sleep(10);
                    continue;
                }

                //Fill the wave buffer with new samples
                int bytesRead = _waveProvider.Read(waveBuffer.ByteBuffer, 0, waveBuffer.ByteBufferCount);
                if (bytesRead > 0)
                {
                    //Clear the unused space in the wave buffer if necessary
                    if (bytesRead < waveBuffer.ByteBufferCount)
                    {
                        waveBuffer.ByteBufferCount = (bytesRead + 3) & ~3;
                        Array.Clear(waveBuffer.ByteBuffer, bytesRead, waveBuffer.ByteBufferCount - bytesRead);
                    }

                    //Write the specified wave buffer to the audio track
                    if (_waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
                    {
                        _audioTrack.Write(waveBuffer.ByteBuffer, 0, waveBuffer.ByteBufferCount);
                    }
                    else if (_waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
                    {
                        //AudioTrack.Write doesn't appreciate WaveBuffer.FloatBuffer
                        float[] floatBuffer = new float[waveBuffer.FloatBufferCount];
                        for (int i = 0; i < waveBuffer.FloatBufferCount; i++)
                        {
                            floatBuffer[i] = waveBuffer.FloatBuffer[i];
                        }
                        _audioTrack.Write(floatBuffer, 0, floatBuffer.Length, WriteMode.Blocking);
                    }
                }
                else
                {
                    //Stop the audio track
                    _audioTrack.Stop();
                    break;
                }
            }

            //Flush the audio track
            _audioTrack.Flush();
        }

        private void RaisePlaybackStoppedEvent(Exception? e)
        {
            var handler = PlaybackStopped;
            if (handler != null)
            {
                if (_synchronizationContext == null)
                {
                    handler(this, new StoppedEventArgs(e));
                }
                else
                {
                    _synchronizationContext.Post(state => handler(this, new StoppedEventArgs(e)), null);
                }
            }
        }

        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (PlaybackState != PlaybackState.Stopped)
                {
                    Stop();
                }
                _audioTrack?.Release();
                _audioTrack?.Dispose();
            }
        }
    }
}

@SineVector241
Copy link

SineVector241 commented Oct 11, 2024

I've also made an android recorder based off of this too https://gist.github.com/SineVector241/58152564e615066132b081e8e2d00645 and NAudio's winmm implementation. Still yet to make one for iOS, Linux and MacOS.

@ShowTune
Copy link

Hi! An app UWF audio output works fine, app ANDROID without sound... How to use NAudio Driver for Xamarin.Android in multiplatform development for android app to output audio using NAudio? Help me with advice, this is my first android app... :)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment