Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Audio impulse response in time domain.
using System;
using System.Threading;
using NAudio.Wave;
using NAudio.Wave.SampleProviders;
namespace GM.SP.Audio
{
public static class AudioEffect
{
/// <summary>
/// A very simple impulse response in time domain. Note that it will be very slow.
/// </summary>
public static void ImpulseResponse(string audioFile,string impulseResponseFile,string outputFile,Action<double> updateLeft,Action<double> updateRight)
{
if(!outputFile.ToLower().EndsWith(".wav"))
throw new Exception("Output file type must be WAV.");
WaveStream audioWaveProvider=null;
WaveStream impulseWaveProvider=null;
WaveFileWriter writer=null;
try {
// open audio file
if(audioFile.ToLower().EndsWith(".mp3"))
audioWaveProvider = new Mp3FileReader(audioFile);
else if(audioFile.ToLower().EndsWith(".wav"))
audioWaveProvider = new WaveFileReader(audioFile);
else
throw new Exception("Unsupported format.");
// open impulse response audio file
if(impulseResponseFile.ToLower().EndsWith(".mp3"))
impulseWaveProvider = new Mp3FileReader(impulseResponseFile);
else if(impulseResponseFile.ToLower().EndsWith(".wav"))
impulseWaveProvider = new WaveFileReader(impulseResponseFile);
else
throw new Exception("Unsupported format.");
// both audio files must have the same sample rate
if(audioWaveProvider.WaveFormat.SampleRate != impulseWaveProvider.WaveFormat.SampleRate)
throw new Exception("Selected audio file and impulse response should have the same sample rate, but they don't.");
// both audio files must be stereo
if(audioWaveProvider.WaveFormat.Channels != 2)
throw new Exception("Only stereo audio files are supported.");
if(impulseWaveProvider.WaveFormat.Channels != 2)
throw new Exception("Only stereo impulse responses are supported.");
// create a sample provider for audio file
ISampleProvider audioSampleProvider;
switch(audioWaveProvider.WaveFormat.BitsPerSample) {
case 8:
audioSampleProvider = new Pcm8BitToSampleProvider(audioWaveProvider);
break;
case 16:
audioSampleProvider = new Pcm16BitToSampleProvider(audioWaveProvider);
break;
case 24:
audioSampleProvider = new Pcm24BitToSampleProvider(audioWaveProvider);
break;
case 32:
audioSampleProvider = new Pcm32BitToSampleProvider(audioWaveProvider);
break;
default:
throw new Exception("Unsupported bits per sample in audio file.");
}
// create a sample provider for impulse response audio file
ISampleProvider impulseSampleProvider;
switch(impulseWaveProvider.WaveFormat.BitsPerSample) {
case 8:
impulseSampleProvider = new Pcm8BitToSampleProvider(impulseWaveProvider);
break;
case 16:
impulseSampleProvider = new Pcm16BitToSampleProvider(impulseWaveProvider);
break;
case 24:
impulseSampleProvider = new Pcm24BitToSampleProvider(impulseWaveProvider);
break;
case 32:
impulseSampleProvider = new Pcm32BitToSampleProvider(impulseWaveProvider);
break;
default:
throw new Exception("Unsupported bits per sample in impulse response file.");
}
// read all samples from audio file to array
float[] audioSampleArray = new float[audioWaveProvider.Length / (audioWaveProvider.WaveFormat.BitsPerSample / 8)];
audioSampleProvider.Read(audioSampleArray, 0, audioSampleArray.Length);
// read all samples from impulse response to array
float[] impulseSampleArray = new float[impulseWaveProvider.Length / (impulseWaveProvider.WaveFormat.BitsPerSample / 8)];
impulseSampleProvider.Read(impulseSampleArray, 0, impulseSampleArray.Length);
// unzip both arrays to separate left and right channels
Tuple<float[], float[]> audioSamples = AudioUtility.Unzip(audioSampleArray);
Tuple<float[], float[]> impulseSamples = AudioUtility.Unzip(impulseSampleArray);
float[] resultLeft=null;
float[] resultRight=null;
// create a separate thread for each channel
Thread t1 = new Thread(new ThreadStart(delegate
{
// convolve left channel
resultLeft = Convolve(audioSamples.Item1, impulseSamples.Item1,updateLeft);
}));
Thread t2 = new Thread(new ThreadStart(delegate
{
// convolve right channel
resultRight = Convolve(audioSamples.Item2, impulseSamples.Item2,updateRight);
}));
// start both threads
t1.Start();
t2.Start();
// wait both threads
t1.Join();
t2.Join();
// zip both channels back to one array
float[] result = AudioUtility.Zip(resultLeft, resultRight);
// save
// the result has to be saved with the same sample rate as both audio files
WaveFormat format = new WaveFormat(audioWaveProvider.WaveFormat.SampleRate, audioWaveProvider.WaveFormat.Channels);
writer = new WaveFileWriter(outputFile,format);
writer.WriteSamples(result, 0, result.Length);
} finally {
audioWaveProvider?.Dispose();
impulseWaveProvider?.Dispose();
writer?.Dispose();
}
}
public static float[] Convolve(float[] input,float[] impulseResponse, Action<double> update)
{
var output = new float[input.Length + impulseResponse.Length];
ulong totalIterations = (ulong)output.Length * (ulong)impulseResponse.Length;
ulong current = 0;
// let's update every percent
ulong updateStep = (ulong)(0.5 + totalIterations / 100.0);
for(int t = 0; t < output.Length; t++) {
for(int n = 0; n < impulseResponse.Length; n++) {
if((t >= n) && (t - n < input.Length))
output[t] += impulseResponse[n] * input[t - n];
if(++current % updateStep == 0)
update(current / (float)totalIterations);
}
}
update(1);
Normalize(output);
return output;
}
public static void Normalize(float[] data)
{
float max = 0;
for(int n = 0; n < data.Length; n++)
max = Math.Max(max, Math.Abs(data[n]));
if(max > 1.0)
for(int n = 0; n < data.Length; n++)
data[n] /= max;
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.