Skip to content

Instantly share code, notes, and snippets.

@tomrijnbeek
Last active September 12, 2015 20:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tomrijnbeek/c9da892b461342c109a0 to your computer and use it in GitHub Desktop.
Save tomrijnbeek/c9da892b461342c109a0 to your computer and use it in GitHub Desktop.
Play sound with OpenAL example
using System;
using System.Collections.Generic;
using System.IO;
using OpenTK.Audio;
using OpenTK.Audio.OpenAL;
namespace PlayALSoundExample
{
class Program
{
public static void Main(string[] args)
{
if (args.Length < 1)
throw new ArgumentException("Program needs at least one parameters.", "args");
// Make audio context.
var context = new AudioContext();
// Declare variables.
IList<short[]> buffers;
ALFormat alFormat;
int sampleRate;
// Get the data from the wave file.
getData(File.OpenRead(args[0]), out buffers, out alFormat, out sampleRate);
// Get a list of buffer handles from OpenAL.
var bufferHandles = AL.GenBuffers(buffers.Count);
// Store all the data in OpenAL buffers.
for (int i = 0; i < buffers.Count; i++)
AL.BufferData (bufferHandles[i], alFormat, buffers[i], buffers[i].Length * sizeof(short), sampleRate);
// Get a source from OpenAL.
var sourceHandle = AL.GenSource();
// Fill the source with the buffered data.
AL.SourceQueueBuffers(sourceHandle, bufferHandles.Length, bufferHandles);
// Play the source.
AL.SourcePlay(sourceHandle);
// Wait until we're done playing.
while (AL.GetSourceState(sourceHandle) != ALSourceState.Stopped)
System.Threading.Thread.Sleep(100);
// Dispose source.
AL.DeleteSource(sourceHandle);
// Dispose buffers.
AL.DeleteBuffers(bufferHandles);
// Dispose AudioContext.
context.Dispose();
}
private static void getData(Stream stream, out IList<short[]> buffers, out ALFormat alFormat, out int sampleRate)
{
using (var reader = new BinaryReader(stream))
{
// RIFF header
var signature = new string(reader.ReadChars(4));
if (signature != "RIFF")
throw new NotSupportedException("Specified stream is not a wave file.");
reader.ReadInt32(); // riffChunkSize
var format = new string(reader.ReadChars(4));
if (format != "WAVE")
throw new NotSupportedException("Specified stream is not a wave file.");
// WAVE header
var formatSignature = new string(reader.ReadChars(4));
if (formatSignature != "fmt ")
throw new NotSupportedException("Specified wave file is not supported.");
int formatChunkSize = reader.ReadInt32();
reader.ReadInt16(); // audioFormat
int numChannels = reader.ReadInt16();
sampleRate = reader.ReadInt32();
reader.ReadInt32(); // byteRate
reader.ReadInt16(); // blockAlign
int bitsPerSample = reader.ReadInt16();
if (formatChunkSize > 16)
reader.ReadBytes(formatChunkSize - 16);
var dataSignature = new string(reader.ReadChars(4));
if (dataSignature != "data")
throw new NotSupportedException("Only uncompressed wave files are supported.");
reader.ReadInt32(); // dataChunkSize
alFormat = getSoundFormat(numChannels, bitsPerSample);
var data = reader.ReadBytes((int)reader.BaseStream.Length);
buffers = new List<short[]>();
int count;
int i = 0;
const int bufferSize = 16384;
while ((count = (Math.Min(data.Length, (i + 1) * bufferSize * 2) - i * bufferSize * 2) / 2) > 0)
{
var buffer = new short[bufferSize];
convertBuffer(data, buffer, count, i * bufferSize * 2);
buffers.Add(buffer);
i++;
}
}
}
private static ALFormat getSoundFormat(int channels, int bits)
{
switch (channels)
{
case 1: return bits == 8 ? ALFormat.Mono8 : ALFormat.Mono16;
case 2: return bits == 8 ? ALFormat.Stereo8 : ALFormat.Stereo16;
default: throw new NotSupportedException("The specified sound format is not supported.");
}
}
private static void convertBuffer(byte[] inBuffer, short[] outBuffer, int length, int inOffset = 0)
{
for (int i = 0; i < length; i++)
outBuffer[i] = BitConverter.ToInt16(inBuffer, inOffset + 2 * i);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment