Skip to content

Instantly share code, notes, and snippets.

@dstd
Created April 15, 2015 17:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save dstd/6b6eaa30b9e1166b908c to your computer and use it in GitHub Desktop.
Save dstd/6b6eaa30b9e1166b908c to your computer and use it in GitHub Desktop.
With this source the MediaElement.Positions returns values
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Media;
namespace MediaTools
{
public class SimpleRawMediaStreamSource : MediaStreamSource, IDisposable
{
private MediaStreamDescription _audioDesc;
private long _currentTimeStamp;
private WAVEFORMATEX _waveFormat;
private Dictionary<MediaSampleAttributeKeys, string> _emptySampleDict = new Dictionary<MediaSampleAttributeKeys, string>();
private byte[] _decodingBuffer;
private Stream _stream;
private long _rawStart;
public SimpleRawMediaStreamSource(Stream stream)
{
_stream = stream;
_rawStart = 0;
}
private short ReadInt16()
{
int a = _stream.ReadByte();
int b = _stream.ReadByte();
if (a == -1 || b == -1)
return 0;
return (short)((b << 8) + a);
}
private int ReadInt32()
{
int a = _stream.ReadByte();
int b = _stream.ReadByte();
int c = _stream.ReadByte();
int d = _stream.ReadByte();
if (a == -1 || b == -1 || c == -1 || d == -1)
return 0;
return (int)(((((d << 8) + c) << 8) + b) << 8) + a;
}
protected override void OpenMediaAsync()
{
// Create a parser
Dictionary<MediaStreamAttributeKeys, string> streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
Dictionary<MediaSourceAttributesKeys, string> sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
List<MediaStreamDescription> availableStreams = new List<MediaStreamDescription>();
_waveFormat = new WAVEFORMATEX();
ReadInt32(); // "RIFF"
ReadInt32(); // size of "RIFF" content
ReadInt32(); // "WAVE"
ReadInt32(); // "fmt "
ReadInt32(); // size of "fmt" content
_waveFormat.FormatTag = ReadInt16();
_waveFormat.Channels = ReadInt16();
_waveFormat.SamplesPerSec = ReadInt32();
_waveFormat.AvgBytesPerSec = ReadInt32();
_waveFormat.BlockAlign = ReadInt16();
_waveFormat.BitsPerSample = ReadInt16();
_waveFormat.Size = ReadInt16();
ReadInt32(); // "data"
ReadInt32(); // size of "data" content
_rawStart = _stream.Position;
streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _waveFormat.ToHexString(); // wfx
MediaStreamDescription msd = new MediaStreamDescription(MediaStreamType.Audio, streamAttributes);
_audioDesc = msd;
availableStreams.Add(_audioDesc);
sourceAttributes[MediaSourceAttributesKeys.CanSeek] = true.ToString();
var duration = (_stream.Length - _rawStart) * 10000000 / _waveFormat.AvgBytesPerSec;
sourceAttributes[MediaSourceAttributesKeys.Duration] = duration.ToString(System.Globalization.CultureInfo.InvariantCulture);
ReportOpenMediaCompleted(sourceAttributes, availableStreams);
}
protected override void CloseMedia()
{
if (_stream != null)
{
_stream.Dispose();
_stream = null;
}
}
protected override void GetDiagnosticAsync(MediaStreamSourceDiagnosticKind diagnosticKind)
{
}
protected override void GetSampleAsync(MediaStreamType mediaStreamType)
{
if (_stream.Position < _stream.Length)
{
var bufferSize = _waveFormat.SamplesPerSec * _waveFormat.Channels * _waveFormat.BitsPerSample / 8;
if (_decodingBuffer == null || _decodingBuffer.Length != bufferSize)
_decodingBuffer = new byte[bufferSize];
_stream.Read(_decodingBuffer, 0, _decodingBuffer.Length);
using (var stream = new System.IO.MemoryStream(_decodingBuffer))
{
MediaStreamSample ms = new MediaStreamSample(_audioDesc, stream, 0, stream.Length, _currentTimeStamp, _emptySampleDict);
var ts = _currentTimeStamp;
_currentTimeStamp += _waveFormat.AudioDurationFromBufferSize((uint)stream.Length);
//Log.WriteLine("[audio] playing {0} --> {1}", ts, _currentTimeStamp);
ReportGetSampleCompleted(ms);
}
}
else
{
MediaStreamSample ms = new MediaStreamSample(_audioDesc, null, 0, 0, _currentTimeStamp, _emptySampleDict);
//Log.WriteLine("[audio] finished");
ReportGetSampleCompleted(ms);
}
}
protected override void SeekAsync(long seekToTime)
{
try
{
var seekBytes = _waveFormat.BufferSizeFromAudioDuration(seekToTime);
_stream.Seek(_rawStart + seekBytes, SeekOrigin.Begin);
_currentTimeStamp = _waveFormat.AudioDurationFromBufferSize((uint)seekBytes);
ReportSeekCompleted(_currentTimeStamp);
}
catch(Exception)
{
}
}
protected override void SwitchMediaStreamAsync(MediaStreamDescription mediaStreamDescription)
{
}
public void Dispose()
{
this.CloseMedia();
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment