Skip to content

Instantly share code, notes, and snippets.

@monyone
Last active June 13, 2022 06:27
Show Gist options
  • Star 4 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save monyone/881532d7d6cad8d6ca45f55255d68bf2 to your computer and use it in GitHub Desktop.
Save monyone/881532d7d6cad8d6ca45f55255d68bf2 to your computer and use it in GitHub Desktop.
TSライブ再生実験 (used by Insertable Stream for MediaStreamTrack)
ffmpeg -f mpegts -i - -map 0:v -map 0:a ¥
-c:v libx264 -tune zerolatency,fastdecode ¥
-c:a aac ¥
-f mpegts -fflags +nobuffer -flags +low_delay -max_delay 0 ¥
-
const { Writable } = require('stream');
const express = require('express');
const cors = require('cors');
const port = 3000;
const app = express();
app.use(cors());
peocess.stdin.pipe(new Wriable({
write(chunk, encoding, callback) { callback(); }
}));
app.get('/live', (req, res, next) => {
process.stdin.pipe(res);
});
app.listen(port);

これは何?

Chrome M94 で追加された以下のAPIを使ったTSのライブ再生用デモ実装

  • WebCodecs
  • Insertable Stream for MediaStreamTrack

使い方

mpgets.js と同じような構成で (max_delay はリップシンクのため 0 推奨, データストリームは入れない事を推奨)
(TSを受けたら ffmpeg.sh のようなオプションで変換、live.js のようなストリームをHTTPで返却する)

それをクライアント側で参照すると、TSを WebRTC と同レベルの低遅延で再生される

仕組み

  • TS の demux した PES パケットを WebCodecs で追加されたデコーダに渡すと VideoFrame/AudioFrame が手に入る
  • Insertable Stream for MediaStreamTrack で追加された API に渡すと VideoFrame/AudioFrame が再生される

制限

  • MediaStreamTrack は timestamp での同期を取らないので、リップシンクは自分で合わせる必要がある
  • 受信chunkが1フレームずつではない場合の平滑化を行うなら、独自に実装する必要がある
<video id="video" autoplay muted controls></video>
<script>
(async () => {
const fetcher = new Worker('worker-extractor.js');
const decoder = new Worker('worker-decoder.js');
const video = document.getElementById('video');
const videoTrackGenerator = new MediaStreamTrackGenerator({ kind: 'video' });
const audioTrackGenerator = new MediaStreamTrackGenerator({ kind: 'audio' });
const videoTrackGeneratorWriter = videoTrackGenerator.writable.getWriter();
const audioTrackGeneratorWriter = audioTrackGenerator.writable.getWriter();
const mediaStream = new MediaStream();
mediaStream.addTrack(videoTrackGenerator);
mediaStream.addTrack(audioTrackGenerator);
fetcher.onmessage = (e) => {
if (e.data.type === 'video') {
const encodedVideoChunk = e.data.encodedVideoChunk;
decoder.postMessage({ type: 'video', encodedVideoChunk });
} else if (e.data.type === 'audio') {
const encodedAudioChunk = e.data.encodedAudioChunk;
decoder.postMessage({ type: 'audio', encodedAudioChunk });
}
};
decoder.onmessage = (e) => {
if (e.data.type === 'video') {
const videoFrame = e.data.videoFrame;
videoTrackGeneratorWriter.write(videoFrame);
videoFrame.close();
} else if(e.data.type === 'audio') {
const audioFrame = e.data.audioFrame;
audioTrackGeneratorWriter.write(audioFrame);
audioFrame.close();
}
}
video.srcObject = mediaStream;
})();
</script>
(async () => {
const videoDecoder = new VideoDecoder({
output: (videoFrame) => {
self.postMessage({ type: 'video', videoFrame });
videoFrame.close();
},
error: () => {},
})
await videoDecoder.configure({
codec: 'avc1.64001f',
hardwareAcceleration: "prefer-hardware",
});
const audioDecoder = new AudioDecoder({
output: (audioFrame) => {
self.postMessage({ type: 'audio', audioFrame });
audioFrame.close();
},
error: () => {},
});
await audioDecoder.configure({
codec: 'mp4a.40.2',
sampleRate: 48000,
numberOfChannels: 2,
});
self.onmessage = (e) => {
if (e.data.type === 'video') {
const encodedVideoChunk = e.data.encodedVideoChunk;
videoDecoder.decode(encodedVideoChunk);
} else if (e.data.type === 'audio') {
const encodedAudioChunk = e.data.encodedAudioChunk;
audioDecoder.decode(encodedAudioChunk);
}
}
})();
const liveUrl = '配信元のURL';
(async () => {
let firstIDR = false;
let remains = Uint8Array.from([]);
let videoPES = Uint8Array.from([]);
let audioPES = Uint8Array.from([]);
const extract = new WritableStream({
write(chunk) {
chunk = Uint8Array.from([... remains, ... chunk]);
for (let index = 0; index < chunk.length;) {
if (chunk[index] === 0x47 && index + 188 >= chunk.length) {
remains = chunk.slice(index);
break;
} else if(chunk[index] !== 0x47) {
index++;
continue;
}
const pid = ((chunk[index + 1] & 0x1F) << 8) | chunk[index + 2];
if (pid !== 0x100 && pid !== 0x101) {
index += 188;
continue;
}
const end = index + 188;
let payload_start = index + 4;
const payload_unit_start_indicator = (chunk[index + 1] & 0x40) >>> 6;
const adaptation_field_control = (chunk[index + 3] & 0x30) >>> 4;
if (adaptation_field_control === 0x02 || adaptation_field_control === 0x03) {
payload_start += 1 + chunk[index + 4];
}
if (pid === 0x100){
if (payload_unit_start_indicator && videoPES.length > 0) {
const PTS_DTS_flags = (videoPES[7] & 0xC0) >>> 6;
const PES_header_data_length = videoPES[8];
const payload_start_index = 6 + 3 + PES_header_data_length;
const data = videoPES.slice(payload_start_index);
let pts = 0;
if (PTS_DTS_flags === 0x02 || PTS_DTS_flags === 0x03) {
pts = (videoPES[9] & 0x0E) * 536870912 + // 1 << 29
(videoPES[10] & 0xFF) * 4194304 + // 1 << 22
(videoPES[11] & 0xFE) * 16384 + // 1 << 14
(videoPES[12] & 0xFF) * 128 + // 1 << 7
(videoPES[13] & 0xFE) / 2;
}
let hasIDR = false;
for (let i = 0; i < data.length; i++){
if (i + 4 < data.length) {
const uint32 =
(data[i + 0] << 24) |
(data[i + 1] << 16) |
(data[i + 2] << 8) |
(data[i + 3] << 0);
if (uint32 === 1) {
const type = data[i + 4] & 0x1F;
if (type === 5) {
hasIDR = true;
}
}
}
if(i + 3 < data.length){
const uint24 =
(data[i + 0] << 16) |
(data[i + 1] << 8) |
(data[i + 2] << 0);
if (uint24 === 1) {
const type = data[i + 3] & 0x1F;
if (type === 5) {
hasIDR = true;
}
}
}
}
firstIDR ||= hasIDR;
if (firstIDR) {
const encodedVideoChunk = new EncodedVideoChunk({
type: hasIDR ? 'key' : 'delta',
timestamp: pts / 90 * 1000,
data: data,
});
self.postMessage({ type: 'video', encodedVideoChunk });
}
videoPES = Uint8Array.from([]);
} else if (!payload_unit_start_indicator && videoPES.length === 0){
index += 188;
continue;
}
videoPES = Uint8Array.from([... videoPES, ... chunk.slice(payload_start, end)]);
index += 188;
} else if (pid === 0x101){
if (payload_unit_start_indicator && audioPES.length > 0) {
const PTS_DTS_flags = (audioPES[7] & 0xC0) >>> 6;
const PES_header_data_length = audioPES[8];
const payload_start_index = 6 + 3 + PES_header_data_length;
const data = audioPES.slice(payload_start_index);
let pts = 0;
if (PTS_DTS_flags === 0x02 || PTS_DTS_flags === 0x03) {
pts = (audioPES[9] & 0x0E) * 536870912 + // 1 << 29
(audioPES[10] & 0xFF) * 4194304 + // 1 << 22
(audioPES[11] & 0xFE) * 16384 + // 1 << 14
(audioPES[12] & 0xFF) * 128 + // 1 << 7
(audioPES[13] & 0xFE) / 2;
}
const encodedAudioChunk = new EncodedAudioChunk({
type: 'key',
timestamp: pts / 90 * 1000,
data: data,
});
self.postMessage({ type: 'audio', encodedAudioChunk });
audioPES = Uint8Array.from([]);
} else if (!payload_unit_start_indicator && audioPES.length === 0){
index += 188;
continue;
}
audioPES = Uint8Array.from([... audioPES, ... chunk.slice(payload_start, end)]);
index += 188;
}
}
}
}, {});
const response = await fetch(liveUrl)
response.body.pipeTo(extract);
})();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment