-
-
Save jimmywarting/2296342bfb7a31ad26068659d1fd9fc9 to your computer and use it in GitHub Desktop.
var pc1 = new RTCPeerConnection(), | |
pc2 = new RTCPeerConnection(); | |
var addCandidate = (pc, can) => can && pc.addIceCandidate(can).catch(console.error); | |
pc1.onicecandidate = e => { addCandidate(pc2, e.candidate); }; | |
pc2.onicecandidate = e => { addCandidate(pc1, e.candidate); }; | |
pc1.oniceconnectionstatechange = e => console.log("pc1 iceConnState:", pc1.iceConnectionState); | |
pc2.oniceconnectionstatechange = e => console.log("pc2 iceConnState:", pc2.iceConnectionState); | |
pc1dch = pc1.createDataChannel('dch', {"negotiated" : true, id: 1}); | |
pc2dch = pc2.createDataChannel('dch', {"negotiated" : true, id: 1}); | |
pc2dch.binaryType = 'arraybuffer' | |
pc1dch.binaryType = 'arraybuffer' | |
pc1dch.onopen = e => {console.log("pc1dch open")}; | |
pc2dch.onopen = e => {console.log("pc2dch open")}; | |
pc1dch.onclose = e => {console.log("pc1dch close")}; | |
pc2dch.onclose = e => {console.log("pc2dch close")}; | |
pc2dch.onmessage = e => {console.log("pc2dch message: ", e)} | |
pc1dch.onmessage = e => {console.log("pc1dch message: ", e)} | |
function start() { | |
pc1.createOffer() | |
.then(d => pc1.setLocalDescription(d)) | |
.then(() => pc2.setRemoteDescription(pc1.localDescription)) | |
.then(() => pc2.createAnswer()) | |
.then(d => pc2.setLocalDescription(d)) | |
.then(() => pc1.setRemoteDescription(pc2.localDescription)) | |
.catch(console.error); | |
}; | |
start(); |
Believe it's option B
a 10 second delay between each media pack the remote portion of the code receives?
I want to use one camera to transfer the live stream to another device and throughout the hole session it should play the video with an added delay on the playback to it so you can see something you did afterwards.
It shouldn't just record 10 seconds of a video and then stop and play the final blob afterwards... needs to be continuous
@jimmywarting At any given point during the live stream you can draw black frames for the effect of a delay, given the same code at tthe linked plnkr
let raf;
let now = 0;
let then = 60 * 10;
const draw = async() => {
if (++now < then) {
ctx.fillStyle = "black";
ctx.fillRect(0, 0, width, height);
} else {
drawClock();
}
requestFrame();
requestAnimationFrame(draw)
};
// https://github.com/w3c/mediacapture-fromelement/issues/76
const requestFrame = _ => canvasStream.requestFrame ? canvasStream.requestFrame() : videoTrack.requestFrame();
raf = requestAnimationFrame(draw);
setTimeout(() => console.log(now), 10000);
Another alternative would be to use 2 canvas
elements at local portion of code to draw and store images of live stream while streaming black frames. At 10 seconds begin drawing stored frames, which should render at remote portion of code 10 seconds behind live images being stored.
@jimmywarting Substitute the code below for createMediaStreamTracks
function at the linked plnkr
const createMediaStreamTracks = _ => {
const canvas = document.createElement("canvas");
canvas.id = "canvas";
const span = document.createElement("span");
span.textContent = canvas.id;
canvas.width = width;
canvas.height = height;
document.body.appendChild(canvas);
canvas.insertAdjacentElement("beforebegin", span);
const ctx = canvas.getContext("2d");
canvasStream = canvas.captureStream(0);
const [videoTrack] = canvasStream.getVideoTracks();
var radius = canvas.height / 2;
ctx.translate(radius, radius);
radius = radius * 0.90;
function drawClock() {
drawFace(ctx, radius);
drawNumbers(ctx, radius);
drawTime(ctx, radius);
}
function drawFace(ctx, radius) {
var grad;
ctx.beginPath();
ctx.arc(0, 0, radius, 0, 2 * Math.PI);
ctx.fillStyle = 'white';
ctx.fill();
grad = ctx.createRadialGradient(0, 0, radius * 0.95, 0, 0, radius * 1.05);
grad.addColorStop(0, '#333');
grad.addColorStop(0.5, 'white');
grad.addColorStop(1, '#333');
ctx.strokeStyle = grad;
ctx.lineWidth = radius * 0.1;
ctx.stroke();
ctx.beginPath();
ctx.arc(0, 0, radius * 0.1, 0, 2 * Math.PI);
ctx.fillStyle = '#333';
ctx.fill();
}
function drawNumbers(ctx, radius) {
var ang;
var num;
ctx.font = radius * 0.15 + "px arial";
ctx.textBaseline = "middle";
ctx.textAlign = "center";
for (num = 1; num < 13; num++) {
ang = num * Math.PI / 6;
ctx.rotate(ang);
ctx.translate(0, -radius * 0.85);
ctx.rotate(-ang);
ctx.fillText(num.toString(), 0, 0);
ctx.rotate(ang);
ctx.translate(0, radius * 0.85);
ctx.rotate(-ang);
}
}
function drawTime(ctx, radius) {
var now = new Date();
var hour = now.getHours();
var minute = now.getMinutes();
var second = now.getSeconds();
//hour
hour = hour % 12;
hour = (hour * Math.PI / 6) +
(minute * Math.PI / (6 * 60)) +
(second * Math.PI / (360 * 60));
drawHand(ctx, hour, radius * 0.5, radius * 0.07);
//minute
minute = (minute * Math.PI / 30) + (second * Math.PI / (30 * 60));
drawHand(ctx, minute, radius * 0.8, radius * 0.07);
// second
second = (second * Math.PI / 30);
drawHand(ctx, second, radius * 0.9, radius * 0.02);
}
function drawHand(ctx, pos, length, width) {
ctx.beginPath();
ctx.lineWidth = width;
ctx.lineCap = "round";
ctx.moveTo(0, 0);
ctx.rotate(pos);
ctx.lineTo(0, -length);
ctx.stroke();
ctx.rotate(-pos);
}
// draw black frames for 10 seconds
const delayStreamCanvas = document.createElement("canvas");
delayStreamCanvas.width = width;
delayStreamCanvas.height = height;
const delayStreamContext = delayStreamCanvas.getContext("2d");
const delayStream = delayStreamCanvas.captureStream(0);
const [delayStreamTrack] = delayStream.getVideoTracks();
let now = 0;
let then = 60 * 10;
let raf;
const delayed = [];
requestAnimationFrame(function drawDelay() {
if (++now < then) {
delayStreamContext.fillStyle = "black";
delayStreamContext.fillRect(0, 0, width, height);
} else {
// stream stored images of stream
delayStreamContext.drawImage(delayed.shift(), 0, 0);
}
requestFrame(delayStream);
requestAnimationFrame(drawDelay);
});
const draw = async() => {
// draw images
drawClock();
// store images
delayed.push(await createImageBitmap(canvas));
requestFrame(canvasStream);
requestAnimationFrame(draw);
};
// https://github.com/w3c/mediacapture-fromelement/issues/76
const requestFrame = stream => stream.requestFrame ? stream.requestFrame() : stream.getVideoTracks()[0].requestFrame();
raf = requestAnimationFrame(draw);
setTimeout(() => console.log(now), 10000);
return {
mediaStream: delayStream,
videoTrack: delayStreamTrack,
raf
};
}
@jimmywarting If the initial MediaStream
is not derived from a canvas
, e.g., the same approach can be employed by utilizing ImageCapture
grabFrame()
to store the current frame of a MediaStream
as an ImageBitmap
(see https://plnkr.co/edit/5bvp9xv0ciMYfVzG; https://github.com/guest271314/MediaFragmentRecorder/blob/imagecapture-audiocontext-readablestream-writablestream/MediaFragmentRecorder.html).
Using one requestAnimationFrame
const draw = async() => {
drawClock();
delayed.push(await createImageBitmap(canvas));
if (++now < then) {
delayStreamContext.fillStyle = "black";
delayStreamContext.fillRect(0, 0, width, height);
} else {
delayStreamContext.drawImage(delayed.shift(), 0, 0);
}
requestFrame(canvasStream);
requestFrame(delayStream);
requestAnimationFrame(draw);
};
https://bugs.chromium.org/p/webrtc/issues/detail?id=10759#c12 :
To answer your original question: It's not possible to set a playout delay of 10 seconds in WebRTC.
Hmm, thanks for investigating the possibility
@jimmywarting Can you clarify what the expected output is?
Are you expecting
a 10 second delay before the remote portion of the code receives the first media packet?
a 10 second delay between each media pack the remote portion of the code receives?