Skip to content

Instantly share code, notes, and snippets.

@Jamie0
Last active January 31, 2024 11:22
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Jamie0/84e22d2164843a01564a6ee405be7ed8 to your computer and use it in GitHub Desktop.
Save Jamie0/84e22d2164843a01564a6ee405be7ed8 to your computer and use it in GitHub Desktop.
Electron #41158 test case
<html>
<body>
<audio id="localVideo" playsinline autoplay controls muted></audio>
<audio id="remoteVideo" playsinline autoplay controls></audio>
<div class="box">
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>
</body>
<script type="text/javascript">
let context, blinkWorkaroundAudio;
// Crude simulation of network jitter
const randomDelayTransformStream = new TransformStream({
latency: 10,
jitter: 1000,
transform (chunk, controller) {
setTimeout(
function () {
controller.enqueue(chunk)
},
this.latency + Math.random() * this.jitter
)
}
});
function setupSenderTransform(sender) {
const senderStreams = sender.createEncodedStreams();
senderStreams.readable
.pipeThrough(randomDelayTransformStream)
.pipeTo(senderStreams.writable);
}
function gotRemoteStream(e) {
if (!remoteVideo.srcObject) {
if (context) context.close()
context = new AudioContext();
let source = context.createMediaStreamSource(e.streams[0])
let dummyNode = context.createChannelMerger(2)
let destination = context.createMediaStreamDestination();
// Blink will mute the WebRTC MediaStream unless it's also attached to a (silent) sink element
// https://bugs.chromium.org/p/chromium/issues/detail?id=933677
blinkWorkaroundAudio = new Audio();
blinkWorkaroundAudio.muted = true;
blinkWorkaroundAudio.srcObject = e.streams[0];
blinkWorkaroundAudio.play();
source.connect(dummyNode)
dummyNode.connect(destination)
context.createGain().connect(context.destination)
console.log('pc2 received source', source, destination);
remoteVideo.srcObject = destination.stream;
}
}
// Adapted from https://webrtc.github.io/samples/src/content/peerconnection/pc1/
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
const startButton = document.getElementById('startButton');
const callButton = document.getElementById('callButton');
const hangupButton = document.getElementById('hangupButton');
callButton.disabled = true;
hangupButton.disabled = true;
startButton.addEventListener('click', start);
callButton.addEventListener('click', call);
hangupButton.addEventListener('click', hangup);
let startTime;
const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');
remoteVideo.addEventListener('playing', () => {
if (startTime) {
const elapsedTime = window.performance.now() - startTime;
console.log('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
startTime = null;
}
});
let localStream;
let pc1;
let pc2;
const offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 0
};
function getName(pc) {
return (pc === pc1) ? 'pc1' : 'pc2';
}
function getOtherPc(pc) {
return (pc === pc1) ? pc2 : pc1;
}
async function start() {
console.log('Requesting local stream');
startButton.disabled = true;
try {
const audio = {noiseSuppression: false, autoGainControl: false, echoCancellation: false }
const stream = await navigator.mediaDevices.getUserMedia({audio});
console.log('Received local stream');
localVideo.srcObject = stream;
localStream = stream;
callButton.disabled = false;
} catch (e) {
alert(`getUserMedia() error: ${e.name}`);
}
}
async function call() {
callButton.disabled = true;
hangupButton.disabled = false;
console.log('Starting call');
startTime = window.performance.now();
const videoTracks = localStream.getVideoTracks();
const audioTracks = localStream.getAudioTracks();
if (videoTracks.length > 0) {
console.log(`Using video device: ${videoTracks[0].label}`);
}
if (audioTracks.length > 0) {
console.log(`Using audio device: ${audioTracks[0].label}`);
}
const configuration = {
iceServers: [{"url":"stun:stun.l.google.com:19302"}]
};
console.log('RTCPeerConnection configuration:', configuration);
pc1 = new RTCPeerConnection({...configuration, encodedInsertableStreams: true});
console.log('Created local peer connection object pc1');
pc1.addEventListener('icecandidate', e => onIceCandidate(pc1, e));
pc2 = new RTCPeerConnection(configuration);
console.log('Created remote peer connection object pc2');
pc2.addEventListener('icecandidate', e => onIceCandidate(pc2, e));
pc1.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc1, e));
pc2.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc2, e));
pc2.addEventListener('track', gotRemoteStream);
localStream.getTracks().forEach(track => pc1.addTrack(track, localStream));
console.log('Added local stream to pc1')
pc1.getSenders().forEach(setupSenderTransform)
try {
console.log('pc1 createOffer start');
const offer = await pc1.createOffer(offerOptions);
await onCreateOfferSuccess(offer);
} catch (e) {
onCreateSessionDescriptionError(e);
}
}
function onCreateSessionDescriptionError(error) {
console.log(`Failed to create session description: ${error.toString()}`);
}
async function onCreateOfferSuccess(desc) {
console.log(`Offer from pc1\n${desc.sdp}`);
console.log('pc1 setLocalDescription start');
try {
await pc1.setLocalDescription(desc);
onSetLocalSuccess(pc1);
} catch (e) {
onSetSessionDescriptionError();
}
console.log('pc2 setRemoteDescription start');
try {
await pc2.setRemoteDescription(desc);
onSetRemoteSuccess(pc2);
} catch (e) {
onSetSessionDescriptionError();
}
console.log('pc2 createAnswer start');
// Since the 'remote' side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
try {
const answer = await pc2.createAnswer();
await onCreateAnswerSuccess(answer);
} catch (e) {
onCreateSessionDescriptionError(e);
}
}
function onSetLocalSuccess(pc) {
console.log(`${getName(pc)} setLocalDescription complete`);
}
function onSetRemoteSuccess(pc) {
console.log(`${getName(pc)} setRemoteDescription complete`);
}
function onSetSessionDescriptionError(error) {
console.log(`Failed to set session description: ${error.toString()}`);
}
async function onCreateAnswerSuccess(desc) {
console.log(`Answer from pc2:\n${desc.sdp}`);
console.log('pc2 setLocalDescription start');
try {
await pc2.setLocalDescription(desc);
onSetLocalSuccess(pc2);
} catch (e) {
onSetSessionDescriptionError(e);
}
console.log('pc1 setRemoteDescription start');
try {
await pc1.setRemoteDescription(desc);
onSetRemoteSuccess(pc1);
} catch (e) {
onSetSessionDescriptionError(e);
}
}
async function onIceCandidate(pc, event) {
try {
await (getOtherPc(pc).addIceCandidate(event.candidate));
onAddIceCandidateSuccess(pc);
} catch (e) {
onAddIceCandidateError(pc, e);
}
console.log(`${getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`);
}
function onAddIceCandidateSuccess(pc) {
console.log(`${getName(pc)} addIceCandidate success`);
}
function onAddIceCandidateError(pc, error) {
console.log(`${getName(pc)} failed to add ICE Candidate: ${error.toString()}`);
}
function onIceStateChange(pc, event) {
if (pc) {
console.log(`${getName(pc)} ICE state: ${pc.iceConnectionState}`);
console.log('ICE state change event: ', event);
}
}
function hangup() {
console.log('Ending call');
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
hangupButton.disabled = true;
callButton.disabled = false;
}
</script>
</html>
const { app, session, protocol, systemPreferences, BrowserWindow } = require('electron')
const fs = require('fs'), mime = require('mime-types'), path = require('path');
const createWindow = (session) => {
const webPreferences = {
sandbox: true,
plugins: true,
enableRemoteModule: true,
contextIsolation: false,
nativeWindowOpen: true,
backgroundThrottling: false,
session
}
const win = new BrowserWindow({
width: 800,
height: 600,
webPreferences
})
const windowOpenHandler = (data) => {
const webPreferences = {
contextIsolation: false,
nativeWindowOpen: true,
backgroundThrottling: false
};
return {
action: 'allow',
outlivesOpener: true,
overrideBrowserWindowOptions: {
webPreferences
},
};
};
app.on('web-contents-created', (_, win) => {
win._windowOpenHandler = windowOpenHandler;
})
win.webContents.setWindowOpenHandler(windowOpenHandler);
win.loadURL('local://localhost/index.html')
}
async function createFirstWindow(id) {
if (systemPreferences.askForMediaAccess) {
await systemPreferences.askForMediaAccess("microphone");
}
let sess = session.fromPartition("persist:sess");
sess.protocol.registerBufferProtocol("local", async (request, callback) => {
let url = request.url.replace(/^local:\/\/localhost/, "");
let localPath = path.join(__dirname, "./", url);
if ( !await
fs.promises.lstat(localPath)
.then(s => s.isFile())
.catch(_ => false)
) {
return callback({
statusCode: 404,
data: "Not found",
mimeType: "text/html",
});
}
callback({
mimeType: mime.lookup(url),
data: await fs.promises.readFile(localPath)
});
});
sess.setPermissionRequestHandler(
(webContents, permission, callback, extra) => {
callback(true);
}
);
createWindow(sess);
}
protocol.registerSchemesAsPrivileged([
{
scheme: "local",
privileges: {
standard: true,
secure: true,
allowServiceWorkers: true,
supportFetchAPI: true,
corsEnabled: true,
stream: true,
},
},
]);
app.whenReady().then(createFirstWindow)
{
"dependencies": {
"electron": "29.0.0-beta.3",
"mime-types": "^2.1.35"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment