Skip to content

Instantly share code, notes, and snippets.

@kwindla
Created January 20, 2021 04:21
Show Gist options
  • Save kwindla/c4de50d99ad834d97b0bb68de4914617 to your computer and use it in GitHub Desktop.
Save kwindla/c4de50d99ad834d97b0bb68de4914617 to your computer and use it in GitHub Desktop.
Sample code for various instructor (active speaker) settings
<html>
<head>
<title>test settings for instructor/student use cases</title>
<script src="https://unpkg.com/@daily-co/daily-js"></script>
</head>
<body onload="main()">
<div id="local-controls">
<div>
Turn off small videos:
<input
type="checkbox"
id="small-vids-off"
onchange="updateSmallVidsDisplayState(this)"
/>
</div>
<div>
Pause sending video:
<input
type="checkbox"
id="pause-video-send"
checked="true"
onchange="updateVideoSendingState(this)"
/>
</div>
<div>
Send only two layers:
<input
type="checkbox"
id="send-two-layers"
onchange="updateVideoSendingQuality(this)"
/>
</div>
<!-- WORK IN PROGRESS
<div>
Prioritize network packets for active video:
<input
type="checkbox"
id="active-vid-network-priority"
onchange="updateNetworkPriority(this)"
/>
</div>
-->
<hr />
<div>Video send bitrate: <span id="send-kbps">0</span></div>
<div>Video receive bitrate: <span id="recv-kbps">0</span></div>
<div>
Active video resolution:
<span id="active-res">[ click on a non-local video ]</span>
</div>
<hr />
</div>
<div id="local-video"></div>
<div id="remote-videos"></div>
<script>
async function main() {
// to make the autoplay logic happy, call getUserMedia() before we do anything else.
// alternatively we could require a UI click on the way into the call
//
await getLocalCam();
// SET ROOM_URL BELOW TO A VALID ROOM
const ROOM_URL = "ROOM URL HERE";
window.callObject = DailyIframe.createCallObject({
audioSource: false,
videoSource: false,
dailyConfig: {
experimentalChromeVideoMuteLightOff: true,
// here we're using one set of simulcast encodings for
// everyone. this makes it easy to switch a participant's
// role from student to teacher. you could also set different
// simulcast layer settings when creating the call object.
// you can also modify the layer setting on the fly during
// the call. APIs for that coming soon.
camSimulcastEncodings: [
{
maxBitrate: 120000,
maxFramerate: 10,
scaleResolutionDownBy: 4,
},
{
maxBitrate: 300000,
maxFramerate: 20,
scaleResolutionDownBy: 2,
},
{
maxBitrate: 1200000,
maxFramerate: 30,
scaleResolutionDownBy: 1,
},
],
},
});
callObject.on("track-started", displayVideo);
callObject.on("track-stopped", destroyVideo);
await callObject.join({ url: ROOM_URL });
await callObject.setNetworkTopology({ topology: "sfu" });
setInterval(updateStatsDisplay, 3000);
}
async function getLocalCam() {
if (window.localCamForDisplay) {
destroyVideo(window.localCamForDisplay.getVideoTracks()[0]);
window.localCamForDisplay.getTracks().forEach((t) => t.stop());
}
if (window.localCamForDaily) {
window.localCamForDaily.getTracks().forEach((t) => t.stop());
}
window.localCamForDisplay = await navigator.mediaDevices.getUserMedia({
video: { height: 720 },
audio: true,
});
window.localCamForDaily = window.localCamForDisplay.clone();
displayLocalVideo(window.localCamForDisplay);
}
async function setLocalTracksAndMaybeUnmute() {
if (!callObject) {
return;
}
await callObject.setInputDevicesAsync({
videoSource: window.localCamForDaily.getVideoTracks()[0],
audioSource: window.localCamForDaily.getAudioTracks()[0],
});
callObject.setLocalAudio(true);
if (checkShouldSendVideo()) {
callObject.setLocalVideo(true);
}
}
function displayLocalVideo(cam) {
let videosDiv = document.getElementById("local-video");
let videoEl = document.createElement("video");
videoEl._local = true;
videosDiv.appendChild(videoEl);
videoEl.style.width = "100%";
videoEl.style.backgroundColor = "orange";
videoEl.srcObject = new MediaStream(cam);
videoEl.volume = 0;
videoEl.play();
doLayout();
}
function displayVideo(evt) {
const track = evt.track;
const participant = evt.participant;
// ignore local track and audio track events
if (track.kind !== "video") {
return;
}
if (participant.local) {
return;
}
let videosDiv = document.getElementById("remote-videos");
let videoEl = document.createElement("video");
videoEl._activeSpeaker = false;
videoEl._participant = participant;
videoEl.onclick = toggleActiveSpeaker;
setReceivePropertiesForNonActiveSpeaker(participant);
videosDiv.appendChild(videoEl);
videoEl.style.width = "100%";
videoEl.style.backgroundColor = "orange";
videoEl.srcObject = new MediaStream([track]);
videoEl.play();
doLayout();
// pause this video if small videos are turned off
if (document.getElementById("small-vids-off").checked) {
const consumer = Object.values(rtcpeers.sfu.consumers).find(
(c) =>
c.appData.peerId === videoEl._participant.session_id &&
c.appData.mediaTag === "cam-video"
);
rtcpeers.sfu.pauseConsumer(consumer);
}
}
// call from event handler, or by passing in track directly
//
function destroyVideo(evtOrTrack) {
const track = evtOrTrack.track || evtOrTrack;
let vids = document.getElementsByTagName("video");
for (let vid of vids) {
if (vid.srcObject && vid.srcObject.getVideoTracks()[0] === track) {
vid.remove();
}
}
doLayout();
}
function getActiveSpeakerVideo() {
return Array.from(document.getElementsByTagName("video")).find(
(v) => v._activeSpeaker
);
}
function checkShouldPrioritizeActiveVideo() {
return false; // WORK IN PROGRESS
return document.getElementById("active-vid-network-priority").checked;
}
function checkShouldSendVideo() {
return !document.getElementById("pause-video-send").checked;
}
function checkMaxSendingLayer() {
return document.getElementById("send-two-layers").checked ? 1 : 2;
}
async function setReceivePropertiesForNonActiveSpeaker(p) {
if (!p) {
return;
}
betaSetVideoReceiveQuality(p.session_id, {
layer: 0,
priority: 1,
});
}
async function setReceivePropertiesForActiveSpeaker(p) {
if (!p) {
return;
}
betaSetVideoReceiveQuality(p.session_id, {
layer: 2,
priority: checkShouldPrioritizeActiveVideo() ? 200 : 1,
});
}
async function updateSmallVidsDisplayState(el) {
const f = el.checked
? (c) => rtcpeers.sfu.pauseConsumer(c)
: (c) => rtcpeers.sfu.resumeConsumer(c);
Array.from(document.getElementsByTagName("video"))
.filter((v) => !v._activeSpeaker)
.forEach(async (v) => {
const consumer = Object.values(rtcpeers.sfu.consumers).find(
(c) =>
v._participant &&
c.appData.peerId === v._participant.session_id &&
c.appData.mediaTag === "cam-video"
);
await f(consumer);
});
}
async function updateNetworkPriority(el) {
const activeSpeaker = getActiveSpeakerVideo();
if (activeSpeaker) {
setReceivePropertiesForActiveSpeaker(activeSpeaker._participant);
}
}
async function updateVideoSendingState(el) {
if (!window.rtcpeers.sfu.getProducerByTag("cam-video")) {
// first time turning on video
await setLocalTracksAndMaybeUnmute();
// HACKING -- if we're turning on video for the first
// time, let's make sure we set max spatial layer
// correctly. this won't be needed when we release new
// API methods that cache all device/rtp state internally.
setTimeout(updateVideoSendingQuality, 3000);
return;
}
callObject.setLocalVideo(checkShouldSendVideo());
}
async function updateVideoSendingQuality(el) {
const producer = window.rtcpeers.sfu.getProducerByTag("cam-video");
if (!(producer && producer.track && producer.rtpSender)) {
return;
}
producer.setMaxSpatialLayer(checkMaxSendingLayer());
}
function toggleActiveSpeaker(evt) {
const vid = evt.target;
if (vid._activeSpeaker) {
vid._activeSpeaker = false;
setReceivePropertiesForNonActiveSpeaker(vid._participant);
} else {
Array.from(document.getElementsByTagName("video"))
.filter((v) => v._activeSpeaker)
.forEach((v) => {
v._activeSpeaker = false;
setReceivePropertiesForNonActiveSpeaker(vid._participant);
});
vid._activeSpeaker = true;
setReceivePropertiesForActiveSpeaker(vid._participant);
}
doLayout();
}
const SMALL_VIDEO_WIDTH = 220;
function doLayout() {
console.log("laying out");
const vids = Array.from(document.getElementsByTagName("video"));
// local
const localVid = vids.find((v) => v._local);
if (localVid) {
localVid.style.width = SMALL_VIDEO_WIDTH;
localVid.style.margin = "5px";
}
// non-activeSpeakers
vids
.filter((v) => !v._activeSpeaker)
.forEach((v) => {
v.style.width = SMALL_VIDEO_WIDTH;
v.style.margin = "5px";
v.style.display = "inline";
});
// activeSpeaker
const activeSpeaker = getActiveSpeakerVideo();
if (activeSpeaker) {
activeSpeaker.style.display = "block";
activeSpeaker.style.width = "100%";
}
}
async function updateStatsDisplay() {
const stats = (await callObject.getNetworkStats()).stats.latest;
document.getElementById("send-kbps").innerText = Math.round(
stats.videoSendBitsPerSecond / 1000
);
document.getElementById("recv-kbps").innerText = Math.round(
stats.videoRecvBitsPerSecond / 1000
);
const activeSpeaker = getActiveSpeakerVideo();
if (activeSpeaker) {
document.getElementById("active-res").innerText =
activeSpeaker.videoWidth + " x " + activeSpeaker.videoHeight;
} else {
document.getElementById("active-res").innerText =
"[ click on a non-local video ]";
}
}
// approximation of a new API method we are testing. `priority` argument is ignored
// for now except in Daily dev/staging environments
//
async function betaSetVideoReceiveQuality(
participantId,
{ layer, priority } = {}
) {
console.log(
"beta receive quality function",
participantId,
layer,
priority
);
if (!rtcpeers.sfu) {
console.log("sfu not set up yet");
return;
}
try {
let c = rtcpeers.sfu.consumers[participantId + "/cam-video"];
if (!c) {
console.log("no consumer found for participant", participantId);
return;
}
rtcpeers.sfu.setConsumerLayer(c, layer, priority);
} catch (e) {
console.error(e);
}
}
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment