Skip to content

Instantly share code, notes, and snippets.

@pdesantis
Last active June 5, 2019 18:26
Show Gist options
  • Save pdesantis/7ace29b5ff1e291227edcfe29f6172bf to your computer and use it in GitHub Desktop.
Save pdesantis/7ace29b5ff1e291227edcfe29f6172bf to your computer and use it in GitHub Desktop.
Electron Fiddle Gist

To use:

  • Load in Electron Fiddle
  • Run
  • Open the Developer console and observe the log messages
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Hello World!</title>
</head>
<body>
<h1>Hello World!</h1>
<!-- All of the Node.js APIs are available in this renderer process. -->
We are using Node.js <script>document.write(process.versions.node)</script>,
Chromium <script>document.write(process.versions.chrome)</script>,
and Electron <script>document.write(process.versions.electron)</script>.
<script>
// You can also require other files to run in this process
require('./renderer.js')
</script>
</body>
</html>
// Modules to control application life and create native browser window
const {app, BrowserWindow} = require('electron')
// Keep a global reference of the window object, if you don't, the window will
// be closed automatically when the JavaScript object is garbage collected.
let mainWindow
function createWindow () {
// Create the browser window.
mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true
}
})
// and load the index.html of the app.
mainWindow.loadFile('index.html')
// Open the DevTools.
// mainWindow.webContents.openDevTools()
// Emitted when the window is closed.
mainWindow.on('closed', function () {
// Dereference the window object, usually you would store windows
// in an array if your app supports multi windows, this is the time
// when you should delete the corresponding element.
mainWindow = null
})
}
// This method will be called when Electron has finished
// initialization and is ready to create browser windows.
// Some APIs can only be used after this event occurs.
app.on('ready', createWindow)
// Quit when all windows are closed.
app.on('window-all-closed', function () {
// On OS X it is common for applications and their menu bar
// to stay active until the user quits explicitly with Cmd + Q
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', function () {
// On OS X it's common to re-create a window in the app when the
// dock icon is clicked and there are no other windows open.
if (mainWindow === null) {
createWindow()
}
})
// In this file you can include the rest of your app's specific main process
// code. You can also put them in separate files and require them here.
// This file is required by the index.html file and will
// be executed in the renderer process for that window.
// All of the Node.js APIs are available in this process.
const workletString = `
class PcmAudioWorkletProcessorWorklet extends AudioWorkletProcessor {
process(inputs, outputs, parameters) {
if (inputs.length === 0) {
throw new Error('PcmAudioWorkletProcessorWorklet has no inputs');
}
const input = inputs[0];
if (input.length === 0) {
// This input has no channels, so stop processing
return false;
}
const firstChannelSamples = input[0];
// Send the input, and transfer ownership of the buffers
this.port.postMessage(firstChannelSamples, [firstChannelSamples.buffer]);
return true;
}
}
registerProcessor('pcm-audio-worklet-processor', PcmAudioWorkletProcessorWorklet);
`;
async function listAudioInputs() {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter((device) => device.kind === 'audioinput');
}
async function getDefaultInput(fallbackToFirstInput = true) {
const audioInputs = await listAudioInputs();
const defaultDevice = audioInputs.find((device) => device.deviceId === 'default');
if (defaultDevice) {
return defaultDevice;
}
return fallbackToFirstInput && audioInputs.length > 0 ? audioInputs[0] : undefined;
}
async function getAudioStream(device) {
try {
const constraints = {
audio: {
deviceId: device.deviceId,
},
};
return navigator.mediaDevices.getUserMedia(constraints);
} catch (e) {
trackError(e, 'get-microphone-stream');
throw e;
}
}
async function createRecordingPipeline(device) {
const stream = await getAudioStream(device);
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
throw new Error('Microphone stream has no audio tracks');
}
const sampleRate = audioTracks[0].getSettings().sampleRate;
if (sampleRate === undefined) {
throw new Error('Audio input has undefined sample rate');
}
const context = new AudioContext({ sampleRate, latencyHint: 'interactive' });
if (context.sampleRate !== sampleRate) {
console.warn(
`AudioContext.sampleRate (${
context.sampleRate
}) differs from requested sampleRate (${sampleRate})`,
);
}
console.log(`requested sample rate: ${sampleRate}`);
console.log(`audio context sample rate: ${context.sampleRate}`);
const sourceNode = context.createMediaStreamSource(stream);
const blob = new Blob([workletString], { type: 'text/javascript' });
const workletUrl = URL.createObjectURL(blob);
await context.audioWorklet.addModule(workletUrl);
const workletNode = new AudioWorkletNode(context, 'pcm-audio-worklet-processor');
sourceNode.connect(workletNode);
const destinationNode = context.createMediaStreamDestination();
workletNode.connect(destinationNode);
let messagesReceived = 0;
let lastLogAt = performance.now();
workletNode.port.onmessage = (event) => {
const data = event.data;
// messagesReceived++;
messagesReceived += data.length;
const now = performance.now();
if (now > lastLogAt + 1000) {
// const diff = now - lastMessageAt;
// const rate = (1000 / diff).toFixed(0);
// const rate = 1000 / diff;
// console.log(`${rate} sample rate`);
console.log(`${messagesReceived} sample rate`);
console.log(data.length);
lastLogAt = now;
messagesReceived = 0;
}
// lastMessageAt = now;
};
}
async function goGoGo() {
const device = await getDefaultInput();
await createRecordingPipeline(device);
}
goGoGo();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment