-
-
Save shivrajsa/b4e4467a21420e74638b650a3c4b3bac to your computer and use it in GitHub Desktop.
MP3 record using createMediaStreamDestination
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<p>Encoding a pure sine wave to an Opus file </p> | |
<button>Record</button> | |
<audio controls></audio> | |
<script> | |
var b = document.querySelector("button"); | |
var clicked = false; | |
b.addEventListener("click", function(e) { | |
if (!clicked) { | |
MIDI.startRecording() | |
// MIDI.stopRecording(); | |
//osc.start(0); | |
e.target.innerHTML = "Stop recording"; | |
clicked = true; | |
} else { | |
MIDI.stopRecording() | |
//osc.stop(0); | |
e.target.disabled = true; | |
} | |
}); | |
</script> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
---------------------------------------------------------- | |
Web Audio API - OGG or MPEG Soundbank | |
---------------------------------------------------------- | |
http://webaudio.github.io/web-audio-api/ | |
---------------------------------------------------------- | |
I have modified plugin.webaudio.js for mp3 recording and button to call start and stop transaction. | |
follow these four variables in the gist to understand code. | |
var recordchunks=[]; | |
var recorderdest = null; | |
var mediaRecorder = null; | |
var isrecord = 1; | |
There could be better way to do that, I am not expert. | |
Also refer midi.html for button start and stop | |
You can start recording only when browser is making sound. | |
*/ | |
(function(root) { 'use strict'; | |
window.AudioContext && (function() { | |
var audioContext = null; // new AudioContext(); | |
var useStreamingBuffer = false; // !!audioContext.createMediaElementSource; | |
var midi = root.WebAudio = {api: 'webaudio'}; | |
var ctx ; // audio context | |
var sources = {}; | |
var effects = {}; | |
var masterVolume = 127; | |
var audioBuffers = {}; | |
var detuneCents=0; | |
var recordchunks=[]; | |
var recorderdest = null; | |
var mediaRecorder = null; | |
var isrecord = 1; | |
/// | |
midi.audioBuffers = audioBuffers; | |
midi.send = function(data, delay) { }; | |
midi.setController = function(channelId, type, value, delay) { }; | |
midi.setVolume = function(channelId, volume, delay) { | |
if (delay) { | |
setTimeout(function() { | |
masterVolume = volume; | |
}, delay * 1000); | |
} else { | |
masterVolume = volume; | |
} | |
}; | |
midi.programChange = function(channelId, program, delay) { | |
// if (delay) { | |
// return setTimeout(function() { | |
// var channel = root.channels[channelId]; | |
// channel.instrument = program; | |
// }, delay); | |
// } else { | |
var channel = root.channels[channelId]; | |
channel.instrument = program; | |
// } | |
}; | |
midi.pitchBend = function(channelId, program, delay) { | |
// if (delay) { | |
// setTimeout(function() { | |
// var channel = root.channels[channelId]; | |
// channel.pitchBend = program; | |
// }, delay); | |
// } else { | |
var channel = root.channels[channelId]; | |
channel.pitchBend = program; | |
// } | |
}; | |
midi.startRecording = function() | |
{ | |
isrecord = 1; | |
mediaRecorder.start(); | |
mediaRecorder.ondataavailable = function(evt) { | |
// push each chunk (blobs) in an array | |
recordchunks.push(evt.data); | |
}; | |
}; | |
midi.stopRecording = function() | |
{ | |
isrecord = 0; | |
mediaRecorder.stop(); | |
mediaRecorder.onstop = function(evt) { | |
// Make blob out of our blobs, and open it. | |
var blob = new Blob(recordchunks, { 'type' : 'audio/mp3; codecs=opus' }); | |
document.querySelector("audio").src = URL.createObjectURL(blob); | |
}; | |
}; | |
midi.setDetune = function(cents) | |
{ | |
detuneCents = cents; | |
}; | |
midi.noteOn = function(channelId, noteId, velocity, delay) { | |
delay = delay || 0; | |
/// check whether the note exists | |
var channel = root.channels[channelId]; | |
var instrument = channel.instrument; | |
var bufferId = instrument + '' + noteId; | |
var buffer = audioBuffers[bufferId]; | |
if (!buffer) { | |
// console.log(MIDI.GM.byId[instrument].id, instrument, channelId); | |
return; | |
} | |
/// convert relative delay to absolute delay | |
if (delay < ctx.currentTime) { | |
delay += ctx.currentTime; | |
} | |
/// create audio buffer | |
if (useStreamingBuffer) { | |
var source = ctx.createMediaElementSource(buffer); | |
} else { // XMLHTTP buffer | |
var source = ctx.createBufferSource(); | |
source.buffer = buffer; | |
} | |
/// add effects to buffer | |
if (effects) { | |
var chain = source; | |
for (var key in effects) { | |
chain.connect(effects[key].input); | |
chain = effects[key]; | |
} | |
} | |
/// add gain + pitchShift | |
var gain = (velocity / 127) * (masterVolume / 127) * 2 - 1; | |
source.connect(ctx.destination); | |
source.detune.value = detuneCents; | |
source.playbackRate.value = 1; // pitch shift | |
source.gainNode = ctx.createGain(); // gain | |
source.gainNode.connect(ctx.destination); | |
source.gainNode.gain.value = Math.min(1.0, Math.max(-1.0, gain)); | |
source.connect(source.gainNode); | |
if(!recorderdest) | |
{ | |
recorderdest = ctx.createMediaStreamDestination(); | |
if(!mediaRecorder) | |
{ | |
mediaRecorder = new MediaRecorder(recorderdest.stream); | |
//midi.startRecording() //if you want to automate startRecording. | |
} | |
} | |
else if(isrecord = 1) | |
{ | |
source.connect(recorderdest); | |
} | |
/// | |
if (useStreamingBuffer) { | |
if (delay) { | |
return setTimeout(function() { | |
buffer.currentTime = 0; | |
buffer.play() | |
}, delay * 1000); | |
} else { | |
buffer.currentTime = 0; | |
buffer.play() | |
} | |
} else { | |
source.start(delay || 0); | |
} | |
/// | |
sources[channelId + '' + noteId] = source; | |
/// | |
return source; | |
}; | |
midi.noteOff = function(channelId, noteId, delay) { | |
delay = delay || 0; | |
/// check whether the note exists | |
var channel = root.channels[channelId]; | |
var instrument = channel.instrument; | |
var bufferId = instrument + '' + noteId; | |
var buffer = audioBuffers[bufferId]; | |
if (buffer) { | |
if (delay < ctx.currentTime) { | |
delay += ctx.currentTime; | |
} | |
/// | |
var source = sources[channelId + '' + noteId]; | |
if (source) { | |
if (source.gainNode) { | |
// @Miranet: 'the values of 0.2 and 0.3 could of course be used as | |
// a 'release' parameter for ADSR like time settings.' | |
// add { 'metadata': { release: 0.3 } } to soundfont files | |
var gain = source.gainNode.gain; | |
gain.linearRampToValueAtTime(gain.value, delay); | |
gain.linearRampToValueAtTime(-1.0, delay + 0.3); | |
} | |
/// | |
if (useStreamingBuffer) { | |
if (delay) { | |
setTimeout(function() { | |
buffer.pause(); | |
}, delay * 1000); | |
} else { | |
buffer.pause(); | |
} | |
} else { | |
if (source.noteOff) { | |
source.noteOff(delay + 0.5); | |
} else { | |
source.stop(delay + 0.5); | |
} | |
} | |
/// | |
delete sources[channelId + '' + noteId]; | |
/// | |
return source; | |
} | |
} | |
}; | |
midi.chordOn = function(channel, chord, velocity, delay) { | |
var res = {}; | |
for (var n = 0, note, len = chord.length; n < len; n++) { | |
res[note = chord[n]] = midi.noteOn(channel, note, velocity, delay); | |
} | |
return res; | |
}; | |
midi.chordOff = function(channel, chord, delay) { | |
var res = {}; | |
for (var n = 0, note, len = chord.length; n < len; n++) { | |
res[note = chord[n]] = midi.noteOff(channel, note, delay); | |
} | |
return res; | |
}; | |
midi.stopAllNotes = function() { | |
for (var sid in sources) { | |
var delay = 0; | |
if (delay < ctx.currentTime) { | |
delay += ctx.currentTime; | |
} | |
var source = sources[sid]; | |
source.gain.linearRampToValueAtTime(1, delay); | |
source.gain.linearRampToValueAtTime(0, delay + 0.3); | |
if (source.noteOff) { // old api | |
source.noteOff(delay + 0.3); | |
} else { // new api | |
source.stop(delay + 0.3); | |
} | |
delete sources[sid]; | |
} | |
}; | |
midi.setEffects = function(list) { | |
if (ctx.tunajs) { | |
for (var n = 0; n < list.length; n ++) { | |
var data = list[n]; | |
var effect = new ctx.tunajs[data.type](data); | |
effect.connect(ctx.destination); | |
effects[data.type] = effect; | |
} | |
} else { | |
return console.log('Effects module not installed.'); | |
} | |
}; | |
midi.connect = function(opts) { | |
root.setDefaultPlugin(midi); | |
midi.setContext(ctx || createAudioContext(), opts.onsuccess); | |
}; | |
midi.getContext = function() { | |
return ctx; | |
}; | |
midi.setContext = function(newCtx, onload, onprogress, onerror) { | |
ctx = newCtx; | |
/// tuna.js effects module - https://github.com/Dinahmoe/tuna | |
if (typeof Tuna !== 'undefined' && !ctx.tunajs) { | |
ctx.tunajs = new Tuna(ctx); | |
} | |
/// loading audio files | |
var urls = []; | |
var notes = root.keyToNote; | |
for (var key in notes) urls.push(key); | |
/// | |
var waitForEnd = function(instrument) { | |
for (var key in bufferPending) { // has pending items | |
if (bufferPending[key]) return; | |
} | |
/// | |
if (onload) { // run onload once | |
onload(); | |
onload = null; | |
} | |
}; | |
/// | |
var requestAudio = function(soundfont, instrumentId, index, key) { | |
var url = soundfont[key]; | |
if (url) { | |
bufferPending[instrumentId] ++; | |
loadAudio(url, function(buffer) { | |
buffer.id = key; | |
var noteId = root.keyToNote[key]; | |
audioBuffers[instrumentId + '' + noteId] = buffer; | |
/// | |
if (-- bufferPending[instrumentId] === 0) { | |
var percent = index / 87; | |
// console.log(MIDI.GM.byId[instrumentId], 'processing: ', percent); | |
soundfont.isLoaded = true; | |
waitForEnd(instrument); | |
} | |
}, function(err) { | |
// console.log(err); | |
}); | |
} | |
}; | |
/// | |
var bufferPending = {}; | |
for (var instrument in root.Soundfont) { | |
var soundfont = root.Soundfont[instrument]; | |
if (soundfont.isLoaded) { | |
continue; | |
} | |
/// | |
var synth = root.GM.byName[instrument]; | |
var instrumentId = synth.number; | |
/// | |
bufferPending[instrumentId] = 0; | |
/// | |
for (var index = 0; index < urls.length; index++) { | |
var key = urls[index]; | |
requestAudio(soundfont, instrumentId, index, key); | |
} | |
} | |
/// | |
setTimeout(waitForEnd, 1); | |
}; | |
/* Load audio file: streaming | base64 | arraybuffer | |
---------------------------------------------------------------------- */ | |
function loadAudio(url, onload, onerror) { | |
if (useStreamingBuffer) { | |
var audio = new Audio(); | |
audio.src = url; | |
audio.controls = false; | |
audio.autoplay = false; | |
audio.preload = false; | |
audio.addEventListener('canplay', function() { | |
onload && onload(audio); | |
}); | |
audio.addEventListener('error', function(err) { | |
onerror && onerror(err); | |
}); | |
document.body.appendChild(audio); | |
} else if (url.indexOf('data:audio') === 0) { // Base64 string | |
var base64 = url.split(',')[1]; | |
var buffer = Base64Binary.decodeArrayBuffer(base64); | |
ctx.decodeAudioData(buffer, onload, onerror); | |
} else { // XMLHTTP buffer | |
var request = new XMLHttpRequest(); | |
request.open('GET', url, true); | |
request.responseType = 'arraybuffer'; | |
request.onload = function() { | |
ctx.decodeAudioData(request.response, onload, onerror); | |
}; | |
request.send(); | |
} | |
}; | |
function createAudioContext() { | |
return new (window.AudioContext || window.webkitAudioContext)(); | |
}; | |
})(); | |
})(MIDI); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment