Skip to content

Instantly share code, notes, and snippets.

@dmoosocool
Created March 26, 2021 08:13
Show Gist options
  • Save dmoosocool/0984eabe2e769dace6b2fcee66790440 to your computer and use it in GitHub Desktop.
Save dmoosocool/0984eabe2e769dace6b2fcee66790440 to your computer and use it in GitHub Desktop.
use WebRTC, MediaRecorder, FFmpeg.
import { EventEmitter } from 'events'
import AudioRecorder from 'audio-recorder-polyfill'
import { createFFmpeg, FFmpeg, fetchFile } from '@ffmpeg/ffmpeg';
import autoSound from '@ASSETS/sounds/tts.mp3';
export enum RecorderStatus {
Inited = 1, // 初始化完成
PrevRecording, // 准备录制
Recording, // 录制中
Recorded, // 录制完成
End // 结束
}
type TUseFrontCameraCallback = {
deviceId?: W3C.ConstrainString;
facingMode?: W3C.ConstrainString
}
function supportsRecording(mimeType:string) {
if (!window.MediaRecorder)
return false;
if (!MediaRecorder.isTypeSupported)
return mimeType.startsWith("audio/mp4") || mimeType.startsWith("video/mp4");
return MediaRecorder.isTypeSupported(mimeType);
}
export default class CustomerRecorder extends EventEmitter{
constructor(
debug:boolean = false,
previewVideoElementId: string = '__PREVIEW_VIDEO__',
autoStopRecordTime: number = 0
) {
super();
this.debug = debug;
this.previewVideoElementId = previewVideoElementId;
this.autoStopRecordTime = autoStopRecordTime;
// 默认执行 getUserMediaPolyfill.
this.getUserMediaPolyfill();
// 默认执行 recorderPolyfill.
this.recorderPolyfill();
}
// 是否为调试模式
private debug: boolean = true;
// 通过getUserMedia 获取到的视频流.
private stream?: MediaStream;
// 录制的视频流
private recorderBlobs: Blob[] = [];
private recorder?: MediaRecorder;
private mimeType: string = '';
// 自动停止录制
private autoStopRecordTime: number = 0;
// 预览的video元素ID
public previewVideoElementId: string = ''
// 需要获取的权限.
public constraints: MediaStreamConstraints = {
audio: { echoCancellation: { exact: true } },
// 最小1024*776 最大1920*1080 理想值1280*720
video: {
// 设置帧率 最小10帧 理想20帧 最大40帧
frameRate: { min: 10, ideal: 20, max: 40 },
// 设置分辨率 最小宽度480 理想宽度1024 最大宽度 2048
width: { min: 480, ideal: 1024, max: 2048 },
}
}
// 打印日志
public log(str: string): void {
this.debug && console.log(str)
}
private recorderPolyfill() {
if( window.MediaRecorder === undefined ) {
window.MediaRecorder = AudioRecorder;
}
}
// getUserMedia的polyfill 并将一些旧的浏览器中提供的getUserMedia, Promise化. 使用统一的方式进行调用.
private getUserMediaPolyfill() {
// 将 getUserMedia Promise 化.
const promisifiedGetUserMedia = (constraints: MediaStreamConstraints):Promise<MediaStream> => {
const getUserMedia: NavigatorGetUserMedia | undefined = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
if(getUserMedia === undefined) {
return Promise.reject(new Error('您的浏览器不支持getUserMedia.'));
} else {
return new Promise((resolve, reject) => {
getUserMedia.call(navigator, constraints, resolve, reject);
})
}
}
// 如果浏览器不支持 navigator.mediaDevices 则将 navigator.mediaDevices 设置成空对象.
if( navigator.mediaDevices === undefined) {
Object.defineProperty(navigator, 'mediaDevices', {
value: {}
});
}
// 如果浏览器不支持 navigator.mediaDevices.getUserMedia, 则将promise化后的getUserMedia 替换掉 navigator.mediaDevices.getUserMedia
if( navigator.mediaDevices.getUserMedia === undefined) {
Object.defineProperty(navigator.mediaDevices, 'getUserMedia', {
value: promisifiedGetUserMedia
});
}
}
// 使用前置摄像头.
private async useFrontCamera(): Promise<TUseFrontCameraCallback> {
let result:TUseFrontCameraCallback = {};
if(navigator.mediaDevices.enumerateDevices !== undefined) {
const devices = await navigator.mediaDevices.enumerateDevices();
const videoDevices = devices.filter( device => device.kind === 'videoinput');
switch(videoDevices.length) {
// 如果视频输入设备数量等于0, 则不支持使用摄像头.
case 0:
// throw new Error('没有摄像头设备可用,');
console.error('没有摄像头可用');
break;
// 如果视频输入设备数量等于1 则使用默认摄像头.
case 1:
result.deviceId = videoDevices[0].deviceId;
break;
// 如果视频输入设备数量大于1 则使用指定facingMode方式启用前置摄像头,
default:
result.facingMode = { exact: 'user' };
break;
}
}
return result;
}
// 获取摄像头权限.
public async getCamera() {
const frontOption = await this.useFrontCamera();
Object.assign(this.constraints.video, frontOption);
try {
const stream = await navigator.mediaDevices.getUserMedia(this.constraints);
this.stream = stream;
Promise.resolve()
this.emit('recordChange', RecorderStatus.Inited);
} catch(e) {
throw e;
}
}
// 获取完摄像头权限 将摄像头的mediaStream输出到页面中.
public async previewVideo(): Promise<void> {
const el: any = document.getElementById(this.previewVideoElementId);
return new Promise( async(resolve, reject) => {
if(!el) {
reject(`Preview video element is not found. element id :${this.previewVideoElementId}`)
}
if(!this.stream) {
reject(`Preview video error: can not found mediastream, please run getCamera function first.`)
}
el.playsInline = true;
// 可操作
el.controls = false;
// 静音
el.muted = true;
// 设置音量级别
el.volume = 0;
// 设置播放源
el.srcObject = this.stream;
try{
el.play().then().catch((e:any)=>console.log('preViewVideo play() error',e));
this.log('Preview video is played.');
this.emit('recordChange', RecorderStatus.PrevRecording);
resolve();
}catch(e){
reject(e);
throw new Error(e);
}
})
}
public async mergeAudioAndVideo(recorderBlobs: Blob[]): Promise<string> {
if (recorderBlobs.length === 0) {
return Promise.reject('Recorder Blob is undefinded.')
}
const superBuffer = new Blob(recorderBlobs, {'type':'video/webm'});
const AudioData = await fetchFile(autoSound);
const VideoData = await fetchFile(superBuffer);
const ffmpeg = createFFmpeg({ log: false });
await ffmpeg.load();
await ffmpeg.FS('writeFile', 'audio.mp3', AudioData);
await ffmpeg.FS('writeFile', 'video.webm', VideoData);
// 1. cover mp3 to opus.
// ffmpeg -i pc.wav -ar 48000 -ac 2 -acodec libopus -ab 256k man.opus
await ffmpeg.run('-i', 'audio.mp3', '-ar', '48000', '-ac', '2', '-acodec', 'libopus', '-ab', '256k', 'audio.opus');
// 2. get audio from webm. only audio.
await ffmpeg.run('-i', 'video.webm', '-map', '0:a', '-c', 'copy', 'record.opus');
// 3. get video from webm. only video.
await ffmpeg.run('-i', 'video.webm', '-map', '0:v', '-vcodec', 'copy', 'record.mp4');
// 4. concat audio.opus and record.opus
await ffmpeg.run('-i', 'record.opus', '-i', 'audio.opus', '-filter_complex', 'amix=inputs=2:duration=first:dropout_transition=2', 'merge.opus');
// 5. concat video and audio.
await ffmpeg.run('-y', '-i', 'record.mp4', '-i', 'merge.opus', '-map', '0:0', '-map', '1:0', '-c', 'copy', 'output.mp4');
const data = ffmpeg.FS('readFile', 'output.mp4');
return URL.createObjectURL(new Blob([data], { type: 'video/mp4' }));
}
public getAndPlayAutoSound(): Promise<void> {
return new Promise((resolve, reject) => {
const context = new (window.AudioContext || window.webkitAudioContext)();
const fetchAudio = (url: string, cb: Function | undefined) => {
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => { cb && cb(request) };
request.send()
}
fetchAudio(autoSound, (req: XMLHttpRequest) => {
const audioData = req.response;
context.decodeAudioData(audioData, (buffer: AudioBuffer) => {
const source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.addEventListener('ended', function () {
source.disconnect();
resolve();
})
source.start();
// source.onended && source.onended(()=> {})
}, (error: DOMException) => {
console.log('Error decoding buffer: ' + error.message);
reject(error);
})
})
})
}
public async recordingVideo(): Promise<void> {
return new Promise( (resolve, reject) => {
if(!this.stream) {
reject('Record video error: can not found mediastream, please run getCamera function first.')
}else{
const recorder = new MediaRecorder(this.stream);
const mimeTypes = ['video/webm;codecs=vp9,opus', 'video/webm;codecs=vp8,opus', 'video/webm', 'video/ogg', 'video/mp4'];
for( let mimeType of mimeTypes) {
if(supportsRecording(mimeType)){
this.mimeType = mimeType;
break;
}
}
if(this.mimeType){
Object.defineProperty(recorder, 'mimeType', {value: this.mimeType})
}
console.log('支持的mimeType为:' + this.mimeType);
recorder.ondataavailable = (e) => {
console.log('录制是视频流信息:', e.data);
// console.log(e.data);
if( e.data && e.data.size > 0) {
this.recorderBlobs.push(e.data);
}
}
recorder.start(1280);
this.emit('recordChange', RecorderStatus.Recording);
if (this.autoStopRecordTime > 0) {
const timer = setTimeout( () => {
recorder.stop();
//this.mergeAudioAndVideo(this.recorderBlobs).then((videoUrl: string) => {
this.emit('recordChange', RecorderStatus.Recorded, this.recorderBlobs);
resolve();
//})
clearTimeout(timer)
}, this.autoStopRecordTime * 1000)
}
this.recorder = recorder;
}
})
}
public closeCamera(): Promise<void> {
return new Promise((resolve, reject) => {
console.log('正在关闭摄像头');
if(this.stream) {
console.log('获取到的视频流成功.')
console.log(this.stream.getTracks());
this.stream.getTracks().forEach(track => {
track.stop();
})
const previewVideo: any = document.getElementById(this.previewVideoElementId);
previewVideo.srcObject = null;
previewVideo.load();
resolve()
}else {
reject('camera is not found.');
}
})
}
public stopRecord(): Promise<void> {
return new Promise( (resolve, reject) => {
if(!this.recorder) {
reject('stopRecord error: can not found recorder.')
}else{
this.recorder.stop();
resolve()
}
})
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment