Skip to content

Instantly share code, notes, and snippets.

@olee
Created April 17, 2021 00:49
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save olee/21b780adc616b4c67605a2059d991655 to your computer and use it in GitHub Desktop.
Save olee/21b780adc616b4c67605a2059d991655 to your computer and use it in GitHub Desktop.
remotion advanced video renderer
export type GlslVarType =
'bool' |
'int' |
'uint' |
'float' |
'double' |
'vec2' |
'vec3' |
'vec4' |
'bvec2' |
'bvec3' |
'bvec4' |
'ivec2' |
'ivec3' |
'ivec4' |
'uvec2' |
'uvec3' |
'uvec4' |
'dvec2' |
'dvec3' |
'dvec4' |
'mat2' |
'mat3' |
'mat4'
;
export function setUniform(gl: WebGLRenderingContext, loc: WebGLUniformLocation | null, type: GlslVarType, value: any) {
if (!loc) {
return;
}
switch (type) {
case 'bool':
gl.uniform1i(loc, value ? 1 : 0);
break;
case 'int':
case 'uint':
gl.uniform1i(loc, value);
break;
case 'float':
case 'double':
gl.uniform1f(loc, value);
break;
case 'vec2':
case 'dvec2':
gl.uniform2fv(loc, value);
break;
case 'vec3':
case 'dvec3':
gl.uniform3fv(loc, value);
break;
case 'vec4':
case 'dvec4':
gl.uniform4fv(loc, value);
break;
case 'bvec2':
case 'ivec2':
case 'uvec2':
gl.uniform2iv(loc, value);
break;
case 'bvec3':
case 'ivec3':
case 'uvec3':
gl.uniform3iv(loc, value);
break;
case 'bvec4':
case 'ivec4':
case 'uvec4':
gl.uniform4iv(loc, value);
break;
case 'mat2':
gl.uniformMatrix2fv(loc, false, value);
break;
case 'mat3':
gl.uniformMatrix3fv(loc, false, value);
break;
case 'mat4':
gl.uniformMatrix4fv(loc, false, value);
break;
default:
throw new Error(`Unsupported type ${type}`);
}
}
import React from 'react';
import { AbsoluteFill, continueRender, delayRender, RemotionVideoProps, useCurrentFrame, Video } from 'remotion';
import WebGlVideoProcessor, { ShaderVariable } from './WebGlVideoProcessor';
export { VAR_UV, VAR_XY, ShaderVariable, swirlShader, VideoShaderOptions, createFragmentShader } from './WebGlVideoProcessor';
export interface VideoProcessorProps extends RemotionVideoProps {
fragShader?: string;
vertShader?: string;
variables?: ShaderVariable[];
}
export default function VideoProcessor(props: VideoProcessorProps) {
const {
style,
src,
variables,
fragShader,
vertShader,
...rest
} = props;
const frame = useCurrentFrame();
const wrapperRef = React.useRef<HTMLDivElement>(null);
const canvasRef = React.useRef<HTMLCanvasElement>(null);
const [processor, setProcessor] = React.useState<WebGlVideoProcessor | undefined>(undefined);
React.useEffect(() => {
const canvas = canvasRef.current!;
const video = wrapperRef.current!.firstChild as HTMLVideoElement;
const _processor = new WebGlVideoProcessor(canvas, video);
setProcessor(_processor);
_processor.start();
return () => {
_processor.stop();
};
}, []);
React.useMemo(() => processor?.setShader(fragShader, vertShader), [processor, fragShader, vertShader]);
React.useEffect(() => processor?.setFrame(frame), [processor, frame]);
React.useEffect(() => processor?.videoChanged(), [processor, src]);
React.useEffect(() => processor?.setVariables(variables), [processor, variables]);
return (
<div ref={wrapperRef} style={{ position: 'relative', ...style }}>
<Video {...rest} src={src} style={{ display: 'none' }} />
<AbsoluteFill>
<canvas ref={canvasRef} style={style} />
</AbsoluteFill>
</div>
);
};
import { continueRender, delayRender, RemotionVideoProps } from 'remotion';
import { GlslVarType, setUniform } from './glUtils';
export interface VideoShaderOptions {
variables?: {
type: GlslVarType;
name: string;
}[];
}
export const ATTR_UV = 'a_uv';
export const ATTR_POS = 'a_pos';
export const VAR_UV = 'uv';
export const VAR_XY = 'xy';
const defaultVertexShader = `
precision highp float;
attribute vec4 ${ATTR_POS};
attribute vec2 ${ATTR_UV};
varying vec2 ${VAR_UV};
varying vec2 ${VAR_XY};
uniform vec2 size;
void main(void) {
gl_Position = ${ATTR_POS};
${VAR_UV} = ${ATTR_UV};
${VAR_XY} = vec2(
(${ATTR_POS}.x * 0.5 + 0.5) * float(size.x),
(0.5 - ${ATTR_POS}.y * 0.5) * float(size.y)
);
}`;
export function createFragmentShader(content: string, options: VideoShaderOptions = {}) {
const { variables } = options;
return `
precision highp float;
varying vec2 ${VAR_UV};
varying vec2 ${VAR_XY};
uniform vec2 size;
uniform int frame;
uniform float t;
uniform sampler2D u_tex;
${!variables ? '' : variables.map(v => `uniform ${v.type} ${v.name};`).join('\n')}
vec2 xyToUv(in vec2 _xy) {
return vec2(_xy.x, _xy.y) / size;
}
void main(void) {
${content}
}`;
}
export const defaultShader = createFragmentShader(`gl_FragColor = vec4(texture2D(u_tex, ${VAR_UV}).rgb, 1);`);
// Example for animation using frame uniform variable inside shader
// float r = radius * (sin(float(frame) * 0.02) * 0.5 + 0.5);
export const swirlShader = createFragmentShader(`
vec2 cUV = ${VAR_XY} - size / 2. - offset;
float dist = length(cUV);
if (dist < radius)
{
float percent = (radius - dist) / radius;
float theta = percent * percent * angle * 8.0;
float s = sin(theta);
float c = cos(theta);
cUV = vec2(dot(cUV, vec2(c, -s)), dot(cUV, vec2(s, c)));
}
cUV = xyToUv(cUV + offset + size / 2.); // / vec2(width, height) + vec2(0.5, 0.5);
gl_FragColor = vec4(texture2D(u_tex, cUV).rgb, 0.5);`,
{
variables: [
{ type: 'float', name: 'radius' },
{ type: 'float', name: 'angle' },
{ type: 'vec2', name: 'offset' },
],
}
);
export interface ShaderVariable {
name: string;
type: GlslVarType;
value: any;
}
export default class WebGlVideoProcessor {
public readonly gl: WebGLRenderingContext;
private _stop?: () => void;
private _texture: WebGLTexture;
private _program: WebGLProgram;
private _vertexShader: WebGLShader;
private _fragmentShader: WebGLShader;
private _verticesBuffer: WebGLBuffer;
private _uvBuffer: WebGLBuffer;
private _delayRenderHandle?: number;
private _videoLoaded = false;
private _variables: ShaderVariable[] = [];
private _frame: number = 0;
constructor(
public readonly canvas: HTMLCanvasElement,
public readonly video: HTMLVideoElement,
fragmentShader = defaultShader,
vertexShader = defaultVertexShader
) {
const gl = canvas.getContext('webgl', { preserveDrawingBuffer: true });
if (!gl) {
throw new Error('Error initializing WebGl context');
}
this.gl = gl;
// Initialize resources
this._texture = this._createTexture();
this._verticesBuffer = this._createBuffer([
1.0, 1.0,
0.0, -1.0,
1.0, 0.0,
//
1.0, -1.0,
0.0, -1.0,
-1.0, 0.0
]);
this._uvBuffer = this._createBuffer([
1.0, 0.0,
0.0, 0.0,
1.0, 1.0,
0.0, 1.0
]);
this._vertexShader = this._createShader(vertexShader, gl.VERTEX_SHADER);
this._fragmentShader = this._createShader(fragmentShader, gl.FRAGMENT_SHADER);
this._program = this._createProgram();
this.videoChanged();
}
private delayRender() {
if (!this._delayRenderHandle && this.isRunning) {
this._delayRenderHandle = delayRender();
}
}
private continueRender() {
if (this._delayRenderHandle) {
continueRender(this._delayRenderHandle);
this._delayRenderHandle = undefined;
}
}
private _createBuffer(data: number[]) {
const gl = this.gl;
const buffer = gl.createBuffer();
if (!buffer) {
throw new Error('Error creating vertex buffer');
}
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(data), gl.STATIC_DRAW);
return buffer;
}
private _createTexture() {
const gl = this.gl;
const texture = gl.createTexture();
if (!texture) {
throw new Error('Error creating texture');
}
gl.bindTexture(gl.TEXTURE_2D, texture);
// Fill with yellow pixel
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([255, 255, 0]));
// Set texture parameters
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
return texture;
}
private _createShader(source: string, type: number) {
const gl = this.gl;
const shader = gl.createShader(type);
if (!shader) {
throw new Error('Error creating shader');
}
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.debug('Shader source:', source.split('\n').map((l, idx) => `${idx + 1}: ${l}`).join('\n'));
throw new Error('Error compiling shader: ' + gl.getShaderInfoLog(shader));
}
return shader;
}
private _createProgram() {
if (!this._vertexShader || !this._fragmentShader) {
throw new Error();
}
const gl = this.gl;
const program = gl.createProgram();
if (!program) {
throw new Error('Error creating program');
}
gl.attachShader(program, this._vertexShader);
gl.attachShader(program, this._fragmentShader);
// Link program
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw new Error('Error linking shader program: ' + gl.getProgramInfoLog(program));
}
// Set program parameters
const posAttribute = gl.getAttribLocation(program, ATTR_POS);
gl.bindBuffer(gl.ARRAY_BUFFER, this._verticesBuffer);
gl.enableVertexAttribArray(posAttribute);
gl.vertexAttribPointer(posAttribute, 3, gl.FLOAT, false, 0, 0);
const uvAttribute = gl.getAttribLocation(program, ATTR_UV);
gl.bindBuffer(gl.ARRAY_BUFFER, this._uvBuffer);
gl.enableVertexAttribArray(uvAttribute);
gl.vertexAttribPointer(uvAttribute, 2, gl.FLOAT, false, 0, 0);
// Use new program
gl.useProgram(program);
return program;
}
public get isRunning() {
return Boolean(this._stop);
}
public start() {
let stopped = false;
this._stop = () => { stopped = true; };
let lastTime = -1;
const update = () => {
if (stopped) {
return;
}
if (this._videoLoaded) {
// Check, if we actually have to update the frame
if (lastTime !== this.video.currentTime) {
lastTime = this.video.currentTime;
this.render();
}
// Continue render
this.continueRender();
}
window.requestAnimationFrame(update);
};
window.requestAnimationFrame(update);
}
private render() {
const gl = this.gl;
const video = this.video;
// Update video texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
// Update variables
for (const v of this._variables) {
setUniform(gl, gl.getUniformLocation(this._program, v.name), v.type, v.value);
}
setUniform(gl, gl.getUniformLocation(this._program, 'frame'), 'int', this._frame);
setUniform(gl, gl.getUniformLocation(this._program, 'size'), 'vec2', [video.videoWidth, video.videoHeight]);
setUniform(gl, gl.getUniformLocation(this._program, 't'), 'float', video.currentTime);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}
public stop() {
this._stop?.();
this.continueRender();
}
public videoChanged() {
const gl = this.gl;
const canvas = this.canvas;
const video = this.video;
this._videoLoaded = false;
const delayRenderHandle = delayRender();
video.addEventListener('loadeddata', () => {
// Set canvas size
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
continueRender(delayRenderHandle);
this._videoLoaded = true;
}, { once: true });
}
public setFrame(frame: number) {
this.delayRender();
this._frame = frame;
}
public setShader(fragmentShader = defaultShader, vertexShader = defaultVertexShader) {
this._vertexShader = this._createShader(vertexShader, this.gl.VERTEX_SHADER);
this._fragmentShader = this._createShader(fragmentShader, this.gl.FRAGMENT_SHADER);
this._program = this._createProgram();
}
public setVariables(variables: ShaderVariable[] | undefined) {
this._variables = variables || [];
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment