Skip to content

Instantly share code, notes, and snippets.

@porst17
Created June 1, 2023 06:35
Show Gist options
  • Save porst17/3bc48ff313bd5422a6da8ff1cd17c919 to your computer and use it in GitHub Desktop.
Save porst17/3bc48ff313bd5422a6da8ff1cd17c919 to your computer and use it in GitHub Desktop.
Complex Transformation WebGL Demo
<!DOCTYPE html>
<html>
<head> </head>
<body>
<canvas id="backward" width="640" height="480"></canvas><br />
<video id="input" width="640" height="480"></video><br />
<canvas id="forward" width="640" height="480"></canvas><br />
<script>
const importShaderCodeLibrary = `
vec2 cAdd(in vec2 a, in vec2 b) {
return a + b;
}
vec2 cSub(in vec2 a, in vec2 b) {
return a - b;
}
vec2 cMul(in vec2 a, in vec2 b) {
return vec2(a.x * b.x - a.y * b.y, a.x * b.y + a.y * b.x);
}
vec2 cDiv(in vec2 a, in vec2 b) {
vec2 temp;
temp.x = ((a.x * b.x) + (a.y * b.y))/(b.x*b.x + b.y*b.y);
temp.y = ((a.y * b.x) - (a.x * b.y))/(b.x*b.x + b.y*b.y);
return temp;
}
vec2 cExp(in vec2 z) {
return vec2(exp(z.x)*cos(z.y), exp(z.x)*sin(z.y));
}
float cSinh(in float x) {
return (exp(x) - exp(-x)) / 2.0;
}
float cCosh(in float x) {
return (exp(x) + exp(-x)) / 2.0;
}
vec2 cSin(in vec2 z) {
return vec2(sin(z.x)*cCosh(z.y), cos(z.x)*cSinh(z.y));
}
vec2 cCos(in vec2 z) {
return vec2(cos(z.x)*cCosh(z.y), -sin(z.x)*cSinh(z.y));
}
vec2 cTan(in vec2 z) {
return cDiv(cSin(z), cCos(z));
}
vec2 cConjugate(in vec2 z) {
return vec2(z.x, -z.y);
}
vec2 cLog(in vec2 z) {
float r = sqrt(z.x*z.x + z.y*z.y);
float theta = atan(z.y, z.x);
return vec2(log(r), theta);
}
vec2 cPow(in vec2 z, in vec2 w) {
float r = sqrt(z.x*z.x + z.y*z.y);
float theta = atan(z.y, z.x);
return cExp(cAdd(log(r) * w, theta * vec2(w.y, w.x)));
}
vec2 quadrupled(vec2 z) {
vec2 s = cMul(z, z);
return cMul(s, s);
}
vec2 squared(vec2 z) {
return cMul(z, z);
}
vec2 rotate(vec2 z, float theta) {
return cMul(z, vec2(cos(theta), sin(theta)));
}
vec2 f(vec2 z) {
return rotate(z, 3.14152 / 2.0);
}
`;
const backwardVertexShaderCode = `
attribute vec3 coordinates;
varying vec2 z;
${importShaderCodeLibrary}
void main(void) {
gl_Position = vec4(coordinates, 1.0);
z = coordinates.xy * vec2(1.0, -1.0);
}
`;
const backwardFragmentShaderCode = `
precision highp float;
varying vec2 z;
uniform sampler2D uSampler;
${importShaderCodeLibrary}
void main() {
vec2 center = vec2(0.5, 0.5);
vec2 fz = f(z);
gl_FragColor = texture2D(uSampler, mod(0.5 * fz + center, 1.0));
}
`;
const forwardVertexShaderCode = `
attribute vec3 coordinates;
varying vec2 z;
${importShaderCodeLibrary}
void main(void) {
z = 2.0 * (coordinates.xy - vec2(0.5, 0.5));
vec2 fz = f(z);
gl_Position = vec4( fz, 0.0, 1.0);
}
`;
const forwardFragmentShaderCode = `
precision highp float;
varying vec2 z;
uniform sampler2D uSampler;
${importShaderCodeLibrary}
void main() {
vec2 center = vec2(0.5, 0.5);
gl_FragColor = texture2D(uSampler, mod(0.5 * z + center, 1.0));
}
`;
function initTexture(gl) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because video has to be download over the internet
// they might take a moment until it's ready so
// put a single pixel in the texture so we can
// use it immediately.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(
gl.TEXTURE_2D,
level,
internalFormat,
width,
height,
border,
srcFormat,
srcType,
pixel
);
// Turn off mips and set wrapping to clamp to edge so it
// will work regardless of the dimensions of the video.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
return texture;
}
function updateTexture(gl, texture, video) {
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(
gl.TEXTURE_2D,
level,
internalFormat,
srcFormat,
srcType,
video
);
}
function setupGL(gl, vertices, indices, vs, fs) {
/*========== Defining and storing the geometry =========*/
// Create an empty buffer object to store vertex buffer
let vertex_buffer = gl.createBuffer();
// Bind appropriate array buffer to it
gl.bindBuffer(gl.ARRAY_BUFFER, vertex_buffer);
// Pass the vertex data to the buffer
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(vertices),
gl.STATIC_DRAW
);
// Unbind the buffer
gl.bindBuffer(gl.ARRAY_BUFFER, null);
// Create an empty buffer object to store Index buffer
let Index_Buffer = gl.createBuffer();
// Bind appropriate array buffer to it
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, Index_Buffer);
// Pass the vertex data to the buffer
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indices),
gl.STATIC_DRAW
);
// Unbind the buffer
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
/*====================== Shaders =======================*/
// Create a vertex shader object
let vertShader = gl.createShader(gl.VERTEX_SHADER);
// Attach vertex shader source code
gl.shaderSource(vertShader, vs);
// Compile the vertex shader
gl.compileShader(vertShader);
console.log("Vertex Shader Log:", gl.getShaderInfoLog(vertShader));
// Create fragment shader object
let fragShader = gl.createShader(gl.FRAGMENT_SHADER);
// Attach fragment shader source code
gl.shaderSource(fragShader, fs);
// Compile the fragment shader
gl.compileShader(fragShader);
// Compile the vertex shader
gl.compileShader(fragShader);
console.log("Fragment Shader Log:", gl.getShaderInfoLog(fragShader));
// Create a shader program object to
// store the combined shader program
let shaderProgram = gl.createProgram();
// Attach a vertex shader
gl.attachShader(shaderProgram, vertShader);
// Attach a fragment shader
gl.attachShader(shaderProgram, fragShader);
// Link both the programs
gl.linkProgram(shaderProgram);
// Create a texture
const texture = initTexture(gl);
// Tell WebGL we want to affect texture unit 0
gl.activeTexture(gl.TEXTURE0);
// Bind the texture to texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);
// Use the combined shader program object
gl.useProgram(shaderProgram);
// Tell the shader we bound the texture to texture unit 0
gl.uniform1i(gl.getUniformLocation(shaderProgram, "uSampler"), 0);
/* ======= Associating shaders to buffer objects =======*/
// Bind vertex buffer object
gl.bindBuffer(gl.ARRAY_BUFFER, vertex_buffer);
// Bind index buffer object
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, Index_Buffer);
// Get the attribute location
let coord = gl.getAttribLocation(shaderProgram, "coordinates");
// Point an attribute to the currently bound VBO
gl.vertexAttribPointer(coord, 3, gl.FLOAT, false, 0, 0);
// Enable the attribute
gl.enableVertexAttribArray(coord);
// Disable the depth test
gl.disable(gl.DEPTH_TEST);
/*============= Drawing the Quad ================*/
// Clear the canvas
gl.clearColor(0.5, 0.5, 0.5, 0.9);
// Clear the color buffer bit
gl.clear(gl.COLOR_BUFFER_BIT);
return { texture, numIndices: indices.length };
}
function createBackwardTriangles() {
const vertices = [
-1.0, 1.0, 0.0, -1.0, -1.0, 0.0, 1.0, -1.0, 0.0, 1.0, 1.0, 0.0,
];
const indices = [3, 2, 1, 3, 1, 0];
return { vertices, indices };
}
function createForwardTriangles() {
const gridSize = 128;
const vertices = [];
const indices = [];
for (let i = 0; i <= gridSize; i++) {
const y = i / gridSize;
for (let j = 0; j <= gridSize; j++) {
const x = j / gridSize;
vertices.push(x, y, 0.0);
}
}
for (let i = 0; i < gridSize; i++) {
for (let j = 0; j < gridSize; j++) {
indices.push(
(i + 0) * (gridSize + 1) + (j + 0),
(i + 0) * (gridSize + 1) + (j + 1),
(i + 1) * (gridSize + 1) + (j + 0),
(i + 0) * (gridSize + 1) + (j + 1),
(i + 1) * (gridSize + 1) + (j + 0),
(i + 1) * (gridSize + 1) + (j + 1)
);
}
}
return { vertices, indices };
}
function main() {
const backwardCanvas = document.querySelector("#backward");
const backwardGLContext = backwardCanvas.getContext("webgl");
// If we don't have a GL context, give up now
if (!backwardGLContext) {
alert(
"Unable to initialize WebGL. Your browser or machine may not support it."
);
return;
}
const { vertices: backwardVertices, indices: backwardIndices } =
createBackwardTriangles();
const { texture: backwardTexture, numIndices: backwardNumIndices } =
setupGL(
backwardGLContext,
backwardVertices,
backwardIndices,
backwardVertexShaderCode.trim(),
backwardFragmentShaderCode.trim()
);
const forwardCanvas = document.querySelector("#forward");
const forwardGLContext = forwardCanvas.getContext("webgl");
// If we don't have a GL context, give up now
if (!forwardGLContext) {
alert(
"Unable to initialize WebGL. Your browser or machine may not support it."
);
return;
}
const { vertices: forwardVertices, indices: forwardIndices } =
createForwardTriangles();
const { texture: forwardTexture, numIndices: forwardNumIndices } =
setupGL(
forwardGLContext,
forwardVertices,
forwardIndices,
forwardVertexShaderCode.trim(),
forwardFragmentShaderCode.trim()
);
const inputVideoElement = document.querySelector("#input");
let copyVideo = false;
function loop() {
if (copyVideo) {
updateTexture(
backwardGLContext,
backwardTexture,
inputVideoElement
);
updateTexture(forwardGLContext, forwardTexture, inputVideoElement);
}
// Set the view port
backwardGLContext.viewport(
0,
0,
backwardGLContext.canvas.width,
backwardGLContext.canvas.height
);
// Draw the triangles
backwardGLContext.drawElements(
backwardGLContext.TRIANGLES,
backwardNumIndices,
backwardGLContext.UNSIGNED_SHORT,
0
);
// Set the view port
forwardGLContext.viewport(
0,
0,
forwardGLContext.canvas.width,
forwardGLContext.canvas.height
);
// Draw the triangles
forwardGLContext.drawElements(
forwardGLContext.TRIANGLES,
forwardNumIndices,
forwardGLContext.UNSIGNED_SHORT,
0
);
requestAnimationFrame(loop);
}
if (navigator.mediaDevices.getUserMedia) {
navigator.mediaDevices
.getUserMedia({ video: true })
.then(function (stream) {
let playing = false;
let timeupdate = false;
const video = inputVideoElement;
video.addEventListener(
"playing",
() => {
playing = true;
checkReady();
},
true
);
video.addEventListener(
"timeupdate",
() => {
timeupdate = true;
checkReady();
},
true
);
function checkReady() {
if (playing && timeupdate) {
copyVideo = true;
}
}
inputVideoElement.srcObject = stream;
inputVideoElement.play();
loop();
})
.catch(function (err0r) {
console.log("Something went wrong!", err0r);
});
}
}
main();
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment