Skip to content

Instantly share code, notes, and snippets.

@ilufang
Last active July 3, 2020 16:43
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ilufang/1651c4e4af2ba122f09407886e55d826 to your computer and use it in GitHub Desktop.
Save ilufang/1651c4e4af2ba122f09407886e55d826 to your computer and use it in GitHub Desktop.
Chunithm Chart Perspective Tool for YouTube
// ==UserScript==
// @name Chunithm Chart Perspective Tool for YouTube
// @namespace https://fang.lu/
// @version 1.1
// @description Reverse 3D perspective back to a flat falling chart.
// @author ilufang
// @match https://www.youtube.com/watch?*
// @grant none
// @require https://cdnjs.cloudflare.com/ajax/libs/gl-matrix/2.8.1/gl-matrix-min.js
// @downloadURL https://gist.githubusercontent.com/ilufang/1651c4e4af2ba122f09407886e55d826/raw/yt-chunithm-perspective.user.js
// ==/UserScript==
/*
* Adapted from MDN WebGL Tutorial.
* https://github.com/mdn/webgl-examples/tree/gh-pages/tutorial/sample8
*/
(function() {
// Vertex shader program
const vsSource = `
attribute vec4 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uModelViewMatrix;
uniform mat4 uProjectionMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uProjectionMatrix * uModelViewMatrix * aVertexPosition;
vTextureCoord = aTextureCoord;
}
`;
// Fragment shader program
const fsSource = `
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
highp vec4 texelColor = texture2D(uSampler, vTextureCoord);
gl_FragColor = texelColor;
}
`;
//
// Entry point
//
var video, container, canvas;
function chuni() {
video = document.querySelector("video");
container = video.parentElement;
canvas = document.createElement("canvas");
canvas.width=video.videoWidth;
canvas.height=video.videoHeight;
canvas.style.position = "absolute";
canvas.style.width = video.style.width;
canvas.style.height = video.style.height;
canvas.style.left = video.style.left;
canvas.style.top = video.style.top;
canvas.style.zIndex = "9";
video.style.zIndex = "15";
video.style.opacity = "0";
container.appendChild(canvas);
const gl = canvas.getContext("webgl");
// If we don't have a GL context, give up now
if (!gl) {
alert('Unable to initialize WebGL. Your browser or machine may not support it.');
return;
}
// Initialize a shader program; this is where all the lighting
// for the vertices and so forth is established.
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// Collect all the info needed to use the shader program.
// Look up which attributes our shader program is using
// for aVertexPosition, aVertexNormal, aTextureCoord,
// and look up uniform locations.
const programInfo = {
program: shaderProgram,
attribLocations: {
vertexPosition: gl.getAttribLocation(shaderProgram, 'aVertexPosition'),
textureCoord: gl.getAttribLocation(shaderProgram, 'aTextureCoord'),
},
uniformLocations: {
projectionMatrix: gl.getUniformLocation(shaderProgram, 'uProjectionMatrix'),
modelViewMatrix: gl.getUniformLocation(shaderProgram, 'uModelViewMatrix'),
uSampler: gl.getUniformLocation(shaderProgram, 'uSampler'),
},
};
// Here's where we call the routine that builds all the
// objects we'll be drawing.
const buffers = initBuffers(gl);
const texture = initTexture(gl);
var then = 0;
// Draw the scene repeatedly
function render() {
updateTexture(gl, texture, video);
drawScene(gl, programInfo, buffers, texture);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
var initialized = false;
var enabled = false;
document.body.addEventListener("keydown", e => {
if (e.ctrlKey && e.altKey && e.shiftKey && e.code=="KeyC") {
if (!initialized) {
chuni();
initialized = true;
}
enabled = !enabled;
if (enabled) {
canvas.style.display = "";
video.style.opacity = "0";
} else {
canvas.style.display = "none";
video.style.opacity = "";
}
}
});
//
// initBuffers
//
// Initialize the buffers we'll need. For this demo, we just
// have one object -- a simple three-dimensional cube.
//
function initBuffers(gl) {
// Create a buffer for the cube's vertex positions.
const positionBuffer = gl.createBuffer();
// Select the positionBuffer as the one to apply buffer
// operations to from here out.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Now create an array of positions for the cube.
const positions = [
// Front face
-9.6, -5.4, 1.0,
9.6, -5.4, 1.0,
9.6, 5.4, 1.0,
-9.6, 5.4, 1.0,
];
// Now pass the list of positions into WebGL to build the
// shape. We do this by creating a Float32Array from the
// JavaScript array, then use it to fill the current buffer.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Now set up the texture coordinates for the faces.
const textureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, textureCoordBuffer);
const textureCoordinates = [
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates),
gl.STATIC_DRAW);
// Build the element array buffer; this specifies the indices
// into the vertex arrays for each face's vertices.
const indexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
// This array defines each face as two triangles, using the
// indices into the vertex array to specify each triangle's
// position.
const indices = [
0, 1, 2, 0, 2, 3, // front
];
// Now send the element array to GL
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indices), gl.STATIC_DRAW);
return {
position: positionBuffer,
textureCoord: textureCoordBuffer,
indices: indexBuffer,
};
}
//
// Initialize a texture.
//
function initTexture(gl, url) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because video havs to be download over the internet
// they might take a moment until it's ready so
// put a single pixel in the texture so we can
// use it immediately.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
width, height, border, srcFormat, srcType,
pixel);
// Turn off mips and set wrapping to clamp to edge so it
// will work regardless of the dimensions of the video.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
return texture;
}
//
// copy the video texture
//
function updateTexture(gl, texture, video) {
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, video);
}
function isPowerOf2(value) {
return (value & (value - 1)) == 0;
}
window.rotX = Math.PI*(1+120/360);
window.rotY = 0;
window.rotZ = 0;
window.distX = 0;
window.distY = 1.8;
window.distZ = 4.9;
//
// Draw the scene.
//
function drawScene(gl, programInfo, buffers, texture) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clearDepth(1.0); // Clear everything
gl.enable(gl.DEPTH_TEST); // Enable depth testing
gl.depthFunc(gl.LEQUAL); // Near things obscure far things
// Clear the canvas before we start drawing on it.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Create a perspective matrix, a special matrix that is
// used to simulate the distortion of perspective in a camera.
// Our field of view is 45 degrees, with a width/height
// ratio that matches the display size of the canvas
// and we only want to see objects between 0.1 units
// and 100 units away from the camera.
const fieldOfView = 45 * Math.PI / 180; // in radians
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.1;
const zFar = 100.0;
const projectionMatrix = mat4.create();
// note: glmatrix.js always has the first argument
// as the destination to receive the result.
mat4.perspective(projectionMatrix,
fieldOfView,
aspect,
zNear,
zFar);
// Set the drawing position to the "identity" point, which is
// the center of the scene.
const modelViewMatrix = mat4.create();
// Now move the drawing position a bit to where we want to
// start drawing the square.
mat4.translate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to translate
[-distX, -distY, -distZ]); // amount to translate
if (rotX)
mat4.rotate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to rotate
rotX, // amount to rotate in radians
[1, 0, 0]); // axis to rotate around (X)
if (rotY)
mat4.rotate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to rotate
rotY, // amount to rotate in radians
[0, 1, 0]); // axis to rotate around (Y)
if (rotZ)
mat4.rotate(modelViewMatrix, // destination matrix
modelViewMatrix, // matrix to rotate
rotZ, // amount to rotate in radians
[0, 0, 1]); // axis to rotate around (Y)
// Tell WebGL how to pull out the positions from the position
// buffer into the vertexPosition attribute
{
const numComponents = 3;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.position);
gl.vertexAttribPointer(
programInfo.attribLocations.vertexPosition,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.vertexPosition);
}
// Tell WebGL how to pull out the texture coordinates from
// the texture coordinate buffer into the textureCoord attribute.
{
const numComponents = 2;
const type = gl.FLOAT;
const normalize = false;
const stride = 0;
const offset = 0;
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.textureCoord);
gl.vertexAttribPointer(
programInfo.attribLocations.textureCoord,
numComponents,
type,
normalize,
stride,
offset);
gl.enableVertexAttribArray(
programInfo.attribLocations.textureCoord);
}
// Tell WebGL which indices to use to index the vertices
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffers.indices);
// Tell WebGL to use our program when drawing
gl.useProgram(programInfo.program);
// Set the shader uniforms
gl.uniformMatrix4fv(
programInfo.uniformLocations.projectionMatrix,
false,
projectionMatrix);
gl.uniformMatrix4fv(
programInfo.uniformLocations.modelViewMatrix,
false,
modelViewMatrix);
// Specify the texture to map onto the faces.
// Tell WebGL we want to affect texture unit 0
gl.activeTexture(gl.TEXTURE0);
// Bind the texture to texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell the shader we bound the texture to texture unit 0
gl.uniform1i(programInfo.uniformLocations.uSampler, 0);
{
const vertexCount = 6;
const type = gl.UNSIGNED_SHORT;
const offset = 0;
gl.drawElements(gl.TRIANGLES, vertexCount, type, offset);
}
}
//
// Initialize a shader program, so WebGL knows how to draw our data
//
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
//
// creates a shader of the given type, uploads the source and
// compiles it.
//
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
})();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment