Last active
February 8, 2022 16:54
-
-
Save micahscopes/e44d7ec3b293148bfa3e to your computer and use it in GitHub Desktop.
A/V hybrid synth (routes pixels from a WebGL to WebAudio)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html><head> | |
<meta http-equiv="content-type" content="text/html; charset=windows-1252"> | |
<title>reaction diffusion space ship (a/v synth)</title> | |
<script id="shader-vs" type="x-shader/x-vertex"> | |
attribute vec3 aPos; | |
attribute vec2 aTexCoord; | |
varying vec2 pixel; | |
void main(void) { | |
gl_Position = vec4(aPos, 1.); | |
pixel = aTexCoord; | |
} | |
</script> | |
<script id="shader-fs-blur-horizontal" type="x-shader/x-fragment"> | |
#ifdef GL_ES | |
precision highp float; | |
#endif | |
// original shader from http://www.gamerendering.com/2008/10/11/gaussian-blur-filter-shader/ | |
// horizontal blur fragment shader | |
uniform sampler2D src_tex; | |
varying vec2 pixel; | |
uniform vec2 pixelSize; | |
void main(void) // fragment | |
{ | |
float h = pixelSize.x; | |
vec4 sum = vec4(0.0); | |
sum += texture2D(src_tex, vec2(pixel.x - 4.0*h, pixel.y) ) * 0.05; | |
sum += texture2D(src_tex, vec2(pixel.x - 3.0*h, pixel.y) ) * 0.09; | |
sum += texture2D(src_tex, vec2(pixel.x - 2.0*h, pixel.y) ) * 0.12; | |
sum += texture2D(src_tex, vec2(pixel.x - 1.0*h, pixel.y) ) * 0.15; | |
sum += texture2D(src_tex, vec2(pixel.x + 0.0*h, pixel.y) ) * 0.16; | |
sum += texture2D(src_tex, vec2(pixel.x + 1.0*h, pixel.y) ) * 0.15; | |
sum += texture2D(src_tex, vec2(pixel.x + 2.0*h, pixel.y) ) * 0.12; | |
sum += texture2D(src_tex, vec2(pixel.x + 3.0*h, pixel.y) ) * 0.09; | |
sum += texture2D(src_tex, vec2(pixel.x + 4.0*h, pixel.y) ) * 0.05; | |
gl_FragColor.xyz = sum.xyz/0.98; // normalize | |
gl_FragColor.a = 1.; | |
} | |
</script> | |
<script id="shader-fs-blur-vertical" type="x-shader/x-fragment"> | |
#ifdef GL_ES | |
precision highp float; | |
#endif | |
// original shader from http://www.gamerendering.com/2008/10/11/gaussian-blur-filter-shader/ | |
// vertical blur fragment shader | |
uniform sampler2D src_tex; | |
varying vec2 pixel; | |
uniform vec2 pixelSize; | |
void main(void) // fragment | |
{ | |
float v = pixelSize.y; | |
vec4 sum = vec4(0.0); | |
sum += texture2D(src_tex, vec2(pixel.x, - 4.0*v + pixel.y) ) * 0.05; | |
sum += texture2D(src_tex, vec2(pixel.x, - 3.0*v + pixel.y) ) * 0.09; | |
sum += texture2D(src_tex, vec2(pixel.x, - 2.0*v + pixel.y) ) * 0.12; | |
sum += texture2D(src_tex, vec2(pixel.x, - 1.0*v + pixel.y) ) * 0.15; | |
sum += texture2D(src_tex, vec2(pixel.x, + 0.0*v + pixel.y) ) * 0.16; | |
sum += texture2D(src_tex, vec2(pixel.x, + 1.0*v + pixel.y) ) * 0.15; | |
sum += texture2D(src_tex, vec2(pixel.x, + 2.0*v + pixel.y) ) * 0.12; | |
sum += texture2D(src_tex, vec2(pixel.x, + 3.0*v + pixel.y) ) * 0.09; | |
sum += texture2D(src_tex, vec2(pixel.x, + 4.0*v + pixel.y) ) * 0.05; | |
gl_FragColor.xyz = sum.xyz/0.98; | |
gl_FragColor.a = 1.; | |
} | |
</script> | |
<script id="shader-fs-advance" type="x-shader/x-fragment"> | |
#ifdef GL_ES | |
precision highp float; | |
#endif | |
uniform sampler2D sampler_prev; | |
uniform sampler2D sampler_prev_n; | |
uniform sampler2D sampler_blur; | |
uniform sampler2D sampler_noise; | |
uniform sampler2D sampler_noise_n; | |
varying vec2 pixel; | |
uniform vec2 pixelSize; | |
uniform vec4 rnd; | |
uniform vec2 mouse; | |
uniform float zoomRate; | |
uniform float rateRD; | |
uniform float time; | |
void main(void) { | |
vec2 zoom_in = mouse + (pixel-mouse)*zoomRate; | |
vec4 rand_noise = texture2D(sampler_noise, zoom_in + vec2(rnd.x, rnd.y)); | |
zoom_in += (rand_noise.yz-0.5)*pixelSize*1.*1.; // error-diffusion | |
gl_FragColor = texture2D(sampler_prev, zoom_in) + (rand_noise-0.5)*0.12; | |
gl_FragColor -= (texture2D(sampler_blur, zoom_in) - texture2D(sampler_prev, zoom_in))*rateRD; // reaction-diffusion | |
gl_FragColor.a = 1.; | |
} | |
</script> | |
<script id="shader-fs-composite" type="x-shader/x-fragment"> | |
#ifdef GL_ES | |
precision highp float; | |
#endif | |
uniform sampler2D sampler_prev; | |
uniform sampler2D sampler_prev_n; | |
uniform sampler2D sampler_blur; | |
uniform sampler2D sampler_noise; | |
uniform sampler2D sampler_noise_n; | |
varying vec2 pixel; | |
uniform vec2 pixelSize; | |
uniform vec4 rnd; | |
uniform vec2 mouse; | |
uniform float time; | |
void main(void) { | |
// negative | |
gl_FragColor = texture2D(sampler_prev, pixel); | |
gl_FragColor.a = 1.; | |
} | |
</script> | |
<script type="text/javascript"> | |
function getShader(gl, id) { | |
var shaderScript = document.getElementById(id); | |
var str = ""; | |
var k = shaderScript.firstChild; | |
while (k) { | |
if (k.nodeType == 3) | |
str += k.textContent; | |
k = k.nextSibling; | |
} | |
var shader; | |
if (shaderScript.type == "x-shader/x-fragment") | |
shader = gl.createShader(gl.FRAGMENT_SHADER); | |
else if (shaderScript.type == "x-shader/x-vertex") | |
shader = gl.createShader(gl.VERTEX_SHADER); | |
else | |
return null; | |
gl.shaderSource(shader, str); | |
gl.compileShader(shader); | |
if (gl.getShaderParameter(shader, gl.COMPILE_STATUS) == 0) | |
alert(gl.getShaderInfoLog(shader)); | |
return shader; | |
} | |
requestAnimFrame = (function() { | |
return window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || function(callback, element) { | |
setTimeout(callback, 1000 / 60); | |
}; | |
})(); | |
var gl; | |
var prog_advance; | |
var prog_composite; | |
var prog_blur_horizontal; | |
var prog_blur_vertical; | |
var FBO_main; | |
var FBO_main2; | |
var FBO_helper; | |
var FBO_blur; | |
var FBO_noise; | |
var texture_main_l; // main, linear | |
var texture_main_n; // main, nearest (accurate pixel access on the same buffer) | |
var texture_main2_l; // main double buffer, linear | |
var texture_main2_n; // main double buffer, nearest (accurate pixel access on the same buffer) | |
var texture_helper; // to be used when a buffer for multi-pass shader programs is needed (2-pass Gaussian blur) | |
var texture_blur; // blur result | |
var texture_noise_n; // noise pixel accurate | |
var texture_noise_l; // noise interpolated pixel access | |
var drawWebGLonAudioBufferFill = true; | |
var freeze = false; | |
var zoomRate = 0.99; | |
var rateRD = -0.5; | |
var delay = 3; | |
var it = 1; | |
var frames = 0; | |
var time; | |
var mouseXFac = 0.5; | |
var mouseYFac = 0.5; | |
var canvasMouseX = 0.5; | |
var canvasMouseY = 0.5; | |
var audioBufferWidth = 512; | |
var audioBufferHeight = 8; var animation; | |
var timer; | |
var sizeX = 512; | |
var sizeY = 512; | |
var accelerating = false; | |
var deccelerating = false; | |
var acceleratingRD = false; | |
var decceleratingRD = false; | |
function load() { | |
clearInterval(timer); | |
var c = document.getElementById("c"); | |
c.height = window.innerHeight-175; | |
c.width = c.height; | |
try { | |
gl = c.getContext("experimental-webgl", { depth : false, preserveDrawingBuffer: true }); | |
} catch (e) { | |
} | |
if (!gl) { | |
alert("Your browser does not support WebGL"); | |
return; | |
} | |
window.onmousemove = function(evt) { | |
mouseXFac = evt.clientX / window.innerWidth -0.25; | |
mouseYFac = - evt.clientY / window.innerHeight + 0.75; | |
/*sampleOffset = Math.abs(evt.clientY / window.innerHeight * 10 - 5);*/ | |
}; | |
c.onmousemove = function(evt) { | |
canvasMouseX = Math.min(evt.layerX, c.width-audioBufferWidth); | |
canvasMouseY = c.height - evt.layerY; | |
} | |
window.onkeydown = function(evt) { | |
var k = evt ? evt.which : window.event.keyCode; | |
if (k == 9) | |
toggleDrawWebGLonAudioBufferFill(); | |
if (k == 32) { freeze = !freeze; } | |
if (k == 81) { zoomRate = 1; } | |
if (k == 87) { rateRD = 0; } | |
if (k == 65) { accelerating = true; } | |
if (k == 90) { deccelerating = true; } | |
if (k == 83) { acceleratingRD = true; } | |
if (k == 88) { decceleratingRD = true; } | |
} | |
window.onkeyup = function(evt) { | |
var k = evt ? evt.which : window.event.keyCode; | |
if (k == 65) { accelerating = false; } | |
if (k == 90) { deccelerating = false; } | |
if (k == 83) { acceleratingRD = false; } | |
if (k == 88) { decceleratingRD = false; } | |
} | |
prog_advance = gl.createProgram(); | |
gl.attachShader(prog_advance, getShader(gl, "shader-vs")); | |
gl.attachShader(prog_advance, getShader(gl, "shader-fs-advance")); | |
gl.linkProgram(prog_advance); | |
prog_composite = gl.createProgram(); | |
gl.attachShader(prog_composite, getShader(gl, "shader-vs")); | |
gl.attachShader(prog_composite, getShader(gl, "shader-fs-composite")); | |
gl.linkProgram(prog_composite); | |
prog_blur_horizontal = gl.createProgram(); | |
gl.attachShader(prog_blur_horizontal, getShader(gl, "shader-vs")); | |
gl.attachShader(prog_blur_horizontal, getShader(gl, "shader-fs-blur-horizontal")); | |
gl.linkProgram(prog_blur_horizontal); | |
prog_blur_vertical = gl.createProgram(); | |
gl.attachShader(prog_blur_vertical, getShader(gl, "shader-vs")); | |
gl.attachShader(prog_blur_vertical, getShader(gl, "shader-fs-blur-vertical")); | |
gl.linkProgram(prog_blur_vertical); | |
var posBuffer = gl.createBuffer(); | |
gl.bindBuffer(gl.ARRAY_BUFFER, posBuffer); | |
var vertices = new Float32Array([ -1, -1, 0, 1, -1, 0, -1, 1, 0, 1, 1, 0 ]); | |
var aPosLoc = gl.getAttribLocation(prog_advance, "aPos"); | |
gl.enableVertexAttribArray(aPosLoc); | |
var aTexLoc = gl.getAttribLocation(prog_advance, "aTexCoord"); | |
gl.enableVertexAttribArray(aTexLoc); | |
var texCoords = new Float32Array([ 0, 0, 1, 0, 0, 1, 1, 1 ]); | |
var texCoordOffset = vertices.byteLength; | |
gl.bufferData(gl.ARRAY_BUFFER, texCoordOffset + texCoords.byteLength, gl.STATIC_DRAW); | |
gl.bufferSubData(gl.ARRAY_BUFFER, 0, vertices); | |
gl.bufferSubData(gl.ARRAY_BUFFER, texCoordOffset, texCoords); | |
gl.vertexAttribPointer(aPosLoc, 3, gl.FLOAT, gl.FALSE, 0, 0); | |
gl.vertexAttribPointer(aTexLoc, 2, gl.FLOAT, gl.FALSE, 0, texCoordOffset); | |
var noisepixels = []; | |
var pixels = []; | |
for ( var i = 0; i < sizeX; i++) { | |
for ( var j = 0; j < sizeY; j++) { | |
noisepixels.push(Math.random() * 255, Math.random() * 255, Math.random() * 255, 255); | |
pixels.push(0, 0, 0, 255); | |
} | |
} | |
/* | |
* if (Math.random() > density) pixels.push(0, 0, 0, 0); else pixels.push(255, 0, 0, 0); | |
*/ | |
var rawData = new Uint8Array(noisepixels); | |
texture_main_l = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_l); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
texture_main_n = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_n); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); | |
rawData = new Uint8Array(noisepixels); | |
rawData = new Uint8Array(noisepixels); | |
texture_main2_l = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_l); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
texture_main2_n = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_n); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); | |
rawData = new Uint8Array(pixels); | |
texture_helper = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_helper); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
rawData = new Uint8Array(pixels); | |
texture_blur = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_blur); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
rawData = new Uint8Array(noisepixels); | |
texture_noise_l = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_noise_l); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR); | |
texture_noise_n = gl.createTexture(); | |
gl.bindTexture(gl.TEXTURE_2D, texture_noise_n); | |
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1); | |
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, sizeX, sizeY, 0, gl.RGBA, gl.UNSIGNED_BYTE, rawData); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); | |
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); | |
// gl.uniform1i(gl.getUniformLocation(prog, "uTexSamp"), 0); | |
FBO_main = gl.createFramebuffer(); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_main); | |
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture_main_l, 0); | |
FBO_main2 = gl.createFramebuffer(); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_main2); | |
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture_main2_l, 0); | |
FBO_helper = gl.createFramebuffer(); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_helper); | |
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture_helper, 0); | |
FBO_blur = gl.createFramebuffer(); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_blur); | |
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture_blur, 0); | |
FBO_noise = gl.createFramebuffer(); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_noise); | |
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture_noise_l, 0); | |
gl.useProgram(prog_advance); | |
setUniforms(prog_advance); | |
gl.useProgram(prog_blur_horizontal); | |
gl.uniform2f(gl.getUniformLocation(prog_blur_horizontal, "pixelSize"), 1. / sizeX, 1. / sizeY); | |
gl.uniform1i(gl.getUniformLocation(prog_blur_horizontal, "src_tex"), 0); | |
gl.useProgram(prog_blur_vertical); | |
gl.uniform2f(gl.getUniformLocation(prog_blur_vertical, "pixelSize"), 1. / sizeX, 1. / sizeY); | |
gl.uniform1i(gl.getUniformLocation(prog_blur_vertical, "src_tex"), 0); | |
gl.useProgram(prog_composite); | |
setUniforms(prog_composite); | |
gl.activeTexture(gl.TEXTURE2); | |
gl.bindTexture(gl.TEXTURE_2D, texture_blur); | |
gl.activeTexture(gl.TEXTURE3); | |
gl.bindTexture(gl.TEXTURE_2D, texture_noise_l); | |
gl.activeTexture(gl.TEXTURE4); | |
gl.bindTexture(gl.TEXTURE_2D, texture_noise_n); | |
calculateBlurTexture(); | |
timer = setInterval(fr, 500); | |
time = new Date().getTime(); | |
animation = "animate"; | |
anim(); | |
startAudio(); | |
} | |
function setUniforms(program) { | |
gl.uniform2f(gl.getUniformLocation(program, "pixelSize"), 1. / sizeX, 1. / sizeY); | |
gl.uniform4f(gl.getUniformLocation(program, "rnd"), Math.random(), Math.random(), Math.random(), Math.random()); | |
gl.uniform1f(gl.getUniformLocation(program, "time"), time); | |
gl.uniform1f(gl.getUniformLocation(program, "zoomRate"), zoomRate); | |
gl.uniform1f(gl.getUniformLocation(program, "rateRD"), rateRD); | |
gl.uniform2f(gl.getUniformLocation(program, "mouse"), mouseXFac, mouseYFac); | |
gl.uniform1i(gl.getUniformLocation(program, "sampler_prev"), 0); | |
gl.uniform1i(gl.getUniformLocation(program, "sampler_prev_n"), 1); | |
gl.uniform1i(gl.getUniformLocation(program, "sampler_blur"), 2); | |
gl.uniform1i(gl.getUniformLocation(program, "sampler_noise"), 3); | |
gl.uniform1i(gl.getUniformLocation(program, "sampler_noise_n"), 4); | |
} | |
function calculateBlurTexture() { | |
// horizontal | |
gl.viewport(0, 0, sizeX, sizeY); | |
gl.useProgram(prog_blur_horizontal); | |
gl.activeTexture(gl.TEXTURE0); | |
if (it < 0) { | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_l); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_helper); | |
} else { | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_l); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_helper); | |
} | |
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); | |
gl.flush(); | |
// vertical | |
gl.viewport(0, 0, sizeX, sizeY); | |
gl.useProgram(prog_blur_vertical); | |
gl.activeTexture(gl.TEXTURE0); | |
gl.bindTexture(gl.TEXTURE_2D, texture_helper); | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_blur); | |
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); | |
gl.flush(); | |
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); | |
gl.flush(); | |
} | |
function draw() { | |
if (freeze) return; | |
// advance | |
if (accelerating) zoomRate -= 0.001; | |
if (deccelerating) zoomRate += 0.001; | |
if (acceleratingRD) rateRD += 0.03; | |
if (decceleratingRD) rateRD -= 0.03; | |
gl.viewport(0, 0, sizeX, sizeY); | |
gl.useProgram(prog_advance); | |
setUniforms(prog_advance); | |
if (it > 0) { | |
gl.activeTexture(gl.TEXTURE0); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_l); // interpolated input | |
gl.activeTexture(gl.TEXTURE1); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_n); // "nearest" input | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_main2); // write to buffer | |
} else { | |
gl.activeTexture(gl.TEXTURE0); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_l); // interpolated | |
gl.activeTexture(gl.TEXTURE1); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_n); // "nearest" | |
gl.bindFramebuffer(gl.FRAMEBUFFER, FBO_main); // write to buffer | |
} | |
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); | |
gl.flush(); | |
calculateBlurTexture(); | |
// composite | |
gl.viewport(0, 0, 1024,1024); | |
gl.useProgram(prog_composite); | |
setUniforms(prog_composite); | |
if (it < 0) { | |
gl.activeTexture(gl.TEXTURE0); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_l); | |
gl.activeTexture(gl.TEXTURE1); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main_n); | |
} else { | |
gl.activeTexture(gl.TEXTURE0); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_l); | |
gl.activeTexture(gl.TEXTURE1); | |
gl.bindTexture(gl.TEXTURE_2D, texture_main2_n); | |
} | |
gl.bindFramebuffer(gl.FRAMEBUFFER, null); | |
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); | |
gl.flush(); | |
it = -it; | |
frames++; | |
} | |
function anim() { | |
if(!drawWebGLonAudioBufferFill) { | |
draw(); | |
switch (animation) { | |
case "animate": | |
setTimeout("requestAnimFrame(anim)", delay); | |
break; | |
case "reset": | |
load(); | |
break; | |
} | |
} | |
} | |
function toggleDrawWebGLonAudioBufferFill() { | |
if (drawWebGLonAudioBufferFill) { | |
drawWebGLonAudioBufferFill = false; | |
anim(); | |
} else { | |
drawWebGLonAudioBufferFill = true; | |
} | |
} | |
function setDelay(v) { | |
delay = parseInt(v); | |
} | |
function fr() { | |
var ti = new Date().getTime(); | |
var fps = Math.round(1000 * frames / (ti - time)); | |
document.getElementById("framerate").value = fps; | |
frames = 0; | |
time = ti; | |
} | |
</script> | |
<script> | |
function spiralIndex(w,h) { | |
var a = []; | |
var dh = [0,h] | |
var dw = [0,w] | |
function ddw() { | |
return dw[1]-dw[0]; | |
} | |
function ddh() { | |
return dh[1]-dh[0]; | |
} | |
while(ddw() > 0 & ddh() > 0) { | |
if(ddw() > 0) { | |
for(var i=dw[0]; i<dw[1]; i++) { | |
a.push(i+dh[0]*w); | |
} | |
dh[0]+=1; | |
} | |
dw[1]-=1; | |
if(ddh() > 0) { | |
for(var i=dh[0]; i<dh[1]; i++) { | |
a.push(i*w+dw[1]); | |
} | |
} | |
dh[1]-=1; | |
if(ddw() > 0) { | |
for(var i=dw[1]; i>dw[0]; i--) { | |
a.push(i+(dh[1])*w-1); | |
} | |
} | |
if(ddh() > 0) { | |
for(var i=dh[1]; i>dh[0]; i--) { | |
a.push((i*w-dw[1]-1)); | |
} | |
dw[0]+=1; | |
} | |
} | |
return a; | |
} | |
var audioBufferSize = 1024; | |
var width = 32; | |
var height = audioBufferSize/width; | |
var bufferSpiralIndex = spiralIndex(width,height); | |
var pixels = new Uint8Array(4 * audioBufferSize); | |
var p = new Int32Array(pixels.buffer) | |
window.AudioContext = window.AudioContext || window.webkitAudioContext; | |
var context = new AudioContext(); | |
var filter = context.createBiquadFilter(); | |
filter.type = 'highshelf'; | |
filter.frequency.value = 5200; | |
filter.Q.value = 1; | |
filter.connect(context.destination); | |
var redGain = context.createGain(); | |
redGain.gain.value = 0.3; | |
var red = context.createOscillator(); | |
red.connect(redGain); | |
redGain.connect(filter); | |
red.frequency.value = 20; | |
red.type = 'sine'; | |
red.connect(redGain); | |
red.start(0); | |
var greenGain = context.createGain(); | |
greenGain.gain.value = 0.3; | |
var green = context.createOscillator(); | |
green.connect(greenGain); | |
greenGain.connect(filter); | |
green.frequency.value = 30; | |
green.type = 'sine'; | |
green.connect(greenGain); | |
green.start(0); | |
var blueGain = context.createGain(); | |
blueGain.gain.value = 0.3; | |
var blue = context.createOscillator(); | |
blue.connect(blueGain); | |
blueGain.connect(filter); | |
blue.frequency.value = 50; | |
blue.type = 'sine'; | |
blue.start(0); | |
function startAudio(){ | |
// Create a ScriptProcessorNode with a bufferSize of audioBufferSize and a single output channel | |
var redModulator = context.createScriptProcessor(audioBufferSize, 0, 1); | |
// Give the node a function to process audio events | |
fillbuffer = function(audioProcessingEvent) { | |
if(drawWebGLonAudioBufferFill) draw(); | |
gl.readPixels(canvasMouseX,canvasMouseY,width,height,gl.RGBA, gl.UNSIGNED_BYTE,pixels); | |
// The output buffer contains the samples that will be modified and played | |
var outputBuffer = audioProcessingEvent.outputBuffer; | |
// Loop through the output channels (in this case there is only one) | |
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) { | |
var outData = outputBuffer.getChannelData(channel); | |
// Loop through the audioBufferSize samples | |
for (var i = 0; i < outputBuffer.length; i++) { | |
sample = bufferSpiralIndex[i]; | |
var r = (p[sample] >> (0)) & 0xff; | |
outData[i] = Math.pow(r,1+0.05*audioBufferSize/4096+Math.random()*0.1); | |
} | |
} | |
bufferSpiralIndex = bufferSpiralIndex.reverse(); | |
} | |
redModulator.onaudioprocess = fillbuffer; | |
var greenModulator = context.createScriptProcessor(audioBufferSize, 0, 1); | |
updateGreen = function(evt) { | |
var outputBuffer = evt.outputBuffer; | |
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) { | |
var outData = outputBuffer.getChannelData(channel); | |
// Loop through the audioBufferSize samples | |
for (var i = 0; i < outputBuffer.length; i++) { | |
sample = bufferSpiralIndex[i]; | |
p.buffer = pixels.buffer; | |
var g = (p[sample] >> (8)) & 0xff; | |
outData[i] = Math.pow(g,1+0.1*audioBufferSize/4096+Math.random()*0.1); | |
} | |
} | |
} | |
greenModulator.onaudioprocess = updateGreen; | |
var blueModulator = context.createScriptProcessor(audioBufferSize, 0, 1); | |
updateBlue = function(evt) { | |
var outputBuffer = evt.outputBuffer; | |
for (var channel = 0; channel < outputBuffer.numberOfChannels; channel++) { | |
var outData = outputBuffer.getChannelData(channel); | |
// Loop through the audioBufferSize samples | |
for (var i = 0; i < outputBuffer.length; i++) { | |
sample = bufferSpiralIndex[i]; | |
p.buffer = pixels.buffer; | |
var b = (p[sample] >> (16)) & 0xff; | |
outData[i] = Math.pow(b,1+0.2*audioBufferSize/4096+Math.random()*0.1)+b; | |
} | |
} | |
} | |
blueModulator.onaudioprocess = updateBlue; | |
// Connect the node to the context to start playing the sound | |
redModulator.connect(red.frequency); | |
greenModulator.connect(green.frequency); | |
blueModulator.connect(blue.frequency); | |
//redModulator.connect(redGain.gain); | |
//greenModulator.connect(greenGain.gain); | |
//blueModulator.connect(blueGain.gain); | |
} | |
</script> | |
</head> | |
<body onload="load()" style="text-align:center; background: black; font-family: sans; color:white; font-size: 0.6em;"> | |
<style>a{color:#aaa} body{cursor:all-scroll}</style> | |
<div style="width:800px; margin: 0 auto; text-align: left;"> | |
<h2>reaction diffusion space ship</h2> | |
Welcome to virtual reaction diffusion space. It is filled with many strange space places! Please be careful if you have epilepsy.<p/> There is a small spiral of pixels on the horizon. These are sent to an audio buffer and used to modulate three tones, R, G and B, so that you can hear the strange space pattern creature-things you may discover. | |
<p/>Controls: Hit the spacebar for a total freeze! Use the mouse to set focus. 'q', 'a', 'z' for zoom rate. 'w', 's', 'x' for reaction diffusion control. | |
<p/>Example: Hold down 'a' and 's' to initiate warp speed. When it gets to be too much hit 'q' and 'w'. | |
</div> | |
<div id='framerate'></div> | |
<canvas id="c"></canvas> | |
<p>created by @micahphones. <a href="http://gist.github.com/timbresmith/e44d7ec3b293148bfa3e">click here for github gist.</a> original reaction-diffusion WebGL <a href="http://www.chromeexperiments.com/detail/reaction-diffusion/">shader demo</a> by <a href="https://twitter.com/flexi23">@Flexi23</a></p> | |
</body></html> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment