You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
// @runconstisMobile=(()=>{return/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent)||(navigator.platform==='MacIntel'&&navigator.maxTouchPoints>1);})();constvertexShader=`varying vec2 vUv;void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);}`;constresetSvg=`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1" stroke-linecap="round" stroke-linejoin="round"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M20 11a8.1 8.1 0 0 0 -15.5 -2m-.5 -4v4h4" /><path d="M4 13a8.1 8.1 0 0 0 15.5 2m.5 4v-4h-4" /></svg>`;constclearSvg=`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="1" stroke-linecap="round" stroke-linejoin="round"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M19 20h-10.5l-4.21 -4.3a1 1 0 0 1 0 -1.41l10 -10a1 1 0 0 1 1.41 0l5 5a1 1 0 0 1 0 1.41l-9.2 9.3" /><path d="M18 13.3l-6.3 -6.3" /></svg>`;constsunMoonSvg=`<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><path stroke="none" d="M0 0h24v24H0z" fill="none"/><path d="M9.173 14.83a4 4 0 1 1 5.657 -5.657" /><path d="M11.294 12.707l.174 .247a7.5 7.5 0 0 0 8.845 2.492a9 9 0 0 1 -14.671 2.914" /><path d="M3 12h1" /><path d="M12 3v1" /><path d="M5.6 5.6l.7 .7" /><path d="M3 21l18 -18" /></svg>`functionhexToRgb(hex){constresult=/^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);returnresult ? {r: parseInt(result[1],16),g: parseInt(result[2],16),b: parseInt(result[3],16)} : null;}functionrgbToHex(r,g,b){return"#"+((1<<24)+(r<<16)+(g<<8)+b).toString(16).slice(1);}// This is the html plumbing / structure / controls for little canvasesfunctionintializeCanvas({
id, canvas, onSetColor, startDrawing, onMouseMove, stopDrawing, clear, reset, toggleSun
}){constclearDom=clear ? `<button id="${id}-clear" class="iconButton">${clearSvg}</button>` : "";constresetDom=reset ? `<button id="${id}-reset" class="iconButton">${resetSvg}</button>` : "";constsunMoonDom=toggleSun ? `<button id="${id}-sun" class="iconButton">${sunMoonSvg}</button>` : "";document.querySelector(`#${id}`).innerHTML=` <div style="display: flex; gap: 20px;"> <div id="${id}-canvas-container"></div> <div style="display: flex; flex-direction: column; justify-content: space-between;"> <div id="${id}-color-picker" style="display: flex; flex-direction: column"> <input type="color" id="${id}-color-input" value="#eb6b6f" style="display: none" > <button id="${id}-color-3" class="color" style="background-color: #fff6d3; color: black"></button> <button id="${id}-color-2" class="color" style="background-color: #f9a875; color: black"></button> <button id="${id}-color-1" class="color" style="background-color: #eb6b6f;"></button> <button id="${id}-color-0" class="color" style="background-color: #7c3f58;"></button> <button id="${id}-color-4" class="color" style="background-color: black; color: white; outline: solid 1px white;"></button> </div> <div style="display: flex; flex-direction: column; gap: 2px">${sunMoonDom}${clearDom}${resetDom} </div> </div></div>`;constcolorInput=document.getElementById(`${id}-color-input`);functionsetColor(r,g,b){colorInput.value=rgbToHex(r,g,b);onSetColor({r, g, b});}functionsetHex(hex){constrgb=hexToRgb(hex);setColor(rgb.r,rgb.g,rgb.b);}functionupdateColor(event){consthex=event.target.value;setHex(hex);}colorInput.addEventListener('input',updateColor);["#7c3f58","#eb6b6f","#f9a875","#fff6d3","#000000"].forEach((color,i)=>{document.getElementById(`${id}-color-${i}`).addEventListener('click',()=>setHex(color));});constcontainer=document.querySelector(`#${id}-canvas-container`);container.appendChild(canvas);canvas.addEventListener('touchstart',startDrawing);canvas.addEventListener('mousedown',startDrawing);canvas.addEventListener('mousemove',onMouseMove);canvas.addEventListener('touchmove',onMouseMove);canvas.addEventListener('mouseup',stopDrawing);canvas.addEventListener('touchend',stopDrawing);canvas.addEventListener('touchcancel',stopDrawing);canvas.addEventListener('mouseleave',stopDrawing);if(clear){document.querySelector(`#${id}-clear`).addEventListener("click",()=>{clear();});}if(reset){document.querySelector(`#${id}-reset`).addEventListener("click",()=>{reset();});}if(toggleSun){document.querySelector(`#${id}-sun`).addEventListener("click",()=>{toggleSun();});}return{container, setHex};}// This is the JS side that connects our canvas to three.js, and adds drawing on mobile// Also deals with interaction (mouse / touch) logicclassPaintableCanvas{constructor({width, height, initialColor ='transparent', radius =6, friction =0.1}){[this.canvas,this.context]=this.createCanvas(width,height,initialColor);this.texture=newTHREE.CanvasTexture(this.canvas);this.setupTexture(this.texture);this.isDrawing=false;this.lastPoint=null;this.currentPoint=null;this.mouseMoved=false;this.currentColor={r: 255,g: 255,b: 255};this.RADIUS=radius;this.FRICTION=friction;this.width=width;this.height=height;this.initialColor=initialColor;if(this.useFallbackCanvas()){this.currentImageData=newImageData(this.canvas.width,this.canvas.height);}this.onUpdateTextures=()=>{};this.drawSmoothLine=(from,to)=>{thrownewError("Missing implementation");}}useFallbackCanvas(){returnisMobile;}// Mobile breaks in all kinds of ways// Drawing on cpu fixes most of the issuesdrawSmoothLineFallback(from,to){this.drawLine(from,to,this.currentColor,this.context);this.updateTexture();}drawLine(from,to,color,context){constradius=this.RADIUS;// Ensure we're within canvas boundariesconstleft=0;consttop=0;constright=context.canvas.width-1;constbottom=context.canvas.height-1;letwidth=right-left+1;letheight=bottom-top+1;letimageData=this.currentImageData;letdata=imageData.data;// Bresenham's line algorithmletx0=Math.round(from.x-left);lety0=Math.round(from.y-top);letx1=Math.round(to.x-left);lety1=Math.round(to.y-top);letdx=Math.abs(x1-x0);letdy=Math.abs(y1-y0);letsx=(x0<x1) ? 1 : -1;letsy=(y0<y1) ? 1 : -1;leterr=dx-dy;while(true){// Draw the pixel and its surrounding pixelsthis.drawCircle(x0,y0,color,radius);if(x0===x1&&y0===y1)break;lete2=2*err;if(e2>-dy){err-=dy;x0+=sx;}if(e2<dx){err+=dx;y0+=sy;}}// Put the modified image data back to the canvascontext.putImageData(imageData,left,top);}drawCircle(x0,y0,color,radius){for(letry=-radius;ry<=radius;ry++){for(letrx=-radius;rx<=radius;rx++){if(rx*rx+ry*ry<=radius*radius){letx=x0+rx;lety=y0+ry;if(x>=0&&x<this.width&&y>=0&&y<this.height){this.setPixel(x,y,color);}}}}}setPixel(x,y,color){letindex=(y*this.width+x)*4;this.currentImageData.data[index]=color.r;// Redthis.currentImageData.data[index+1]=color.g;// Greenthis.currentImageData.data[index+2]=color.b;// Bluethis.currentImageData.data[index+3]=255.0;// Alpha}createCanvas(width,height,initialColor){constcanvas=document.createElement('canvas');canvas.width=width;canvas.height=height;constcontext=canvas.getContext('2d');context.fillStyle=initialColor;context.fillRect(0,0,canvas.width,canvas.height);return[canvas,context];}setupTexture(texture){texture.minFilter=THREE.NearestFilter;texture.magFilter=THREE.NearestFilter;texture.format=THREE.RGBAFormat;texture.type=THREE.FloatType,texture.wrapS=THREE.ClampToEdgeWrapping,texture.wrapT=THREE.ClampToEdgeWrapping}updateTexture(){this.texture.needsUpdate=true;this.onUpdateTextures();}startDrawing(e){this.isDrawing=true;this.currentMousePosition=this.lastPoint=this.currentPoint=this.getMousePos(e);try{this.onMouseMove(e);}catch(e){console.error(e);}this.mouseMoved=false;}stopDrawing(e){constwasDrawing=this.isDrawing;if(!wasDrawing){returnfalse;}if(!this.mouseMoved){this.drawSmoothLine(this.currentPoint,this.currentPoint);}else{try{this.onMouseMove(e);}catch(e){console.error(e);}}this.isDrawing=false;this.mouseMoved=false;returntrue;}onMouseMove(event){if(!this.isDrawing)returnfalse;this.mouseMoved=true;this.currentMousePosition=this.getMousePos(event);this.doDraw();returntrue;}doDraw(){constnewPoint=this.currentMousePosition;// Some smoothing...letdist=this.distance(this.currentPoint,newPoint);if(dist>0){letdir={x: (newPoint.x-this.currentPoint.x)/dist,y: (newPoint.y-this.currentPoint.y)/dist};letlen=Math.max(dist-this.RADIUS,0);letease=1-Math.pow(this.FRICTION,1/60*10);this.currentPoint={x: this.currentPoint.x+dir.x*len*ease,y: this.currentPoint.y+dir.y*len*ease};}else{this.currentPoint=newPoint;}this.drawSmoothLine(this.lastPoint,this.currentPoint);this.lastPoint=this.currentPoint;}// I'll be honest - not sure why I can't just use `clientX` and `clientY`// Must have made a weird mistake somewhere.getMousePos(e){e.preventDefault();if(e.touches){return{x: e.touches[0].clientX-e.touches[0].target.offsetLeft+window.scrollX,y: e.touches[0].clientY-e.touches[0].target.offsetTop+window.scrollY};}return{x: e.clientX-e.target.offsetLeft+window.scrollX,y: e.clientY-e.target.offsetTop+window.scrollY};}distance(p1,p2){returnMath.sqrt(Math.pow(p2.x-p1.x,2)+Math.pow(p2.y-p1.y,2));}setColor(r,g,b){this.currentColor={r, g, b};}clear(){this.context.clearRect(0,0,this.canvas.width,this.canvas.height);this.currentImageData=newImageData(this.canvas.width,this.canvas.height);this.updateTexture();}}functionthreeJSInit(width,height,materialProperties,renderer=null,renderTargetOverrides={}){constscene=newTHREE.Scene();constcamera=newTHREE.OrthographicCamera(-1,1,1,-1,0,1);if(!renderer){renderer=newTHREE.WebGLRenderer({antialiasing: false});}renderer.setSize(width,height);constrenderTargetA=newTHREE.WebGLRenderTarget(width,height,{minFilter: THREE.NearestFilter,magFilter: THREE.NearestFilter,format: THREE.RGBAFormat,type: THREE.FloatType,wrapS: THREE.ClampToEdgeWrapping,wrapT: THREE.ClampToEdgeWrapping,
...renderTargetOverrides,});constrenderTargetB=renderTargetA.clone();constgeometry=newTHREE.PlaneGeometry(2,2);constmaterial=newTHREE.ShaderMaterial({depthTest: false,depthWrite: false,
...materialProperties,});plane=newTHREE.Mesh(geometry,material);scene.add(plane);return{
plane,canvas: renderer.domElement,render: ()=>{renderer.render(scene,camera)},renderTargets: [renderTargetA,renderTargetB],
renderer
}}
// @run// Let's instrument the post with this so we can disable animations while editing.constdisableAnimation=false;// Draw animations very fast, with a huge loss in accuracy (for testing)constinstantMode=false;constgetFrame=disableAnimation
? (fn)=>{fn()}
: requestAnimationFrame;letrayCount=32.0;
I'll be using three.js for this post and everything we discuss is written from scratch and entirely contained in the post / html. I wrote this with mdxish, so you can read this post in markdown as well, where all executed code is broken into grokable javascript markdown codeblocks. This post supports desktop and mobile.
Global illumination is about how light interacts with surfaces. When you turn a light on in your room at night, it's not a small glowing orb. It spreads to the walls, the floor, and the ceiling, breathing life into the room. Desks, doors, and objects cast shadows, softening as they stretch away.
Simulating these interactions can make for some beautiful scenes - as we're more closely mimicking real life. Think Pixar, beautiful blender renders, and hyper-realistic voxel scenes. But historically, it required powerful hardware, time, and/or aesthetic compromises - like noise. There are some fantastic tutorials on various techniques, such as ray tracing (video), which can be used to simulate realistic lighting- but, not in real-time.
Over the last six months or so (at time of writing), a group of folks have been working hard on a new technique that enables real-time global illumination on consumer hardware, without the standard compromises. It's called Radiance Cascades. A fast, noiseless approach to global illumination.
And that's what we will (next post) be building, but doing so requires a foundation of code and conceptual understanding. So we're going to build up our repertoire and knowledge and first build "naive" global illumination. Next time, we'll effectively get the same end result, but much higher resolution and much more efficient.
Let's get started!
A drawable surface
If we're going to be messing around with lights and shadows, it's incredibly useful to be able to quickly and easily draw on the screen. We'll use it to test things out, understand limitations and issues, and build intuition around the algorithms and techniques we're going to use. If you are anxious to get to the global illumination part of this post, feel free to jump straight to raymarching.
In general, to get this kind of interface, we just need to place some pixels on the screen in a group and interpolate from the previously drawn point, in case the brush moved within the last frame and is still drawing.
On the GPU side, we can do what we want using an SDF line segment.
As this is our first shader in the post - this is a fragment shader. The code it executes individually on every pixel. To draw a line (or any shape) you describe it in terms of distance away from the pixel currently being processed by the GPU. p here is the position of the pixel in coordinate space, meaning, if it's vec2(20.0, 20.0) it's intuitively 20 pixels towards height, and 20 pixels towards width. This is unlike uv - more on that in a sec.
What we need to do to represent a line with width (in this case, radius) is describe the distance from our pixel position, to the nearest point on the line, and if that's less than half the width (or radius here) set the pixel to be the chosen color.
// Draw a line shape!float sdfLineSquared(vec2 p, vec2 from, vec2 to) {
vec2 toStart = p - from;
vec2 line = to - from;
float lineLengthSquared =dot(line, line);
float t =clamp(dot(toStart, line) / lineLengthSquared, 0.0, 1.0);
vec2 closestVector = toStart - line * t;
returndot(closestVector, closestVector);
}
And I know, you're probably saying - but there are no calls to distance or length - what gives? This doesn't look like the examples. But a neat trick is that we can avoid a sqrt, if we use dot and keep everything we're comparing squared. To clarify, distance(a, b) == length(a - b) == sqrt(dot(a - b, a - b)). So if we're comparing to radius - we can instead just pass in radius * radius and not need sqrt. (sqrt is expensive).
At this point, we can draw our line. You can also check out how I did the CPU / mobile version of drawing in the source code.
void main() {
vec4 current = texture(inputTexture, vUv);
// If we aren't actively drawing (or on mobile) no-op!if (drawing) {
vec2 coord = vUv * resolution;
if (sdfLineSquared(coord, from, to) <= radiusSquared) {
current =vec4(color, 1.0);
}
}
gl_FragColor= current;
}
Any time you see uv (or vUv), it is the position of the current pixel, but on a scale of 0-1 in both dimensions. So vec2(0.5, 0.5) is always the center of the texture. You can pass in the size of the texture as a uniform if you want to be able to convert to pixel-space, which we do here.
ThreeJS uses vUv. I believe that first v is for varying keyword.
If you want to add some extra polish you can make it feel a bit smoother / paintier by adding easing. Check the source to see how I did it! Spoiler: I cheated and did it on the CPU.
Also, if you're wondering what that note is... For mobile, I ran into all kinds of issues with enabling transparency and drawing with the GPU for this step. Let me know if you manage to nail it. I ejected and drew directly to the canvas on the CPU side, which is slow at higher resolutions, but works for the canvas sizes we're using. But, everything after this step is GPU-driven.
Whichever engine you're using, be it a game engine, webgl, p5.js, three.js, webgpu, (and most others), there will be a way to say, "draw this rgb value at this location", and then it's just expanding that to a radius, from one point to another - or using SDFs as shown above.
And after skimming over some details that aren't the focus of this post, we've got our drawable surface!
// @runclassBaseSurface{constructor({ id, width, height, radius =5}){// Create PaintableCanvas instancesthis.createSurface(width,height,radius);this.width=width;this.height=height;this.id=id;this.initialized=false;this.initialize();}createSurface(width,height,radius){this.surface=newPaintableCanvas({ width, height, radius });}initialize(){// Child class should fill this out}load(){// Child class should fill this out}clear(){// Child class should fill this out}renderPass(){// Child class should fill this out}reset(){this.clear();this.setHex("#fff6d3");newPromise((resolve)=>{getFrame(()=>this.draw(0.0,null,resolve));});}draw(t,last,resolve){if(t>=10.0){resolve();return;}constangle=(t*0.05)*Math.PI*2;const{x, y}={x: 100+100*Math.sin(angle+0.25)*Math.cos(angle*0.15),y: 50+100*Math.sin(angle*0.7)};last??={x, y};this.surface.drawSmoothLine(last,{x, y});last={x, y};conststep=instantMode ? 5.0 : 0.2;getFrame(()=>this.draw(t+step,last,resolve));}buildCanvas(){returnintializeCanvas({id: this.id,canvas: this.canvas,onSetColor: ({r, g, b})=>{this.surface.currentColor={r, g, b};this.plane.material.uniforms.color.value=newTHREE.Color(this.surface.currentColor.r/255.0,this.surface.currentColor.g/255.0,this.surface.currentColor.b/255.0);},startDrawing: (e)=>this.surface.startDrawing(e),onMouseMove: (e)=>this.surface.onMouseMove(e),stopDrawing: (e)=>this.surface.stopDrawing(e),clear: ()=>this.clear(),reset: ()=>this.reset(),
...this.canvasModifications()});}canvasModifications(){return{}}observe(){constobserver=newIntersectionObserver((entries)=>{if(entries[0].isIntersecting===true){this.load();observer.disconnect(this.container);}});observer.observe(this.container);}initThreeJS({ uniforms, fragmentShader, renderTargetOverrides }){returnthreeJSInit(this.width,this.height,{
uniforms,
fragmentShader,
vertexShader,transparent: !this.surface.useFallbackCanvas(),},this.renderer,renderTargetOverrides??{},this.surface)}}classDrawingextendsBaseSurface{initializeSmoothSurface(){constprops=this.initThreeJS({uniforms: {inputTexture: {value: this.surface.texture},color: {value: newTHREE.Color(1,1,1)},from: {value: newTHREE.Vector2(0,0)},to: {value: newTHREE.Vector2(0,0)},radiusSquared: {value: Math.pow(this.surface.RADIUS,2.0)},resolution: {value: newTHREE.Vector2(this.width,this.height)},drawing: {value: false},},fragmentShader: `uniform sampler2D inputTexture;uniform vec3 color;uniform vec2 from;uniform vec2 to;uniform float radiusSquared;uniform vec2 resolution;uniform bool drawing;varying vec2 vUv;float sdfLineSquared(vec2 p, vec2 from, vec2 to) {vec2 toStart = p - from;vec2 line = to - from;float lineLengthSquared = dot(line, line);float t = clamp(dot(toStart, line) / lineLengthSquared, 0.0, 1.0);vec2 closestVector = toStart - line * t;return dot(closestVector, closestVector);}void main() {vec4 current = texture(inputTexture, vUv);if (drawing) { vec2 coord = vUv * resolution; if (sdfLineSquared(coord, from, to) <= radiusSquared) { current = vec4(color, 1.0); }}gl_FragColor = current;}`,});if(this.surface.useFallbackCanvas()){this.surface.drawSmoothLine=(from,to)=>{this.surface.drawSmoothLineFallback(from,to);}this.surface.onUpdateTextures=()=>{this.renderPass();}}else{this.surface.drawSmoothLine=(from,to)=>{props.plane.material.uniforms.drawing.value=true;props.plane.material.uniforms.from.value={
...from,y: this.height-from.y};props.plane.material.uniforms.to.value={
...to,y: this.height-to.y};this.renderPass();props.plane.material.uniforms.drawing.value=false;}}returnprops;}clear(){if(this.surface.useFallbackCanvas()){this.surface.clear();return;}if(this.initialized){this.renderTargets.forEach((target)=>{this.renderer.setRenderTarget(target);this.renderer.clearColor();});}this.renderer.setRenderTarget(null);this.renderer.clearColor();}initialize(){const{
plane, canvas, render, renderer, renderTargets
}=this.initializeSmoothSurface();this.canvas=canvas;this.plane=plane;this.render=render;this.renderer=renderer;this.renderTargets=renderTargets;const{ container, setHex }=this.buildCanvas();this.container=container;this.setHex=setHex;this.renderIndex=0;this.innerInitialize();this.observe();}innerInitialize(){}load(){this.reset();this.initialized=true;}drawPass(){if(this.surface.useFallbackCanvas()){returnthis.surface.texture;}else{this.plane.material.uniforms.inputTexture.value=this.renderTargets[this.renderIndex].texture;this.renderIndex=1-this.renderIndex;this.renderer.setRenderTarget(this.renderTargets[this.renderIndex]);this.render();returnthis.renderTargets[this.renderIndex].texture;}}renderPass(){this.drawPass()this.renderer.setRenderTarget(null);this.render();}}constsimpleSurface=newDrawing({id: "simpleSurface",width: 300,height: 300});
If you're interested in the plumbing, inspect this node in the browser, or go to this section in the markdown! Right above this dom is where the code that runs, lives.
Raymarching
At its core, global illumination is going through a scene, looking around at nearby lights, and adjusting how bright that area is based on what we see. But how you do that makes all the difference.
So we're going to start with a naive approach, but the core logic ends up being nearly identical to dramatically more efficient approaches, so we're not wasting time building it. It's called raymarching. Link contains spoilers!
Remember when ten seconds ago I said global illumination is going through a scene and looking around at nearby lights to determine your "radiance"? Let's try it out by telling the GPU (for every pixel) to walk in a handful of directions for a larger handful of steps and to average the colors of anything it hits - and to stop walking if it hits something.
Before we proceed, let's define a quick bounds check function we'll use.
Ok, let's walk through what we just said above, but with glsl.
vec4 raymarch() {
So first we query our drawn texture. If it's something we drew, there's no need to process this pixel, as it can't receive light. If we were doing subsurface scattering, or reflective or transparent materials, we would need to process them, as they would interact with light.
Now we know we're only dealing with pixels that can receive light. We'll declare two variables. oneOverRayCount which we'll use to take the average of all light we see. And tauOverRayCount which tells us how much of an angle there is between our rays - which are equally spaced in a circle around the pixel.
We'll also get a random number between 0-1 based on our pixel location. We'll use it to slightly offset the angle of each ray (the random offset is seeded based on our position), so that rays don't all line up across pixels. That means no repeating patterns!
If Radiance Cascades didn't exist, I'd probably bring up blue noise, but the goal is not using noise at all - so any old noise is fine for our purposes.
Finally, we have some radiance that we'll add to, if we hit something.
float oneOverRayCount =1.0/float(rayCount);
float tauOverRayCount = TAU * oneOverRayCount;
// Distinct random value for every pixelfloat noise = rand(vUv);
vec4 radiance =vec4(0.0);
Now we get to start firing rays!
for(int i =0; i < rayCount; i++) {
For every ray, calculate the direction to head, and start walking.
Since we know our current position is empty, we can take our first step.
float angle = tauOverRayCount * (float(i) + noise);
vec2 rayDirectionUv =vec2(cos(angle), -sin(angle)) / size;
// Our current position, plus one step.vec2 sampleUv = vUv + rayDirectionUv;
for (intstep=0; step< maxSteps; step++) {
Now, for every step we take, if we walk out of bounds (or hit maxSteps), we're done.
if (outOfBounds(sampleUv)) break;
If we hit something, collect the radiance, and stop walking. Otherwise, take another step.
If we hit an end condition, return our collected radiance, averaged over all directions we walked / rays we fired!
}
}
return radiance * oneOverRayCount;
}
And that's it!
Now we can tweak the code a bit (see source to see how) in order to provide controls over it. This way we can build some intuition around why we did what we did.
Enable Noise
Checkout what happens when we turn off the noise!
Accumulate Radiance
Checkout what happens when we don't accumulate radiance to see the rays traveling.
Max Raymarch Steps
Don't forget you can draw.
// @runconstraymarchSlider=document.querySelector("#raymarch-steps-slider");constshowNoiseCheckbox=document.querySelector("#noise-raymarch");constaccumRadianceCheckbox=document.querySelector("#accumulate-radiance");classNaiveRaymarchGiextendsDrawing{innerInitialize(){const{plane: giPlane,render: giRender,renderTargets: giRenderTargets}=this.initThreeJS({uniforms: {sceneTexture: {value: this.surface.texture},rayCount: {value: 8},maxSteps: {value: raymarchSlider.value},showNoise: {value: showNoiseCheckbox.checked},accumRadiance: {value: accumRadianceCheckbox.checked},size: {value: newTHREE.Vector2(this.width,this.height)},},fragmentShader: `uniform sampler2D sceneTexture;uniform int rayCount;uniform int maxSteps;uniform bool showNoise;uniform bool accumRadiance;uniform vec2 size;in vec2 vUv;const float PI = 3.14159265;const float TAU = 2.0 * PI;float rand(vec2 co) { return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);}vec4 raymarch() { vec4 light = texture(sceneTexture, vUv); if (light.a > 0.1) { return light; } float oneOverRayCount = 1.0 / float(rayCount); float tauOverRayCount = TAU * oneOverRayCount; // Different noise every pixel float noise = showNoise ? rand(vUv) : 0.1; vec4 radiance = vec4(0.0); // Shoot rays in "rayCount" directions, equally spaced, with some randomness. for(int i = 0; i < rayCount; i++) { float angle = tauOverRayCount * (float(i) + noise); vec2 rayDirectionUv = vec2(cos(angle), -sin(angle)) / size; vec2 traveled = vec2(0.0); int initialStep = accumRadiance ? 0 : max(0, maxSteps - 1); for (int step = initialStep; step < maxSteps; step++) { // Go the direction we're traveling (with noise) vec2 sampleUv = vUv + rayDirectionUv * float(step); if (sampleUv.x < 0.0 || sampleUv.x > 1.0 || sampleUv.y < 0.0 || sampleUv.y > 1.0) { break; } vec4 sampleLight = texture(sceneTexture, sampleUv); if (sampleLight.a > 0.5) { radiance += sampleLight; break; } } } // Average radiance return radiance * oneOverRayCount;}void main() { // Bring up all the values to have an alpha of 1.0. gl_FragColor = vec4(raymarch().rgb, 1.0);}`,});this.giPlane=giPlane;this.giRender=giRender;this.giRenderTargets=giRenderTargets;}raymarchPass(inputTexture){this.giPlane.material.uniforms.sceneTexture.value=inputTexture;this.renderer.setRenderTarget(null);this.giRender();}clear(){if(this.initialized){this.giRenderTargets.forEach((target)=>{this.renderer.setRenderTarget(target);this.renderer.clearColor();});}super.clear();}renderPass(){letout=this.drawPass();this.raymarchPass(out);}load(){super.load();raymarchSlider.addEventListener("input",()=>{this.giPlane.material.uniforms.maxSteps.value=raymarchSlider.value;this.renderPass();});showNoiseCheckbox.addEventListener("input",()=>{this.giPlane.material.uniforms.showNoise.value=showNoiseCheckbox.checked;this.renderPass();});accumRadianceCheckbox.addEventListener("input",()=>{this.giPlane.material.uniforms.accumRadiance.value=accumRadianceCheckbox.checked;this.renderPass();});getFrame(()=>this.reset());}draw(last,t,isShadow,resolve){if(t>=10.0){resolve();return;}constangle=(t*0.05)*Math.PI*2;const{x, y}=isShadow
? {x: 90+12*t,y: 200+1*t,}
: {x: 100+100*Math.sin(angle+0.25)*Math.cos(angle*0.15),y: 50+100*Math.sin(angle*0.7)};last??={x, y};this.surface.drawSmoothLine(last,{x, y});last={x, y};conststep=instantMode ? 5.0 : (isShadow ? 0.5 : 0.3);getFrame(()=>this.draw(last,t+step,isShadow,resolve));}reset(){this.clear();letlast=undefined;returnnewPromise((resolve)=>{this.setHex("#f9a875");getFrame(()=>this.draw(last,0,false,resolve));}).then(()=>newPromise((resolve)=>{last=undefined;getFrame(()=>{this.setHex("#000000");getFrame(()=>this.draw(last,0,true,resolve));});})).then(()=>{this.renderPass();getFrame(()=>this.setHex("#fff6d3"));});}}constraymarchSurface=newNaiveRaymarchGi({id: "naive-raymarch",width: 300,height: 300});
We've done it! Looks pretty similar to the final result at the top.
And we could increase the rays, replicate those features (grain and sun) and call it a day, but it's actually wildly inefficient. Maybe it doesn't even run well on whatever device you're using, maybe it does. But we're only casting 8 rays per pixel and light can only spread 256 pixels away - also that canvas is absolutely tiny and can't be made much bigger and run smoothly, even on more powerful machines.
If we take a moment to think about the core logic of our raymarching, we're doing a lot of repeated work. Think about all the overlapping steps we're taking - hint they are mostly overlapping. So caching right? Well, most of what we're doing is in parallel per-pixel, so the values of other pixels aren't available yet. So we're going to have to come up with another approach.
Remember those distance fields from earlier? Where we were representing shapes in terms of their distance away from any given pixel? Well, if we had a way to tell how far away the nearest filled pixel was, from any given pixel, we'd know the maxiumum safe distance we could jump - in any direction. After we jumped, we could reassess, and again know the maximum safe distance we could jump. This would save a ton of computation during our raymarching process as we're getting to completely ignore large swathes of our surface.
Now, you might be thinking, alright - so we need a map that shows the nearest filled pixel for every pixel - what are we going to do, shoot rays in all directions and take steps in every direction find the first one? That can't be much more efficient than our current raymarching process! And you'd be right.
But maybe we can get more creative.
Jump Flood Algorithm
The loose idea here is, if we get the pixel coordinates of every filled pixel in our input / surface texture, and spread them around in the right way, we'll end up with a texture where every pixel from our original image is still just its uv, but all the other pixels are the nearest uv from the original image. If we manage to do that - all we need to do is calculate the distance of the original and newly smeared pixel locations, at each pixel, and we've got a distance field we can use.
For the "smearing" bit - we can just hierarchically repeat our vUv transformed seed texture, on top of itself a handful of times.
So first, let's make that seed texture, which you can see by going to "1 pass" in the slider. We just multiply the alpha of our drawing texture by the uv map. You can see the uv map by moving the slider to "No passes". You can also see this as the result of a fragment shader by setting the pixel color to be vec4(vUv.x, vUv.y, 0.0, 1.0) - which is frequently used for debugging shaders.
The Jump Flood Algorithm, JFA, denotes we should use log(N) of the largest dimension of our texture. This makes sense given the branching nature of what we're doing.
And then we can execute our JFA. We're using the "ping-pong" technique of having two render targets and swapping between them in order to accumulate the final JFA texture we want. So render, swap, render, swap, etc. passes times. We can't just use the same texture / render target as this is all happening in parallel, so you'd be modifying pixels that hadn't been handled yet elsewhere, causing inconsistencies.
So what about the actual shader? Well, we need to keep track of the nearest filled pixel to us and how far away it is. (I used -2.0 to make sure it was completely out of frame) and a big number for distance.
Then, we just look all around us, offset by uOffset over resolution (we could totally precalculate that and pass it in) and if it's a filled pixel that's closer to us than what we've found so far, great! And note that we can't break early, because a pixel is "closer" based on the color it represents, not based on how far we've traveled, or something.
for (float y =-1.0; y <=1.0; y +=1.0) {
for (float x =-1.0; x <=1.0; x +=1.0) {
vec2 sampleUV = vUv +vec2(x, y) * uOffset / resolution;
Quick bounds check, then we can get the distance based on the red and green pixel values currently stored there, compared with our own. I could have used distance but chose to use dot because why use the extra sqrt? We're only comparing relative distances.
And we're done - set ourselves as the uv of the nearest filled pixel, ready to be compared to in the next pass (if applicable).
}
}
gl_FragColor= nearestSeed;
Let's check out what this looks like in practice!
No Passes
9 Passes
// @runconstjfaSlider=document.querySelector("#jfa-slider");jfaSlider.value=window.mdxishState.jfaSlider??5;classJFAextendsDrawing{innerInitialize(){this.passes=Math.ceil(Math.log2(Math.max(this.width,this.height)));const{plane: seedPlane,render: seedRender,renderTargets: seedRenderTargets}=this.initThreeJS({uniforms: {surfaceTexture: {value: this.surface.texture},},fragmentShader: ` uniform sampler2D surfaceTexture; in vec2 vUv; void main() { float alpha = texture(surfaceTexture, vUv).a; gl_FragColor = vec4(vUv * alpha, 0.0, 1.0); }`,});const{plane: jfaPlane,render: jfaRender,renderTargets: jfaRenderTargets}=this.initThreeJS({uniforms: {inputTexture: {value: this.surface.texture},oneOverSize: {value: newTHREE.Vector2(1.0/this.width,1.0/this.height)},uOffset: {value: Math.pow(2,this.passes-1)},skip: {value: true},},fragmentShader: `uniform vec2 oneOverSize;uniform sampler2D inputTexture;uniform float uOffset;uniform bool skip;in vec2 vUv;void main() { if (skip) { gl_FragColor = vec4(vUv, 0.0, 1.0); } else { vec4 nearestSeed = vec4(-2.0); float nearestDist = 999999.9; for (float y = -1.0; y <= 1.0; y += 1.0) { for (float x = -1.0; x <= 1.0; x += 1.0) { vec2 sampleUV = vUv + vec2(x, y) * uOffset * oneOverSize; // Check if the sample is within bounds if (sampleUV.x < 0.0 || sampleUV.x > 1.0 || sampleUV.y < 0.0 || sampleUV.y > 1.0) { continue; } vec4 sampleValue = texture(inputTexture, sampleUV); vec2 sampleSeed = sampleValue.xy; if (sampleSeed.x != 0.0 || sampleSeed.y != 0.0) { vec2 diff = sampleSeed - vUv; float dist = dot(diff, diff); if (dist < nearestDist) { nearestDist = dist; nearestSeed = sampleValue; } } } } gl_FragColor = nearestSeed; }}`});this.seedPlane=seedPlane;this.seedRender=seedRender;this.seedRenderTargets=seedRenderTargets;this.jfaPlane=jfaPlane;this.jfaRender=jfaRender;this.jfaRenderTargets=jfaRenderTargets;}seedPass(inputTexture){this.seedPlane.material.uniforms.surfaceTexture.value=inputTexture;this.renderer.setRenderTarget(this.seedRenderTargets[0]);this.seedRender();returnthis.seedRenderTargets[0].texture;}jfaPassesCount(){returnparseInt(jfaSlider.value);}jfaPass(inputTexture){letcurrentInput=inputTexture;let[renderA,renderB]=this.jfaRenderTargets;letcurrentOutput=renderA;this.jfaPlane.material.uniforms.skip.value=true;letpasses=this.jfaPassesCount();for(leti=0;i<passes||(passes===0&&i===0);i++){this.jfaPlane.material.uniforms.skip.value=passes===0;this.jfaPlane.material.uniforms.inputTexture.value=currentInput;// This intentionally uses `this.passes` which is the true value// In order to properly show stages using the JFA slider.this.jfaPlane.material.uniforms.uOffset.value=Math.pow(2,this.passes-i-1);this.renderer.setRenderTarget(currentOutput);this.jfaRender();currentInput=currentOutput.texture;currentOutput=(currentOutput===renderA) ? renderB : renderA;}returncurrentInput;}draw(last,t,isShadow,resolve){if(t>=10.0){resolve();return;}constangle=(t*0.05)*Math.PI*2;const{x, y}=isShadow
? {x: 90+12*t,y: 200+1*t,}
: {x: 100+100*Math.sin(angle+0.25)*Math.cos(angle*0.15),y: 50+100*Math.sin(angle*0.7)};last??={x, y};this.surface.drawSmoothLine(last,{x, y});last={x, y};conststep=instantMode ? 5.0 : (isShadow ? 0.5 : 0.3);getFrame(()=>this.draw(last,t+step,isShadow,resolve));}clear(){if(this.initialized){this.seedRenderTargets.concat(this.jfaRenderTargets).forEach((target)=>{this.renderer.setRenderTarget(target);this.renderer.clearColor();});}super.clear();}load(){super.load();jfaSlider.addEventListener("input",()=>{this.renderPass();// Save the valuewindow.mdxishState.jfaSlider=jfaSlider.value;});getFrame(()=>this.reset());}renderPass(){letout=this.drawPass();out=this.seedPass(out);out=this.jfaPass(out);this.renderer.setRenderTarget(null);this.jfaRender();}reset(){this.clear();letlast=undefined;returnnewPromise((resolve)=>{this.setHex("#f9a875");getFrame(()=>this.draw(last,0,false,resolve));}).then(()=>newPromise((resolve)=>{last=undefined;getFrame(()=>{this.setHex("#000000");getFrame(()=>this.draw(last,0,true,resolve));});})).then(()=>{this.renderPass();getFrame(()=>this.setHex("#fff6d3"));});}}constjfa=newJFA({id: "jfa",width: 300,height: 300});
I'm also showing "No Passes" here, which is the UV map of all pixels, without the drawing.
If you haven't been experimenting... you can draw regardless of the settings, and it will appropriately render what would have been rendered if you had drawn on a canvas and that was passed to the GPU. This is how all these canvases are instrumented.
So... we're pretty much there on our precomputed nearest-filled-pixel distance lookup. We just need to do a quick pass with a shader to convert the texture.
Distance Field
We need to take our JFA texture - which is the uv of our filled pixels smeared about hierarchically, and just measure the distance between the stored rg (red, green) value and the uv itself!
So let's go ahead and do it.
vec2 nearestSeed = texture(jfaTexture, vUv).xy;
// Clamp by the size of our texture (1.0 in uv space).floatdistance=clamp(distance(vUv, nearestSeed), 0.0, 1.0);
// Normalize and visualize the distancegl_FragColor=vec4(vec3(distance), 1.0);
We also clamped it to make sure things that are further than 1 uv away are still just treated as 1.
But that's really it- and now we can dramatically improve the runtime of our raymarching with our fancy jump distance lookup.
Erase / Drag around
// @runclassDistanceFieldextendsJFA{jfaPassesCount(){returnthis.passes;}innerInitialize(){super.innerInitialize();const{plane: dfPlane,render: dfRender,renderTargets: dfRenderTargets}=this.initThreeJS({uniforms: {jfaTexture: {value: this.surface.texture},size: {value: newTHREE.Vector2(this.width,this.height)},},fragmentShader: ` uniform sampler2D jfaTexture; uniform vec2 size; in vec2 vUv; void main() { vec2 nearestSeed = texture(jfaTexture, vUv).xy; // Clamp by the size of our texture (1.0 in uv space). float distance = clamp(distance(vUv, nearestSeed), 0.0, 1.0); // Normalize and visualize the distance gl_FragColor = vec4(vec3(distance), 1.0); }`,});this.dfPlane=dfPlane;this.dfRender=dfRender;this.dfRenderTargets=dfRenderTargets;}load(){this.reset();this.initialized=true;}clear(){if(this.initialized){this.dfRenderTargets.forEach((target)=>{this.renderer.setRenderTarget(target);this.renderer.clearColor();});}super.clear();}dfPass(inputTexture){this.renderer.setRenderTarget(this.dfRenderTargets[0]);this.dfPlane.material.uniforms.jfaTexture.value=inputTexture;this.dfRender();returnthis.dfRenderTargets[0].texture;}renderPass(){letout=this.drawPass();out=this.seedPass(out);out=this.jfaPass(out);out=this.dfPass(out);this.renderer.setRenderTarget(null);this.dfRender();}}constdistanceField=newDistanceField({id: "distance-field-canvas",width: 300,height: 300});
Let's update our raymarching logic to use our new distance field!
Naive Global Illumination
Yes, this is still naive global illumination, but much better than what we started with!
We're doing almost exactly the same thing, but maxSteps can be way smaller - like 32 now, and definitely goes to the edge of the screen. And this new maxSteps effectively controls quality and scales with the size of the canvas. Our previous method is a set distance (like 256 steps) so needs to be increased with larger canvases, and is clearly more expensive.
// We tested uv already (we know we aren't an object), so skip step 0.for (intstep=1; step< maxSteps; step++) {
// How far away is the nearest object?float dist = texture(distanceTexture, sampleUv).r;
// Go the direction we're traveling (with noise)
sampleUv += rayDirection * dist;
if (outOfBounds(sampleUv)) break;
// We hit something! (EPS = small number, like 0.001)if (dist < EPS) {
// Collect the radiance
radDelta += texture(sceneTexture, sampleUv);
break;
}
}
And that's all we need to change to get our naive global illumination implementation!
Playground
Here are a bunch of controls to play with what we've built, and some extra features.
// @runclassGIextendsDistanceField{innerInitialize(){super.innerInitialize();this.toggle=document.querySelector("#noise-toggle");this.grainToggle=document.querySelector("#grain-toggle");this.temporalToggle=document.querySelector("#temporal-toggle");this.sunToggle=document.querySelector("#sun-toggle");this.sunAngleSlider=document.querySelector("#sun-angle-slider");this.maxStepsSlider=document.querySelector("#max-steps-slider");this.showNoise=true;this.showGrain=true;this.useTemporalAccum=false;this.enableSun=true;this.activelyDrawing=false;this.accumAmt=10.0;this.maxSteps=this.maxStepsSlider.value;const{plane: giPlane,render: giRender,renderTargets: giRenderTargets}=this.initThreeJS({uniforms: {sceneTexture: {value: this.surface.texture},distanceTexture: {value: null},lastFrameTexture: {value: null},oneOverSize: {value: newTHREE.Vector2(1.0/this.width,1.0/this.height)},rayCount: {value: rayCount},showNoise: {value: this.showNoise},showGrain: {value: this.showGrain},useTemporalAccum: {value: this.useTemporalAccum},enableSun: {value: this.enableSun},time: {value: 0.0},// We're using TAU - 2.0 (radians) here b/c it feels like a reasonable spot in the skysunAngle: {value: this.sunAngleSlider.value},maxSteps: {value: this.maxSteps}},fragmentShader: `uniform int rayCount;uniform float time;uniform float sunAngle;uniform bool showNoise;uniform bool showGrain;uniform bool useTemporalAccum;uniform bool enableSun;uniform vec2 oneOverSize;uniform int maxSteps;uniform sampler2D sceneTexture;uniform sampler2D lastFrameTexture;uniform sampler2D distanceTexture;in vec2 vUv;const float PI = 3.14159265;const float TAU = 2.0 * PI;const float ONE_OVER_TAU = 1.0 / TAU;const float PAD_ANGLE = 0.01;const float EPS = 0.001f;const vec3 skyColor = vec3(0.02, 0.08, 0.2);const vec3 sunColor = vec3(0.95, 0.95, 0.9);const float goldenAngle = PI * 0.7639320225;// Popular rand functionfloat rand(vec2 co) { return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);}vec3 sunAndSky(float rayAngle) { // Get the sun / ray relative angle float angleToSun = mod(rayAngle - sunAngle, TAU); // Sun falloff based on the angle float sunIntensity = smoothstep(1.0, 0.0, angleToSun); // And that's our sky radiance return sunColor * sunIntensity + skyColor;}bool outOfBounds(vec2 uv) { return uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0;}void main() { vec2 uv = vUv; vec4 light = texture(sceneTexture, uv); vec4 radiance = vec4(0.0); float oneOverRayCount = 1.0 / float(rayCount); float angleStepSize = TAU * oneOverRayCount; float coef = useTemporalAccum ? time : 0.0; float offset = showNoise ? rand(uv + coef) : 0.0; float rayAngleStepSize = showGrain ? angleStepSize + offset * TAU : angleStepSize; // Not light source or occluder if (light.a < 0.1) { // Shoot rays in "rayCount" directions, equally spaced, with some randomness. for(int i = 0; i < rayCount; i++) { float angle = rayAngleStepSize * (float(i) + offset) + sunAngle; vec2 rayDirection = vec2(cos(angle), -sin(angle)); vec2 sampleUv = uv; vec4 radDelta = vec4(0.0); bool hitSurface = false; // We tested uv already (we know we aren't an object), so skip step 0. for (int step = 1; step < maxSteps; step++) { // How far away is the nearest object? float dist = texture(distanceTexture, sampleUv).r; // Go the direction we're traveling (with noise) sampleUv += rayDirection * dist; if (outOfBounds(sampleUv)) break; if (dist < EPS) { vec4 sampleColor = texture(sceneTexture, sampleUv); radDelta += sampleColor; hitSurface = true; break; } } // If we didn't find an object, add some sky + sun color if (!hitSurface && enableSun) { radDelta += vec4(sunAndSky(angle), 1.0); } // Accumulate total radiance radiance += radDelta; } } else if (length(light.rgb) >= 0.1) { radiance = light; } // Bring up all the values to have an alpha of 1.0. vec4 finalRadiance = vec4(max(light, radiance * oneOverRayCount).rgb, 1.0); if (useTemporalAccum && time > 0.0) { vec4 prevRadiance = texture(lastFrameTexture, vUv); gl_FragColor = mix(finalRadiance, prevRadiance, 0.9); } else { gl_FragColor = finalRadiance; }}`,});this.lastFrame=null;this.prev=0;this.drawingExample=false;this.giPlane=giPlane;this.giRender=giRender;this.giRenderTargets=giRenderTargets;}giPass(distanceFieldTexture){this.giPlane.material.uniforms.distanceTexture.value=distanceFieldTexture;this.giPlane.material.uniforms.sceneTexture.value=this.surface.texture;if(this.useTemporalAccum&&!this.surface.isDrawing&&!this.drawingExample){this.giPlane.material.uniforms.lastFrameTexture.value=this.lastFrame??this.surface.texture;consttarget=this.prev ? this.giRenderTargets[0] : this.giRenderTargets[1];this.prev=(this.prev+1)%2;this.renderer.setRenderTarget(target);this.giRender();this.lastFrame=target.texture;this.giPlane.material.uniforms.time.value+=1.0;}else{this.giPlane.material.uniforms.time.value=0.0;this.lastFrame=null;}this.renderer.setRenderTarget(null);this.giRender();returnthis.lastFrame;}renderPass(){constisDone=this.giPlane.material.uniforms.time.value>=this.accumAmt;if(isDone||this.surface.isDrawing||this.drawingExample){this.giPlane.material.uniforms.time.value=0;}letdrawPassTexture=this.drawPass();letout=this.seedPass(drawPassTexture);out=this.jfaPass(out);out=this.dfPass(out);this.renderer.setRenderTarget(null);this.surface.texture=drawPassTexture;out=this.giPass(out);}animate(){constisDone=this.giPlane.material.uniforms.time.value>=this.accumAmt;this.renderPass();if(isDone||this.surface.isDrawing||this.drawingExample||!this.useTemporalAccum){return;}getFrame(()=>this.animate());}toggleSun(){this.sunToggle.checked=!this.sunToggle.checkedthis.enableSun=!this.enableSun;this.giPlane.material.uniforms.enableSun.value=this.enableSun;this.animate();}clear(){this.lastFrame=null;if(this.initialized){this.giRenderTargets.forEach((target)=>{this.renderer.setRenderTarget(target);this.renderer.clearColor();});}super.clear();}reset(){this.drawingExample=true;returnsuper.reset().then(()=>{this.drawingExample=false;this.animate();})}canvasModifications(){return{startDrawing: (e)=>{if(this.drawingExample){return;}this.lastFrame=null;this.giPlane.material.uniforms.time.value=0.0;this.surface.startDrawing(e)},onMouseMove: (e)=>{if(this.surface.onMouseMove(e)){this.giPlane.material.uniforms.time.value=0.0;}},stopDrawing: (e)=>{if(this.surface.stopDrawing(e)){this.giPlane.material.uniforms.time.value=0;this.animate();}},
...(this.id==="final" ? {toggleSun: ()=>this.toggleSun()} : {})}}stopSliding(){this.drawingExample=false;this.animate();}loadAfterReset(){this.initialized=true;this.toggle.addEventListener("input",()=>{this.showNoise=this.toggle.checked;this.giPlane.material.uniforms.showNoise.value=this.showNoise;this.animate();});this.grainToggle.addEventListener("input",()=>{this.showGrain=this.grainToggle.checked;this.giPlane.material.uniforms.showGrain.value=this.showGrain;this.animate();});this.temporalToggle.addEventListener("input",()=>{this.useTemporalAccum=this.temporalToggle.checked;this.giPlane.material.uniforms.useTemporalAccum.value=this.useTemporalAccum;this.animate();});this.sunToggle.addEventListener("input",()=>{this.giPlane.material.uniforms.time.value=0;this.enableSun=this.sunToggle.checked;this.giPlane.material.uniforms.enableSun.value=this.enableSun;this.animate();});Object.entries({"mousedown": ()=>{this.drawingExample=true;},"touchstart": ()=>{this.drawingExample=true;},"touchend": ()=>{this.stopSliding()},"touchcancel": ()=>{this.stopSliding()},"mouseup": ()=>{this.stopSliding()},}).forEach(([event,fn])=>{this.sunAngleSlider.addEventListener(event,fn);this.maxStepsSlider.addEventListener(event,fn);});this.sunAngleSlider.addEventListener("input",()=>{this.giPlane.material.uniforms.sunAngle.value=this.sunAngleSlider.value;this.renderPass();window.mdxishState.sunAngleSlider=this.sunAngleSlider.value;});this.maxStepsSlider.addEventListener("input",()=>{this.giPlane.material.uniforms.maxSteps.value=this.maxStepsSlider.value;this.renderPass();window.mdxishState.maxSteps=this.maxSteps.value;});}load(){this.reset().then(()=>{this.loadAfterReset();});}}constgi=newGI({id: "gi",width: 300,height: 400});letfinalWidth=300;letgiFinal=newGI({id: "final",width: finalWidth,height: 400});if(!isMobile){letperformanceMode=true;letperfDiv=document.querySelector("#performance-issues");perfDiv.textContent="Want a bigger canvas?";perfDiv.addEventListener("click",()=>{document.querySelector("#final").innerHtml="";performanceMode=!performanceMode;finalWidth=performanceMode ? 300 : document.querySelector("#content").clientWidth-64;perfDiv.textContent=performanceMode ? "Want a bigger canvas?" : "Performance issues?";giFinal=newGI({id: "final",width: finalWidth,height: 400});});}
Bonus Material
Oh, you're still here? Cool. Let's peel back the demo above then!
Each control has a "How?" link which jumps to the explanation, and there's a "Jump back to playground" at the end of each section. No need to read in order!
Make it look like we're outside
So if we want to make it look like we're outside, we need to record when we hit a surface, because if we didn't, we want to add extra radiance from the sun / sky.
if (dist < EPS) {
radDelta += texture(sceneTexture, sampleUv);
// Record that we hit something!
hitSurface = true;
break;
}
It's important we record that we hit something because if we didn't, then we want to add some sun/sky look and feel (radiance). And then we can just take a literal slice of the sky, and cast rays from that direction
if (!hitSurface) {
radDelta +=vec4(sunAndSky(angle), 1.0);
}
So, let's define the key radiance calculation.
First, choose some colors which represent what color and how much of an impact the "sky color" and "sun color" will have on the final render. The "sky color" will always be the same, while the "sun color" will be applied based on its angle. I chose to make the sun matter a lot more, be near white, and for the sky to be much dimmer with a blue hue. But these can be whatever you want!
And finally, we apply the intensity to the sun color and add the sky color.
return sunColor * sunIntensity + skyColor;
That's it! Every time I've come across "sky radiance" in the wild, it's always some crazy set of equations I barely understand, and/or "copied from <so and so's> shadertoy". Maybe I'm way over simplifying things, but, this approach is what I landed on when thinking it through myself.
So I was playing with noise, and happened on pixels "catching the light" by adding our same noise to how much we increase the angle by each iteration, adding some noisy overlap. So certain pixels get double the radiance.
And in our core ray loop, where i is our ray index, I added the sunAngle to it, so the light would catch (overlap would happen) in the same direction as the sun.
This is pretty straightforward, but there's a lot you can do with it to customize it. I didn't mention it earlier because Radiance Cascades doesn't require temporal accumulation, which is basically a hack to deal with noise.
All we need to do to get temporal accumulation is, add a time component to our noise seed, so it varies over time, and mix with the last frame as an additional texture (ping-pong buffer).
Anywhere you use rand and what it to get smoothed, just add time.
rand(uv + time)
And when you return the final radiance, just mix it with the previous frame.
On the CPU side, just update the loop to include / store the previous texture, and add a stopping condition. If the time reaches, say, 10 frames, stop mixing. prev is just 0 or 1 - initialized to 0 somewhere.
I also opted to pause temporal accumulation while drawing. It impacts performance as it needs to set a new render target each frame and adds a texture lookup.
@DragonAxe yup. Definitely correct. I'll update here and in the post. It looks like a reasonable output currently due to the wrapping nature of radians. It's so easy to make these small errors. This is only in the very first simple raymarch canvas.
@DragonAxe yup. Definitely correct. I'll update here and in the post. It looks like a reasonable output currently due to the wrapping nature of radians. It's so easy to make these small errors. This is only in the very first simple raymarch canvas.