Skip to content

Instantly share code, notes, and snippets.

@greggman
Last active July 3, 2024 02:18
Show Gist options
  • Save greggman/fce5bcbd6b7e0e15aeabab247af32dd2 to your computer and use it in GitHub Desktop.
Save greggman/fce5bcbd6b7e0e15aeabab247af32dd2 to your computer and use it in GitHub Desktop.
WebGPU Cubemap (check mips)
@import url(https://webgpufundamentals.org/webgpu/resources/webgpu-lesson.css);
html, body {
margin: 0; /* remove the default margin */
height: 100%; /* make the html,body fill the page */
}
canvas {
display: block; /* make the canvas act like a block */
width: 100%; /* make the canvas fill its container */
height: 100%;
}
<canvas></canvas>
// WebGPU SkyBox
// from https://webgpufundamentals.org/webgpu/webgpu-skybox.html
import GUI from 'https://webgpufundamentals.org/3rdparty/muigui-0.x.module.js';
// see https://webgpufundamentals.org/webgpu/lessons/webgpu-utils.html#wgpu-matrix
import {mat4} from 'https://webgpufundamentals.org/3rdparty/wgpu-matrix.module.js';
async function main() {
const adapter = await navigator.gpu?.requestAdapter();
const device = await adapter?.requestDevice();
if (!device) {
fail('need a browser that supports WebGPU');
return;
}
// Get a WebGPU context from the canvas and configure it
const canvas = document.querySelector('canvas');
const context = canvas.getContext('webgpu');
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
context.configure({
device,
format: presentationFormat,
alphaMode: 'premultiplied',
});
const module = device.createShaderModule({
code: `
struct Uniforms {
viewDirectionProjectionInverse: mat4x4f,
};
struct VSOutput {
@builtin(position) position: vec4f,
@location(0) pos: vec4f,
};
@group(0) @binding(0) var<uniform> uni: Uniforms;
@group(0) @binding(1) var ourSampler: sampler;
@group(0) @binding(2) var ourTexture: texture_cube<f32>;
@vertex fn vs(@builtin(vertex_index) vNdx: u32) -> VSOutput {
let pos = array(
vec2f(-1, 3),
vec2f(-1,-1),
vec2f( 3,-1),
);
var vsOut: VSOutput;
vsOut.position = vec4f(pos[vNdx], 1, 1);
vsOut.pos = vsOut.position;
return vsOut;
}
@fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f {
let t = uni.viewDirectionProjectionInverse * vsOut.pos;
return textureSample(ourTexture, ourSampler, normalize(t.xyz / t.w) * vec3f(-1, 1, 1));
}
`,
});
const pipeline = device.createRenderPipeline({
label: 'no attributes',
layout: 'auto',
vertex: {
module,
},
fragment: {
module,
targets: [{ format: presentationFormat }],
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less-equal',
format: 'depth24plus',
},
});
const numMipLevels = (...sizes) => {
const maxSize = Math.max(...sizes);
return 1 + Math.log2(maxSize) | 0;
};
function copySourcesToTexture(device, texture, sources, {flipY} = {}) {
sources.forEach((source, layer) => {
source.forEach((mipSource, mipLevel) => {
device.queue.copyExternalImageToTexture(
{ source: mipSource, flipY, },
{ texture, origin: [0, 0, layer], mipLevel },
{ width: mipSource.width, height: mipSource.height },
);
})
});
//if (texture.mipLevelCount > 1) {
// generateMips(device, texture);
//}
}
function createTextureFromSources(device, sources, options = {}) {
// Assume are sources all the same size so just use the first one for width and height
const source = sources[0];
const texture = device.createTexture({
format: 'rgba8unorm',
mipLevelCount: options.mips ? numMipLevels(source.width, source.height) : 1,
size: [source.width, source.height, sources.length],
usage: GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST |
GPUTextureUsage.RENDER_ATTACHMENT,
});
copySourcesToTexture(device, texture, sources, options);
return texture;
}
const hsl = (h, s, l) => `hsl(${h * 360 | 0}, ${s * 100}%, ${l * 100 | 0}%)`;
const createCheckedMipmap = (size) => {
const ctx = document.createElement('canvas').getContext('2d', {willReadFrequently: true});
const levels = [];
while (size >= 1) {
levels.push({size, color: hsl(levels.length / 2.7, 1, 0.5)});
size /= 2;
}
return levels.map(({size, color}, i) => {
ctx.canvas.width = size;
ctx.canvas.height = size;
ctx.fillStyle = i & 1 ? '#000' : '#fff';
ctx.fillStyle = color;
ctx.fillRect(0, 0, size, size);
ctx.fillStyle = color;
ctx.fillRect(0, 0, size / 2, size / 2);
ctx.fillRect(size / 2, size / 2, size / 2, size / 2);
return ctx.getImageData(0, 0, size, size);
});
};
const texture = device.createTexture({
size: [512, 512, 6],
mipLevelCount: numMipLevels([512]),
format: 'rgba8unorm',
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_DST,
});
const mips = createCheckedMipmap(texture.width);
copySourcesToTexture(device, texture, [mips, mips, mips, mips, mips, mips])
const sampler = device.createSampler({
magFilter: 'linear',
minFilter: 'linear',
mipmapFilter: 'linear',
});
// viewDirectionProjectionInverse
const uniformBufferSize = (16) * 4;
const uniformBuffer = device.createBuffer({
label: 'uniforms',
size: uniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const uniformValues = new Float32Array(uniformBufferSize / 4);
// offsets to the various uniform values in float32 indices
const kViewDirectionProjectionInverseOffset = 0;
const viewDirectionProjectionInverseValue = uniformValues.subarray(
kViewDirectionProjectionInverseOffset,
kViewDirectionProjectionInverseOffset + 16);
const bindGroup = device.createBindGroup({
label: 'bind group for object',
layout: pipeline.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: { buffer: uniformBuffer }},
{ binding: 1, resource: sampler },
{ binding: 2, resource: texture.createView({dimension: 'cube'}) },
],
});
const renderPassDescriptor = {
label: 'our basic canvas renderPass',
colorAttachments: [
{
// view: <- to be filled out when we render
loadOp: 'clear',
storeOp: 'store',
},
],
depthStencilAttachment: {
// view: <- to be filled out when we render
depthClearValue: 1.0,
depthLoadOp: 'clear',
depthStoreOp: 'store',
},
};
let depthTexture;
const settings = {
fieldOfView: 155 * Math.PI / 180,
rotation: 0,
}
const gui = new GUI();
gui.onChange(render);
gui.add(settings, 'fieldOfView', {min: 1, max: 179, converters: GUI.converters.radToDeg});
gui.add(settings, 'rotation', {min: -180, max: 180, converters: GUI.converters.radToDeg});
function render() {
// Get the current texture from the canvas context and
// set it as the texture to render to.
const canvasTexture = context.getCurrentTexture();
renderPassDescriptor.colorAttachments[0].view = canvasTexture.createView();
// If we don't have a depth texture OR if its size is different
// from the canvasTexture when make a new depth texture
if (!depthTexture ||
depthTexture.width !== canvasTexture.width ||
depthTexture.height !== canvasTexture.height) {
if (depthTexture) {
depthTexture.destroy();
}
depthTexture = device.createTexture({
size: [canvasTexture.width, canvasTexture.height],
format: 'depth24plus',
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
}
renderPassDescriptor.depthStencilAttachment.view = depthTexture.createView();
const encoder = device.createCommandEncoder();
const pass = encoder.beginRenderPass(renderPassDescriptor);
pass.setPipeline(pipeline);
const aspect = canvas.clientWidth / canvas.clientHeight;
const projection = mat4.perspective(
settings.fieldOfView,
aspect,
0.1, // zNear
10, // zFar
);
// Camera going in circle from origin looking at origin
const r = settings.rotation;
const cameraPosition = [Math.cos(r), 0, Math.sin(r)];
const view = mat4.lookAt(
cameraPosition,
[0, 0, 0], // target
[0, 1, 0], // up
);
// We only care about direction so remove the translation
view[12] = 0;
view[13] = 0;
view[14] = 0;
const viewProjection = mat4.multiply(projection, view);
mat4.inverse(viewProjection, viewDirectionProjectionInverseValue);
// upload the uniform values to the uniform buffer
device.queue.writeBuffer(uniformBuffer, 0, uniformValues);
pass.setBindGroup(0, bindGroup);
pass.draw(3);
pass.end();
const commandBuffer = encoder.finish();
device.queue.submit([commandBuffer]);
}
render();
const observer = new ResizeObserver(entries => {
for (const entry of entries) {
const canvas = entry.target;
const width = entry.contentBoxSize[0].inlineSize;
const height = entry.contentBoxSize[0].blockSize;
canvas.width = Math.max(1, Math.min(width, device.limits.maxTextureDimension2D));
canvas.height = Math.max(1, Math.min(height, device.limits.maxTextureDimension2D));
render();
}
});
observer.observe(canvas);
}
function fail(msg) {
alert(msg);
}
main();
{"name":"WebGPU Cubemap (check mips)","settings":{},"filenames":["index.html","index.css","index.js"]}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment