Skip to content

Instantly share code, notes, and snippets.

@greggman
Last active May 8, 2023 07:18
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save greggman/11a0b15eb7076cf7939c30b5c6d544c3 to your computer and use it in GitHub Desktop.
Save greggman/11a0b15eb7076cf7939c30b5c6d544c3 to your computer and use it in GitHub Desktop.
WebGPU Cube - Pulling Vertices
html, body { margin: 0; height: 100% }
canvas { width: 100%; height: 100%; display: block; }
#fail {
position: fixed;
left: 0;
top: 0;
width: 100%;
height: 100%;
display: flex;
justify-content: center;
align-items: center;
background: red;
color: white;
font-weight: bold;
font-family: monospace;
font-size: 16pt;
text-align: center;
}
<canvas></canvas>
<div id="fail" style="display: none">
<div class="content"></div>
</div>
// WebGPU Cube
// from http://localhost:8080/webgpu/webgpu-cube.html
/* global GPUBufferUsage */
/* global GPUTextureUsage */
import {vec3, mat4} from 'https://webgpufundamentals.org/3rdparty/wgpu-matrix.module.js';
async function main() {
const gpu = navigator.gpu;
if (!gpu) {
fail('this browser does not support webgpu');
return;
}
const adapter = await gpu.requestAdapter();
if (!adapter) {
fail('this browser appears to support WebGPU but it\'s disabled');
return;
}
const device = await adapter.requestDevice();
const canvas = document.querySelector('canvas');
const context = canvas.getContext('webgpu');
const presentationFormat = gpu.getPreferredCanvasFormat(adapter);
const presentationSize = [300, 150]; // default canvas size
context.configure({
alphaMode: "opaque",
format: presentationFormat,
device,
});
const canvasInfo = {
canvas,
context,
presentationSize,
presentationFormat,
// these are filled out in resizeToDisplaySize
renderTarget: undefined,
renderTargetView: undefined,
depthTexture: undefined,
depthTextureView: undefined,
sampleCount: 4, // can be 1 or 4
};
const code = `
struct VSUniforms {
worldViewProjection: mat4x4<f32>,
worldInverseTranspose: mat4x4<f32>,
};
@group(0) @binding(0) var<uniform> vsUniforms: VSUniforms;
struct MyVSInput {
position: vec4<f32>,
normal: vec4<f32>,
texcoord: vec4<f32>,
};
struct MyVSOutput {
@builtin(position) position: vec4<f32>,
@location(0) normal: vec3<f32>,
@location(1) texcoord: vec2<f32>,
};
@group(0) @binding(1) var<storage, read> vertData : array<MyVSInput>;
@vertex
fn myVSMain(
@builtin(vertex_index) my_index: u32,
) -> MyVSOutput {
var vsOut: MyVSOutput;
var v = vertData[my_index];
vsOut.position = vsUniforms.worldViewProjection * v.position;
vsOut.normal = (vsUniforms.worldInverseTranspose * v.normal).xyz;
vsOut.texcoord = v.texcoord.xy;
return vsOut;
}
struct FSUniforms {
lightDirection: vec3<f32>,
};
@group(0) @binding(2) var<uniform> fsUniforms: FSUniforms;
@group(0) @binding(3) var diffuseSampler: sampler;
@group(0) @binding(4) var diffuseTexture: texture_2d<f32>;
@fragment
fn myFSMain(v: MyVSOutput) -> @location(0) vec4<f32> {
var diffuseColor = textureSample(diffuseTexture, diffuseSampler, v.texcoord);
var a_normal = normalize(v.normal);
var l = dot(a_normal, fsUniforms.lightDirection) * 0.5 + 0.5;
return vec4<f32>(diffuseColor.rgb * l, diffuseColor.a);
}
`;
const shaderModule = await device.createShaderModule({code});
const vUniformBufferSize = 2 * 16 * 4; // 2 mat4s * 16 floats per mat * 4 bytes per float
const fUniformBufferSize = 3 * 4 + 4; // 1 vec3 * 3 floats per vec3 * 4 bytes per float (+ pad)
const vsUniformBuffer = device.createBuffer({
size: vUniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const fsUniformBuffer = device.createBuffer({
size: fUniformBufferSize,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});
const vsUniformValues = new Float32Array(2 * 16); // 2 mat4s
const worldViewProjection = vsUniformValues.subarray(0, 16);
const worldInverseTranspose = vsUniformValues.subarray(16, 32);
const fsUniformValues = new Float32Array(3); // 1 vec3
const lightDirection = fsUniformValues.subarray(0, 3);
function createBuffer(device, data, usage) {
const buffer = device.createBuffer({
size: data.byteLength,
usage,
mappedAtCreation: true,
});
const dst = new data.constructor(buffer.getMappedRange());
dst.set(data);
buffer.unmap();
return buffer;
}
const positions = [1, 1, -1, 1, 1, 1, 1, -1, 1, 1, -1, -1, -1, 1, 1, -1, 1, -1, -1, -1, -1, -1, -1, 1, -1, 1, 1, 1, 1, 1, 1, 1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1, 1, 1, 1, -1, 1, 1, -1, -1, 1, 1, -1, 1, -1, 1, -1, 1, 1, -1, 1, -1, -1, -1, -1, -1];
const normals = [1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, -1];
const texcoords = [1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1];
const indices = new Uint16Array([0, 1, 2, 0, 2, 3, 4, 5, 6, 4, 6, 7, 8, 9, 10, 8, 10, 11, 12, 13, 14, 12, 14, 15, 16, 17, 18, 16, 18, 19, 20, 21, 22, 20, 22, 23]);
const vertData = [];
for (let n = 0; n < indices.length; ++n) {
const i = indices[n];
vertData.push(
...positions.slice(i * 3, (i + 1) * 3), 1,
...normals.slice(i * 3, (i + 1) * 3), 0,
...texcoords.slice(i * 2, (i + 1) * 2), 0, 0,
)
}
const vertexBuffer = createBuffer(device, new Float32Array(vertData), GPUBufferUsage.STORAGE);
const tex = device.createTexture({
size: [2, 2, 1],
format: 'rgba8unorm',
usage:
GPUTextureUsage.TEXTURE_BINDING |
GPUTextureUsage.COPY_DST,
});
device.queue.writeTexture(
{ texture: tex },
new Uint8Array([
255, 255, 128, 255,
128, 255, 255, 255,
255, 128, 255, 255,
255, 128, 128, 255,
]),
{ bytesPerRow: 8, rowsPerImage: 2 },
{ width: 2, height: 2 },
);
const sampler = device.createSampler({
magFilter: 'nearest',
minFilter: 'nearest',
});
const pipeline = device.createRenderPipeline({
layout: 'auto',
vertex: {
module: shaderModule,
entryPoint: 'myVSMain',
},
fragment: {
module: shaderModule,
entryPoint: 'myFSMain',
targets: [
{format: presentationFormat},
],
},
primitive: {
topology: 'triangle-list',
cullMode: 'back',
},
depthStencil: {
depthWriteEnabled: true,
depthCompare: 'less',
format: 'depth24plus',
},
...(canvasInfo.sampleCount > 1 && {
multisample: {
count: canvasInfo.sampleCount,
},
}),
});
const bindGroup = device.createBindGroup({
layout: pipeline.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: { buffer: vsUniformBuffer } },
{ binding: 1, resource: { buffer: vertexBuffer } },
{ binding: 2, resource: { buffer: fsUniformBuffer } },
{ binding: 3, resource: sampler },
{ binding: 4, resource: tex.createView() },
],
});
const renderPassDescriptor = {
colorAttachments: [
{
// view: undefined, // Assigned later
// resolveTarget: undefined, // Assigned Later
clearValue: { r: 0.5, g: 0.5, b: 0.5, a: 1.0 },
loadOp: 'clear',
storeOp: 'store',
},
],
depthStencilAttachment: {
// view: undefined, // Assigned later
depthClearValue: 1.0,
depthLoadOp: 'clear',
depthStoreOp: 'store',
},
};
function resizeToDisplaySize(device, canvasInfo) {
const {
canvas,
context,
renderTarget,
presentationSize,
presentationFormat,
depthTexture,
sampleCount,
} = canvasInfo;
const width = Math.min(device.limits.maxTextureDimension2D, canvas.clientWidth);
const height = Math.min(device.limits.maxTextureDimension2D, canvas.clientHeight);
const needResize = !canvasInfo.renderTarget ||
width !== presentationSize[0] ||
height !== presentationSize[1];
if (needResize) {
if (renderTarget) {
renderTarget.destroy();
}
if (depthTexture) {
depthTexture.destroy();
}
canvas.width = width;
canvas.height = height;
presentationSize[0] = width;
presentationSize[1] = height;
if (sampleCount > 1) {
const newRenderTarget = device.createTexture({
size: presentationSize,
format: presentationFormat,
sampleCount,
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
canvasInfo.renderTarget = newRenderTarget;
canvasInfo.renderTargetView = newRenderTarget.createView();
}
const newDepthTexture = device.createTexture({
size: presentationSize,
format: 'depth24plus',
sampleCount,
usage: GPUTextureUsage.RENDER_ATTACHMENT,
});
canvasInfo.depthTexture = newDepthTexture;
canvasInfo.depthTextureView = newDepthTexture.createView();
}
return needResize;
}
function render(time) {
time *= 0.001;
resizeToDisplaySize(device, canvasInfo);
const projection = mat4.perspective(30 * Math.PI / 180, canvas.clientWidth / canvas.clientHeight, 0.5, 10);
const eye = [1, 4, -6];
const target = [0, 0, 0];
const up = [0, 1, 0];
const view = mat4.lookAt(eye, target, up);
const viewProjection = mat4.multiply(projection, view);
const world = mat4.rotationY(time);
mat4.transpose(mat4.inverse(world), worldInverseTranspose);
mat4.multiply(viewProjection, world, worldViewProjection);
vec3.normalize([1, 8, -10], lightDirection);
device.queue.writeBuffer(
vsUniformBuffer,
0,
vsUniformValues.buffer,
vsUniformValues.byteOffset,
vsUniformValues.byteLength,
);
device.queue.writeBuffer(
fsUniformBuffer,
0,
fsUniformValues.buffer,
fsUniformValues.byteOffset,
fsUniformValues.byteLength,
);
if (canvasInfo.sampleCount === 1) {
const colorTexture = context.getCurrentTexture();
renderPassDescriptor.colorAttachments[0].view = colorTexture.createView();
} else {
renderPassDescriptor.colorAttachments[0].view = canvasInfo.renderTargetView;
renderPassDescriptor.colorAttachments[0].resolveTarget = context.getCurrentTexture().createView();
}
renderPassDescriptor.depthStencilAttachment.view = canvasInfo.depthTextureView;
const commandEncoder = device.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, bindGroup);
passEncoder.draw(vertData.length / 8);
passEncoder.end();
device.queue.submit([commandEncoder.finish()]);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function fail(msg) {
const elem = document.querySelector('#fail');
const contentElem = elem.querySelector('.content');
elem.style.display = '';
contentElem.textContent = msg;
}
main();
{"name":"WebGPU Cube - Pulling Vertices","settings":{},"filenames":["index.html","index.css","index.js"]}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment