WebGPU supports drawing to points. We do this by setting the
primitive topology to 'point-list'
in a render pipeline.
Let’s create a simple example with random points starting with ideas presented in the the article on vertex buffers.
First a simple vertex shader and fragment shader. To keep it simple we’ll just use clip space coordinates for positions and hard code the color yellow in our fragment shader.
struct Vertex { @location(0) position: vec2f, }; struct VSOutput { @builtin(position) position: vec4f, }; @vertex fn vs(vert: Vertex,) -> VSOutput { var vsOut: VSOutput; vsOut.position = vert.position; } @fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f { return vec4f(1, 1, 0, 1); // yellow }
Then, when we create a pipeline, we set the topology to 'point-list'
const pipeline = device.createRenderPipeline({ label: '1 pixel points', layout: 'auto', vertex: { module, entryPoint: 'vs', buffers: [ { arrayStride: 2 * 4, // 2 floats, 4 bytes each attributes: [ {shaderLocation: 0, offset: 0, format: 'float32x2'}, // position ], }, ], }, fragment: { module, entryPoint: 'fs', targets: [{ format: presentationFormat }], }, + primitive: { + topology: 'point-list', + }, });
Let’s fill a vertex buffer with some random clips space points
const rand = (min, max) => min + Math.random() * (max - min); const kNumPoints = 100; const vertexData = new Float32Array(kNumPoints * 2); for (let i = 0; i < kNumPoints; ++i) { const offset = i * 2; vertexData[offset + 0] = rand(-1, 1); vertexData[offset + 1] = rand(-1, 1); } const vertexBuffer = device.createBuffer({ label: 'vertex buffer vertices', size: vertexData.byteLength, usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, }); device.queue.writeBuffer(vertexBuffer, 0, vertexData);
And then draw
const encoder = device.createCommandEncoder(); const pass = encoder.beginRenderPass(renderPassDescriptor); pass.setPipeline(pipeline); pass.setVertexBuffer(0, vertexBuffer); pass.draw(kNumPoints); pass.end();
And with that we get 100 random yellow points
Unfortunately they are all only 1 pixel in size. 1 pixel size points is all WebGPU supports. If we want something larger we need to do it ourselves. Fortunately it’s easy to do. We’ll just make a quad and use instancing;
Let’s add a quad to our vertex shader and a size attribute. Let’s also add a uniform to pass in the size of the texture we are drawing to.
struct Vertex { @location(0) position: vec2f, + @location(1) size: f32, }; +struct Uniforms { + resolution: vec2f, +}; struct VSOutput { @builtin(position) position: vec4f, }; +@group(0) @binding(0) var<uniform> uni: Uniforms; @vertex fn vs( vert: Vertex, + @builtin(vertex_index) vNdx: u32, ) -> VSOutput { + let points = array( + vec2f(-1, -1), + vec2f( 1, -1), + vec2f(-1, 1), + vec2f(-1, 1), + vec2f( 1, -1), + vec2f( 1, 1), + ); var vsOut: VSOutput; + let pos = points[vNdx]; - vsOut.position = vec4f(vert.position, 0, 1); + vsOut.position = vec4f(vert.position + pos * vert.size / uni.resolution, 0, 1); return vsOut; } @fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f { return vec4f(1, 1, 0, 1); // yellow }
In JavaScript we need to add an attribute for a size per point, we need to set
the attributes advance per instance by setting stepMode: 'instance'
, and we
can remove the topology setting since we want the default 'triangle-list'
const pipeline = device.createRenderPipeline({ label: 'sizeable points', layout: 'auto', vertex: { module, entryPoint: 'vs', buffers: [ { - arrayStride: 2 * 4, // 2 floats, 4 bytes each + arrayStride: (2 + 1) * 4, // 3 floats, 4 bytes each + stepMode: 'instance', attributes: [ {shaderLocation: 0, offset: 0, format: 'float32x2'}, // position + {shaderLocation: 1, offset: 8, format: 'float32'}, // size ], }, ], }, fragment: { module, entryPoint: 'fs', targets: [{ format: presentationFormat }], }, - primitive: { - topology: 'point-list', - }, });
Let’s add a random size per point to our vertex data
const kNumPoints = 100; - const vertexData = new Float32Array(kNumPoints * 2); + const vertexData = new Float32Array(kNumPoints * 3); for (let i = 0; i < kNumPoints; ++i) { - const offset = i * 2; + const offset = i * 3; vertexData[offset + 0] = rand(-1, 1); vertexData[offset + 1] = rand(-1, 1); + vertexData[offset + 2] = rand(1, 32); }
We need a uniform buffer so we can pass in the resolution
const uniformValues = new Float32Array(2); const uniformBuffer = device.createBuffer({ size: uniformValues.byteLength, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, }); const kResolutionOffset = 0; const resolutionValue = uniformValues.subarray( kResolutionOffset, kResolutionOffset + 2);
And we need a bind group to bind the uniform buffer
const bindGroup = device.createBindGroup({ layout: pipeline.getBindGroupLayout(0), entries: [ { binding: 0, resource: { buffer: uniformBuffer }}, ], });
Then at render time we can update the uniform buffer with the current resolution.
// Get the current texture from the canvas context and // set it as the texture to render to. const canvasTexture = context.getCurrentTexture(); renderPassDescriptor.colorAttachments[0].view = canvasTexture.createView(); + // Update the resolution in the uniform buffer + resolutionValue.set([canvasTexture.width, canvasTexture.height]); + device.queue.writeBuffer(uniformBuffer, 0, uniformValues);
then set our bind group and render an instance per point
const encoder = device.createCommandEncoder(); const pass = encoder.beginRenderPass(renderPassDescriptor); pass.setPipeline(pipeline); pass.setVertexBuffer(0, vertexBuffer); + pass.setBindGroup(0, bindGroup); - pass.draw(kNumPoints); + pass.draw(6, kNumPoints); pass.end();
And now we have sizable points
What if we wanted to texture our points? We just need to pass in texture coordinates from the vertex shader to the fragment shader.
struct Vertex { @location(0) position: vec2f, @location(1) size: f32, }; struct Uniforms { resolution: vec2f, + @location(0) texcoord: vec2f, }; struct VSOutput { @builtin(position) position: vec4f, @location(0) texcoord: vec2f, }; @group(0) @binding(0) var<uniform> uni: Uniforms; @vertex fn vs( vert: Vertex, @builtin(vertex_index) vNdx: u32, ) -> VSOutput { let points = array( vec2f(-1, -1), vec2f( 1, -1), vec2f(-1, 1), vec2f(-1, 1), vec2f( 1, -1), vec2f( 1, 1), ); var vsOut: VSOutput; let pos = points[vNdx]; vsOut.position = vec4f(vert.position + pos * vert.size / uni.resolution, 0, 1); + vsOut.texcoord = pos * 0.5 + 0.5; return vsOut; }
And of course use a texture in the fragment shader
+@group(0) @binding(1) var s: sampler; +@group(0) @binding(2) var t: texture_2d<f32>; @fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f { - return vec4f(1, 1, 0, 1); // yellow + return textureSample(t, s, vsOut.texcoord); }
We’ll create a simple texture using a canvas like we covered in the article on importing textures.
const ctx = new OffscreenCanvas(32, 32).getContext('2d'); ctx.font = '27px sans-serif'; ctx.textAlign = 'center'; ctx.textBaseline = 'middle'; ctx.fillText('🥑', 16, 16); const texture = device.createTexture({ size: [32, 32], format: 'rgba8unorm', usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT, }); device.queue.copyExternalImageToTexture( { source: ctx.canvas, flipY: true }, { texture, premultipliedAlpha: true }, [32, 32], );
And we need a sampler and we need to add them to our bind group
const sampler = device.createSampler({ minFilter: 'linear', magFilter: 'linear', }); const bindGroup = device.createBindGroup({ layout: pipeline.getBindGroupLayout(0), entries: [ { binding: 0, resource: { buffer: uniformBuffer }}, + { binding: 1, resource: sampler }, + { binding: 2, resource: texture.createView() }, ], });
Let’s also turn on blending so we get transparency
const pipeline = device.createRenderPipeline({ label: 'sizeable points with texture', layout: 'auto', vertex: { module, entryPoint: 'vs', buffers: [ { arrayStride: (2 + 1) * 4, // 3 floats, 4 bytes each stepMode: 'instance', attributes: [ {shaderLocation: 0, offset: 0, format: 'float32x2'}, // position {shaderLocation: 1, offset: 8, format: 'float32'}, // size ], }, ], }, fragment: { module, entryPoint: 'fs', - targets: [{ format: presentationFormat }], + targets: [ + { + format: presentationFormat, + blend: { + color: { + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha', + operation: 'add', + }, + alpha: { + srcFactor: 'one', + dstFactor: 'one-minus-src-alpha', + operation: 'add', + }, + }, + }, + ], }, });
And now we have textured points
And we could keep going, how about a rotation per point? Using the math we covered in the article on matrix math.
struct Vertex { @location(0) position: vec2f, @location(1) size: f32, + @location(2) rotation: f32, }; struct Uniforms { resolution: vec2f, }; struct VSOutput { @builtin(position) position: vec4f, @location(0) texcoord: vec2f, }; @group(0) @binding(0) var<uniform> uni: Uniforms; @vertex fn vs( vert: Vertex, @builtin(vertex_index) vNdx: u32, ) -> VSOutput { let points = array( vec2f(-1, -1), vec2f( 1, -1), vec2f(-1, 1), vec2f(-1, 1), vec2f( 1, -1), vec2f( 1, 1), ); var vsOut: VSOutput; let pos = points[vNdx]; + let c = cos(vert.rotation); + let s = sin(vert.rotation); + let rot = mat2x2f( + c, s, + -s, c, + ); - vsOut.position = vec4f(vert.position + pos * vert.size / uni.resolution, 0, 1); + vsOut.position = vec4f(vert.position + rot * pos * vert.size / uni.resolution, 0, 1); vsOut.texcoord = pos * 0.5 + 0.5; return vsOut; }
We need to add the rotation attribute to our pipeline
const pipeline = device.createRenderPipeline({ label: 'sizeable rotatable points with texture', layout: 'auto', vertex: { module, entryPoint: 'vs', buffers: [ { - arrayStride: (2 + 1) * 4, // 3 floats, 4 bytes each + arrayStride: (2 + 1 + 1) * 4, // 4 floats, 4 bytes each stepMode: 'instance', attributes: [ {shaderLocation: 0, offset: 0, format: 'float32x2'}, // position {shaderLocation: 1, offset: 8, format: 'float32'}, // size + {shaderLocation: 2, offset: 12, format: 'float32'}, // rotation ], }, ], }, ...
We need to add rotation to our vertex data
const kNumPoints = 100; - const vertexData = new Float32Array(kNumPoints * 3); + const vertexData = new Float32Array(kNumPoints * 4); for (let i = 0; i < kNumPoints; ++i) { - const offset = i * 3; + const offset = i * 4; vertexData[offset + 0] = rand(-1, 1); vertexData[offset + 1] = rand(-1, 1); * vertexData[offset + 2] = rand(10, 64); + vertexData[offset + 3] = rand(0, Math.PI * 2); }
Let’s also change the texture from 🥑 to 👉
- ctx.fillText('🥑', 16, 16); + ctx.fillText('👉', 16, 16);
Hopefully this gives you some ideas.