4 <meta name="viewport" content="width=1000">
5 <title>WebGPU Cube</title>
6 <script src="scripts/gl-matrix-min.js"></script>
7 <link rel="stylesheet" href="css/style.css"/>
10 font-family: system-ui;
12 background-color: rgb(13, 77, 153);
25 <h1>Textured Cube</h1>
27 This demo uploads a PNG image as texture data and uses it on the faces of a cube.
29 <canvas width="1200" height="1200"></canvas>
32 <h2>WebGPU not available</h2>
34 Make sure you are on a system with WebGPU enabled. In
35 Safari, first make sure the Developer Menu is visible (Preferences →
36 Advanced), then Develop → Experimental Features → WebGPU.
41 document.body.className = 'error';
43 const positionAttributeNum = 0;
44 const texCoordsAttributeNum = 1;
46 const transformBindingNum = 0;
47 const textureBindingNum = 1;
48 const samplerBindingNum = 2;
50 const bindGroupIndex = 0;
54 float4 position : SV_Position;
55 float2 texCoords : attribute(${texCoordsAttributeNum});
58 vertex FragmentData vertex_main(
59 float4 position : attribute(${positionAttributeNum}),
60 float2 texCoords : attribute(${texCoordsAttributeNum}),
61 constant float4x4[] modelViewProjectionMatrix : register(b${transformBindingNum}))
64 out.position = mul(modelViewProjectionMatrix[0], position);
65 out.texCoords = texCoords;
70 fragment float4 fragment_main(
71 float2 texCoords : attribute(${texCoordsAttributeNum}),
72 Texture2D<float4> faceTexture : register(t${textureBindingNum}),
73 sampler faceSampler : register(s${samplerBindingNum})) : SV_Target 0
75 return Sample(faceTexture, faceSampler, texCoords);
79 let device, swapChain, verticesBuffer, bindGroupLayout, pipeline, renderPassDescriptor, queue, textureViewBinding, samplerBinding;
80 let projectionMatrix = mat4.create();
82 const texCoordsOffset = 4 * 4;
83 const vertexSize = 4 * 6;
84 const verticesArray = new Float32Array([
85 // float4 position, float2 texCoords
129 async function init() {
130 const adapter = await navigator.gpu.requestAdapter();
131 device = await adapter.requestDevice();
133 const canvas = document.querySelector('canvas');
135 const aspect = Math.abs(canvas.width / canvas.height);
136 mat4.perspective(projectionMatrix, (2 * Math.PI) / 5, aspect, 1, 100.0);
138 const context = canvas.getContext('gpu');
140 const swapChainDescriptor = {
144 swapChain = context.configureSwapChain(swapChainDescriptor);
146 // WebKit WebGPU accepts only MSL for now.
147 const shaderModuleDescriptor = { code: shader, isWHLSL: true };
148 const shaderModule = device.createShaderModule(shaderModuleDescriptor);
150 const verticesBufferDescriptor = {
151 size: verticesArray.byteLength,
152 usage: GPUBufferUsage.VERTEX | GPUBufferUsage.TRANSFER_DST
154 let verticesArrayBuffer;
155 [verticesBuffer, verticesArrayBuffer] = device.createBufferMapped(verticesBufferDescriptor);
157 const verticesWriteArray = new Float32Array(verticesArrayBuffer);
158 verticesWriteArray.set(verticesArray);
159 verticesBuffer.unmap();
163 const positionAttributeDescriptor = {
164 shaderLocation: positionAttributeNum, // [[attribute(0)]].
168 const texCoordsAttributeDescriptor = {
169 shaderLocation: texCoordsAttributeNum,
170 offset: texCoordsOffset,
173 const vertexBufferDescriptor = {
174 attributeSet: [positionAttributeDescriptor, texCoordsAttributeDescriptor],
178 const vertexInputDescriptor = { vertexBuffers: [vertexBufferDescriptor] };
182 // Load texture image
183 const image = new Image();
184 const imageLoadPromise = new Promise(resolve => {
185 image.onload = () => resolve();
186 image.src = "resources/safari-alpha.png"
188 await Promise.resolve(imageLoadPromise);
190 const textureSize = {
192 height: image.height,
196 const textureDescriptor = {
202 format: "rgba8unorm",
203 usage: GPUTextureUsage.TRANSFER_DST | GPUTextureUsage.SAMPLED
205 const texture = device.createTexture(textureDescriptor);
208 const canvas2d = document.createElement('canvas');
209 canvas2d.width = image.width;
210 canvas2d.height = image.height;
211 const context2d = canvas2d.getContext('2d');
212 context2d.drawImage(image, 0, 0);
214 const imageData = context2d.getImageData(0, 0, image.width, image.height);
216 const textureDataBufferDescriptor = {
217 size: imageData.data.length,
218 usage: GPUBufferUsage.TRANSFER_SRC
220 const [textureDataBuffer, textureArrayBuffer] = device.createBufferMapped(textureDataBufferDescriptor);
222 const textureWriteArray = new Uint8Array(textureArrayBuffer);
223 textureWriteArray.set(imageData.data);
224 textureDataBuffer.unmap();
226 const dataCopyView = {
227 buffer: textureDataBuffer,
229 rowPitch: image.width * 4,
232 const textureCopyView = {
236 origin: { x: 0, y: 0, z: 0 }
239 const blitCommandEncoder = device.createCommandEncoder();
240 blitCommandEncoder.copyBufferToTexture(dataCopyView, textureCopyView, textureSize);
242 queue = device.getQueue();
244 queue.submit([blitCommandEncoder.finish()]);
246 // Bind group binding layout
247 const transformBufferBindGroupLayoutBinding = {
248 binding: transformBindingNum, // id[[(0)]]
249 visibility: GPUShaderStageBit.VERTEX,
250 type: "uniform-buffer"
253 const textureBindGroupLayoutBinding = {
254 binding: textureBindingNum,
255 visibility: GPUShaderStageBit.FRAGMENT,
256 type: "sampled-texture"
258 textureViewBinding = {
259 binding: textureBindingNum,
260 resource: texture.createDefaultView()
263 const samplerBindGroupLayoutBinding = {
264 binding: samplerBindingNum,
265 visibility: GPUShaderStageBit.FRAGMENT,
269 binding: samplerBindingNum,
270 resource: device.createSampler({})
273 const bindGroupLayoutDescriptor = {
274 bindings: [transformBufferBindGroupLayoutBinding, textureBindGroupLayoutBinding, samplerBindGroupLayoutBinding]
276 bindGroupLayout = device.createBindGroupLayout(bindGroupLayoutDescriptor);
279 const depthStateDescriptor = {
280 depthWriteEnabled: true,
284 const pipelineLayoutDescriptor = { bindGroupLayouts: [bindGroupLayout] };
285 const pipelineLayout = device.createPipelineLayout(pipelineLayoutDescriptor);
286 const vertexStageDescriptor = {
287 module: shaderModule,
288 entryPoint: "vertex_main"
290 const fragmentStageDescriptor = {
291 module: shaderModule,
292 entryPoint: "fragment_main"
295 format: "bgra8unorm",
297 srcFactor: "src-alpha",
298 dstFactor: "one-minus-src-alpha",
302 srcFactor: "src-alpha",
303 dstFactor: "one-minus-src-alpha",
306 writeMask: GPUColorWriteBits.ALL
308 const pipelineDescriptor = {
309 layout: pipelineLayout,
311 vertexStage: vertexStageDescriptor,
312 fragmentStage: fragmentStageDescriptor,
314 primitiveTopology: "triangle-list",
315 colorStates: [colorState],
316 depthStencilState: depthStateDescriptor,
317 vertexInput: vertexInputDescriptor
319 pipeline = device.createRenderPipeline(pipelineDescriptor);
321 let colorAttachment = {
322 // attachment is acquired in render loop.
325 clearColor: { r: 0.05, g: .3, b: .6, a: 1.0 } // GPUColor
328 // Depth stencil texture
333 height: canvas.height,
337 const depthTextureDescriptor = {
343 format: "depth32float-stencil8",
344 usage: GPUTextureUsage.OUTPUT_ATTACHMENT
347 const depthTexture = device.createTexture(depthTextureDescriptor);
349 // GPURenderPassDepthStencilAttachmentDescriptor
350 const depthAttachment = {
351 attachment: depthTexture.createDefaultView(),
352 depthLoadOp: "clear",
353 depthStoreOp: "store",
357 renderPassDescriptor = {
358 colorAttachments: [colorAttachment],
359 depthStencilAttachment: depthAttachment
365 /* Transform Buffers and Bindings */
366 const transformSize = 4 * 16;
368 const transformBufferDescriptor = {
370 usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.MAP_WRITE
373 let mappedGroups = [];
376 if (mappedGroups.length === 0) {
377 const [buffer, arrayBuffer] = device.createBufferMapped(transformBufferDescriptor);
378 const group = device.createBindGroup(createBindGroupDescriptor(buffer, textureViewBinding, samplerBinding));
379 let mappedGroup = { buffer: buffer, arrayBuffer: arrayBuffer, bindGroup: group };
380 drawCommands(mappedGroup);
382 drawCommands(mappedGroups.shift());
385 function createBindGroupDescriptor(transformBuffer, textureViewBinding, samplerBinding) {
386 const transformBufferBinding = {
387 buffer: transformBuffer,
391 const transformBufferBindGroupBinding = {
392 binding: transformBindingNum,
393 resource: transformBufferBinding
396 layout: bindGroupLayout,
397 bindings: [transformBufferBindGroupBinding, textureViewBinding, samplerBinding]
401 function drawCommands(mappedGroup) {
402 updateTransformArray(new Float32Array(mappedGroup.arrayBuffer));
403 mappedGroup.buffer.unmap();
405 const commandEncoder = device.createCommandEncoder();
406 renderPassDescriptor.colorAttachments[0].attachment = swapChain.getCurrentTexture().createDefaultView();
407 const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
408 // Encode drawing commands.
409 passEncoder.setPipeline(pipeline);
411 passEncoder.setVertexBuffers(0, [verticesBuffer], [0]);
413 passEncoder.setBindGroup(bindGroupIndex, mappedGroup.bindGroup);
414 passEncoder.draw(36, 1, 0, 0);
415 passEncoder.endPass();
417 queue.submit([commandEncoder.finish()]);
419 // Ready the current buffer for update after GPU is done with it.
420 mappedGroup.buffer.mapWriteAsync().then((arrayBuffer) => {
421 mappedGroup.arrayBuffer = arrayBuffer;
422 mappedGroups.push(mappedGroup);
425 requestAnimationFrame(render);
428 function updateTransformArray(array) {
429 let viewMatrix = mat4.create();
430 mat4.translate(viewMatrix, viewMatrix, vec3.fromValues(0, 0, -5));
431 let now = Date.now() / 1000;
432 mat4.rotate(viewMatrix, viewMatrix, 1, vec3.fromValues(Math.sin(now), 1, 1));
433 let modelViewProjectionMatrix = mat4.create();
434 mat4.multiply(modelViewProjectionMatrix, projectionMatrix, viewMatrix);
435 mat4.copy(array, modelViewProjectionMatrix);
438 window.addEventListener("load", init);