Add WebGPU extension functional sample (#1121)

* Add WebGPU extension functional sample

* Fix lint issues

* Add warning support

* Update functional-samples/sample.webgpu/manifest.json

Co-authored-by: Joe Medley <jmedley@google.com>

* Update functional-samples/sample.webgpu/README.md

Co-authored-by: Joe Medley <jmedley@google.com>

---------

Co-authored-by: Joe Medley <jmedley@google.com>
This commit is contained in:
François Beaufort
2024-03-09 06:59:48 +01:00
committed by GitHub
parent bc9adaf8cb
commit 73e61f7f67
3 changed files with 73 additions and 0 deletions

View File

@@ -0,0 +1,17 @@
# WebGPU extension sample
This sample demonstrates how to use the [WebGPU API](https://webgpu.dev/) to generate a red triangle using an extension service worker.
> [!WARNING]
> Service worker support in WebGPU is available in Chrome 123 with the "Experimental Web Platform Features" flag.
## Overview
In this sample, clicking the action button opens a red triangle image in a new tab.
## Running this extension
1. Clone this repository.
2. Load this directory in Chrome as an [unpacked extension](https://developer.chrome.com/docs/extensions/mv3/getstarted/development-basics/#load-unpacked).
3. Pin the extension from the extension menu.
4. Click the extension's action icon to open the red triangle in a new tab.

View File

@@ -0,0 +1,12 @@
{
"manifest_version": 3,
"name": "WebGPU Extension",
"description": "Generate a red triangle with WebGPU in an extension service worker.",
"version": "1.0",
"action": {
"default_title": "Click to see a red triangle"
},
"background": {
"service_worker": "service-worker.js"
}
}

View File

@@ -0,0 +1,44 @@
chrome.action.onClicked.addListener(async () => {
const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();
const canvas = new OffscreenCanvas(256, 256);
const context = canvas.getContext('webgpu');
const format = navigator.gpu.getPreferredCanvasFormat();
context.configure({ device, format });
const code = `
@vertex fn vertexMain(@builtin(vertex_index) i : u32) ->
@builtin(position) vec4f {
const pos = array(vec2f(0, 1), vec2f(-1, -1), vec2f(1, -1));
return vec4f(pos[i], 0, 1);
}
@fragment fn fragmentMain() -> @location(0) vec4f {
return vec4f(1, 0, 0, 1);
}`;
const module = device.createShaderModule({ code });
const pipeline = await device.createRenderPipelineAsync({
layout: 'auto',
vertex: { module },
fragment: { module, targets: [{ format }] }
});
const commandEncoder = device.createCommandEncoder();
const colorAttachments = [
{
view: context.getCurrentTexture().createView(),
loadOp: 'clear',
storeOp: 'store'
}
];
const passEncoder = commandEncoder.beginRenderPass({ colorAttachments });
passEncoder.setPipeline(pipeline);
passEncoder.draw(3);
passEncoder.end();
device.queue.submit([commandEncoder.finish()]);
// Open canvas as an image in a new tab.
const blob = await canvas.convertToBlob();
const reader = new FileReader();
reader.onload = () => chrome.tabs.create({ url: reader.result });
reader.readAsDataURL(blob);
});