├── .gitignore ├── bun.lockb ├── deploy.sh ├── favicon.svg ├── index.html ├── package.json ├── readme.md ├── samples ├── assets.ts ├── index.css ├── index.ts └── utils.ts ├── src ├── index.ts ├── style.css ├── utils │ ├── texture.ts │ ├── triangle_mesh.ts │ ├── type.ts │ └── utils.ts ├── vite-env.d.ts ├── webgpu │ ├── index.ts │ └── rect.ts └── wgsl │ ├── blur.wgsl │ ├── compute_blur.wgsl │ ├── copy.wgsl │ ├── noise.wgsl │ ├── vert.wgsl │ └── warp.wgsl ├── tsconfig.json └── vite.config.ts /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? -------------------------------------------------------------------------------- /bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/quarksb/webgpu-image-filter/56214bf06169368aaa62c508ef2f2d999b8bc054/bun.lockb -------------------------------------------------------------------------------- /deploy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 确保脚本抛出遇到的错误 4 | set -e 5 | 6 | # 生成静态文件 7 | pnpm build 8 | 9 | # 进入生成的文件夹 10 | cd dist 11 | 12 | git init 13 | git add -A 14 | git commit -m 'deploy' 15 | 16 | # 发布到 https://.github.io/ 17 | git push -f git@github.com:quarksb/image-cooker.git master:gh-pages 18 | 19 | cd - -------------------------------------------------------------------------------- /favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | webgpu-image-filter 18 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "webgpu-image-filter", 3 | "private": false, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "bunx --bun vite", 8 | "build": "vite build --mode demo", 9 | "build:demo": "vite build --mode demo", 10 | "build:lib": "vite build --mode lib", 11 | "release": "pnpm build:lib && yarn publish", 12 | "deploy": "sh deploy.sh" 13 | }, 14 | "devDependencies": { 15 | "@tweakpane/core": "^2.0.5", 16 | "lru_map": "^0.4.1", 17 | "tweakpane": "^4.0.5", 18 | "vite": "^6.0.3", 19 | "vite-plugin-glsl": "1.0.1" 20 | }, 21 | "dependencies": { 22 | "@webgpu/types": "^0.1.51" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # WebGPU image filter 2 | 3 | [Preview online](https://quarksb.github.io/webgpu-image-filter/) 4 | This is a project to demonstrate the application of webgpu in image processing 5 | 6 | 7 | ## LightSpot 8 | - wgsl shader auto parsing 9 | - Free combination of filter chains 10 | 11 | ## To be realized 12 | - Support for custom filters 13 | - Unified resource management, using only a small amount of memory, copying and transferring data only when needed for updates 14 | 15 | ## How to use 16 | Todo 17 | 18 | 19 | 20 | ## Info 21 | If you want to see more webgpu related materials please check [awesome-webgpu](https://github.com/mikbry/awesome-webgpu) 22 | 23 | 24 | -------------------------------------------------------------------------------- /samples/assets.ts: -------------------------------------------------------------------------------- 1 | export const ImageUrls = [ 2 | 'https://st0.dancf.com/gaoding-material/0/images/223463/20191107-203725-leuLE.jpg', 3 | 'https://st0.dancf.com/gaoding-material/0/images/372765/20200428-184157-CC9C5C33-7193-54B7-8D32-FB0D309706DD.jpg', 4 | 'https://st0.dancf.com/gaoding-material/0/images/354320/20200108-213408-ADZNv.jpg', 5 | 'https://gd-filems.dancf.com/gaoding/gaoding/15004/88567b2b-9e8b-4e15-9347-03aef654e1d813604450.png', 6 | 'https://gd-filems.dancf.com/gaoding/gaoding/15004/0aef368c-7fb8-4d97-98db-abb053b69a9313604467.png', 7 | 'https://gd-filems-fat.my-static.dancf.com/saas/40eds5/43368/76512881-9bf9-4f95-9355-23cdee5f032d4184.png', 8 | 'https://gd-filems.dancf.com/saas/xi19e5/2269/f9b57bad-de39-4a10-ae93-93ab27428cab17679446.jpg', 9 | 'https://gd-filems.dancf.com/gaoding/gaoding/2269/d1275156-5a66-44a0-8109-4a639c77ac3d92.jpeg', 10 | 'https://gd-filems.dancf.com/gaoding/gaoding/2269/5b043889-248a-487d-82db-54f07f5d472a3294867.jpeg', 11 | ]; -------------------------------------------------------------------------------- /samples/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | display: grid; 3 | background-color: aqua; 4 | } 5 | 6 | #canvas { 7 | /* transform: scale(0.2); */ 8 | scale: 1; 9 | max-width: 70vw; 10 | max-height: 95vh; 11 | } 12 | -------------------------------------------------------------------------------- /samples/index.ts: -------------------------------------------------------------------------------- 1 | import { BasicRenderer } from "../src/webgpu"; 2 | import { Pane } from "tweakpane"; 3 | import { uploadFile, download, getImageBitmap } from "./utils"; 4 | import "./index.css"; 5 | import { getGpuDevice } from "../src/utils/utils"; 6 | import { ImageUrls } from "./assets"; 7 | import { NoiseFilterParam, WarpFilterParam, BlurFilterParam, FilterParam } from "../src/utils/type"; 8 | import { BindingApi } from "@tweakpane/core"; 9 | 10 | const basicCanvas = document.getElementById("canvas")!; 11 | const w = 1200; 12 | const h = 675; 13 | 14 | basicCanvas.width = w; 15 | basicCanvas.height = h; 16 | 17 | const PARAMS = { 18 | blur: 10, 19 | warp: 1, 20 | seed: 0, 21 | noise: 40, 22 | granularity: 10, 23 | shadow: 10, 24 | center: { x: 0, y: 0 }, 25 | backgroundColor: "#88ddff", 26 | imageIndex: 0, 27 | }; 28 | 29 | const pane = new Pane(); 30 | const bacInputs: BindingApi[] = []; 31 | const f1 = pane.addFolder({ title: "background" }); 32 | bacInputs.push(f1.addBinding(PARAMS, "backgroundColor", { view: "color" })); 33 | const baseInputs: BindingApi[] = []; 34 | 35 | const f2 = pane.addFolder({ title: "Ablation" }); 36 | const f3 = pane.addFolder({ title: "Blur" }); 37 | const f4 = pane.addFolder({ title: "Twist" }); 38 | baseInputs.push(f2.addBinding(PARAMS, "noise", { label: "strength", min: 0, max: 100 })); 39 | baseInputs.push(f2.addBinding(PARAMS, "granularity", { label: "scale", min: 0, max: 100 })); 40 | baseInputs.push(f2.addBinding(PARAMS, "seed", { label: "seed", min: 0, max: 1 })); 41 | baseInputs.push(f3.addBinding(PARAMS, "blur", { label: "strength", min: 0, max: 300 })); 42 | baseInputs.push(f4.addBinding(PARAMS, "warp", { label: "strength", min: -100, max: 100 })); 43 | baseInputs.push( 44 | f4.addBinding(PARAMS, "center", { 45 | label: "center", 46 | picker: "inline", 47 | expanded: true, 48 | x: { step: 1, min: -100, max: 100 }, 49 | y: { step: 1, min: -100, max: 100 }, 50 | }) 51 | ); 52 | 53 | const imageInputs: BindingApi[] = []; 54 | imageInputs.push(pane.addBinding(PARAMS, "imageIndex", { label: "image", min: 0, max: ImageUrls.length - 1, step: 1 })); 55 | const button1 = pane.addButton({ title: "upload image" }); 56 | const button2 = pane.addButton({ title: "download image" }); 57 | 58 | const ctx = basicCanvas.getContext("2d")!; 59 | let imgBitmap: ImageBitmap; 60 | 61 | let url = ImageUrls[PARAMS.imageIndex]; 62 | const input: HTMLInputElement = document.createElement("input"); 63 | input.type = "file"; 64 | input.accept = "image/png"; 65 | input.style.display = "none"; 66 | 67 | input.addEventListener("change", async () => { 68 | const files = input.files; 69 | if (files && files.length) { 70 | const { url: currentUrl } = await uploadFile(files[0]); 71 | url = currentUrl; 72 | deepRender(); 73 | } 74 | }); 75 | button1.on("click", () => { 76 | input.click(); 77 | }); 78 | button2.on("click", () => { 79 | download(basicCanvas); 80 | }); 81 | deepRender(); 82 | 83 | let device; 84 | let renderer; 85 | 86 | async function render() { 87 | if (!renderer) { 88 | device = (await getGpuDevice()).device; 89 | renderer = new BasicRenderer(device); 90 | } 91 | 92 | const { width, height } = imgBitmap; 93 | basicCanvas.width = width; 94 | basicCanvas.height = height; 95 | 96 | const noiseParam: NoiseFilterParam = { 97 | filterType: "noise", 98 | enable: true, 99 | properties: [ 100 | { key: "intensity", value: 100 - PARAMS.noise }, 101 | { key: "seed", value: PARAMS.seed }, 102 | { key: "granularity", value: PARAMS.granularity }, 103 | ], 104 | }; 105 | 106 | const warpParam: WarpFilterParam = { 107 | filterType: "warp", 108 | enable: true, 109 | properties: [ 110 | { key: "intensity", value: PARAMS.warp }, 111 | { key: "center", value: [PARAMS.center.x * 0.01 + 0.5, PARAMS.center.y * 0.01 + 0.5] }, 112 | ], 113 | }; 114 | 115 | const blurParam: BlurFilterParam = { 116 | filterType: "blur", 117 | enable: true, 118 | properties: [{ key: "intensity", value: PARAMS.blur }], 119 | }; 120 | 121 | const dataArray: FilterParam[] = [noiseParam, warpParam, blurParam]; 122 | 123 | console.time("render"); 124 | const outCanvas = renderer.render(imgBitmap, dataArray, url); 125 | console.timeEnd("render"); 126 | 127 | // copyImage(imgBitmap); 128 | ctx.clearRect(0, 0, width, height); 129 | ctx.drawImage(outCanvas, 0, 0); 130 | } 131 | 132 | async function deepRender() { 133 | url = ImageUrls[PARAMS.imageIndex]; 134 | imgBitmap = await getImageBitmap(url); 135 | render(); 136 | } 137 | const body = document.querySelector("body")!; 138 | bacInputs.forEach((input) => { 139 | input.on("change", () => { 140 | body.style.backgroundColor = PARAMS.backgroundColor; 141 | }); 142 | }); 143 | 144 | baseInputs.forEach((input) => { 145 | input.on("change", () => { 146 | render(); 147 | }); 148 | }); 149 | 150 | imageInputs.forEach((input) => { 151 | input.on("change", () => { 152 | deepRender(); 153 | }); 154 | }); 155 | -------------------------------------------------------------------------------- /samples/utils.ts: -------------------------------------------------------------------------------- 1 | import { LRUMap } from "lru_map"; 2 | 3 | const array = [ 4 | "自强不息", 5 | "厚德载物", 6 | "精益求精", 7 | "上善若水" 8 | ] 9 | 10 | export async function download(canvas: HTMLCanvasElement) { 11 | if (!canvas) return; 12 | const link = document.createElement("a"); 13 | canvas.toBlob((blob) => { 14 | const url = URL.createObjectURL(blob!); 15 | link.download = `夸克${array[Math.floor(Math.random() * array.length)]}.png`; 16 | link.href = url; 17 | link.click(); 18 | }); 19 | } 20 | 21 | export function uploadFile(file): Promise<{ url: string }> { 22 | return new Promise((resolve) => { 23 | // 使用 FileReader 读取文件对象 24 | const reader = new FileReader(); 25 | // 把文件对象作为一个 dataURL 读入 26 | reader.readAsDataURL(file); 27 | reader.onload = (event: ProgressEvent) => { 28 | const url = event?.target?.result as string; 29 | resolve({ url }); 30 | }; 31 | }); 32 | } 33 | 34 | const ImageBitmapCache: LRUMap = new LRUMap(100); 35 | 36 | export async function getImageBitmap(url: string): Promise { 37 | const image = new Image(); 38 | image.crossOrigin = ''; 39 | image.src = url; 40 | return new Promise(resolve => { 41 | image.onload = () => { 42 | let imageBitmap = ImageBitmapCache.get(url); 43 | if (!imageBitmap) { 44 | createImageBitmap(image).then(bitmap => { 45 | imageBitmap = bitmap; 46 | ImageBitmapCache.set(url, imageBitmap); 47 | resolve(imageBitmap); 48 | }).catch(err => { 49 | console.error(err); 50 | }); 51 | } 52 | else { 53 | resolve(imageBitmap); 54 | } 55 | }; 56 | }, 57 | ); 58 | } 59 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './webgpu'; -------------------------------------------------------------------------------- /src/style.css: -------------------------------------------------------------------------------- 1 | #app { 2 | font-family: Avenir, Helvetica, Arial, sans-serif; 3 | -webkit-font-smoothing: antialiased; 4 | -moz-osx-font-smoothing: grayscale; 5 | text-align: center; 6 | color: #2c3e50; 7 | margin-top: 60px; 8 | } 9 | -------------------------------------------------------------------------------- /src/utils/texture.ts: -------------------------------------------------------------------------------- 1 | export function getTextureSize(source: GPUCopyExternalImageSourceInfo["source"]) { 2 | let width = 1; 3 | let height = 1; 4 | 5 | if (source instanceof HTMLCanvasElement || source instanceof ImageBitmap || source instanceof OffscreenCanvas || source instanceof HTMLImageElement) { 6 | width = source.width; 7 | height = source.height; 8 | } else if (source instanceof HTMLVideoElement) { 9 | width = source.videoWidth; 10 | height = source.videoHeight; 11 | } else { 12 | throw new Error("Invalid source type"); 13 | } 14 | 15 | return { width, height }; 16 | } 17 | 18 | // Defining this as a separate function because we'll be re-using it a lot. 19 | export function webGPUTextureFromImageBitmapOrCanvas(gpuDevice: GPUDevice, source: GPUCopyExternalImageSourceInfo["source"]) { 20 | const size = getTextureSize(source); 21 | 22 | const textureDescriptor: GPUTextureDescriptor = { 23 | // Unlike in WebGL, the size of our texture must be set at texture creation time. 24 | // This means we have to wait until the image is loaded to create the texture, since we won't 25 | // know the size until then. 26 | label: 'Texture from image', 27 | size, 28 | format: 'rgba8unorm', 29 | usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST 30 | }; 31 | const texture = gpuDevice.createTexture(textureDescriptor); 32 | 33 | gpuDevice.queue.copyExternalImageToTexture({ source }, { texture }, textureDescriptor.size); 34 | 35 | return texture; 36 | } 37 | 38 | // Note that this is an async function 39 | export async function webGPUTextureFromImageUrl(gpuDevice: GPUDevice, url: string) { 40 | const response = await fetch(url); 41 | const blob = await response.blob(); 42 | const imgBitmap = await createImageBitmap(blob); 43 | 44 | return webGPUTextureFromImageBitmapOrCanvas(gpuDevice, imgBitmap); 45 | } -------------------------------------------------------------------------------- /src/utils/triangle_mesh.ts: -------------------------------------------------------------------------------- 1 | import { getBuffer } from "./utils"; 2 | 3 | export class TriangleMesh { 4 | 5 | buffer: GPUBuffer 6 | count: number; 7 | 8 | constructor(device: GPUDevice) { 9 | const vertices: Float32Array = new Float32Array( 10 | // posx posy texcordx texcordy 11 | [ 12 | 0.0, 3.0, 0.5, -1.0, 13 | -2.0, -1.0, -0.5, 1.0, 14 | 2.0, -1.0, 1.5, 1.0 15 | ] 16 | ); 17 | 18 | const usage: GPUBufferUsageFlags = GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST; 19 | // VERTEX: the buffer can be used as a vertex buffer 20 | // COPY_DST: data can be copied to the buffer 21 | 22 | this.buffer = getBuffer(device, vertices, usage); 23 | this.count = 3; 24 | } 25 | } -------------------------------------------------------------------------------- /src/utils/type.ts: -------------------------------------------------------------------------------- 1 | export enum Name { 2 | sigma = 'sigma', 3 | canvasSize = 'canvasSize', 4 | direction = 'direction', 5 | texture = "img" 6 | } 7 | 8 | export interface Blur { 9 | direction: Vec2 10 | } 11 | 12 | export interface TexTure { 13 | textureSize: Vec2, 14 | } 15 | 16 | export type Vec2 = [number, number]; 17 | export type Vec3 = [number, number, number]; 18 | 19 | // todo external GPUTexture 20 | export type BindingData = { type: 'buffer', buffer: GPUBuffer } | { type: 'sampler', sampler: GPUSampler } | { type: 'texture', texture: GPUTexture }; 21 | export interface BindingInfo { 22 | groupIndex: number; 23 | bindingIndex: number; 24 | bindingData: BindingData; 25 | } 26 | export type BindingInfos = BindingInfo[]; 27 | 28 | export type BindingType = 'buffer' | 'sampler' | 'texture' | 'storageTexture'; 29 | export interface BindingTypeInfo { 30 | groupIndex: number; 31 | bindingIndex: number; 32 | bindingType: BindingType; 33 | name: string; 34 | visibility: number; 35 | viewDimension: GPUTextureViewDimension; 36 | format: GPUTextureFormat; 37 | } 38 | export type BindingTypeInfos = BindingTypeInfo[]; 39 | 40 | type BaseData = number | number[]; 41 | interface Property { 42 | key: string; 43 | value: BaseData; 44 | } 45 | 46 | export interface FilterParam { 47 | filterType: string; 48 | enable?: boolean; 49 | code?: string 50 | properties: Property[]; 51 | } 52 | 53 | export interface NoiseFilterParam extends FilterParam { 54 | filterType: 'noise'; 55 | properties: [ 56 | { key: 'intensity', value: number }, 57 | { key: 'seed', value: number }, 58 | { key: 'granularity', value: number } 59 | ]; 60 | } 61 | 62 | export interface Point { 63 | x: number; 64 | y: number; 65 | } 66 | 67 | export interface WarpFilterParam extends FilterParam { 68 | filterType: 'warp'; 69 | properties: [ 70 | { key: 'intensity', value: number }, 71 | { key: 'center', value: number[] }, 72 | ]; 73 | } 74 | export interface BlurFilterParam extends FilterParam { 75 | filterType: 'blur'; 76 | properties: [ 77 | { key: 'intensity', value: number }, 78 | ]; 79 | } 80 | export interface GPUBindGroupLayoutEntryInfo extends GPUBindGroupLayoutEntry { 81 | bindingType: BindingType; 82 | name: string; 83 | } 84 | 85 | export interface GroupInfo { 86 | groupIndex: number; 87 | groupLayoutDescriptor: { 88 | entries: GPUBindGroupLayoutEntryInfo[] 89 | } 90 | } 91 | export interface pipelineData { 92 | groupInfos: GroupInfo[], 93 | pipeline: GPURenderPipeline 94 | } 95 | 96 | export type CommonArray = Float32Array | Uint32Array; 97 | 98 | 99 | 100 | -------------------------------------------------------------------------------- /src/utils/utils.ts: -------------------------------------------------------------------------------- 1 | import type { BindingType, BindingTypeInfos, GPUBindGroupLayoutEntryInfo, GroupInfo, pipelineData } from "./type"; 2 | 3 | let adapter: GPUAdapter; 4 | let device: GPUDevice; 5 | export async function getGpuDevice() { 6 | if (device) { 7 | return { adapter, device }; 8 | } else { 9 | try { 10 | adapter = (await navigator.gpu.requestAdapter())!; 11 | device = (await adapter!.requestDevice())!; 12 | } catch (e) { 13 | alert("your browser don‘t support webgpu\n你的浏览器不支持 webgpu, 请使用 chrome 113+ 或者 edge 113+"); 14 | open("https://www.google.com/chrome/"); 15 | } 16 | return { adapter, device }; 17 | } 18 | } 19 | 20 | export const DefaultFormat: GPUTextureFormat = "bgra8unorm"; 21 | 22 | export function getBuffer(device: GPUDevice, arr: Float32Array | Uint32Array, usage = GPUBufferUsage.STORAGE) { 23 | const desc = { 24 | size: Math.max(Math.ceil(arr.byteLength / 4) * 4, 16), 25 | usage, 26 | mappedAtCreation: true, 27 | }; 28 | const buffer = device.createBuffer(desc); 29 | const mappedRange = buffer.getMappedRange(); 30 | const writeArray = arr instanceof Uint32Array ? new Uint32Array(mappedRange) : new Float32Array(mappedRange); 31 | writeArray.set(arr); 32 | buffer.unmap(); 33 | return buffer; 34 | } 35 | 36 | export function getUniformBuffer(device: GPUDevice, type = "float", value = 0, usage = GPUBufferUsage.UNIFORM, size = 4) { 37 | const buffer = device.createBuffer({ size, mappedAtCreation: true, usage }); 38 | const mappedRange = buffer.getMappedRange(); 39 | switch (type) { 40 | case "uint": 41 | new Uint32Array(mappedRange)[0] = value; 42 | break; 43 | case "int": 44 | new Int32Array(mappedRange)[0] = value; 45 | break; 46 | default: 47 | new Float32Array(mappedRange)[0] = value; 48 | } 49 | buffer.unmap(); 50 | return buffer; 51 | } 52 | 53 | export function getTexture( 54 | device: GPUDevice, 55 | { 56 | width = 1, 57 | height = 1, 58 | format, 59 | usage = GPUTextureUsage.COPY_DST | GPUTextureUsage.STORAGE_BINDING | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.RENDER_ATTACHMENT, 60 | }: { 61 | width?: number; 62 | height?: number; 63 | format: GPUTextureFormat; 64 | usage?: GPUTextureDescriptor["usage"]; 65 | } 66 | ): GPUTexture { 67 | return device.createTexture({ 68 | label: `w:${Math.ceil(width)} x h:${Math.ceil(height)}`, 69 | size: { width, height }, 70 | format, 71 | usage, 72 | }); 73 | } 74 | 75 | export function getOffTexture(device: GPUDevice, { width, height, format }: { width: number; height: number; format: GPUTextureFormat }): GPUTexture { 76 | const usage = GPUTextureUsage.COPY_SRC | GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.RENDER_ATTACHMENT; 77 | return getTexture(device, { usage, width, height, format }); 78 | } 79 | 80 | export function getRenderPassEncoder(commandEncoder: GPUCommandEncoder, view: GPUTextureView): GPURenderPassEncoder { 81 | return commandEncoder.beginRenderPass({ 82 | colorAttachments: [ 83 | { 84 | view, 85 | loadOp: "clear", 86 | storeOp: "store", 87 | }, 88 | ], 89 | })!; 90 | } 91 | 92 | export function getCanvas(width: number, height: number): OffscreenCanvas; 93 | export function getCanvas(width: number, height: number, isOnScreen: true): HTMLCanvasElement; 94 | export function getCanvas(width: number = 1, height: number = 1, isOnScreen?: boolean): HTMLCanvasElement | OffscreenCanvas { 95 | // @ts-ignore 96 | if (!isOnScreen && window.OffscreenCanvas) { 97 | // @ts-ignore 98 | const canvas = new OffscreenCanvas(width, height); 99 | return canvas; 100 | } else { 101 | const canvas = document.createElement("canvas"); 102 | canvas.width = width; 103 | canvas.height = height; 104 | return canvas; 105 | } 106 | } 107 | 108 | export function initCode(code: string, device: GPUDevice, stage = GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT): pipelineData { 109 | const { vertexEntryPoint, fragmentEntryPoint, bindingTypeInfos } = parseWGSL(code); 110 | const groupInfos = getGroupInfos(bindingTypeInfos); 111 | 112 | // const bindGroupLayoutMap = new Map(); 113 | groupInfos.forEach(({ groupLayoutDescriptor }) => { 114 | const entries: GPUBindGroupLayoutEntry[] = []; 115 | for (let entry of groupLayoutDescriptor.entries) { 116 | const { bindingType, binding, visibility } = entry; 117 | let entryFilled: GPUBindGroupLayoutEntry = { 118 | binding, 119 | visibility, 120 | [`${bindingType}`]: {}, 121 | }; 122 | entries.push(entryFilled); 123 | } 124 | // const bindGroupLayout = device.createBindGroupLayout({ entries }); 125 | // bindGroupLayoutMap.set(groupIndex, bindGroupLayout); 126 | }); 127 | 128 | // todo analyzing from code 129 | const bufferLayout: GPUVertexBufferLayout = { 130 | arrayStride: 4 * 4, 131 | attributes: [ 132 | { 133 | shaderLocation: 0, 134 | format: "float32x2", 135 | offset: 0, 136 | }, 137 | { 138 | shaderLocation: 1, 139 | format: "float32x2", 140 | offset: 4 * 2, 141 | }, 142 | ], 143 | }; 144 | 145 | // const bindGroupLayouts = bindGroupLayoutMap.values(); 146 | // const pipelineLayout = device.createPipelineLayout({ bindGroupLayouts }); 147 | const module = device.createShaderModule({ code }); 148 | const descriptor: GPURenderPipelineDescriptor = { 149 | // layout: pipelineLayout, 150 | layout: "auto", 151 | vertex: { 152 | module, 153 | entryPoint: vertexEntryPoint, 154 | buffers: [bufferLayout], 155 | }, 156 | fragment: { 157 | module, 158 | entryPoint: fragmentEntryPoint, 159 | targets: [{ format: DefaultFormat }], 160 | }, 161 | primitive: { 162 | topology: "triangle-list", 163 | frontFace: "ccw", // ccw(counter clock wise 逆时针) or cw (clock wise 顺时针) 164 | cullMode: "back", // none or front or back 165 | }, 166 | }; 167 | const pipeline = device.createRenderPipeline(descriptor); 168 | 169 | return { groupInfos, pipeline }; 170 | } 171 | 172 | export function getSampler(device: GPUDevice, { magFilter = "linear", minFilter = "linear" }: GPUSamplerDescriptor) { 173 | return device.createSampler({ magFilter, minFilter }); 174 | } 175 | 176 | export function getGroupInfos(bindingTypeInfos: BindingTypeInfos) { 177 | let groupInfos: GroupInfo[] = []; 178 | bindingTypeInfos.forEach(({ groupIndex, bindingIndex, bindingType, name }) => { 179 | let groupInfo = groupInfos.find((groupInfo) => groupInfo.groupIndex === groupIndex); 180 | if (typeof groupInfo === "undefined") { 181 | groupInfo = { 182 | groupIndex, 183 | groupLayoutDescriptor: { 184 | entries: [], 185 | }, 186 | }; 187 | groupInfos.push(groupInfo); 188 | } 189 | 190 | (groupInfo.groupLayoutDescriptor.entries as GPUBindGroupLayoutEntryInfo[]).push({ 191 | binding: bindingIndex, 192 | // todo 自动根据 code 分析出 visibility 193 | visibility: GPUShaderStage.VERTEX | GPUShaderStage.FRAGMENT, 194 | bindingType, 195 | name, 196 | }); 197 | }); 198 | return groupInfos; 199 | } 200 | 201 | export function parseWGSL(code: string) { 202 | // todo 正则表达式不完善,没有 cover 所有场景 203 | const computeEntryData = code.matchAll(/@compute\s*@workgroup_size\(\s?([0-9]*),\s?([0-9]*)\s?,\s?([0-9]*)\s?\)\s*fn\s*(\w+)\(/g).next().value; 204 | 205 | let vertexEntryPoint = ""; 206 | let fragmentEntryPoint = ""; 207 | let computeEntryPoint = ""; 208 | let visibility: number = 0; 209 | let workgroupSize: number[] = []; 210 | 211 | if (!computeEntryData) { 212 | const vertexEntryData = code.matchAll(/@vertex\s*fn (\w+)\(/g).next().value; 213 | 214 | if (!vertexEntryData) { 215 | console.error("no vertex entry point"); 216 | } else { 217 | vertexEntryPoint = vertexEntryData[1]; 218 | visibility |= GPUShaderStage.VERTEX; 219 | } 220 | 221 | const fragmentEntryData = code.matchAll(/@fragment\s*fn (\w+)\(/g).next().value; 222 | 223 | if (!fragmentEntryData) { 224 | console.error("no fragment entry point"); 225 | } else { 226 | fragmentEntryPoint = fragmentEntryData[1]; 227 | visibility |= GPUShaderStage.FRAGMENT; 228 | } 229 | } else { 230 | const getInt = (i: number) => parseInt(computeEntryData[i]); 231 | workgroupSize = [getInt(1), getInt(2), getInt(3)]; 232 | computeEntryPoint = computeEntryData[4]; 233 | visibility |= GPUShaderStage.COMPUTE; 234 | } 235 | 236 | const datas = code.matchAll(/@group\(([0-9])\)\s+@binding\(([0-9])\)\s+var(<\w+\s*(,\s*\w+\s*)*>)?\s+(\w+)\s*:\s*(\w+)(<\s*(\w+)(\s*,\s*\w+)?>)?;/g); 237 | 238 | const bindingTypeInfos: BindingTypeInfos = []; 239 | for (let data of datas) { 240 | const groupIndex = parseInt(data[1]); 241 | const bindingIndex = parseInt(data[2]); 242 | const isUniform = !!data[3]; 243 | const name = data[5]; 244 | const type = data[6]; 245 | 246 | // console.log("isUniform:", isUniform); 247 | // console.log(" name:", name); 248 | // console.log(" type:", type); 249 | 250 | let bindingType: BindingType | undefined; 251 | let viewDimension: GPUTextureViewDimension = "2d"; 252 | let textureFormat: GPUTextureFormat = DefaultFormat; 253 | if (type === "sampler") { 254 | bindingType = "sampler"; 255 | } else if (type.includes("texture")) { 256 | const arr = type.split("_"); 257 | if (arr[1] === "storage") { 258 | bindingType = "storageTexture"; 259 | } else { 260 | bindingType = "texture"; 261 | } 262 | viewDimension = arr[arr.length - 1] as GPUTextureViewDimension; 263 | textureFormat = data[8] as GPUTextureFormat; 264 | } else if (isUniform) { 265 | bindingType = "buffer"; 266 | } else { 267 | console.error(`can't analyze @group(${groupIndex} @binding(${bindingIndex}) in your wgsl`); 268 | console.error("your wgsl: ", code); 269 | } 270 | if (bindingType) { 271 | bindingTypeInfos.push({ 272 | groupIndex, 273 | bindingIndex, 274 | bindingType, 275 | name, 276 | visibility, 277 | viewDimension, 278 | format: textureFormat, 279 | }); 280 | } 281 | } 282 | return { 283 | computeEntryPoint, 284 | vertexEntryPoint, 285 | fragmentEntryPoint, 286 | bindingTypeInfos, 287 | workgroupSize, 288 | }; 289 | } 290 | -------------------------------------------------------------------------------- /src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | declare module '*.wgsl' { 3 | const content: string; 4 | export default content; 5 | } -------------------------------------------------------------------------------- /src/webgpu/index.ts: -------------------------------------------------------------------------------- 1 | import { TriangleMesh } from "../utils/triangle_mesh"; 2 | import { getTexture, getRenderPassEncoder, getCanvas, getOffTexture, getBuffer, getSampler, DefaultFormat as format, initCode } from "../utils/utils"; 3 | import noiseCode from "../wgsl/noise.wgsl"; 4 | import warpCode from "../wgsl/warp.wgsl"; 5 | import copyCode from "../wgsl/copy.wgsl"; 6 | import blurCode from "../wgsl/blur.wgsl"; 7 | import type { BlurFilterParam, CommonArray, FilterParam, GroupInfo, pipelineData } from "../utils/type"; 8 | import { getTextureSize } from "../utils/texture"; 9 | 10 | interface CommandData { 11 | commandEncoder: GPUCommandEncoder; 12 | pipelineData: pipelineData; 13 | targetTexture: GPUTexture; 14 | inputTexture: GPUTexture; 15 | index?: number; 16 | } 17 | 18 | interface VertexData { 19 | count: number; 20 | buffer: GPUBuffer; 21 | } 22 | 23 | export class BasicRenderer { 24 | cacheKey: string | undefined; 25 | canvas = getCanvas(1, 1); 26 | ctx = this.canvas.getContext("webgpu")!; 27 | device: GPUDevice; 28 | width = 1; 29 | height = 1; 30 | inputTexture: GPUTexture | undefined; 31 | offTexture0: GPUTexture | undefined; 32 | offTexture1: GPUTexture | undefined; 33 | resourceMap: Map; 34 | pipelineDataMap: Map = new Map(); 35 | filterCodeMap: Map = new Map(); 36 | activeIndex = -1; 37 | constructor(device: GPUDevice) { 38 | this.device = device; 39 | 40 | this.resourceMap = new Map(); 41 | const triangleMesh: TriangleMesh = new TriangleMesh(this.device); 42 | this.resourceMap.set("vertex", { buffer: triangleMesh.buffer, count: triangleMesh.count }); 43 | this.resourceMap.set("mySampler", getSampler(device, {})); 44 | this.filterCodeMap.set("noise", noiseCode); 45 | this.filterCodeMap.set("warp", warpCode); 46 | this.filterCodeMap.set("copy", copyCode); 47 | this.filterCodeMap.set("blur", blurCode); 48 | this.updateBuffer("direction", [new Float32Array([1, 0]), new Float32Array([0, 1])]); 49 | 50 | const config: GPUCanvasConfiguration = { 51 | device, 52 | alphaMode: "premultiplied", 53 | format, 54 | }; 55 | this.ctx.configure(config); 56 | } 57 | 58 | load(sourceImage: GPUImageCopyExternalImage["source"], cacheKey?: string) { 59 | if (cacheKey !== this.cacheKey || !cacheKey) { 60 | this.cacheKey = cacheKey; 61 | // TODO: texture size is over the largest texture size 62 | const { width, height } = getTextureSize(sourceImage); 63 | const usage = GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.RENDER_ATTACHMENT; 64 | const texture = getTexture(this.device, { width, height, format, usage }); 65 | this.device.queue.copyExternalImageToTexture({ source: sourceImage }, { texture }, { width, height }); 66 | 67 | this.canvas.width = width; 68 | this.canvas.height = height; 69 | this.width = width; 70 | this.height = height; 71 | this.inputTexture = texture; 72 | this.resourceMap.set("myTexture", texture.createView()); 73 | 74 | // TODO: should replace with smarter texture manager 75 | this.offTexture0 = getOffTexture(this.device, { width, height, format }); 76 | this.offTexture1 = getOffTexture(this.device, { width, height, format }); 77 | } 78 | this.activeIndex = -1; 79 | } 80 | 81 | getOutTexture() { 82 | if (this.activeIndex === 1) { 83 | this.activeIndex = 0; 84 | return this.offTexture0!; 85 | } else { 86 | this.activeIndex = 1; 87 | return this.offTexture1!; 88 | } 89 | } 90 | 91 | // TODO: should replace with smarter texture manager 92 | getTexture() { 93 | if (this.activeIndex === 0) { 94 | this.activeIndex = 1; 95 | return { inputTexture: this.offTexture0!, targetTexture: this.offTexture1! }; 96 | } else if (this.activeIndex === 1) { 97 | this.activeIndex = 0; 98 | return { inputTexture: this.offTexture1!, targetTexture: this.offTexture0! }; 99 | } else { 100 | this.activeIndex = 0; 101 | return { inputTexture: this.inputTexture!, targetTexture: this.offTexture0! }; 102 | } 103 | } 104 | 105 | updateResource(resources: { name: string; resource: GPUBindingResource }[]) { 106 | resources.forEach(({ name, resource }) => { 107 | this.resourceMap.set(name, resource); 108 | }); 109 | } 110 | 111 | getBindGroups(groupInfos: GroupInfo[], pipeline: GPURenderPipeline, index = 0) { 112 | return groupInfos.map(({ groupIndex, groupLayoutDescriptor }) => { 113 | const entries: GPUBindGroupEntry[] = []; 114 | for (const { binding, name } of groupLayoutDescriptor.entries) { 115 | let resource: GPUBindingResource | GPUBindingResource[] = this.resourceMap.get(name)!; 116 | // let resource: GPUBindingResource = this.resourceMap.get(name)!; 117 | 118 | if (!resource) { 119 | console.error(`param “${name}” hasn't assigned value`); 120 | } 121 | 122 | if (resource instanceof Array) { 123 | resource = resource[index]; 124 | } 125 | 126 | entries.push({ binding, resource }); 127 | } 128 | const groupDescriptor: GPUBindGroupDescriptor = { 129 | layout: pipeline.getBindGroupLayout(groupIndex)!, 130 | entries: entries, 131 | }; 132 | 133 | const bindGroup = this.device.createBindGroup(groupDescriptor); 134 | return { 135 | groupIndex, 136 | bindGroup, 137 | }; 138 | }); 139 | } 140 | 141 | setCommandBuffer({ commandEncoder, pipelineData, targetTexture, inputTexture, index }: CommandData) { 142 | const { groupInfos, pipeline } = pipelineData; 143 | 144 | const passEncoder = getRenderPassEncoder(commandEncoder, targetTexture.createView()); 145 | passEncoder.setPipeline(pipeline); 146 | this.resourceMap.set("myTexture", inputTexture.createView()); 147 | const bindGroups = this.getBindGroups(groupInfos, pipeline, index); 148 | 149 | const isRenderPipeline = pipeline instanceof GPURenderPipeline; 150 | if (isRenderPipeline) { 151 | bindGroups.forEach(({ groupIndex, bindGroup }) => passEncoder.setBindGroup(groupIndex, bindGroup)); 152 | const vertexData = this.resourceMap.get("vertex") as VertexData; 153 | passEncoder.setVertexBuffer(0, vertexData.buffer); 154 | passEncoder.draw(vertexData.count); 155 | passEncoder.end(); 156 | } else { 157 | const passEncoder: GPUComputePassEncoder = commandEncoder.beginComputePass(); 158 | passEncoder.setPipeline(pipeline); 159 | bindGroups.forEach(({ groupIndex, bindGroup }) => passEncoder.setBindGroup(groupIndex, bindGroup)); 160 | passEncoder.dispatchWorkgroups(this.canvas.width, this.canvas.height, 1); 161 | passEncoder.end(); 162 | } 163 | } 164 | 165 | updateBuffer(key: string, arr: CommonArray | CommonArray[], usage = GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST) { 166 | const bufferBinding = this.resourceMap.get(key) as GPUBufferBinding | GPUBufferBinding[] | undefined; 167 | if (arr instanceof Array) { 168 | const createBuffer = (bufferBindings: GPUBufferBinding[], data: CommonArray, bufferBinding?: GPUBufferBinding) => { 169 | if (!bufferBinding) { 170 | const buffer = getBuffer(this.device, data, usage); 171 | bufferBinding = { buffer }; 172 | } else { 173 | this.device.queue.writeBuffer(bufferBinding.buffer, 0, data); 174 | } 175 | bufferBindings.push(bufferBinding); 176 | }; 177 | const bufferBindings: GPUBufferBinding[] = []; 178 | if (!bufferBinding) { 179 | arr.forEach((data) => createBuffer(bufferBindings, data)); 180 | } else { 181 | if (!(bufferBinding instanceof Array)) { 182 | this.device.queue.writeBuffer((bufferBinding as GPUBufferBinding).buffer, 0, arr[0]); 183 | bufferBindings.push(bufferBinding as GPUBufferBinding); 184 | for (let index = 1; index < arr.length; index++) { 185 | createBuffer(bufferBindings, arr[index]); 186 | } 187 | } else { 188 | for (let index = 0; index < arr.length; index++) { 189 | createBuffer(bufferBindings, arr[index], bufferBinding[index]); 190 | } 191 | } 192 | } 193 | 194 | this.resourceMap.set(key, bufferBindings); 195 | } else { 196 | if (!bufferBinding) { 197 | const buffer = getBuffer(this.device, arr, usage); 198 | this.resourceMap.set(key, { buffer }); 199 | } else { 200 | this.device.queue.writeBuffer((bufferBinding as GPUBufferBinding).buffer, 0, arr); 201 | } 202 | } 203 | } 204 | 205 | getPipelineData({ filterType, code }: { filterType: string; code: string | undefined }) { 206 | let filterCode: string | undefined = code; 207 | if (!filterCode) { 208 | filterCode = this.filterCodeMap.get(filterType); 209 | if (!filterCode) { 210 | throw new Error(`filter ${filterType} code is not exist`); 211 | } 212 | } else { 213 | this.filterCodeMap.set(filterType, filterCode); 214 | } 215 | let pipelineData = this.pipelineDataMap.get(filterType); 216 | if (!pipelineData) { 217 | pipelineData = initCode(filterCode!, this.device); 218 | this.pipelineDataMap.set(filterType, pipelineData); 219 | } 220 | return pipelineData; 221 | } 222 | 223 | blur(commandEncoder: GPUCommandEncoder, filterParam: BlurFilterParam) { 224 | const { filterType, properties } = filterParam; 225 | const warpFilterParam: FilterParam = { filterType, code: blurCode, properties }; 226 | this.filter(commandEncoder, warpFilterParam); 227 | 228 | const { value } = properties[0]; 229 | // 注意在 value 后的占位数 0, value 和 center 数据大小一致 230 | this.updateBuffer("blur_uniforms", new Float32Array([value, 0, this.width, this.height])); 231 | 232 | const pipelineData = this.getPipelineData({ filterType: "blur", code: blurCode }); 233 | for (let i = 0; i < 2; i++) { 234 | const { inputTexture, targetTexture } = this.getTexture(); 235 | this.setCommandBuffer({ commandEncoder, pipelineData, inputTexture, targetTexture, index: i }); 236 | } 237 | } 238 | 239 | filter(commandEncoder: GPUCommandEncoder, filterParams: FilterParam) { 240 | const { filterType, code, properties } = filterParams; 241 | 242 | if (properties) { 243 | let BaseDataLength = 1; 244 | properties.forEach(({ value }) => { 245 | if (value instanceof Array) { 246 | BaseDataLength = Math.max(BaseDataLength, value.length); 247 | } 248 | }); 249 | const arr = new Float32Array(BaseDataLength * properties.length); 250 | 251 | properties.forEach(({ value }, index) => { 252 | if (value instanceof Array) { 253 | arr.set(value, BaseDataLength * index); 254 | } else { 255 | arr[index] = value; 256 | } 257 | }); 258 | this.updateBuffer(`${filterType}_uniforms`, arr); 259 | } 260 | 261 | if (filterType) { 262 | const pipelineData = this.getPipelineData({ filterType, code }); 263 | this.setCommandBuffer({ commandEncoder, pipelineData, ...this.getTexture() }); 264 | } 265 | } 266 | 267 | copy(commandEncoder: GPUCommandEncoder, texture?: GPUTexture) { 268 | const pipelineData = this.getPipelineData({ filterType: "copy", code: copyCode }); 269 | const { inputTexture, targetTexture } = this.getTexture(); 270 | const target = texture || targetTexture; 271 | this.setCommandBuffer({ commandEncoder, pipelineData, targetTexture: target, inputTexture }); 272 | } 273 | 274 | render(sourceImage: GPUImageCopyExternalImage["source"], params: FilterParam[], cacheKey: string) { 275 | this.load(sourceImage, cacheKey); 276 | const commandEncoder = this.device.createCommandEncoder(); 277 | 278 | for (let i = 0; i < params.length; i++) { 279 | const filterParam = params[i]; 280 | if (filterParam.enable) { 281 | const { filterType } = filterParam; 282 | switch (filterType) { 283 | case "blur": 284 | this.blur(commandEncoder, filterParam as BlurFilterParam); 285 | break; 286 | default: 287 | this.filter(commandEncoder, filterParam); 288 | } 289 | } 290 | } 291 | 292 | this.copy(commandEncoder, this.ctx.getCurrentTexture()); 293 | this.device.queue.submit([commandEncoder.finish()]); 294 | return this.canvas; 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /src/webgpu/rect.ts: -------------------------------------------------------------------------------- 1 | export const rectVertexSize = 4 * 6; // Byte size of one rect vertex. 2 | export const rectPositionOffset = 0; 3 | export const rectUVOffset = 4 * 4; 4 | export const rectVertexCount = 6; 5 | 6 | // prettier-ignore 7 | export const rectVertexArray = new Float32Array([ 8 | // float4 position, float4 color, float2 uv, 9 | 1, 1, 0, 1, 1, 1, 10 | -1, 1, 0, 1, 0, 1, 11 | -1, -1, 0, 1, 0, 0, 12 | 1, -1, 0, 1, 1, 0, 13 | 1, 1, 0, 1, 1, 1, 14 | -1, -1, 0, 1, 0, 0, 15 | ]); -------------------------------------------------------------------------------- /src/wgsl/blur.wgsl: -------------------------------------------------------------------------------- 1 | #include vert.wgsl; 2 | 3 | struct Unifroms { 4 | sigma: f32, 5 | }; 6 | struct Direction { 7 | value: vec2, // 图片大小 8 | }; 9 | 10 | 11 | @group(0) @binding(0) var mySampler: sampler; 12 | @group(0) @binding(1) var myTexture: texture_2d; 13 | @group(1) @binding(0) var blur_uniforms: Unifroms; 14 | @group(2) @binding(0) var direction: Direction; 15 | @fragment 16 | fn frag_main(@location(0) fragUV: vec2) -> @location(0) vec4 { 17 | // 卷积范围 k 为标准差系数 r = k * sigma, 区间(μ-3σ, μ+3σ)内的面积为99.73%, 所以卷积范围一般取 3 18 | const k: f32 = 3.0; 19 | const maxKernelSize: f32 = 1000.0; 20 | let uv = fragUV; 21 | let kernelRadius = blur_uniforms.sigma * k; 22 | let scale2X = -0.5 / (blur_uniforms.sigma * blur_uniforms.sigma); // 后续高斯表达式中使用 23 | 24 | // 中心点颜色和权重 25 | var rgba = textureSample(myTexture, mySampler, uv); 26 | var weightSum: f32 = 1.0; 27 | let canvasSize = vec2(textureDimensions(myTexture)); 28 | // 充分利用线性采样 https://www.rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/ 29 | for (var y: f32 = 0.; y < maxKernelSize; y = y + 2.) { 30 | if y >= kernelRadius { break; } 31 | var offset1 = y + 1.; 32 | var offset2 = y + 2.; 33 | var x1 = scale2X * offset1 * offset1; 34 | var x2 = scale2X * offset2 * offset2; 35 | var weight1 = exp(x1); 36 | var weight2 = exp(x2); 37 | 38 | var weight = weight1 + weight2; 39 | var offset = (weight1 * offset1 + weight2 * offset2) / weight; 40 | var offsetVec = direction.value * offset; 41 | 42 | var srcTmp = textureSample(myTexture, mySampler, uv + offsetVec / canvasSize); 43 | weightSum = weightSum + weight; 44 | rgba = rgba + srcTmp * weight; 45 | 46 | // 由于高斯函数对称性,偏移相反的位置权重相等 47 | srcTmp = textureSample(myTexture, mySampler, uv - offsetVec / canvasSize); 48 | weightSum = weightSum + weight; 49 | rgba = rgba + srcTmp * weight; 50 | } 51 | 52 | var sb = textureSample(myTexture, mySampler, uv); 53 | // var color = sb; 54 | var color = clamp(rgba / weightSum, vec4(0.), vec4(1.)); 55 | return color; 56 | } -------------------------------------------------------------------------------- /src/wgsl/compute_blur.wgsl: -------------------------------------------------------------------------------- 1 | struct Params { 2 | filterDim : i32, 3 | blockDim : u32, 4 | } 5 | 6 | @group(0) @binding(0) var mySampler : sampler; 7 | @group(0) @binding(1) var params : Params; 8 | @group(1) @binding(1) var myTexture : texture_2d; 9 | @group(1) @binding(2) var outputTex : texture_storage_2d; 10 | 11 | struct Flip { 12 | value : u32, 13 | } 14 | @group(1) @binding(3) var flip : Flip; 15 | 16 | // This shader blurs the input texture in one direction, depending on whether 17 | // |flip.value| is 0 or 1. 18 | // It does so by running (128 / 4) threads per workgroup to load 128 19 | // texels into 4 rows of shared memory. Each thread loads a 20 | // 4 x 4 block of texels to take advantage of the texture sampling 21 | // hardware. 22 | // Then, each thread computes the blur result by averaging the adjacent texel values 23 | // in shared memory. 24 | // Because we're operating on a subset of the texture, we cannot compute all of the 25 | // results since not all of the neighbors are available in shared memory. 26 | // Specifically, with 128 x 128 tiles, we can only compute and write out 27 | // square blocks of size 128 - (filterSize - 1). We compute the number of blocks 28 | // needed in Javascript and dispatch that amount. 29 | 30 | var tile : array, 128>, 4>; 31 | 32 | @compute @workgroup_size(32, 1, 1) 33 | fn main( 34 | @builtin(workgroup_id) WorkGroupID : vec3, 35 | @builtin(local_invocation_id) LocalInvocationID : vec3 36 | ) { 37 | let filterOffset = (params.filterDim - 1) / 2; 38 | let dims = vec2(textureDimensions(inputTex, 0)); 39 | let baseIndex = vec2(WorkGroupID.xy * vec2(params.blockDim, 4) + 40 | LocalInvocationID.xy * vec2(4, 1)) 41 | - vec2(filterOffset, 0); 42 | 43 | for (var r = 0; r < 4; r++) { 44 | for (var c = 0; c < 4; c++) { 45 | var loadIndex = baseIndex + vec2(c, r); 46 | if (flip.value != 0u) { 47 | loadIndex = loadIndex.yx; 48 | } 49 | 50 | tile[r][4 * LocalInvocationID.x + u32(c)] = textureSampleLevel( 51 | inputTex, 52 | samp, 53 | (vec2(loadIndex) + vec2(0.25, 0.25)) / vec2(dims), 54 | 0.0 55 | ).rgb; 56 | } 57 | } 58 | 59 | workgroupBarrier(); 60 | 61 | for (var r = 0; r < 4; r++) { 62 | for (var c = 0; c < 4; c++) { 63 | var writeIndex = baseIndex + vec2(c, r); 64 | if (flip.value != 0) { 65 | writeIndex = writeIndex.yx; 66 | } 67 | 68 | let center = i32(4 * LocalInvocationID.x) + c; 69 | if (center >= filterOffset && 70 | center < 128 - filterOffset && 71 | all(writeIndex < dims)) { 72 | var acc = vec3(0.0, 0.0, 0.0); 73 | for (var f = 0; f < params.filterDim; f++) { 74 | var i = center + f - filterOffset; 75 | acc = acc + (1.0 / f32(params.filterDim)) * tile[r][i]; 76 | } 77 | textureStore(outputTex, writeIndex, vec4(acc, 1.0)); 78 | } 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/wgsl/copy.wgsl: -------------------------------------------------------------------------------- 1 | #include vert.wgsl; 2 | 3 | @group(0) @binding(0) var mySampler: sampler; 4 | @group(0) @binding(1) var myTexture: texture_2d; 5 | 6 | @fragment 7 | fn frag_main(@location(0) TexCoord: vec2) -> @location(0) vec4 { 8 | var rgba = textureSample(myTexture, mySampler, TexCoord); 9 | return rgba; 10 | } 11 | 12 | -------------------------------------------------------------------------------- /src/wgsl/noise.wgsl: -------------------------------------------------------------------------------- 1 | #include vert.wgsl; 2 | 3 | struct Unifroms { 4 | ratio: f32, 5 | seed: f32, 6 | granularity: f32, 7 | } 8 | 9 | @group(0) @binding(0) var mySampler: sampler; 10 | @group(0) @binding(1) var myTexture: texture_2d; 11 | @group(1) @binding(0) var noise_uniforms: Unifroms; 12 | 13 | fn random(st: vec2) -> f32 { 14 | return fract(sin(noise_uniforms.seed + dot(st.xy, vec2(12.9898, 78.233))) * 43758.5453123); 15 | } 16 | 17 | // Based on Morgan McGuire @morgan3d 18 | // https://www.shadertoy.com/view/4dS3Wd 19 | fn noise(st: vec2) -> f32 { 20 | var i = floor(st); 21 | var f = fract(st); 22 | 23 | // Four corners in 2D of a tile 24 | var a = random(i); 25 | var b = random(i + vec2(1.0, 0.0)); 26 | var c = random(i + vec2(0.0, 1.0)); 27 | var d = random(i + vec2(1.0, 1.0)); 28 | 29 | var u = f * f * (3.0 - 2.0 * f); 30 | 31 | return mix(a, b, u.x) + (c - a) * u.y * (1.0 - u.x) + (d - b) * u.x * u.y; 32 | } 33 | 34 | const OCTAVES = 6u; 35 | fn fbm(st: vec2) -> f32 { 36 | // Initial values 37 | var value = 0.0; 38 | var amplitude = .5; 39 | var frequency = 0.; 40 | var uv = st; 41 | // Loop of octaves 42 | for (var i = 0u; i < OCTAVES; i = i + 1u) { 43 | value = value + amplitude * noise(uv); 44 | uv = uv * 2.; 45 | amplitude = amplitude * .5; 46 | } 47 | return value; 48 | } 49 | 50 | @fragment 51 | fn frag_main(@location(0) fragUV: vec2) -> @location(0) vec4 { 52 | let uv = fragUV; 53 | let rgba = textureSample(myTexture, mySampler, uv); 54 | 55 | let p = uv * noise_uniforms.granularity; 56 | let value = fbm(p); 57 | 58 | let k = value - 0.01 * noise_uniforms.ratio; 59 | if k > 0.0 { 60 | // discard; 61 | return vec4(rgba.rgb, 0.); 62 | } 63 | return rgba; 64 | } -------------------------------------------------------------------------------- /src/wgsl/vert.wgsl: -------------------------------------------------------------------------------- 1 | struct VertexOutput { 2 | @builtin(position) Position: vec4, 3 | @location(0) TexCoord: vec2, 4 | }; 5 | 6 | @vertex 7 | fn vert_main(@location(0) vertexPosition: vec2, @location(1) vertexTexCoord: vec2) -> VertexOutput { 8 | var output: VertexOutput; 9 | output.Position = vec4(vertexPosition, 0.0, 1.0); 10 | output.TexCoord = vertexTexCoord; 11 | return output; 12 | } -------------------------------------------------------------------------------- /src/wgsl/warp.wgsl: -------------------------------------------------------------------------------- 1 | #include vert.wgsl; 2 | 3 | struct Unifroms { 4 | angle: f32, 5 | center: vec2, 6 | }; 7 | 8 | @group(0) @binding(0) var mySampler: sampler; 9 | @group(0) @binding(1) var myTexture: texture_2d; 10 | @group(1) @binding(0) var warp_uniforms: Unifroms; 11 | 12 | @fragment 13 | fn frag_main(@location(0) TexCoord: vec2) -> @location(0) vec4 { 14 | var center = warp_uniforms.center; 15 | // var center = vec2(0.5, 0.5); 16 | let offset = TexCoord - center; 17 | var l = length(offset) * 2.; 18 | l = clamp(1. - l, 0., 1.); 19 | var theta = l * warp_uniforms.angle; 20 | var s = sin(theta); 21 | var c = cos(theta); 22 | var matrix2 = mat2x2(c, s, -s, c); 23 | var uv = matrix2 * offset + center; 24 | var rgba = textureSample(myTexture, mySampler, uv); 25 | // wgpu bug ? 26 | return vec4(rgba.rgb * rgba.a, rgba.a); 27 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ESNext", 4 | "useDefineForClassFields": true, 5 | "module": "ESNext", 6 | "lib": ["ESNext", "DOM"], 7 | "moduleResolution": "Node", 8 | "strict": true, 9 | "sourceMap": true, 10 | "resolveJsonModule": true, 11 | "esModuleInterop": true, 12 | "noEmit": true, 13 | "noUnusedLocals": true, 14 | "noUnusedParameters": true, 15 | "noImplicitReturns": true, 16 | "typeRoots": [ "./node_modules/@webgpu/types", "./node_modules/@types"] 17 | }, 18 | "include": ["src"] 19 | } 20 | -------------------------------------------------------------------------------- /vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vite"; 2 | import glsl from "vite-plugin-glsl"; 3 | 4 | export default defineConfig({ 5 | plugins: [glsl({ compress: false })], 6 | root: "./", 7 | base: "./", 8 | }); 9 | --------------------------------------------------------------------------------