Skip to content

Commit 4e66d78

Browse files
committed
Add video texture sample
This sample shows how to upload video frame to webgpu and rendering on canvas. It can be easily modified to check the uploading perf of copyImageBitmapToTexture.
1 parent f252bd8 commit 4e66d78

File tree

3 files changed

+184
-0
lines changed

3 files changed

+184
-0
lines changed
2.06 MB
Binary file not shown.

build/exampleList.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,5 @@ module.exports = [
88
'./examples/fractalCube',
99
'./examples/computeBoids',
1010
'./examples/animometer',
11+
'./examples/videoUploading',
1112
];

src/examples/videoUploading.ts

Lines changed: 183 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,183 @@
1+
import glslangModule from '../glslang';
2+
3+
export const title = 'Video Texture';
4+
export const description = 'This example shows how to upload video frame to WebGPU.';
5+
6+
export async function init(canvas: HTMLCanvasElement) {
7+
// Set video element
8+
const video: HTMLVideoElement = document.createElement('video');
9+
video.loop = true;
10+
video.autoplay = true;
11+
video.muted = true;
12+
video.src = "assets/video/big-buck-bunny_trailer.webm";
13+
await video.play();
14+
15+
const vertexShaderGLSL = `#version 450
16+
layout(location = 0) in vec3 position;
17+
layout(location = 1) in vec2 uv;
18+
19+
layout(location = 0) out vec2 fragUV;
20+
21+
void main() {
22+
gl_Position = vec4(position, 1.0);
23+
fragUV = uv;
24+
}
25+
`;
26+
27+
const fragmentShaderGLSL = `#version 450
28+
layout(set = 0, binding = 0) uniform sampler mySampler;
29+
layout(set = 0, binding = 1) uniform texture2D myTexture;
30+
31+
layout(location = 0) in vec2 fragUV;
32+
layout(location = 0) out vec4 outColor;
33+
34+
void main() {
35+
outColor = texture(sampler2D(myTexture, mySampler), fragUV);
36+
}
37+
`;
38+
39+
const adapter = await navigator.gpu.requestAdapter();
40+
const device = await adapter.requestDevice();
41+
const glslang = await glslangModule();
42+
const context = canvas.getContext('gpupresent');
43+
44+
const swapChainFormat = "bgra8unorm";
45+
46+
var rectVerts = new Float32Array([
47+
1.0, 1.0, 0.0, 1.0, 0.0,
48+
1.0, -1.0, 0.0, 1.0, 1.0,
49+
-1.0, -1.0, 0.0, 0.0, 1.0,
50+
1.0, 1.0, 0.0, 1.0, 0.0,
51+
-1.0, -1.0, 0.0, 0.0, 1.0,
52+
-1.0, 1.0, 0.0, 0.0, 0.0,
53+
]);
54+
55+
const verticesBuffer = device.createBuffer({
56+
size: rectVerts.byteLength,
57+
usage: GPUBufferUsage.VERTEX,
58+
mappedAtCreation: true,
59+
});
60+
new Float32Array(verticesBuffer.getMappedRange()).set(rectVerts);
61+
verticesBuffer.unmap();
62+
63+
const bindGroupLayout = device.createBindGroupLayout({
64+
entries: [{
65+
// Sampler
66+
binding: 0,
67+
visibility: GPUShaderStage.FRAGMENT,
68+
type: "sampler"
69+
}, {
70+
// Texture view
71+
binding: 1,
72+
visibility: GPUShaderStage.FRAGMENT,
73+
type: "sampled-texture"
74+
}]
75+
});
76+
77+
// @ts-ignore:
78+
const swapChain = context.configureSwapChain({
79+
device,
80+
format: swapChainFormat,
81+
});
82+
83+
const pipeline = device.createRenderPipeline({
84+
layout: device.createPipelineLayout({ bindGroupLayouts: [bindGroupLayout] }),
85+
86+
vertexStage: {
87+
module: device.createShaderModule({
88+
code: glslang.compileGLSL(vertexShaderGLSL, "vertex"),
89+
90+
// @ts-ignore
91+
source: vertexShaderGLSL,
92+
transform: source => glslang.compileGLSL(source, "vertex"),
93+
}),
94+
entryPoint: "main"
95+
},
96+
fragmentStage: {
97+
module: device.createShaderModule({
98+
code: glslang.compileGLSL(fragmentShaderGLSL, "fragment"),
99+
100+
// @ts-ignore
101+
source: fragmentShaderGLSL,
102+
transform: source => glslang.compileGLSL(source, "fragment"),
103+
}),
104+
entryPoint: "main"
105+
},
106+
107+
primitiveTopology: "triangle-list",
108+
vertexState: {
109+
vertexBuffers: [{
110+
arrayStride: 20,
111+
attributes: [{
112+
// position
113+
shaderLocation: 0,
114+
offset: 0,
115+
format: "float3"
116+
}, {
117+
// uv
118+
shaderLocation: 1,
119+
offset: 12,
120+
format: "float2"
121+
}]
122+
}],
123+
},
124+
125+
colorStates: [{
126+
format: swapChainFormat,
127+
}],
128+
});
129+
130+
const sampler = device.createSampler({
131+
magFilter: "linear",
132+
minFilter: "linear",
133+
});
134+
135+
const videoTexture: GPUTexture = device.createTexture({
136+
size: {
137+
width: video.videoWidth,
138+
height: video.videoHeight,
139+
depth: 1,
140+
},
141+
format: 'rgba8unorm',
142+
usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.SAMPLED,
143+
});
144+
145+
const uniformBindGroup: GPUBindGroup = device.createBindGroup({
146+
layout: bindGroupLayout,
147+
entries: [{
148+
binding: 0,
149+
resource: sampler,
150+
}, {
151+
binding: 1,
152+
resource: videoTexture.createView(),
153+
}],
154+
});
155+
156+
return async function frame() {
157+
const commandEncoder = device.createCommandEncoder({});
158+
const textureView = swapChain.getCurrentTexture().createView();
159+
160+
const videoImageBitmap = await createImageBitmap(video);
161+
162+
device.defaultQueue.copyImageBitmapToTexture(
163+
{imageBitmap:videoImageBitmap, origin: {x:0, y: 0} },
164+
{texture: videoTexture},
165+
{width: video.videoWidth, height:video.videoHeight, depth: 1}
166+
);
167+
168+
const renderPassDescriptor = {
169+
colorAttachments: [{
170+
attachment: textureView,
171+
loadValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
172+
}],
173+
};
174+
175+
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
176+
passEncoder.setPipeline(pipeline);
177+
passEncoder.setVertexBuffer(0, verticesBuffer);
178+
passEncoder.setBindGroup(0, uniformBindGroup);
179+
passEncoder.draw(6, 1, 0, 0);
180+
passEncoder.endPass();
181+
device.defaultQueue.submit([commandEncoder.finish()]);
182+
}
183+
}

0 commit comments

Comments
 (0)