diff --git a/Cargo.toml b/Cargo.toml index 47d8ea3c9fe93..d47dab1d6f7af 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1385,11 +1385,21 @@ name = "post_processing" path = "examples/shader/post_processing.rs" [package.metadata.example.post_processing] -name = "Post Processing" +name = "Post Processing - Render To Texture" description = "A custom post processing effect, using two cameras, with one reusing the render texture of the first one" category = "Shaders" wasm = true +[[example]] +name = "post_process_pass" +path = "examples/shader/post_process_pass.rs" + +[package.metadata.example.post_process_pass] +name = "Post Processing - Custom Render Pass" +description = "A custom post processing effect, using a custom render pass that runs after the main pass" +category = "Shaders" +wasm = true + [[example]] name = "shader_defs" path = "examples/shader/shader_defs.rs" diff --git a/assets/shaders/post_process_pass.wgsl b/assets/shaders/post_process_pass.wgsl new file mode 100644 index 0000000000000..b25b5788cc8a6 --- /dev/null +++ b/assets/shaders/post_process_pass.wgsl @@ -0,0 +1,48 @@ +// This shader computes the chromatic aberration effect + +#import bevy_pbr::utils + +// Since post processing is a fullscreen effect, we use the fullscreen vertex shader provided by bevy. +// This will import a vertex shader that renders a single fullscreen triangle. +// +// A fullscreen triangle is a single triangle that covers the entire screen. +// The box in the top left in that diagram is the screen. The 4 x are the corner of the screen +// +// Y axis +// 1 | x-----x...... +// 0 | | s | . ´ +// -1 | x_____x´ +// -2 | : .´ +// -3 | :´ +// +--------------- X axis +// -1 0 1 2 3 +// +// As you can see, the triangle ends up bigger than the screen. +// +// You don't need to worry about this too much since bevy will compute the correct UVs for you. +#import bevy_core_pipeline::fullscreen_vertex_shader + +@group(0) @binding(0) +var screen_texture: texture_2d; +@group(0) @binding(1) +var texture_sampler: sampler; +struct PostProcessSettings { + intensity: f32, +} +@group(0) @binding(2) +var settings: PostProcessSettings; + +@fragment +fn fragment(in: FullscreenVertexOutput) -> @location(0) vec4 { + // Chromatic aberration strength + let offset_strength = settings.intensity; + + // Sample each color channel with an arbitrary shift + return vec4( + textureSample(screen_texture, texture_sampler, in.uv + vec2(offset_strength, -offset_strength)).r, + textureSample(screen_texture, texture_sampler, in.uv + vec2(-offset_strength, 0.0)).g, + textureSample(screen_texture, texture_sampler, in.uv + vec2(0.0, offset_strength)).b, + 1.0 + ); +} + diff --git a/examples/README.md b/examples/README.md index 8fd8f7c29b8d0..6b30aad3ede15 100644 --- a/examples/README.md +++ b/examples/README.md @@ -278,7 +278,8 @@ Example | Description [Material - GLSL](../examples/shader/shader_material_glsl.rs) | A shader that uses the GLSL shading language [Material - Screenspace Texture](../examples/shader/shader_material_screenspace_texture.rs) | A shader that samples a texture with view-independent UV coordinates [Material Prepass](../examples/shader/shader_prepass.rs) | A shader that uses the various textures generated by the prepass -[Post Processing](../examples/shader/post_processing.rs) | A custom post processing effect, using two cameras, with one reusing the render texture of the first one +[Post Processing - Custom Render Pass](../examples/shader/post_process_pass.rs) | A custom post processing effect, using a custom render pass that runs after the main pass +[Post Processing - Render To Texture](../examples/shader/post_processing.rs) | A custom post processing effect, using two cameras, with one reusing the render texture of the first one [Shader Defs](../examples/shader/shader_defs.rs) | A shader that uses "shaders defs" (a bevy tool to selectively toggle parts of a shader) [Texture Binding Array (Bindless Textures)](../examples/shader/texture_binding_array.rs) | A shader that shows how to bind and sample multiple textures as a binding array (a.k.a. bindless textures). diff --git a/examples/shader/post_process_pass.rs b/examples/shader/post_process_pass.rs new file mode 100644 index 0000000000000..ca91b7ee44028 --- /dev/null +++ b/examples/shader/post_process_pass.rs @@ -0,0 +1,412 @@ +//! This example shows how to create a custom render pass that runs after the main pass +//! and reads the texture generated by the main pass. +//! +//! The example shader is a very simple implementation of chromatic aberration. +//! +//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu. + +use bevy::{ + core_pipeline::{ + clear_color::ClearColorConfig, core_3d, + fullscreen_vertex_shader::fullscreen_shader_vertex_state, + }, + prelude::*, + render::{ + extract_component::{ + ComponentUniforms, ExtractComponent, ExtractComponentPlugin, UniformComponentPlugin, + }, + render_graph::{Node, NodeRunError, RenderGraph, RenderGraphContext, SlotInfo, SlotType}, + render_resource::{ + BindGroupDescriptor, BindGroupEntry, BindGroupLayout, BindGroupLayoutDescriptor, + BindGroupLayoutEntry, BindingResource, BindingType, CachedRenderPipelineId, + ColorTargetState, ColorWrites, FragmentState, MultisampleState, Operations, + PipelineCache, PrimitiveState, RenderPassColorAttachment, RenderPassDescriptor, + RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, ShaderStages, + ShaderType, TextureFormat, TextureSampleType, TextureViewDimension, + }, + renderer::{RenderContext, RenderDevice}, + texture::BevyDefault, + view::{ExtractedView, ViewTarget}, + RenderApp, + }, +}; + +fn main() { + App::new() + .add_plugins(DefaultPlugins.set(AssetPlugin { + // Hot reloading the shader works correctly + watch_for_changes: true, + ..default() + })) + .add_plugin(PostProcessPlugin) + .add_startup_system(setup) + .add_system(rotate) + .add_system(update_settings) + .run(); +} + +/// It is generally encouraged to set up post processing effects as a plugin +struct PostProcessPlugin; +impl Plugin for PostProcessPlugin { + fn build(&self, app: &mut App) { + app + // The settings will be a component that lives in the main world but will + // be extracted to the render world every frame. + // This makes it possible to control the effect from the main world. + // This plugin will take care of extracting it automatically. + // It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`] for this plugin to work correctly. + .add_plugin(ExtractComponentPlugin::::default()) + // The settings will also be the data used in the shader. + // This plugin will prepare the component for the GPU by creating a uniform buffer + // and writing the data to that buffer every frame. + .add_plugin(UniformComponentPlugin::::default()); + + // We need to get the render app from the main app + let Ok(render_app) = app.get_sub_app_mut(RenderApp) else { + return; + }; + + // Initialize the pipeline + render_app.init_resource::(); + + // Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph. + // It currently runs on each view/camera and executes each node in the specified order. + // It will make sure that any node that needs a dependency from another node only runs when that dependency is done. + // + // Each node can execute arbitrary work, but it generally runs at least one render pass. + // A node only has access to the render world, so if you need data from the main world + // you need to extract it manually or with the plugin like above. + + // Create the node with the render world + let node = PostProcessNode::new(&mut render_app.world); + + // Get the render graph for the entire app + let mut graph = render_app.world.resource_mut::(); + + // Get the render graph for 3d cameras/views + let core_3d_graph = graph.get_sub_graph_mut(core_3d::graph::NAME).unwrap(); + + // Register the post process node in the 3d render graph + core_3d_graph.add_node(PostProcessNode::NAME, node); + + // A slot edge tells the render graph which input/output value should be passed to the node. + // In this case, the view entity, which is the entity associated with the + // camera on which the graph is running. + core_3d_graph.add_slot_edge( + core_3d_graph.input_node().id, + core_3d::graph::input::VIEW_ENTITY, + PostProcessNode::NAME, + PostProcessNode::IN_VIEW, + ); + + // We now need to add an edge between our node and the nodes from bevy + // to make sure our node is ordered correctly relative to other nodes. + // + // Here we want our effect to run after tonemapping and before the end of the main pass post processing + core_3d_graph.add_node_edge(core_3d::graph::node::TONEMAPPING, PostProcessNode::NAME); + core_3d_graph.add_node_edge( + PostProcessNode::NAME, + core_3d::graph::node::END_MAIN_PASS_POST_PROCESSING, + ); + } +} + +/// The post process node used for the render graph +struct PostProcessNode { + // The node needs a query to gather data from the ECS in order to do its rendering, + // but it's not a normal system so we need to define it manually. + query: QueryState<&'static ViewTarget, With>, +} + +impl PostProcessNode { + pub const IN_VIEW: &str = "view"; + pub const NAME: &str = "post_process"; + + fn new(world: &mut World) -> Self { + Self { + query: QueryState::new(world), + } + } +} + +impl Node for PostProcessNode { + // This defines the input slot of the node and tells the render graph what + // we will need when running the node. + fn input(&self) -> Vec { + // In this case we tell the graph that our node will use the view entity. + // Currently, every node in bevy uses this pattern, so it's safe to just copy it. + vec![SlotInfo::new(PostProcessNode::IN_VIEW, SlotType::Entity)] + } + + // This will run every frame before the run() method + // The important difference is that `self` is `mut` here + fn update(&mut self, world: &mut World) { + // Since this is not a system we need to update the query manually. + // This is mostly boilerplate. There are plans to remove this in the future. + // For now, you can just copy it. + self.query.update_archetypes(world); + } + + // Runs the node logic + // This is where you encode draw commands. + // + // This will run on every view on which the graph is running. If you don't want your effect to run on every camera, + // you'll need to make sure you have a marker component to identify which camera(s) should run the effect. + fn run( + &self, + graph_context: &mut RenderGraphContext, + render_context: &mut RenderContext, + world: &World, + ) -> Result<(), NodeRunError> { + // Get the entity of the view for the render graph where this node is running + let view_entity = graph_context.get_input_entity(PostProcessNode::IN_VIEW)?; + + // We get the data we need from the world based on the view entity passed to the node. + // The data is the query that was defined earlier in the [`PostProcessNode`] + let Ok(view_target) = self.query.get_manual(world, view_entity) else { + return Ok(()); + }; + + // Get the pipeline resource that contains the global data we need to create the render pipeline + let post_process_pipeline = world.resource::(); + + // The pipeline cache is a cache of all previously created pipelines. + // It is required to avoid creating a new pipeline each frame, which is expensive due to shader compilation. + let pipeline_cache = world.resource::(); + + // Get the pipeline from the cache + let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id) else { + return Ok(()); + }; + + // Get the settings uniform binding + let settings_uniforms = world.resource::>(); + let Some(settings_binding) = settings_uniforms.uniforms().binding() else { + return Ok(()); + }; + + // This will start a new "post process write", obtaining two texture + // views from the view target - a `source` and a `destination`. + // `source` is the "current" main texture and you _must_ write into + // `destination` because calling `post_process_write()` on the + // [`ViewTarget`] will internally flip the [`ViewTarget`]'s main + // texture to the `destination` texture. Failing to do so will cause + // the current main texture information to be lost. + let post_process = view_target.post_process_write(); + + // The bind_group gets created each frame. + // + // Normally, you would create a bind_group in the Queue stage, but this doesn't work with the post_process_write(). + // The reason it doesn't work is because each post_process_write will alternate the source/destination. + // The only way to have the correct source/destination for the bind_group is to make sure you get it during the node execution. + let bind_group = render_context + .render_device() + .create_bind_group(&BindGroupDescriptor { + label: Some("post_process_bind_group"), + layout: &post_process_pipeline.layout, + // It's important for this to match the BindGroupLayout defined in the PostProcessPipeline + entries: &[ + BindGroupEntry { + binding: 0, + // Make sure to use the source view + resource: BindingResource::TextureView(post_process.source), + }, + BindGroupEntry { + binding: 1, + // Use the sampler created for the pipeline + resource: BindingResource::Sampler(&post_process_pipeline.sampler), + }, + BindGroupEntry { + binding: 2, + // Set the settings binding + resource: settings_binding.clone(), + }, + ], + }); + + // Begin the render pass + let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor { + label: Some("post_process_pass"), + color_attachments: &[Some(RenderPassColorAttachment { + // We need to specify the post process destination view here + // to make sure we write to the appropriate texture. + view: post_process.destination, + resolve_target: None, + ops: Operations::default(), + })], + depth_stencil_attachment: None, + }); + + // This is mostly just wgpu boilerplate for drawing a fullscreen triangle, + // using the pipeline/bind_group created above + render_pass.set_render_pipeline(pipeline); + render_pass.set_bind_group(0, &bind_group, &[]); + render_pass.draw(0..3, 0..1); + + Ok(()) + } +} + +// This contains global data used by the render pipeline. This will be created once on startup. +#[derive(Resource)] +struct PostProcessPipeline { + layout: BindGroupLayout, + sampler: Sampler, + pipeline_id: CachedRenderPipelineId, +} + +impl FromWorld for PostProcessPipeline { + fn from_world(world: &mut World) -> Self { + let render_device = world.resource::(); + + // We need to define the bind group layout used for our pipeline + let layout = render_device.create_bind_group_layout(&BindGroupLayoutDescriptor { + label: Some("post_process_bind_group_layout"), + entries: &[ + // The screen texture + BindGroupLayoutEntry { + binding: 0, + visibility: ShaderStages::FRAGMENT, + ty: BindingType::Texture { + sample_type: TextureSampleType::Float { filterable: true }, + view_dimension: TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + // The sampler that will be used to sample the screen texture + BindGroupLayoutEntry { + binding: 1, + visibility: ShaderStages::FRAGMENT, + ty: BindingType::Sampler(SamplerBindingType::Filtering), + count: None, + }, + // The settings uniform that will control the effect + BindGroupLayoutEntry { + binding: 2, + visibility: ShaderStages::FRAGMENT, + ty: BindingType::Buffer { + ty: bevy::render::render_resource::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + ], + }); + + // We can create the sampler here since it won't change at runtime and doesn't depend on the view + let sampler = render_device.create_sampler(&SamplerDescriptor::default()); + + // Get the shader handle + let shader = world + .resource::() + .load("shaders/post_process_pass.wgsl"); + + let pipeline_id = world + .resource_mut::() + // This will add the pipeline to the cache and queue it's creation + .queue_render_pipeline(RenderPipelineDescriptor { + label: Some("post_process_pipeline".into()), + layout: vec![layout.clone()], + // This will setup a fullscreen triangle for the vertex state + vertex: fullscreen_shader_vertex_state(), + fragment: Some(FragmentState { + shader, + shader_defs: vec![], + // Make sure this matches the entry point of your shader. + // It can be anything as long as it matches here and in the shader. + entry_point: "fragment".into(), + targets: vec![Some(ColorTargetState { + format: TextureFormat::bevy_default(), + blend: None, + write_mask: ColorWrites::ALL, + })], + }), + // All of the following property are not important for this effect so just use the default values. + // This struct doesn't have the Default trai implemented because not all field can have a default value. + primitive: PrimitiveState::default(), + depth_stencil: None, + multisample: MultisampleState::default(), + push_constant_ranges: vec![], + }); + + Self { + layout, + sampler, + pipeline_id, + } + } +} + +// This is the component that will get passed to the shader +#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)] +struct PostProcessSettings { + intensity: f32, +} + +/// Set up a simple 3D scene +fn setup( + mut commands: Commands, + mut meshes: ResMut>, + mut materials: ResMut>, +) { + // camera + commands.spawn(( + Camera3dBundle { + transform: Transform::from_translation(Vec3::new(0.0, 0.0, 5.0)) + .looking_at(Vec3::default(), Vec3::Y), + camera_3d: Camera3d { + clear_color: ClearColorConfig::Custom(Color::WHITE), + ..default() + }, + ..default() + }, + // Add the setting to the camera. + // This component is also used to determine on which camera to run the post processing effect. + PostProcessSettings { intensity: 0.02 }, + )); + + // cube + commands.spawn(( + PbrBundle { + mesh: meshes.add(Mesh::from(shape::Cube { size: 1.0 })), + material: materials.add(Color::rgb(0.8, 0.7, 0.6).into()), + transform: Transform::from_xyz(0.0, 0.5, 0.0), + ..default() + }, + Rotates, + )); + // light + commands.spawn(PointLightBundle { + transform: Transform::from_translation(Vec3::new(0.0, 0.0, 10.0)), + ..default() + }); +} + +#[derive(Component)] +struct Rotates; + +/// Rotates any entity around the x and y axis +fn rotate(time: Res