Path: blob/main/examples/shader_advanced/custom_post_processing.rs
6849 views
//! This example shows how to create a custom render pass that runs after the main pass1//! and reads the texture generated by the main pass.2//!3//! The example shader is a very simple implementation of chromatic aberration.4//! To adapt this example for 2D, replace all instances of 3D structures (such as `Core3D`, etc.) with their corresponding 2D counterparts.5//!6//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.78use bevy::{9core_pipeline::{10core_3d::graph::{Core3d, Node3d},11FullscreenShader,12},13ecs::query::QueryItem,14prelude::*,15render::{16extract_component::{17ComponentUniforms, DynamicUniformIndex, ExtractComponent, ExtractComponentPlugin,18UniformComponentPlugin,19},20render_graph::{21NodeRunError, RenderGraphContext, RenderGraphExt, RenderLabel, ViewNode, ViewNodeRunner,22},23render_resource::{24binding_types::{sampler, texture_2d, uniform_buffer},25*,26},27renderer::{RenderContext, RenderDevice},28view::ViewTarget,29RenderApp, RenderStartup,30},31};3233/// This example uses a shader source file from the assets subdirectory34const SHADER_ASSET_PATH: &str = "shaders/post_processing.wgsl";3536fn main() {37App::new()38.add_plugins((DefaultPlugins, PostProcessPlugin))39.add_systems(Startup, setup)40.add_systems(Update, (rotate, update_settings))41.run();42}4344/// It is generally encouraged to set up post processing effects as a plugin45struct PostProcessPlugin;4647impl Plugin for PostProcessPlugin {48fn build(&self, app: &mut App) {49app.add_plugins((50// The settings will be a component that lives in the main world but will51// be extracted to the render world every frame.52// This makes it possible to control the effect from the main world.53// This plugin will take care of extracting it automatically.54// It's important to derive [`ExtractComponent`] on [`PostProcessingSettings`]55// for this plugin to work correctly.56ExtractComponentPlugin::<PostProcessSettings>::default(),57// The settings will also be the data used in the shader.58// This plugin will prepare the component for the GPU by creating a uniform buffer59// and writing the data to that buffer every frame.60UniformComponentPlugin::<PostProcessSettings>::default(),61));6263// We need to get the render app from the main app64let Some(render_app) = app.get_sub_app_mut(RenderApp) else {65return;66};6768// RenderStartup runs once on startup after all plugins are built69// It is useful to initialize data that will only live in the RenderApp70render_app.add_systems(RenderStartup, init_post_process_pipeline);7172render_app73// Bevy's renderer uses a render graph which is a collection of nodes in a directed acyclic graph.74// It currently runs on each view/camera and executes each node in the specified order.75// It will make sure that any node that needs a dependency from another node76// only runs when that dependency is done.77//78// Each node can execute arbitrary work, but it generally runs at least one render pass.79// A node only has access to the render world, so if you need data from the main world80// you need to extract it manually or with the plugin like above.81// Add a [`Node`] to the [`RenderGraph`]82// The Node needs to impl FromWorld83//84// The [`ViewNodeRunner`] is a special [`Node`] that will automatically run the node for each view85// matching the [`ViewQuery`]86.add_render_graph_node::<ViewNodeRunner<PostProcessNode>>(87// Specify the label of the graph, in this case we want the graph for 3d88Core3d,89// It also needs the label of the node90PostProcessLabel,91)92.add_render_graph_edges(93Core3d,94// Specify the node ordering.95// This will automatically create all required node edges to enforce the given ordering.96(97Node3d::Tonemapping,98PostProcessLabel,99Node3d::EndMainPassPostProcessing,100),101);102}103}104105#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]106struct PostProcessLabel;107108// The post process node used for the render graph109#[derive(Default)]110struct PostProcessNode;111112// The ViewNode trait is required by the ViewNodeRunner113impl ViewNode for PostProcessNode {114// The node needs a query to gather data from the ECS in order to do its rendering,115// but it's not a normal system so we need to define it manually.116//117// This query will only run on the view entity118type ViewQuery = (119&'static ViewTarget,120// This makes sure the node only runs on cameras with the PostProcessSettings component121&'static PostProcessSettings,122// As there could be multiple post processing components sent to the GPU (one per camera),123// we need to get the index of the one that is associated with the current view.124&'static DynamicUniformIndex<PostProcessSettings>,125);126127// Runs the node logic128// This is where you encode draw commands.129//130// This will run on every view on which the graph is running.131// If you don't want your effect to run on every camera,132// you'll need to make sure you have a marker component as part of [`ViewQuery`]133// to identify which camera(s) should run the effect.134fn run(135&self,136_graph: &mut RenderGraphContext,137render_context: &mut RenderContext,138(view_target, _post_process_settings, settings_index): QueryItem<Self::ViewQuery>,139world: &World,140) -> Result<(), NodeRunError> {141// Get the pipeline resource that contains the global data we need142// to create the render pipeline143let post_process_pipeline = world.resource::<PostProcessPipeline>();144145// The pipeline cache is a cache of all previously created pipelines.146// It is required to avoid creating a new pipeline each frame,147// which is expensive due to shader compilation.148let pipeline_cache = world.resource::<PipelineCache>();149150// Get the pipeline from the cache151let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id)152else {153return Ok(());154};155156// Get the settings uniform binding157let settings_uniforms = world.resource::<ComponentUniforms<PostProcessSettings>>();158let Some(settings_binding) = settings_uniforms.uniforms().binding() else {159return Ok(());160};161162// This will start a new "post process write", obtaining two texture163// views from the view target - a `source` and a `destination`.164// `source` is the "current" main texture and you _must_ write into165// `destination` because calling `post_process_write()` on the166// [`ViewTarget`] will internally flip the [`ViewTarget`]'s main167// texture to the `destination` texture. Failing to do so will cause168// the current main texture information to be lost.169let post_process = view_target.post_process_write();170171// The bind_group gets created each frame.172//173// Normally, you would create a bind_group in the Queue set,174// but this doesn't work with the post_process_write().175// The reason it doesn't work is because each post_process_write will alternate the source/destination.176// The only way to have the correct source/destination for the bind_group177// is to make sure you get it during the node execution.178let bind_group = render_context.render_device().create_bind_group(179"post_process_bind_group",180&post_process_pipeline.layout,181// It's important for this to match the BindGroupLayout defined in the PostProcessPipeline182&BindGroupEntries::sequential((183// Make sure to use the source view184post_process.source,185// Use the sampler created for the pipeline186&post_process_pipeline.sampler,187// Set the settings binding188settings_binding.clone(),189)),190);191192// Begin the render pass193let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {194label: Some("post_process_pass"),195color_attachments: &[Some(RenderPassColorAttachment {196// We need to specify the post process destination view here197// to make sure we write to the appropriate texture.198view: post_process.destination,199depth_slice: None,200resolve_target: None,201ops: Operations::default(),202})],203depth_stencil_attachment: None,204timestamp_writes: None,205occlusion_query_set: None,206});207208// This is mostly just wgpu boilerplate for drawing a fullscreen triangle,209// using the pipeline/bind_group created above210render_pass.set_render_pipeline(pipeline);211// By passing in the index of the post process settings on this view, we ensure212// that in the event that multiple settings were sent to the GPU (as would be the213// case with multiple cameras), we use the correct one.214render_pass.set_bind_group(0, &bind_group, &[settings_index.index()]);215render_pass.draw(0..3, 0..1);216217Ok(())218}219}220221// This contains global data used by the render pipeline. This will be created once on startup.222#[derive(Resource)]223struct PostProcessPipeline {224layout: BindGroupLayout,225sampler: Sampler,226pipeline_id: CachedRenderPipelineId,227}228229fn init_post_process_pipeline(230mut commands: Commands,231render_device: Res<RenderDevice>,232asset_server: Res<AssetServer>,233fullscreen_shader: Res<FullscreenShader>,234pipeline_cache: Res<PipelineCache>,235) {236// We need to define the bind group layout used for our pipeline237let layout = render_device.create_bind_group_layout(238"post_process_bind_group_layout",239&BindGroupLayoutEntries::sequential(240// The layout entries will only be visible in the fragment stage241ShaderStages::FRAGMENT,242(243// The screen texture244texture_2d(TextureSampleType::Float { filterable: true }),245// The sampler that will be used to sample the screen texture246sampler(SamplerBindingType::Filtering),247// The settings uniform that will control the effect248uniform_buffer::<PostProcessSettings>(true),249),250),251);252// We can create the sampler here since it won't change at runtime and doesn't depend on the view253let sampler = render_device.create_sampler(&SamplerDescriptor::default());254255// Get the shader handle256let shader = asset_server.load(SHADER_ASSET_PATH);257// This will setup a fullscreen triangle for the vertex state.258let vertex_state = fullscreen_shader.to_vertex_state();259let pipeline_id = pipeline_cache260// This will add the pipeline to the cache and queue its creation261.queue_render_pipeline(RenderPipelineDescriptor {262label: Some("post_process_pipeline".into()),263layout: vec![layout.clone()],264vertex: vertex_state,265fragment: Some(FragmentState {266shader,267// Make sure this matches the entry point of your shader.268// It can be anything as long as it matches here and in the shader.269targets: vec![Some(ColorTargetState {270format: TextureFormat::bevy_default(),271blend: None,272write_mask: ColorWrites::ALL,273})],274..default()275}),276..default()277});278commands.insert_resource(PostProcessPipeline {279layout,280sampler,281pipeline_id,282});283}284285// This is the component that will get passed to the shader286#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]287struct PostProcessSettings {288intensity: f32,289// WebGL2 structs must be 16 byte aligned.290#[cfg(feature = "webgl2")]291_webgl2_padding: Vec3,292}293294/// Set up a simple 3D scene295fn setup(296mut commands: Commands,297mut meshes: ResMut<Assets<Mesh>>,298mut materials: ResMut<Assets<StandardMaterial>>,299) {300// camera301commands.spawn((302Camera3d::default(),303Transform::from_translation(Vec3::new(0.0, 0.0, 5.0)).looking_at(Vec3::default(), Vec3::Y),304Camera {305clear_color: Color::WHITE.into(),306..default()307},308// Add the setting to the camera.309// This component is also used to determine on which camera to run the post processing effect.310PostProcessSettings {311intensity: 0.02,312..default()313},314));315316// cube317commands.spawn((318Mesh3d(meshes.add(Cuboid::default())),319MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),320Transform::from_xyz(0.0, 0.5, 0.0),321Rotates,322));323// light324commands.spawn(DirectionalLight {325illuminance: 1_000.,326..default()327});328}329330#[derive(Component)]331struct Rotates;332333/// Rotates any entity around the x and y axis334fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {335for mut transform in &mut query {336transform.rotate_x(0.55 * time.delta_secs());337transform.rotate_z(0.15 * time.delta_secs());338}339}340341// Change the intensity over time to show that the effect is controlled from the main world342fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {343for mut setting in &mut settings {344let mut intensity = ops::sin(time.elapsed_secs());345// Make it loop periodically346intensity = ops::sin(intensity);347// Remap it to 0..1 because the intensity can't be negative348intensity = intensity * 0.5 + 0.5;349// Scale it to a more reasonable level350intensity *= 0.015;351352// Set the intensity.353// This will then be extracted to the render world and uploaded to the GPU automatically by the [`UniformComponentPlugin`]354setting.intensity = intensity;355}356}357358359