├── .gitignore ├── cornell_box.png ├── Cargo.toml ├── README.md ├── LICENSE-MIT ├── examples ├── simple.rs └── cornell_box.rs ├── src ├── node.rs ├── edge_detection.wgsl └── lib.rs ├── LICENSE-APACHE └── Cargo.lock /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /cornell_box.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IceSentry/bevy_mod_edge_detection/HEAD/cornell_box.png -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bevy_mod_edge_detection" 3 | version = "0.1.0" 4 | edition = "2021" 5 | license = "MIT OR Apache-2.0" 6 | description = "A simple plugin to add an edge detection shader to bevy" 7 | repository = "https://github.com/IceSentry/bevy_mod_edge_detection" 8 | 9 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 10 | 11 | [dependencies] 12 | bevy = "0.13" 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # bevy_mod_edge_detection 2 | 3 | A simple plugin to add full screen edge detection. 4 | 5 | ![image](cornell_box.png) 6 | 7 | ## Implementation details 8 | 9 | The implementation is mostly based on what is described in this article. 10 | 11 | [https://alexanderameye.github.io/notes/rendering-outlines/#edge-detection](https://alexanderameye.github.io/notes/rendering-outlines/#edge-detection) 12 | 13 | Essentially, it runs the sobel operator on the depth, normal and color textures. The sobel operator is able to determine discontinuity in those textures and the shader will simply draw those discontinuity. 14 | 15 | ## Getting Started 16 | 17 | See the [examples/simples.rs](examples/simple.rs) example 18 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /examples/simple.rs: -------------------------------------------------------------------------------- 1 | use bevy::{ 2 | core_pipeline::{ 3 | fxaa::{Fxaa, Sensitivity}, 4 | prepass::{DepthPrepass, NormalPrepass}, 5 | }, 6 | prelude::*, 7 | }; 8 | use bevy_mod_edge_detection::{EdgeDetectionCamera, EdgeDetectionConfig, EdgeDetectionPlugin}; 9 | 10 | fn main() { 11 | App::new() 12 | // MSAA currently doesn't work correctly with the plugin 13 | .insert_resource(Msaa::Off) 14 | .add_plugins((DefaultPlugins, EdgeDetectionPlugin)) 15 | .init_resource::() 16 | .add_systems(Startup, setup) 17 | .run(); 18 | } 19 | 20 | /// set up a simple 3D scene 21 | fn setup( 22 | mut commands: Commands, 23 | mut meshes: ResMut>, 24 | mut materials: ResMut>, 25 | ) { 26 | // set up the camera 27 | commands.spawn(( 28 | Camera3dBundle { 29 | transform: Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y), 30 | ..default() 31 | }, 32 | // The edge detection effect requires the depth and normal prepass 33 | DepthPrepass, 34 | NormalPrepass, 35 | // Add some anti-aliasing because the lines can be really harsh otherwise 36 | // This isn't required, but some form of AA is recommended 37 | Fxaa { 38 | enabled: true, 39 | edge_threshold: Sensitivity::Extreme, 40 | edge_threshold_min: Sensitivity::Extreme, 41 | }, 42 | EdgeDetectionCamera, 43 | )); 44 | 45 | // set up basic scene 46 | 47 | // circular base 48 | commands.spawn(PbrBundle { 49 | mesh: meshes.add(Circle::new(4.0)), 50 | material: materials.add(Color::WHITE), 51 | transform: Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)), 52 | ..default() 53 | }); 54 | // cube 55 | commands.spawn(PbrBundle { 56 | mesh: meshes.add(Cuboid::new(1.0, 1.0, 1.0)), 57 | material: materials.add(Color::rgb_u8(124, 144, 255)), 58 | transform: Transform::from_xyz(0.0, 0.5, 0.0), 59 | ..default() 60 | }); 61 | // light 62 | commands.spawn(PointLightBundle { 63 | point_light: PointLight { 64 | shadows_enabled: true, 65 | ..default() 66 | }, 67 | transform: Transform::from_xyz(4.0, 8.0, 4.0), 68 | ..default() 69 | }); 70 | } 71 | -------------------------------------------------------------------------------- /src/node.rs: -------------------------------------------------------------------------------- 1 | use bevy::{ 2 | core_pipeline::prepass::ViewPrepassTextures, 3 | prelude::*, 4 | render::{ 5 | render_graph::{NodeRunError, RenderGraphContext, RenderLabel, ViewNode}, 6 | render_resource::{ 7 | BindGroupEntries, Operations, PipelineCache, RenderPassColorAttachment, 8 | RenderPassDescriptor, 9 | }, 10 | renderer::RenderContext, 11 | view::{ViewTarget, ViewUniformOffset, ViewUniforms}, 12 | }, 13 | }; 14 | 15 | use crate::{ConfigBuffer, EdgeDetectionCamera, EdgeDetectionPipeline}; 16 | 17 | #[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)] 18 | pub struct EdgeDetetctionNodeLabel; 19 | 20 | #[derive(Default)] 21 | pub struct EdgeDetectionNode; 22 | 23 | impl ViewNode for EdgeDetectionNode { 24 | type ViewQuery = ( 25 | &'static ViewTarget, 26 | &'static ViewPrepassTextures, 27 | &'static ViewUniformOffset, 28 | &'static EdgeDetectionCamera, 29 | ); 30 | 31 | fn run( 32 | &self, 33 | _graph: &mut RenderGraphContext, 34 | render_context: &mut RenderContext, 35 | (view_target, prepass_textures, view_uniform, _): bevy::ecs::query::QueryItem< 36 | Self::ViewQuery, 37 | >, 38 | world: &World, 39 | ) -> Result<(), NodeRunError> { 40 | let edge_detection_pipeline = world.resource::(); 41 | let pipeline_cache = world.resource::(); 42 | let Some(pipeline) = 43 | pipeline_cache.get_render_pipeline(edge_detection_pipeline.pipeline_id) 44 | else { 45 | return Ok(()); 46 | }; 47 | 48 | let post_process = view_target.post_process_write(); 49 | let view_uniforms = world.resource::(); 50 | let config_buffer = world.resource::(); 51 | 52 | let Some(view_uniforms) = view_uniforms.uniforms.binding() else { 53 | return Ok(()); 54 | }; 55 | 56 | let (Some(depth_texture), Some(normal_texture)) = 57 | (&prepass_textures.depth, &prepass_textures.normal) 58 | else { 59 | return Ok(()); 60 | }; 61 | 62 | let bind_group = render_context.render_device().create_bind_group( 63 | "edge_detection_bind_group", 64 | &edge_detection_pipeline.layout, 65 | &BindGroupEntries::sequential(( 66 | post_process.source, 67 | &edge_detection_pipeline.sampler, 68 | &depth_texture.texture.default_view, 69 | &normal_texture.texture.default_view, 70 | view_uniforms, 71 | &config_buffer.buffer, 72 | )), 73 | ); 74 | 75 | let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor { 76 | label: Some("edge_detection_pass"), 77 | color_attachments: &[Some(RenderPassColorAttachment { 78 | view: post_process.destination, 79 | resolve_target: None, 80 | ops: Operations::default(), 81 | })], 82 | depth_stencil_attachment: None, 83 | timestamp_writes: None, 84 | occlusion_query_set: None, 85 | }); 86 | 87 | render_pass.set_render_pipeline(pipeline); 88 | render_pass.set_bind_group(0, &bind_group, &[view_uniform.offset]); 89 | render_pass.draw(0..3, 0..1); 90 | 91 | Ok(()) 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/edge_detection.wgsl: -------------------------------------------------------------------------------- 1 | #import bevy_core_pipeline::fullscreen_vertex_shader::FullscreenVertexOutput 2 | #import bevy_render::view::View 3 | 4 | struct Config { 5 | depth_threshold: f32, 6 | normal_threshold: f32, 7 | color_threshold: f32, 8 | edge_color: vec4f, 9 | debug: u32, 10 | enabled: u32, 11 | }; 12 | 13 | @group(0) @binding(0) var screen_texture: texture_2d; 14 | @group(0) @binding(1) var texture_sampler: sampler; 15 | @group(0) @binding(2) var depth_prepass_texture: texture_depth_2d; 16 | @group(0) @binding(3) var normal_prepass_texture: texture_2d; 17 | @group(0) @binding(4) var view: View; 18 | @group(0) @binding(5) var config: Config; 19 | 20 | /// Retrieve the perspective camera near clipping plane 21 | fn perspective_camera_near() -> f32 { 22 | return view.projection[3][2]; 23 | } 24 | 25 | /// Convert ndc depth to linear view z. 26 | /// Note: Depth values in front of the camera will be negative as -z is forward 27 | fn depth_ndc_to_view_z(ndc_depth: f32) -> f32 { 28 | #ifdef VIEW_PROJECTION_PERSPECTIVE 29 | return perspective_camera_near() / ndc_depth; 30 | #else ifdef VIEW_PROJECTION_ORTHOGRAPHIC 31 | return -(view.projection[3][2] - ndc_depth) / view.projection[2][2]; 32 | #else 33 | let view_pos = view.inverse_projection * vec4(0.0, 0.0, ndc_depth, 1.0); 34 | return view_pos.z / view_pos.w; 35 | #endif 36 | } 37 | 38 | fn prepass_depth(frag_coord: vec2f) -> f32 { 39 | return textureLoad(depth_prepass_texture, vec2i(frag_coord), 0); 40 | } 41 | 42 | fn prepass_normal(frag_coord: vec2f) -> vec3f { 43 | return textureLoad(normal_prepass_texture, vec2i(frag_coord), 0).xyz; 44 | } 45 | 46 | var sobel_x: array = array( 47 | 1.0, 0.0, -1.0, 48 | 2.0, 0.0, -2.0, 49 | 1.0, 0.0, -1.0, 50 | ); 51 | 52 | var sobel_y: array = array( 53 | 1.0, 2.0, 1.0, 54 | 0.0, 0.0, 0.0, 55 | -1.0, -2.0, -1.0, 56 | ); 57 | 58 | var neighbours: array = array( 59 | vec2f(-1.0, 1.0), vec2f(0.0, 1.0), vec2f(1.0, 1.0), 60 | vec2f(-1.0, 0.0), vec2f(0.0, 0.0), vec2f(1.0, 0.0), 61 | vec2f(-1.0, -1.0), vec2f(0.0, -1.0), vec2f(1.0, -1.0), 62 | ); 63 | 64 | var thickness: f32 = 0.8; 65 | 66 | fn detect_edge_f32(samples: ptr>) -> f32 { 67 | var horizontal = vec3f(0.0); 68 | for (var i = 0; i < 9; i++) { 69 | horizontal += (*samples)[i] * sobel_x[i]; 70 | } 71 | var vertical = vec3f(0.0); 72 | for (var i = 0; i < 9; i++) { 73 | vertical += (*samples)[i] * sobel_y[i]; 74 | } 75 | var edge = sqrt(dot(horizontal, horizontal) + dot(vertical, vertical)); 76 | return edge; 77 | } 78 | 79 | fn detect_edge_vec3(samples: ptr>) -> f32 { 80 | var horizontal = vec3f(0.0); 81 | for (var i = 0; i < 9; i++) { 82 | horizontal += (*samples)[i].xyz * sobel_x[i]; 83 | } 84 | var vertical = vec3f(0.0); 85 | for (var i = 0; i < 9; i++) { 86 | vertical += (*samples)[i].xyz * sobel_y[i]; 87 | } 88 | var edge = sqrt(dot(horizontal, horizontal) + dot(vertical, vertical)); 89 | return edge; 90 | } 91 | 92 | /// returns the (0.0, 0.0) .. (1.0, 1.0) position within the viewport for the current render target 93 | /// [0 .. render target viewport size] eg. [(0.0, 0.0) .. (1280.0, 720.0)] to [(0.0, 0.0) .. (1.0, 1.0)] 94 | fn frag_coord_to_uv(frag_coord: vec2) -> vec2 { 95 | return (frag_coord - view.viewport.xy) / view.viewport.zw; 96 | } 97 | 98 | /// Convert uv [0.0 .. 1.0] coordinate to ndc space xy [-1.0 .. 1.0] 99 | fn uv_to_ndc(uv: vec2) -> vec2 { 100 | return uv * vec2(2.0, -2.0) + vec2(-1.0, 1.0); 101 | } 102 | 103 | /// Convert a ndc space position to view space 104 | fn position_ndc_to_view(ndc_pos: vec3) -> vec3 { 105 | let view_pos = view.inverse_projection * vec4(ndc_pos, 1.0); 106 | return view_pos.xyz / view_pos.w; 107 | } 108 | 109 | fn detect_edge_depth(frag_coord: vec2f) -> f32 { 110 | if config.depth_threshold == 0.0 { 111 | return 0.0; 112 | } 113 | 114 | var samples = array(); 115 | for (var i = 0; i < 9; i++) { 116 | samples[i] = depth_ndc_to_view_z(prepass_depth(frag_coord + neighbours[i] * thickness)); 117 | } 118 | 119 | let edge = detect_edge_f32(&samples); 120 | 121 | // let ndc = uv_to_ndc(frag_coord_to_uv(frag_coord)); 122 | // let pos = position_ndc_to_view(vec3(ndc, -1.0)); 123 | // let dir = normalize(pos); 124 | // let n = prepass_normal(frag_coord); 125 | // let t1 = smoothstep(0.8, 1.0, 1.0 - dot(n, dir)); 126 | // let t2 = mix(0.1, 1000.0, t1); 127 | 128 | // Make the threshold change based on depth 129 | let d = depth_ndc_to_view_z(prepass_depth(frag_coord)); 130 | if edge < config.depth_threshold * d { 131 | return 0.0; 132 | } 133 | return edge; 134 | } 135 | 136 | fn detect_edge_normal(frag_coord: vec2f) -> f32 { 137 | if config.normal_threshold == 0.0 { 138 | return 0.0; 139 | } 140 | 141 | var samples = array(); 142 | for (var i = 0; i < 9; i++) { 143 | samples[i] = prepass_normal(frag_coord + neighbours[i] * thickness); 144 | } 145 | 146 | let edge = detect_edge_vec3(&samples); 147 | if edge < config.normal_threshold { 148 | return 0.0; 149 | } 150 | return edge; 151 | } 152 | 153 | fn detect_edge_color(frag_coord: vec2f) -> f32 { 154 | if config.color_threshold == 0.0 { 155 | return 0.0; 156 | } 157 | 158 | var samples = array(); 159 | for (var i = 0; i < 9; i++) { 160 | samples[i] = textureLoad(screen_texture, vec2i(frag_coord + neighbours[i] * thickness), 0).rgb; 161 | } 162 | 163 | let edge = detect_edge_vec3(&samples); 164 | if edge < config.color_threshold { 165 | return 0.0; 166 | } 167 | return edge; 168 | } 169 | 170 | @fragment 171 | fn fragment(in: FullscreenVertexOutput) -> @location(0) vec4f { 172 | let color = textureSample(screen_texture, texture_sampler, in.uv); 173 | 174 | if config.enabled == 1u { 175 | let frag_coord = in.position.xy; 176 | let edge_depth = detect_edge_depth(frag_coord); 177 | let edge_normal = detect_edge_normal(frag_coord); 178 | let edge_color = detect_edge_color(frag_coord); 179 | let edge = max(edge_depth, max(edge_normal, edge_color)); 180 | 181 | if config.debug == 1u { 182 | return vec4(edge_depth, edge_normal, edge_color, 1.0); 183 | } 184 | 185 | if edge > 0.01 { 186 | return config.edge_color; 187 | } 188 | } 189 | 190 | return color; 191 | } -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use bevy::{ 2 | asset::load_internal_asset, 3 | core_pipeline::{ 4 | core_3d::graph::{Core3d, Node3d}, 5 | fullscreen_vertex_shader::fullscreen_shader_vertex_state, 6 | }, 7 | prelude::*, 8 | render::{ 9 | extract_component::{ExtractComponent, ExtractComponentPlugin}, 10 | render_graph::{RenderGraphApp, ViewNodeRunner}, 11 | render_resource::{ 12 | binding_types::{ 13 | sampler, texture_2d, texture_depth_2d, uniform_buffer, uniform_buffer_sized, 14 | }, 15 | BindGroupLayout, BindGroupLayoutEntries, CachedRenderPipelineId, ColorTargetState, 16 | ColorWrites, FragmentState, MultisampleState, PipelineCache, PrimitiveState, 17 | RenderPipelineDescriptor, Sampler, SamplerBindingType, SamplerDescriptor, ShaderStages, 18 | ShaderType, TextureFormat, TextureSampleType, UniformBuffer, 19 | }, 20 | renderer::{RenderDevice, RenderQueue}, 21 | texture::BevyDefault, 22 | view::ViewUniform, 23 | Extract, Render, RenderApp, RenderSet, 24 | }, 25 | }; 26 | use node::EdgeDetectionNode; 27 | 28 | use crate::node::EdgeDetetctionNodeLabel; 29 | 30 | mod node; 31 | 32 | pub const SHADER_HANDLE: Handle = Handle::weak_from_u128(410592619790336); 33 | 34 | pub struct EdgeDetectionPlugin; 35 | impl Plugin for EdgeDetectionPlugin { 36 | fn build(&self, app: &mut App) { 37 | load_internal_asset!(app, SHADER_HANDLE, "edge_detection.wgsl", Shader::from_wgsl); 38 | // app.add_systems(Update, print_projection); 39 | 40 | app.add_plugins(ExtractComponentPlugin::::default()); 41 | 42 | let Ok(render_app) = app.get_sub_app_mut(RenderApp) else { 43 | return; 44 | }; 45 | 46 | render_app 47 | .add_systems(ExtractSchedule, extract_config) 48 | .add_systems(Render, prepare_config_buffer.in_set(RenderSet::Prepare)); 49 | 50 | render_app 51 | .add_render_graph_node::>( 52 | Core3d, 53 | EdgeDetetctionNodeLabel, 54 | ) 55 | .add_render_graph_edges( 56 | Core3d, 57 | ( 58 | Node3d::EndMainPass, 59 | EdgeDetetctionNodeLabel, 60 | Node3d::Tonemapping, 61 | ), 62 | ); 63 | } 64 | fn finish(&self, app: &mut App) { 65 | let Ok(render_app) = app.get_sub_app_mut(RenderApp) else { 66 | return; 67 | }; 68 | render_app 69 | .init_resource::() 70 | .init_resource::(); 71 | } 72 | } 73 | #[derive(Component, Clone, Copy, ExtractComponent)] 74 | pub struct EdgeDetectionCamera; 75 | 76 | #[derive(Resource, ShaderType, Clone, Copy)] 77 | pub struct EdgeDetectionConfig { 78 | pub depth_threshold: f32, 79 | pub normal_threshold: f32, 80 | pub color_threshold: f32, 81 | pub edge_color: Color, 82 | pub debug: u32, 83 | pub enabled: u32, 84 | } 85 | 86 | impl Default for EdgeDetectionConfig { 87 | fn default() -> Self { 88 | Self { 89 | depth_threshold: 0.2, 90 | normal_threshold: 0.05, 91 | color_threshold: 1.0, 92 | edge_color: Color::BLACK, 93 | debug: 0, 94 | enabled: 1, 95 | } 96 | } 97 | } 98 | 99 | #[derive(Resource)] 100 | struct ConfigBuffer { 101 | buffer: UniformBuffer, 102 | } 103 | 104 | impl FromWorld for ConfigBuffer { 105 | fn from_world(world: &mut World) -> Self { 106 | let render_device = world.resource::(); 107 | let render_queue = world.resource::(); 108 | 109 | let config = EdgeDetectionConfig::default(); 110 | let mut buffer = UniformBuffer::default(); 111 | buffer.set(config); 112 | buffer.write_buffer(render_device, render_queue); 113 | 114 | ConfigBuffer { buffer } 115 | } 116 | } 117 | 118 | fn extract_config(mut commands: Commands, config: Extract>) { 119 | commands.insert_resource(**config); 120 | } 121 | 122 | fn prepare_config_buffer( 123 | render_device: Res, 124 | render_queue: Res, 125 | mut config_buffer: ResMut, 126 | config: Res, 127 | ) { 128 | let buffer = config_buffer.buffer.get_mut(); 129 | *buffer = *config; 130 | config_buffer 131 | .buffer 132 | .write_buffer(&render_device, &render_queue); 133 | } 134 | 135 | #[derive(Resource)] 136 | struct EdgeDetectionPipeline { 137 | layout: BindGroupLayout, 138 | sampler: Sampler, 139 | pipeline_id: CachedRenderPipelineId, 140 | } 141 | 142 | impl FromWorld for EdgeDetectionPipeline { 143 | fn from_world(world: &mut World) -> Self { 144 | let render_device = world.resource::(); 145 | 146 | let layout = render_device.create_bind_group_layout( 147 | "edge_detection_bind_group_layout", 148 | &BindGroupLayoutEntries::sequential( 149 | ShaderStages::FRAGMENT, 150 | ( 151 | // screen_texture 152 | texture_2d(TextureSampleType::Float { filterable: true }), 153 | sampler(SamplerBindingType::Filtering), 154 | // depth prepass 155 | texture_depth_2d(), 156 | // normal prepass 157 | texture_2d(TextureSampleType::Float { filterable: true }), 158 | // view 159 | uniform_buffer::(true), 160 | // config 161 | uniform_buffer_sized(false, None), 162 | ), 163 | ), 164 | ); 165 | 166 | let sampler = render_device.create_sampler(&SamplerDescriptor::default()); 167 | 168 | let pipeline_id = 169 | world 170 | .resource_mut::() 171 | .queue_render_pipeline(RenderPipelineDescriptor { 172 | label: Some("edge_detection_pipeline".into()), 173 | layout: vec![layout.clone()], 174 | // This will setup a fullscreen triangle for the vertex state 175 | vertex: fullscreen_shader_vertex_state(), 176 | fragment: Some(FragmentState { 177 | shader: SHADER_HANDLE, 178 | shader_defs: vec!["VIEW_PROJECTION_PERSPECTIVE".into()], // TODO detect projection 179 | entry_point: "fragment".into(), 180 | targets: vec![Some(ColorTargetState { 181 | format: TextureFormat::bevy_default(), 182 | blend: None, 183 | write_mask: ColorWrites::ALL, 184 | })], 185 | }), 186 | primitive: PrimitiveState::default(), 187 | depth_stencil: None, 188 | multisample: MultisampleState::default(), 189 | push_constant_ranges: vec![], 190 | }); 191 | 192 | Self { 193 | layout, 194 | sampler, 195 | pipeline_id, 196 | } 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /examples/cornell_box.rs: -------------------------------------------------------------------------------- 1 | use std::f32::consts::{FRAC_PI_2, PI}; 2 | 3 | use bevy::{ 4 | core_pipeline::{ 5 | fxaa::{Fxaa, Sensitivity}, 6 | prepass::{DepthPrepass, NormalPrepass}, 7 | }, 8 | diagnostic::{Diagnostic, DiagnosticsStore, FrameTimeDiagnosticsPlugin}, 9 | math::vec3, 10 | prelude::*, 11 | window::{PresentMode, WindowResolution}, 12 | }; 13 | use bevy_mod_edge_detection::{EdgeDetectionCamera, EdgeDetectionConfig, EdgeDetectionPlugin}; 14 | 15 | fn main() { 16 | App::new() 17 | .insert_resource(Msaa::Off) 18 | .add_plugins(( 19 | DefaultPlugins.set(WindowPlugin { 20 | primary_window: Some(Window { 21 | resolution: WindowResolution::new(720.0, 720.0), 22 | present_mode: PresentMode::AutoNoVsync, 23 | ..default() 24 | }), 25 | ..default() 26 | }), 27 | FrameTimeDiagnosticsPlugin, 28 | EdgeDetectionPlugin, 29 | )) 30 | .insert_resource(EdgeDetectionConfig { 31 | depth_threshold: 0.0, 32 | normal_threshold: 1.0, 33 | color_threshold: 0.0, 34 | debug: 0, 35 | ..default() 36 | }) 37 | .add_systems( 38 | Startup, 39 | (setup_camera, setup_ui, spawn_cornell_box, spawn_boxes), 40 | ) 41 | .add_systems(PostStartup, set_unlit) 42 | .add_systems( 43 | Update, 44 | (update_diagnostic_display, update_config, update_camera), 45 | ) 46 | .run(); 47 | } 48 | 49 | fn setup_camera(mut commands: Commands) { 50 | commands.spawn(( 51 | Camera3dBundle { 52 | transform: Transform::from_xyz(0.0, 2.5, -8.75) 53 | .looking_at(vec3(0.0, 2.5, 0.0), Vec3::Y), 54 | ..default() 55 | }, 56 | DepthPrepass, 57 | NormalPrepass, 58 | Fxaa { 59 | enabled: true, 60 | edge_threshold: Sensitivity::Extreme, 61 | edge_threshold_min: Sensitivity::Extreme, 62 | }, 63 | EdgeDetectionCamera, 64 | )); 65 | } 66 | 67 | fn setup_ui(mut commands: Commands) { 68 | let style = TextStyle { 69 | font_size: 16.0, 70 | color: Color::WHITE, 71 | ..default() 72 | }; 73 | commands 74 | .spawn( 75 | TextBundle::from_sections([ 76 | TextSection::from_style(style.clone()), 77 | TextSection::new(" fps\n", style.clone()), 78 | TextSection::from_style(style.clone()), 79 | TextSection::new(" ms", style), 80 | ]) 81 | .with_style(Style { 82 | position_type: PositionType::Absolute, 83 | top: Val::Px(5.0), 84 | left: Val::Px(5.0), 85 | ..default() 86 | }), 87 | ) 88 | .insert(BackgroundColor(Color::BLACK.with_a(0.75))); 89 | } 90 | 91 | fn spawn_cornell_box( 92 | mut commands: Commands, 93 | mut meshes: ResMut>, 94 | mut materials: ResMut>, 95 | ) { 96 | let white = materials.add(Color::WHITE); 97 | let plane_size = 5.0; 98 | let plane = meshes.add(Plane3d::default().mesh().size(plane_size, plane_size)); 99 | 100 | // bottom 101 | commands.spawn(PbrBundle { 102 | mesh: plane.clone(), 103 | material: white.clone(), 104 | transform: Transform::from_xyz(0.0, 0.0, 0.0), 105 | ..default() 106 | }); 107 | // top 108 | commands.spawn(PbrBundle { 109 | mesh: plane.clone(), 110 | material: white.clone(), 111 | transform: Transform::from_xyz(0.0, 5.0, 0.0).with_rotation(Quat::from_rotation_x(PI)), 112 | ..default() 113 | }); 114 | // back 115 | commands.spawn(PbrBundle { 116 | mesh: plane.clone(), 117 | material: white, 118 | transform: Transform::from_xyz(0.0, 2.5, 2.5) 119 | .with_rotation(Quat::from_rotation_x(-FRAC_PI_2)), 120 | ..default() 121 | }); 122 | // left 123 | commands.spawn(PbrBundle { 124 | mesh: plane.clone(), 125 | material: materials.add(Color::RED), 126 | transform: Transform::from_xyz(2.5, 2.5, 0.0) 127 | .with_rotation(Quat::from_rotation_z(FRAC_PI_2)), 128 | ..default() 129 | }); 130 | // right 131 | commands.spawn(PbrBundle { 132 | mesh: plane, 133 | material: materials.add(Color::GREEN), 134 | transform: Transform::from_xyz(-2.5, 2.5, 0.0) 135 | .with_rotation(Quat::from_rotation_z(-FRAC_PI_2)), 136 | ..default() 137 | }); 138 | 139 | // Light 140 | commands.spawn(PointLightBundle { 141 | point_light: PointLight { 142 | intensity: 1000.0, 143 | shadows_enabled: true, 144 | ..default() 145 | }, 146 | transform: Transform::from_xyz(0.0, 5.0 - 0.005, 0.0) 147 | .with_rotation(Quat::from_rotation_x(PI)), 148 | ..default() 149 | }); 150 | } 151 | 152 | fn spawn_boxes( 153 | mut commands: Commands, 154 | mut meshes: ResMut>, 155 | mut materials: ResMut>, 156 | ) { 157 | let box_size = 1.25; 158 | let half_box_size = box_size / 2.0; 159 | 160 | commands.spawn(PbrBundle { 161 | mesh: meshes.add(Cuboid::new(box_size, box_size * 2.0, box_size)), 162 | material: materials.add(Color::WHITE), 163 | transform: Transform::from_xyz(half_box_size, half_box_size * 2.0, half_box_size) 164 | .with_rotation(Quat::from_rotation_y(std::f32::consts::FRAC_PI_6)), 165 | ..default() 166 | }); 167 | 168 | commands.spawn(PbrBundle { 169 | mesh: meshes.add(Cuboid::new(box_size, box_size, box_size)), 170 | material: materials.add(Color::WHITE), 171 | transform: Transform::from_xyz(-half_box_size, half_box_size, -half_box_size) 172 | .with_rotation(Quat::from_rotation_y(-std::f32::consts::FRAC_PI_6)), 173 | ..default() 174 | }); 175 | } 176 | 177 | fn set_unlit( 178 | material_handles: Query<&Handle>, 179 | mut materials: ResMut>, 180 | ) { 181 | for id in &material_handles { 182 | if let Some(material) = materials.get_mut(id) { 183 | material.unlit = true; 184 | } 185 | } 186 | } 187 | 188 | fn update_diagnostic_display(diagnostics: Res, mut query: Query<&mut Text>) { 189 | for mut text in &mut query { 190 | if let Some(fps_smoothed) = diagnostics 191 | .get(&FrameTimeDiagnosticsPlugin::FPS) 192 | .and_then(Diagnostic::smoothed) 193 | { 194 | text.sections[0].value = format!("{fps_smoothed:.1}"); 195 | } 196 | 197 | if let Some(frame_time_smoothed) = diagnostics 198 | .get(&FrameTimeDiagnosticsPlugin::FRAME_TIME) 199 | .and_then(Diagnostic::smoothed) 200 | { 201 | text.sections[2].value = format!("{frame_time_smoothed:.3}"); 202 | } 203 | } 204 | } 205 | 206 | fn update_config(mut config: ResMut, key_input: Res>) { 207 | if key_input.just_pressed(KeyCode::KeyX) { 208 | config.debug = (config.debug + 1) % 2; 209 | println!("debug: {:?}", config.debug != 0); 210 | } 211 | if key_input.just_pressed(KeyCode::KeyC) { 212 | config.enabled = (config.enabled + 1) % 2; 213 | println!("enabled: {:?}", config.enabled != 0); 214 | } 215 | } 216 | 217 | fn update_camera( 218 | key_input: Res>, 219 | mut cam: Query<&mut Transform, With>, 220 | time: Res